fix some tests and bugs

This commit is contained in:
elkowar 2021-07-19 13:27:30 +02:00
parent 2451f6fd49
commit 00abe27c13
No known key found for this signature in database
GPG key ID: E321AD71B1D1F27F
51 changed files with 257 additions and 277 deletions

18
Cargo.lock generated
View file

@ -52,6 +52,12 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "base64"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
[[package]] [[package]]
name = "beef" name = "beef"
version = "0.5.0" version = "0.5.0"
@ -284,6 +290,7 @@ checksum = "c4a1b21a2971cea49ca4613c0e9fe8225ecaf5de64090fddc6002284726e9244"
dependencies = [ dependencies = [
"console", "console",
"lazy_static", "lazy_static",
"ron",
"serde", "serde",
"serde_json", "serde_json",
"serde_yaml", "serde_yaml",
@ -522,6 +529,17 @@ version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "ron"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "064ea8613fb712a19faf920022ec8ddf134984f100090764a4e1d768f3827f1f"
dependencies = [
"base64",
"bitflags",
"serde",
]
[[package]] [[package]]
name = "rustc_version" name = "rustc_version"
version = "0.3.3" version = "0.3.3"

View file

@ -32,4 +32,4 @@ simplexpr = { path = "../../projects/simplexpr" }
lalrpop = "0.19.5" lalrpop = "0.19.5"
[dev-dependencies] [dev-dependencies]
insta = "1.7" insta = { version = "1.7", features = ["ron"]}

View file

@ -47,7 +47,7 @@ impl FromAst for TopLevel {
} }
} }
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
pub struct Config { pub struct Config {
widget_definitions: HashMap<String, WidgetDefinition>, widget_definitions: HashMap<String, WidgetDefinition>,
var_definitions: HashMap<VarName, VarDefinition>, var_definitions: HashMap<VarName, VarDefinition>,

View file

@ -1,5 +1,7 @@
mod config; mod config;
pub mod script_var_definition; pub mod script_var_definition;
#[cfg(test)]
mod test;
pub mod validate; pub mod validate;
pub mod var_definition; pub mod var_definition;
pub mod widget_definition; pub mod widget_definition;

View file

@ -12,7 +12,7 @@ use crate::{
value::{AttrName, VarName}, value::{AttrName, VarName},
}; };
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
pub enum ScriptVarDefinition { pub enum ScriptVarDefinition {
Poll(PollScriptVar), Poll(PollScriptVar),
Tail(TailScriptVar), Tail(TailScriptVar),
@ -27,13 +27,14 @@ impl ScriptVarDefinition {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
pub enum VarSource { pub enum VarSource {
// TODO allow for other executors? (python, etc) // TODO allow for other executors? (python, etc)
Shell(String), Shell(String),
#[serde(skip)]
Function(fn() -> Result<DynVal, Box<dyn std::error::Error>>), Function(fn() -> Result<DynVal, Box<dyn std::error::Error>>),
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
pub struct PollScriptVar { pub struct PollScriptVar {
pub name: VarName, pub name: VarName,
pub command: VarSource, pub command: VarSource,
@ -55,7 +56,7 @@ impl FromAstElementContent for PollScriptVar {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
pub struct TailScriptVar { pub struct TailScriptVar {
pub name: VarName, pub name: VarName,
pub command: String, pub command: String,

View file

@ -0,0 +1,55 @@
---
source: src/config/test.rs
expression: config.unwrap()
---
Config(
widget_definitions: {
"bar": WidgetDefinition(
name: "bar",
expected_args: [
AttrName("arg"),
AttrName("arg2"),
],
widget: WidgetUse(
name: "text",
attrs: {
AttrName("text"): Literal(Span(99, 104, 0), DynVal("bla", None)),
},
children: [],
span: Span(99, 104, 0),
),
span: Span(61, 105, 0),
args_span: Span(76, 86, 0),
),
"foo": WidgetDefinition(
name: "foo",
expected_args: [
AttrName("arg"),
],
widget: WidgetUse(
name: "text",
attrs: {
AttrName("text"): Literal(Span(44, 51, 0), DynVal("heyho", None)),
},
children: [],
span: Span(44, 51, 0),
),
span: Span(11, 52, 0),
args_span: Span(26, 31, 0),
),
},
var_definitions: {
VarName("some_var"): VarDefinition(
name: VarName("some_var"),
initial_value: DynVal("bla", None),
span: Span(114, 137, 0),
),
},
script_vars: {
VarName("stuff"): Tail(TailScriptVar(
name: VarName("stuff"),
command: "tail -f stuff",
)),
},
)

30
src/config/test.rs Normal file
View file

@ -0,0 +1,30 @@
use crate::{
config::config::Config,
parser::{
self,
ast::{Ast, Span},
element::FromAst,
lexer::Lexer,
},
};
#[test]
fn test_config() {
let input = r#"
(defwidget foo [arg]
"heyho")
(defwidget bar [arg arg2]
"bla")
(defvar some_var "bla")
(defpollvar stuff :interval "12s" "date")
(deftailvar stuff "tail -f stuff")
"#;
let lexer = Lexer::new(0, input.to_string());
let p = parser::parser::ToplevelParser::new();
let (span, parse_result) = p.parse(0, lexer).unwrap();
let config = Config::from_ast(Ast::List(span, parse_result));
insta::with_settings!({sort_maps => true}, {
insta::assert_ron_snapshot!(config.unwrap());
});
}

View file

@ -12,7 +12,7 @@ use crate::{
value::{AttrName, VarName}, value::{AttrName, VarName},
}; };
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
pub struct VarDefinition { pub struct VarDefinition {
pub name: VarName, pub name: VarName,
pub initial_value: DynVal, pub initial_value: DynVal,

View file

@ -13,7 +13,7 @@ use crate::{
}; };
use super::widget_use::WidgetUse; use super::widget_use::WidgetUse;
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
pub struct WidgetDefinition { pub struct WidgetDefinition {
pub name: String, pub name: String,
pub expected_args: Vec<AttrName>, pub expected_args: Vec<AttrName>,

View file

@ -11,7 +11,7 @@ use crate::{
spanned, spanned,
value::AttrName, value::AttrName,
}; };
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
pub struct WidgetUse { pub struct WidgetUse {
pub name: String, pub name: String,
pub attrs: HashMap<AttrName, SimplExpr>, pub attrs: HashMap<AttrName, SimplExpr>,

View file

@ -7,7 +7,7 @@ use std::fmt::Display;
use super::element::FromAst; use super::element::FromAst;
use crate::error::{AstError, AstResult, OptionAstErrorExt}; use crate::error::{AstError, AstResult, OptionAstErrorExt};
#[derive(Eq, PartialEq, Clone, Copy)] #[derive(Eq, PartialEq, Clone, Copy, serde::Serialize)]
pub struct Span(pub usize, pub usize, pub usize); pub struct Span(pub usize, pub usize, pub usize);
impl Into<simplexpr::Span> for Span { impl Into<simplexpr::Span> for Span {
@ -121,9 +121,9 @@ impl std::fmt::Display for Ast {
match self { match self {
List(_, x) => write!(f, "({})", x.iter().map(|e| format!("{}", e)).join(" ")), List(_, x) => write!(f, "({})", x.iter().map(|e| format!("{}", e)).join(" ")),
Array(_, x) => write!(f, "({})", x.iter().map(|e| format!("{}", e)).join(" ")), Array(_, x) => write!(f, "({})", x.iter().map(|e| format!("{}", e)).join(" ")),
Keyword(_, x) => write!(f, "{}", x), Keyword(_, x) => write!(f, ":{}", x),
Symbol(_, x) => write!(f, "{}", x), Symbol(_, x) => write!(f, "{}", x),
Value(_, x) => write!(f, "{}", x), Value(_, x) => write!(f, "\"{}\"", x),
SimplExpr(_, x) => write!(f, "{{{}}}", x), SimplExpr(_, x) => write!(f, "{{{}}}", x),
Comment(_) => write!(f, ""), Comment(_) => write!(f, ""),
} }

View file

@ -20,7 +20,7 @@ impl FromAst for Ast {
impl FromAst for String { impl FromAst for String {
fn from_ast(e: Ast) -> AstResult<Self> { fn from_ast(e: Ast) -> AstResult<Self> {
Ok(e.as_value()?.to_string()) Ok(e.as_value()?.as_string().unwrap())
} }
} }
@ -78,26 +78,3 @@ impl<C: FromAst, A: FromAst> FromAst for Element<C, A> {
}) })
} }
} }
#[cfg(test)]
mod test {
use super::super::{
ast::Ast,
element::{Element, FromAst},
lexer,
};
use insta;
#[test]
fn test() {
let parser = super::parser::parser::AstParser::new();
insta::with_settings!({sort_maps => true}, {
let lexer = lexer::Lexer::new(0, "(box :bar 12 :baz \"hi\" foo (bar))".to_string());
insta::assert_debug_snapshot!(
Element::<Ast, Ast>::from_ast(parser.parse(0, lexer).unwrap()).unwrap()
);
});
}
}

View file

@ -66,8 +66,8 @@ regex_rules! {
r"false" => |_| Token::False, r"false" => |_| Token::False,
r#""(?:[^"\\]|\\.)*""# => |x| Token::StrLit(x), r#""(?:[^"\\]|\\.)*""# => |x| Token::StrLit(x),
r#"[+-]?(?:[0-9]+[.])?[0-9]+"# => |x| Token::NumLit(x), r#"[+-]?(?:[0-9]+[.])?[0-9]+"# => |x| Token::NumLit(x),
r#"[a-zA-Z_!\?<>/.*-+][^\s{}\(\)\[\](){}]*"# => |x| Token::Symbol(x), r#":[^\s\)\]}]+"# => |x| Token::Keyword(x),
r#":\S+"# => |x| Token::Keyword(x), r#"[a-zA-Z_!\?<>/\.\*-\+][^\s{}\(\)\[\](){}]*"# => |x| Token::Symbol(x),
r#";.*"# => |_| Token::Comment, r#";.*"# => |_| Token::Comment,
r"[ \t\n\f]+" => |_| Token::Skip r"[ \t\n\f]+" => |_| Token::Skip
} }
@ -126,7 +126,7 @@ impl Iterator for Lexer {
let m = LEXER_REGEXES[i].find(string).unwrap(); let m = LEXER_REGEXES[i].find(string).unwrap();
(m.end(), i) (m.end(), i)
}) })
.next(); .min_by_key(|(_, x)| *x);
let (len, i) = match matched_token { let (len, i) = match matched_token {
Some(x) => x, Some(x) => x,
@ -140,7 +140,7 @@ impl Iterator for Lexer {
let old_pos = self.pos; let old_pos = self.pos;
self.pos += len; self.pos += len;
match LEXER_FNS[i](tok_str.to_string()) { match LEXER_FNS[i](tok_str.to_string()) {
Token::Skip => {} Token::Skip | Token::Comment => {}
token => { token => {
return Some(Ok((old_pos, token, self.pos))); return Some(Ok((old_pos, token, self.pos)));
} }

View file

@ -54,7 +54,7 @@ fn test() {
r#"(test "h\"i")"#, r#"(test "h\"i")"#,
r#"(test " hi ")"#, r#"(test " hi ")"#,
"(+ (1 2 (* 2 5)))", "(+ (1 2 (* 2 5)))",
r#"; test"#, r#"foo ; test"#,
r#"(f arg ; test r#"(f arg ; test
arg2)"#, arg2)"#,
"\"h\\\"i\"" "\"h\\\"i\""

View file

@ -26,8 +26,8 @@ extern {
} }
} }
pub Toplevel: Vec<Ast> = { pub Toplevel: (Span, Vec<Ast>) = {
<(<Ast>)*> => <> <l:@L> <elems:(<Ast>)*> <r:@R> => (Span(l, r, file_id), elems)
} }
pub Ast: Ast = { pub Ast: Ast = {
@ -40,7 +40,7 @@ pub Ast: Ast = {
<l:@L> "comment" <r:@R> => Ast::Comment(Span(l, r, file_id)), <l:@L> "comment" <r:@R> => Ast::Comment(Span(l, r, file_id)),
}; };
Keyword: Ast = <l:@L> <x:"keyword"> <r:@R> => Ast::Keyword(Span(l, r, file_id), x.to_string()); Keyword: Ast = <l:@L> <x:"keyword"> <r:@R> => Ast::Keyword(Span(l, r, file_id), x[1..].to_string());
Symbol: Ast = <l:@L> <x:"symbol"> <r:@R> => Ast::Symbol(Span(l, r, file_id), x.to_string()); Symbol: Ast = <l:@L> <x:"symbol"> <r:@R> => Ast::Symbol(Span(l, r, file_id), x.to_string());
Value: String = { Value: String = {

View file

@ -6,8 +6,8 @@ expression: "Element::<Ast, Ast>::from_ast(parser.parse(0, lexer).unwrap()).unwr
Element { Element {
name: "box", name: "box",
attrs: { attrs: {
":bar": "12", "baz": "hi",
":baz": "hi", "bar": "12",
}, },
children: [ children: [
foo, foo,

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(lol😄 1)\".to_string()))"
---
Ok(
(lol😄 "1"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, r#\"(test \"hi\")\"#.to_string()))"
---
Ok(
(test "hi"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, r#\"(test \"h\\\"i\")\"#.to_string()))"
---
Ok(
(test "h\"i"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, r#\"(test \" hi \")\"#.to_string()))"
---
Ok(
(test " hi "),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(+ (1 2 (* 2 5)))\".to_string()))"
---
Ok(
(+ ("1" "2" (* "2" "5"))),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, r#\"foo ; test\"#.to_string()))"
---
Ok(
foo,
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, r#\"(f arg ; test\n arg2)\"#.to_string()))"
---
Ok(
(f arg arg2),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"\\\"h\\\\\\\"i\\\"\".to_string()))"
---
Ok(
"h\"i",
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(12)\".to_string()))"
---
Ok(
("12"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"1.2\".to_string()))"
---
Ok(
"1.2",
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"-1.2\".to_string()))"
---
Ok(
"-1.2",
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(1 2)\".to_string()))"
---
Ok(
("1" "2"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(1 :foo 1)\".to_string()))"
---
Ok(
("1" :foo "1"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(:foo 1)\".to_string()))"
---
Ok(
(:foo "1"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(:foo->: 1)\".to_string()))"
---
Ok(
(:foo->: "1"),
)

View file

@ -0,0 +1,8 @@
---
source: src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(foo 1)\".to_string()))"
---
Ok(
(foo "1"),
)

View file

@ -1,21 +0,0 @@
---
source: src/config.rs
expression: "Element::<Expr, Expr>::from_expr(parser.parse(0, lexer).unwrap()).unwrap()"
---
Element {
name: "box",
attrs: {
":bar": Value<10..12>(12),
":baz": Value<18..22>(hi),
},
children: [
Symbol<23..26>(foo),
List<27..32>(
[
Symbol<28..31>(bar),
],
),
],
span: 0..33,
}

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(lol😄 1)\"))"
---
Ok(
List<0..11>(
[
Symbol<1..8>(lol😄),
Value<9..10>(1),
],
),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(r#\"(test \"hi\")\"#))"
---
Ok(
List<0..11>(
[
Symbol<1..5>(test),
Value<6..10>(hi),
],
),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(r#\"(test \"h\\\"i\")\"#))"
---
Ok(
List<0..13>(
[
Symbol<1..5>(test),
Value<6..12>(h\"i),
],
),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(r#\"(test \" hi \")\"#))"
---
Ok(
List<0..13>(
[
Symbol<1..5>(test),
Value<6..12>( hi ),
],
),
)

View file

@ -1,25 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(+ (1 2 (* 2 5)))\"))"
---
Ok(
List<0..17>(
[
Symbol<1..2>(+),
List<3..16>(
[
Value<4..5>(1),
Value<6..7>(2),
List<8..15>(
[
Symbol<9..10>(*),
Value<11..12>(2),
Value<13..14>(5),
],
),
],
),
],
),
)

View file

@ -1,8 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(r#\"; test\"#))"
---
Ok(
Comment<0..6>,
)

View file

@ -1,15 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(r#\"(f arg ; test\n arg2)\"#))"
---
Ok(
List<0..27>(
[
Symbol<1..2>(f),
Symbol<3..6>(arg),
Comment<7..13>,
Symbol<22..26>(arg2),
],
),
)

View file

@ -1,8 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"\\\"h\\\\\\\"i\\\"\"))"
---
Ok(
Value<0..6>(h\"i),
)

View file

@ -1,12 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(12)\"))"
---
Ok(
List<0..4>(
[
Value<1..3>(12),
],
),
)

View file

@ -1,8 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"1.2\"))"
---
Ok(
Value<0..3>(1.2),
)

View file

@ -1,8 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"-1.2\"))"
---
Ok(
Value<0..4>(-1.2),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(1 2)\"))"
---
Ok(
List<0..5>(
[
Value<1..2>(1),
Value<3..4>(2),
],
),
)

View file

@ -1,14 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(1 :foo 1)\"))"
---
Ok(
List<0..10>(
[
Value<1..2>(1),
Number<3..7>(:foo),
Value<8..9>(1),
],
),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(:foo 1)\"))"
---
Ok(
List<0..8>(
[
Number<1..5>(:foo),
Value<6..7>(1),
],
),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(:foo->: 1)\"))"
---
Ok(
List<0..11>(
[
Number<1..8>(:foo->:),
Value<9..10>(1),
],
),
)

View file

@ -1,13 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"(foo 1)\"))"
---
Ok(
List<0..7>(
[
Symbol<1..4>(foo),
Value<5..6>(1),
],
),
)

View file

@ -1,8 +0,0 @@
---
source: src/lib.rs
expression: "p.parse(0, lexer::Lexer::new(\"1\"))"
---
Ok(
Value<0..1>(1),
)