fix some tests and bugs
This commit is contained in:
parent
2451f6fd49
commit
00abe27c13
51 changed files with 257 additions and 277 deletions
18
Cargo.lock
generated
18
Cargo.lock
generated
|
@ -52,6 +52,12 @@ version = "1.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
|
||||
|
||||
[[package]]
|
||||
name = "beef"
|
||||
version = "0.5.0"
|
||||
|
@ -284,6 +290,7 @@ checksum = "c4a1b21a2971cea49ca4613c0e9fe8225ecaf5de64090fddc6002284726e9244"
|
|||
dependencies = [
|
||||
"console",
|
||||
"lazy_static",
|
||||
"ron",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
|
@ -522,6 +529,17 @@ version = "0.6.25"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
|
||||
|
||||
[[package]]
|
||||
name = "ron"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "064ea8613fb712a19faf920022ec8ddf134984f100090764a4e1d768f3827f1f"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bitflags",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.3.3"
|
||||
|
|
|
@ -32,4 +32,4 @@ simplexpr = { path = "../../projects/simplexpr" }
|
|||
lalrpop = "0.19.5"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1.7"
|
||||
insta = { version = "1.7", features = ["ron"]}
|
||||
|
|
|
@ -47,7 +47,7 @@ impl FromAst for TopLevel {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
|
||||
pub struct Config {
|
||||
widget_definitions: HashMap<String, WidgetDefinition>,
|
||||
var_definitions: HashMap<VarName, VarDefinition>,
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
mod config;
|
||||
pub mod script_var_definition;
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
pub mod validate;
|
||||
pub mod var_definition;
|
||||
pub mod widget_definition;
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
value::{AttrName, VarName},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
|
||||
pub enum ScriptVarDefinition {
|
||||
Poll(PollScriptVar),
|
||||
Tail(TailScriptVar),
|
||||
|
@ -27,13 +27,14 @@ impl ScriptVarDefinition {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
|
||||
pub enum VarSource {
|
||||
// TODO allow for other executors? (python, etc)
|
||||
Shell(String),
|
||||
#[serde(skip)]
|
||||
Function(fn() -> Result<DynVal, Box<dyn std::error::Error>>),
|
||||
}
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
|
||||
pub struct PollScriptVar {
|
||||
pub name: VarName,
|
||||
pub command: VarSource,
|
||||
|
@ -55,7 +56,7 @@ impl FromAstElementContent for PollScriptVar {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]
|
||||
pub struct TailScriptVar {
|
||||
pub name: VarName,
|
||||
pub command: String,
|
||||
|
|
55
src/config/snapshots/eww_config__config__test__config.snap
Normal file
55
src/config/snapshots/eww_config__config__test__config.snap
Normal file
|
@ -0,0 +1,55 @@
|
|||
---
|
||||
source: src/config/test.rs
|
||||
expression: config.unwrap()
|
||||
|
||||
---
|
||||
Config(
|
||||
widget_definitions: {
|
||||
"bar": WidgetDefinition(
|
||||
name: "bar",
|
||||
expected_args: [
|
||||
AttrName("arg"),
|
||||
AttrName("arg2"),
|
||||
],
|
||||
widget: WidgetUse(
|
||||
name: "text",
|
||||
attrs: {
|
||||
AttrName("text"): Literal(Span(99, 104, 0), DynVal("bla", None)),
|
||||
},
|
||||
children: [],
|
||||
span: Span(99, 104, 0),
|
||||
),
|
||||
span: Span(61, 105, 0),
|
||||
args_span: Span(76, 86, 0),
|
||||
),
|
||||
"foo": WidgetDefinition(
|
||||
name: "foo",
|
||||
expected_args: [
|
||||
AttrName("arg"),
|
||||
],
|
||||
widget: WidgetUse(
|
||||
name: "text",
|
||||
attrs: {
|
||||
AttrName("text"): Literal(Span(44, 51, 0), DynVal("heyho", None)),
|
||||
},
|
||||
children: [],
|
||||
span: Span(44, 51, 0),
|
||||
),
|
||||
span: Span(11, 52, 0),
|
||||
args_span: Span(26, 31, 0),
|
||||
),
|
||||
},
|
||||
var_definitions: {
|
||||
VarName("some_var"): VarDefinition(
|
||||
name: VarName("some_var"),
|
||||
initial_value: DynVal("bla", None),
|
||||
span: Span(114, 137, 0),
|
||||
),
|
||||
},
|
||||
script_vars: {
|
||||
VarName("stuff"): Tail(TailScriptVar(
|
||||
name: VarName("stuff"),
|
||||
command: "tail -f stuff",
|
||||
)),
|
||||
},
|
||||
)
|
30
src/config/test.rs
Normal file
30
src/config/test.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use crate::{
|
||||
config::config::Config,
|
||||
parser::{
|
||||
self,
|
||||
ast::{Ast, Span},
|
||||
element::FromAst,
|
||||
lexer::Lexer,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_config() {
|
||||
let input = r#"
|
||||
(defwidget foo [arg]
|
||||
"heyho")
|
||||
(defwidget bar [arg arg2]
|
||||
"bla")
|
||||
(defvar some_var "bla")
|
||||
(defpollvar stuff :interval "12s" "date")
|
||||
(deftailvar stuff "tail -f stuff")
|
||||
"#;
|
||||
|
||||
let lexer = Lexer::new(0, input.to_string());
|
||||
let p = parser::parser::ToplevelParser::new();
|
||||
let (span, parse_result) = p.parse(0, lexer).unwrap();
|
||||
let config = Config::from_ast(Ast::List(span, parse_result));
|
||||
insta::with_settings!({sort_maps => true}, {
|
||||
insta::assert_ron_snapshot!(config.unwrap());
|
||||
});
|
||||
}
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
value::{AttrName, VarName},
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
|
||||
pub struct VarDefinition {
|
||||
pub name: VarName,
|
||||
pub initial_value: DynVal,
|
||||
|
|
|
@ -13,7 +13,7 @@ use crate::{
|
|||
};
|
||||
|
||||
use super::widget_use::WidgetUse;
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
|
||||
pub struct WidgetDefinition {
|
||||
pub name: String,
|
||||
pub expected_args: Vec<AttrName>,
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
|||
spanned,
|
||||
value::AttrName,
|
||||
};
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, serde::Serialize)]
|
||||
pub struct WidgetUse {
|
||||
pub name: String,
|
||||
pub attrs: HashMap<AttrName, SimplExpr>,
|
||||
|
|
|
@ -7,7 +7,7 @@ use std::fmt::Display;
|
|||
use super::element::FromAst;
|
||||
use crate::error::{AstError, AstResult, OptionAstErrorExt};
|
||||
|
||||
#[derive(Eq, PartialEq, Clone, Copy)]
|
||||
#[derive(Eq, PartialEq, Clone, Copy, serde::Serialize)]
|
||||
pub struct Span(pub usize, pub usize, pub usize);
|
||||
|
||||
impl Into<simplexpr::Span> for Span {
|
||||
|
@ -121,9 +121,9 @@ impl std::fmt::Display for Ast {
|
|||
match self {
|
||||
List(_, x) => write!(f, "({})", x.iter().map(|e| format!("{}", e)).join(" ")),
|
||||
Array(_, x) => write!(f, "({})", x.iter().map(|e| format!("{}", e)).join(" ")),
|
||||
Keyword(_, x) => write!(f, "{}", x),
|
||||
Keyword(_, x) => write!(f, ":{}", x),
|
||||
Symbol(_, x) => write!(f, "{}", x),
|
||||
Value(_, x) => write!(f, "{}", x),
|
||||
Value(_, x) => write!(f, "\"{}\"", x),
|
||||
SimplExpr(_, x) => write!(f, "{{{}}}", x),
|
||||
Comment(_) => write!(f, ""),
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ impl FromAst for Ast {
|
|||
|
||||
impl FromAst for String {
|
||||
fn from_ast(e: Ast) -> AstResult<Self> {
|
||||
Ok(e.as_value()?.to_string())
|
||||
Ok(e.as_value()?.as_string().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,26 +78,3 @@ impl<C: FromAst, A: FromAst> FromAst for Element<C, A> {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use super::super::{
|
||||
ast::Ast,
|
||||
element::{Element, FromAst},
|
||||
lexer,
|
||||
};
|
||||
|
||||
use insta;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let parser = super::parser::parser::AstParser::new();
|
||||
insta::with_settings!({sort_maps => true}, {
|
||||
let lexer = lexer::Lexer::new(0, "(box :bar 12 :baz \"hi\" foo (bar))".to_string());
|
||||
insta::assert_debug_snapshot!(
|
||||
Element::<Ast, Ast>::from_ast(parser.parse(0, lexer).unwrap()).unwrap()
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,8 +66,8 @@ regex_rules! {
|
|||
r"false" => |_| Token::False,
|
||||
r#""(?:[^"\\]|\\.)*""# => |x| Token::StrLit(x),
|
||||
r#"[+-]?(?:[0-9]+[.])?[0-9]+"# => |x| Token::NumLit(x),
|
||||
r#"[a-zA-Z_!\?<>/.*-+][^\s{}\(\)\[\](){}]*"# => |x| Token::Symbol(x),
|
||||
r#":\S+"# => |x| Token::Keyword(x),
|
||||
r#":[^\s\)\]}]+"# => |x| Token::Keyword(x),
|
||||
r#"[a-zA-Z_!\?<>/\.\*-\+][^\s{}\(\)\[\](){}]*"# => |x| Token::Symbol(x),
|
||||
r#";.*"# => |_| Token::Comment,
|
||||
r"[ \t\n\f]+" => |_| Token::Skip
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ impl Iterator for Lexer {
|
|||
let m = LEXER_REGEXES[i].find(string).unwrap();
|
||||
(m.end(), i)
|
||||
})
|
||||
.next();
|
||||
.min_by_key(|(_, x)| *x);
|
||||
|
||||
let (len, i) = match matched_token {
|
||||
Some(x) => x,
|
||||
|
@ -140,7 +140,7 @@ impl Iterator for Lexer {
|
|||
let old_pos = self.pos;
|
||||
self.pos += len;
|
||||
match LEXER_FNS[i](tok_str.to_string()) {
|
||||
Token::Skip => {}
|
||||
Token::Skip | Token::Comment => {}
|
||||
token => {
|
||||
return Some(Ok((old_pos, token, self.pos)));
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ fn test() {
|
|||
r#"(test "h\"i")"#,
|
||||
r#"(test " hi ")"#,
|
||||
"(+ (1 2 (* 2 5)))",
|
||||
r#"; test"#,
|
||||
r#"foo ; test"#,
|
||||
r#"(f arg ; test
|
||||
arg2)"#,
|
||||
"\"h\\\"i\""
|
||||
|
|
|
@ -26,8 +26,8 @@ extern {
|
|||
}
|
||||
}
|
||||
|
||||
pub Toplevel: Vec<Ast> = {
|
||||
<(<Ast>)*> => <>
|
||||
pub Toplevel: (Span, Vec<Ast>) = {
|
||||
<l:@L> <elems:(<Ast>)*> <r:@R> => (Span(l, r, file_id), elems)
|
||||
}
|
||||
|
||||
pub Ast: Ast = {
|
||||
|
@ -40,7 +40,7 @@ pub Ast: Ast = {
|
|||
<l:@L> "comment" <r:@R> => Ast::Comment(Span(l, r, file_id)),
|
||||
};
|
||||
|
||||
Keyword: Ast = <l:@L> <x:"keyword"> <r:@R> => Ast::Keyword(Span(l, r, file_id), x.to_string());
|
||||
Keyword: Ast = <l:@L> <x:"keyword"> <r:@R> => Ast::Keyword(Span(l, r, file_id), x[1..].to_string());
|
||||
Symbol: Ast = <l:@L> <x:"symbol"> <r:@R> => Ast::Symbol(Span(l, r, file_id), x.to_string());
|
||||
|
||||
Value: String = {
|
||||
|
|
|
@ -6,8 +6,8 @@ expression: "Element::<Ast, Ast>::from_ast(parser.parse(0, lexer).unwrap()).unwr
|
|||
Element {
|
||||
name: "box",
|
||||
attrs: {
|
||||
":bar": "12",
|
||||
":baz": "hi",
|
||||
"baz": "hi",
|
||||
"bar": "12",
|
||||
},
|
||||
children: [
|
||||
foo,
|
8
src/parser/snapshots/eww_config__parser__test-10.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-10.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(lol😄 1)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(lol😄 "1"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-11.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-11.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, r#\"(test \"hi\")\"#.to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(test "hi"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-12.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-12.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, r#\"(test \"h\\\"i\")\"#.to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(test "h\"i"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-13.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-13.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, r#\"(test \" hi \")\"#.to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(test " hi "),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-14.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-14.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(+ (1 2 (* 2 5)))\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(+ ("1" "2" (* "2" "5"))),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-15.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-15.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, r#\"foo ; test\"#.to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
foo,
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-16.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-16.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, r#\"(f arg ; test\n arg2)\"#.to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(f arg arg2),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-17.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-17.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"\\\"h\\\\\\\"i\\\"\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
"h\"i",
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-2.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-2.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(12)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
("12"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-3.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-3.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"1.2\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
"1.2",
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-4.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-4.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"-1.2\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
"-1.2",
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-5.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-5.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(1 2)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
("1" "2"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-6.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-6.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(1 :foo 1)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
("1" :foo "1"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-7.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-7.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(:foo 1)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(:foo "1"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-8.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-8.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(:foo->: 1)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(:foo->: "1"),
|
||||
)
|
8
src/parser/snapshots/eww_config__parser__test-9.snap
Normal file
8
src/parser/snapshots/eww_config__parser__test-9.snap
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: src/parser/mod.rs
|
||||
expression: "p.parse(0, Lexer::new(0, \"(foo 1)\".to_string()))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
(foo "1"),
|
||||
)
|
|
@ -1,21 +0,0 @@
|
|||
---
|
||||
source: src/config.rs
|
||||
expression: "Element::<Expr, Expr>::from_expr(parser.parse(0, lexer).unwrap()).unwrap()"
|
||||
|
||||
---
|
||||
Element {
|
||||
name: "box",
|
||||
attrs: {
|
||||
":bar": Value<10..12>(12),
|
||||
":baz": Value<18..22>(hi),
|
||||
},
|
||||
children: [
|
||||
Symbol<23..26>(foo),
|
||||
List<27..32>(
|
||||
[
|
||||
Symbol<28..31>(bar),
|
||||
],
|
||||
),
|
||||
],
|
||||
span: 0..33,
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(lol😄 1)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..11>(
|
||||
[
|
||||
Symbol<1..8>(lol😄),
|
||||
Value<9..10>(1),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(r#\"(test \"hi\")\"#))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..11>(
|
||||
[
|
||||
Symbol<1..5>(test),
|
||||
Value<6..10>(hi),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(r#\"(test \"h\\\"i\")\"#))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..13>(
|
||||
[
|
||||
Symbol<1..5>(test),
|
||||
Value<6..12>(h\"i),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(r#\"(test \" hi \")\"#))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..13>(
|
||||
[
|
||||
Symbol<1..5>(test),
|
||||
Value<6..12>( hi ),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,25 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(+ (1 2 (* 2 5)))\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..17>(
|
||||
[
|
||||
Symbol<1..2>(+),
|
||||
List<3..16>(
|
||||
[
|
||||
Value<4..5>(1),
|
||||
Value<6..7>(2),
|
||||
List<8..15>(
|
||||
[
|
||||
Symbol<9..10>(*),
|
||||
Value<11..12>(2),
|
||||
Value<13..14>(5),
|
||||
],
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(r#\"; test\"#))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
Comment<0..6>,
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(r#\"(f arg ; test\n arg2)\"#))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..27>(
|
||||
[
|
||||
Symbol<1..2>(f),
|
||||
Symbol<3..6>(arg),
|
||||
Comment<7..13>,
|
||||
Symbol<22..26>(arg2),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"\\\"h\\\\\\\"i\\\"\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
Value<0..6>(h\"i),
|
||||
)
|
|
@ -1,12 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(12)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..4>(
|
||||
[
|
||||
Value<1..3>(12),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"1.2\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
Value<0..3>(1.2),
|
||||
)
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"-1.2\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
Value<0..4>(-1.2),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(1 2)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..5>(
|
||||
[
|
||||
Value<1..2>(1),
|
||||
Value<3..4>(2),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,14 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(1 :foo 1)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..10>(
|
||||
[
|
||||
Value<1..2>(1),
|
||||
Number<3..7>(:foo),
|
||||
Value<8..9>(1),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(:foo 1)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..8>(
|
||||
[
|
||||
Number<1..5>(:foo),
|
||||
Value<6..7>(1),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(:foo->: 1)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..11>(
|
||||
[
|
||||
Number<1..8>(:foo->:),
|
||||
Value<9..10>(1),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"(foo 1)\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
List<0..7>(
|
||||
[
|
||||
Symbol<1..4>(foo),
|
||||
Value<5..6>(1),
|
||||
],
|
||||
),
|
||||
)
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
source: src/lib.rs
|
||||
expression: "p.parse(0, lexer::Lexer::new(\"1\"))"
|
||||
|
||||
---
|
||||
Ok(
|
||||
Value<0..1>(1),
|
||||
)
|
Loading…
Add table
Reference in a new issue