Allow for string-interpolation in simplexpr

This commit is contained in:
elkowar 2021-07-30 16:29:04 +02:00
parent 41093cf0af
commit 5cde0228fb
No known key found for this signature in database
GPG key ID: E321AD71B1D1F27F
28 changed files with 550 additions and 262 deletions

View file

@ -15,7 +15,7 @@ thiserror = "1.0"
maplit = "1.0" maplit = "1.0"
logos = "0.12" logos = "0.12"
once_cell = "1.8" once_cell = "1.8.0"
serde = {version = "1.0", features = ["derive"]} serde = {version = "1.0", features = ["derive"]}
serde_json = "1.0" serde_json = "1.0"

View file

@ -31,6 +31,10 @@ pub enum UnaryOp {
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum SimplExpr { pub enum SimplExpr {
// TODO figure out if that span value here is even necessary,..
// DynVal has span information. However, keeping the span here and the span of the dynval separate
// would allow to separate the span of where a dynval was defined and the
// span of the use-site when a literal is used to replace a varref in the evaluation process.
Literal(Span, DynVal), Literal(Span, DynVal),
VarRef(Span, VarName), VarRef(Span, VarName),
BinOp(Span, Box<SimplExpr>, BinOp, Box<SimplExpr>), BinOp(Span, Box<SimplExpr>, BinOp, Box<SimplExpr>),

View file

@ -51,7 +51,7 @@ fn get_parse_error_span(file_id: usize, err: &lalrpop_util::ParseError<usize, le
lalrpop_util::ParseError::UnrecognizedEOF { location, expected: _ } => Span(*location, *location, file_id), lalrpop_util::ParseError::UnrecognizedEOF { location, expected: _ } => Span(*location, *location, file_id),
lalrpop_util::ParseError::UnrecognizedToken { token, expected: _ } => Span(token.0, token.2, file_id), lalrpop_util::ParseError::UnrecognizedToken { token, expected: _ } => Span(token.0, token.2, file_id),
lalrpop_util::ParseError::ExtraToken { token } => Span(token.0, token.2, file_id), lalrpop_util::ParseError::ExtraToken { token } => Span(token.0, token.2, file_id),
lalrpop_util::ParseError::User { error: LexicalError(l, r, file_id) } => Span(*l, *r, *file_id), lalrpop_util::ParseError::User { error: LexicalError(span) } => *span,
} }
} }

View file

@ -1,3 +1,36 @@
use eww_shared_util::Span;
use itertools::Itertools;
use crate::{ast::BinOp, dynval::DynVal, SimplExpr};
use super::lexer::{LexicalError, Sp, StrLitSegment, Token};
pub fn b<T>(x: T) -> Box<T> { pub fn b<T>(x: T) -> Box<T> {
Box::new(x) Box::new(x)
} }
pub fn parse_stringlit(
span: Span,
segs: Vec<Sp<StrLitSegment>>,
) -> Result<SimplExpr, lalrpop_util::ParseError<usize, Token, LexicalError>> {
let file_id = span.2;
let parser = crate::simplexpr_parser::ExprParser::new();
segs.into_iter()
.filter_map(|(lo, segment, hi)| {
let span = Span(lo, hi, file_id);
match segment {
StrLitSegment::Literal(lit) if lit.is_empty() => None,
StrLitSegment::Literal(lit) => Some(Ok(SimplExpr::Literal(span, DynVal(lit, span)))),
StrLitSegment::Interp(toks) => {
let token_stream = toks.into_iter().map(|x| Ok(x));
Some(parser.parse(file_id, token_stream))
}
}
})
.fold_ok(None, |acc, cur| match acc {
Some(ast) => Some(SimplExpr::BinOp(span, Box::new(ast), BinOp::Plus, Box::new(cur))),
None => Some(cur),
})
.map(|ast| ast.unwrap_or_else(|| SimplExpr::Literal(span, DynVal(String::new(), span))))
}

View file

@ -1,105 +1,273 @@
use eww_shared_util::{Span, Spanned}; use eww_shared_util::{Span, Spanned};
use logos::Logos;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::{escape, Regex, RegexSet};
static ESCAPE_REPLACE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\\(.)").unwrap()); pub type Sp<T> = (usize, T, usize);
#[rustfmt::skip] #[derive(Debug, PartialEq, Eq, Clone, strum::Display, strum::EnumString)]
#[derive(Logos, Debug, PartialEq, Eq, Clone, strum::Display, strum::EnumString)] pub enum StrLitSegment {
Literal(String),
Interp(Vec<Sp<Token>>),
}
#[derive(Debug, PartialEq, Eq, Clone, strum::Display, strum::EnumString)]
pub enum Token { pub enum Token {
#[strum(serialize = "+") ] #[token("+") ] Plus, Plus,
#[strum(serialize = "-") ] #[token("-") ] Minus, Minus,
#[strum(serialize = "*") ] #[token("*") ] Times, Times,
#[strum(serialize = "/") ] #[token("/") ] Div, Div,
#[strum(serialize = "%") ] #[token("%") ] Mod, Mod,
#[strum(serialize = "==")] #[token("==")] Equals, Equals,
#[strum(serialize = "!=")] #[token("!=")] NotEquals, NotEquals,
#[strum(serialize = "&&")] #[token("&&")] And, And,
#[strum(serialize = "||")] #[token("||")] Or, Or,
#[strum(serialize = ">") ] #[token(">") ] GT, GT,
#[strum(serialize = "<") ] #[token("<") ] LT, LT,
#[strum(serialize = "?:")] #[token("?:")] Elvis, Elvis,
#[strum(serialize = "=~")] #[token("=~")] RegexMatch, RegexMatch,
#[strum(serialize = "!") ] #[token("!") ] Not, Not,
#[strum(serialize = ",") ] #[token(",") ] Comma, Comma,
#[strum(serialize = "?") ] #[token("?") ] Question, Question,
#[strum(serialize = ":") ] #[token(":") ] Colon, Colon,
#[strum(serialize = "(") ] #[token("(") ] LPren, LPren,
#[strum(serialize = ")") ] #[token(")") ] RPren, RPren,
#[strum(serialize = "[") ] #[token("[") ] LBrack, LBrack,
#[strum(serialize = "]") ] #[token("]") ] RBrack, RBrack,
#[strum(serialize = ".") ] #[token(".") ] Dot, Dot,
#[strum(serialize = "true") ] #[token("true") ] True, True,
#[strum(serialize = "false")] #[token("false")] False, False,
#[regex(r"[a-zA-Z_-]+", |x| x.slice().to_string())]
Ident(String), Ident(String),
#[regex(r"[+-]?(?:[0-9]+[.])?[0-9]+", |x| x.slice().to_string())]
NumLit(String), NumLit(String),
#[regex(r#""(?:[^"\\]|\\.)*""#, |x| ESCAPE_REPLACE_REGEX.replace_all(x.slice(), "$1").to_string())] StringLit(Vec<Sp<StrLitSegment>>),
#[regex(r#"`(?:[^`\\]|\\.)*`"#, |x| ESCAPE_REPLACE_REGEX.replace_all(x.slice(), "$1").to_string())]
#[regex(r#"'(?:[^'\\]|\\.)*'"#, |x| ESCAPE_REPLACE_REGEX.replace_all(x.slice(), "$1").to_string())]
StrLit(String),
Comment,
Skip,
}
#[error] macro_rules! regex_rules {
#[regex(r"[ \t\n\f]+", logos::skip)] ($( $regex:expr => $token:expr),*) => {
#[regex(r";.*", logos::skip)] static LEXER_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| { RegexSet::new(&[
Error, $(format!("^{}", $regex)),*
]).unwrap()});
static LEXER_REGEXES: Lazy<Vec<Regex>> = Lazy::new(|| { vec![
$(Regex::new(&format!("^{}", $regex)).unwrap()),*
]});
static LEXER_FNS: Lazy<Vec<Box<dyn Fn(String) -> Token + Sync + Send>>> = Lazy::new(|| { vec![
$(Box::new($token)),*
]});
}
}
static ESCAPE_REPLACE_REGEX: Lazy<regex::Regex> = Lazy::new(|| Regex::new(r"\\(.)").unwrap());
regex_rules! {
escape(r"+") => |_| Token::Plus,
escape(r"-") => |_| Token::Minus,
escape(r"*") => |_| Token::Times,
escape(r"/") => |_| Token::Div,
escape(r"%") => |_| Token::Mod,
escape(r"==") => |_| Token::Equals,
escape(r"!=") => |_| Token::NotEquals,
escape(r"&&") => |_| Token::And,
escape(r"||") => |_| Token::Or,
escape(r">") => |_| Token::GT,
escape(r"<") => |_| Token::LT,
escape(r"?:") => |_| Token::Elvis,
escape(r"=~") => |_| Token::RegexMatch,
escape(r"!" ) => |_| Token::Not,
escape(r",") => |_| Token::Comma,
escape(r"?") => |_| Token::Question,
escape(r":") => |_| Token::Colon,
escape(r"(") => |_| Token::LPren,
escape(r")") => |_| Token::RPren,
escape(r"[") => |_| Token::LBrack,
escape(r"]") => |_| Token::RBrack,
escape(r".") => |_| Token::Dot,
escape(r"true") => |_| Token::True,
escape(r"false") => |_| Token::False,
r"[ \n\n\f]+" => |_| Token::Skip,
r";.*"=> |_| Token::Comment,
r"[a-zA-Z_-]+" => |x| Token::Ident(x.to_string()),
r"[+-]?(?:[0-9]+[.])?[0-9]+" => |x| Token::NumLit(x.to_string())
}
pub struct Lexer<'s> {
file_id: usize,
source: &'s str,
pos: usize,
failed: bool,
offset: usize,
}
impl<'s> Lexer<'s> {
pub fn new(file_id: usize, span_offset: usize, source: &'s str) -> Self {
Lexer { source, offset: span_offset, file_id, failed: false, pos: 0 }
}
fn remaining(&self) -> &'s str {
&self.source[self.pos..]
}
pub fn next_token(&mut self) -> Option<Result<Sp<Token>, LexicalError>> {
loop {
if self.failed || self.pos >= self.source.len() {
return None;
}
let remaining = self.remaining();
if remaining.starts_with(&['"', '\'', '`'][..]) {
return self
.string_lit()
.map(|x| x.map(|(lo, segs, hi)| (lo + self.offset, Token::StringLit(segs), hi + self.offset)));
} else {
let match_set = LEXER_REGEX_SET.matches(remaining);
let matched_token = match_set
.into_iter()
.map(|i: usize| {
let m = LEXER_REGEXES[i].find(remaining).unwrap();
(m.end(), i)
})
.min_by_key(|(_, x)| *x);
let (len, i) = match matched_token {
Some(x) => x,
None => {
self.failed = true;
return Some(Err(LexicalError(Span(self.pos + self.offset, self.pos + self.offset, self.file_id))));
}
};
let tok_str = &self.source[self.pos..self.pos + len];
let old_pos = self.pos;
self.pos += len;
match LEXER_FNS[i](tok_str.to_string()) {
Token::Skip | Token::Comment => {}
token => {
return Some(Ok((old_pos + self.offset, token, self.pos + self.offset)));
}
}
}
}
}
fn advance_until_char_boundary(&mut self) {
while self.pos < self.source.len() && !self.source.is_char_boundary(self.pos) {
self.pos += 1;
}
}
fn advance_until_one_of(&mut self, pat: &[char]) -> Option<char> {
for (idx, cur) in self.remaining().char_indices() {
if let Some(matched) = pat.iter().find(|p| **p == cur) {
self.pos += idx + 1;
return Some(*matched);
}
}
self.pos = self.source.len();
return None;
}
fn advance_until_unescaped_one_of(&mut self, pat: &[char]) -> Option<char> {
let mut pattern = pat.to_vec();
pattern.push('\\');
match self.advance_until_one_of(pattern.as_slice()) {
Some('\\') => {
self.pos += 1;
self.advance_until_char_boundary();
self.advance_until_unescaped_one_of(pat)
}
result => result,
}
}
fn string_lit(&mut self) -> Option<Result<Sp<Vec<Sp<StrLitSegment>>>, LexicalError>> {
let quote = self.remaining().chars().next().unwrap();
let str_lit_start = self.pos;
self.pos += 1;
self.advance_until_char_boundary();
let mut elements = Vec::new();
let mut in_string_lit = true;
loop {
if in_string_lit {
let segment_start = self.pos - 1;
let segment_ender = self.advance_until_unescaped_one_of(&['{', quote][..])?;
let lit_content = &self.source[segment_start + 1..self.pos - 1];
let lit_content = ESCAPE_REPLACE_REGEX.replace_all(lit_content, "$1").to_string();
elements.push((segment_start + self.offset, StrLitSegment::Literal(lit_content), self.pos + self.offset));
if segment_ender == '{' {
in_string_lit = false;
} else if segment_ender == quote {
return Some(Ok((str_lit_start + self.offset, elements, self.pos + self.offset)));
}
} else {
let segment_start = self.pos;
let mut toks = Vec::new();
while self.pos < self.source.len() && !self.remaining().starts_with('}') {
match self.next_token()? {
Ok(tok) => toks.push(tok),
Err(err) => return Some(Err(err)),
}
}
elements.push((segment_start + self.offset, StrLitSegment::Interp(toks), self.pos + self.offset));
self.pos += 1;
in_string_lit = true;
}
}
}
}
impl<'s> Iterator for Lexer<'s> {
type Item = Result<Sp<Token>, LexicalError>;
fn next(&mut self) -> Option<Self::Item> {
self.next_token()
}
} }
#[derive(Debug, Eq, PartialEq, Copy, Clone)] #[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct LexicalError(pub usize, pub usize, pub usize); pub struct LexicalError(pub Span);
impl Spanned for LexicalError { impl Spanned for LexicalError {
fn span(&self) -> Span { fn span(&self) -> Span {
Span(self.0, self.1, self.2) self.0
} }
} }
impl std::fmt::Display for LexicalError { impl std::fmt::Display for LexicalError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Lexical error at {}..{}", self.0, self.1) write!(f, "Lexical error at {}", self.0)
}
}
pub type SpannedResult<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
pub struct Lexer<'input> {
lexer: logos::SpannedIter<'input, Token>,
byte_offset: usize,
file_id: usize,
}
impl<'input> Lexer<'input> {
pub fn new(file_id: usize, byte_offset: usize, text: &'input str) -> Self {
Lexer { lexer: logos::Lexer::new(text).spanned(), byte_offset, file_id }
}
}
impl<'input> Iterator for Lexer<'input> {
type Item = SpannedResult<Token, usize, LexicalError>;
fn next(&mut self) -> Option<Self::Item> {
let (token, range) = self.lexer.next()?;
let range = (range.start + self.byte_offset, range.end + self.byte_offset);
if token == Token::Error {
Some(Err(LexicalError(range.0, range.1, self.file_id)))
} else {
Some(Ok((range.0, token, range.1)))
}
} }
} }
#[cfg(test)] #[cfg(test)]
#[test] mod test {
fn test_simplexpr_lexer() { use super::*;
use itertools::Itertools; use itertools::Itertools;
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"(foo + - "()" "a\"b" true false [] 12.2)"#).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"" " + music"#).collect_vec()); #[test]
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"foo ; bar"#).collect_vec()); fn test_simplexpr_lexer_basic() {
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"bar "foo""#).collect_vec());
}
#[test]
fn test_simplexpr_lexer_str_interpolate() {
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#" "foo {2 * 2} bar" "#).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#" "foo {(2 * 2) + "{5 + 5}"} bar" "#).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#" "a\"b\{}" "#).collect_vec());
}
// insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"(foo + - "()" "a\"b" true false [] 12.2)"#).collect_vec());
// insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"" " + music"#).collect_vec());
// insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"foo ; bar"#).collect_vec());
} }

View file

@ -1,32 +0,0 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, 0, r#\"\" \" + music\"#).collect_vec()"
---
[
Ok(
(
0,
StrLit(
"\"\u{f001} \"",
),
8,
),
),
Ok(
(
9,
Plus,
10,
),
),
Ok(
(
11,
Ident(
"music",
),
16,
),
),
]

View file

@ -1,16 +0,0 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, 0, r#\"foo ; bar\"#).collect_vec()"
---
[
Ok(
(
0,
Ident(
"foo",
),
3,
),
),
]

View file

@ -1,99 +0,0 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, r#\"(foo + - \"()\" \"a\\\"b\" true false [] 12.2)\"#).collect_vec()"
---
[
Ok(
(
0,
LPren,
1,
),
),
Ok(
(
1,
Ident(
"foo",
),
4,
),
),
Ok(
(
5,
Plus,
6,
),
),
Ok(
(
7,
Minus,
8,
),
),
Ok(
(
9,
StrLit(
"\"()\"",
),
13,
),
),
Ok(
(
14,
StrLit(
"\"a\"b\"",
),
20,
),
),
Ok(
(
21,
True,
25,
),
),
Ok(
(
26,
False,
31,
),
),
Ok(
(
32,
LBrack,
33,
),
),
Ok(
(
33,
RBrack,
34,
),
),
Ok(
(
35,
NumLit(
"12.2",
),
39,
),
),
Ok(
(
39,
RPren,
40,
),
),
]

View file

@ -0,0 +1,33 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, 0, r#\"bar \"foo\"\"#).collect_vec()"
---
[
Ok(
(
0,
Ident(
"bar",
),
3,
),
),
Ok(
(
4,
StringLit(
[
(
4,
Literal(
"foo",
),
9,
),
],
),
9,
),
),
]

View file

@ -0,0 +1,122 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, 0, r#\" \"foo {(2 * 2) + \"{5 + 5}\"} bar\" \"#).collect_vec()"
---
[
Ok(
(
1,
StringLit(
[
(
1,
Literal(
"foo ",
),
7,
),
(
7,
Interp(
[
(
7,
LPren,
8,
),
(
8,
NumLit(
"2",
),
9,
),
(
10,
Times,
11,
),
(
12,
NumLit(
"2",
),
13,
),
(
13,
RPren,
14,
),
(
15,
Plus,
16,
),
(
17,
StringLit(
[
(
17,
Literal(
"",
),
19,
),
(
19,
Interp(
[
(
19,
NumLit(
"5",
),
20,
),
(
21,
Plus,
22,
),
(
23,
NumLit(
"5",
),
24,
),
],
),
24,
),
(
24,
Literal(
"",
),
26,
),
],
),
26,
),
],
),
26,
),
(
26,
Literal(
" bar",
),
32,
),
],
),
32,
),
),
]

View file

@ -0,0 +1,24 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, 0, r#\" \"a\\\"b\\{}\" \"#).collect_vec()"
---
[
Ok(
(
1,
StringLit(
[
(
1,
Literal(
"a\"b{}",
),
10,
),
],
),
10,
),
),
]

View file

@ -0,0 +1,58 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "Lexer::new(0, 0, r#\" \"foo {2 * 2} bar\" \"#).collect_vec()"
---
[
Ok(
(
1,
StringLit(
[
(
1,
Literal(
"foo ",
),
7,
),
(
7,
Interp(
[
(
7,
NumLit(
"2",
),
8,
),
(
9,
Times,
10,
),
(
11,
NumLit(
"2",
),
12,
),
],
),
12,
),
(
12,
Literal(
" bar",
),
18,
),
],
),
18,
),
),
]

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"\\\"foo\\\" + 12.4\"))" expression: "p.parse(0, Lexer::new(0, 0, \"\\\"foo\\\" + 12.4\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"hi[\\\"ho\\\"]\"))" expression: "p.parse(0, Lexer::new(0, 0, \"hi[\\\"ho\\\"]\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"foo.bar.baz\"))" expression: "p.parse(0, Lexer::new(0, 0, \"foo.bar.baz\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"foo.bar[2 + 2] * asdf[foo.bar]\"))" expression: "p.parse(0, Lexer::new(0, 0, \"foo.bar[2 + 2] * asdf[foo.bar]\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"2 + 5\"))" expression: "p.parse(0, Lexer::new(0, 0, \"2 + 5\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"2 * 5 + 1 * 1 + 3\"))" expression: "p.parse(0, Lexer::new(0, 0, \"2 * 5 + 1 * 1 + 3\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"(1 + 2) * 2\"))" expression: "p.parse(0, Lexer::new(0, 0, \"(1 + 2) * 2\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"1 + true ? 2 : 5\"))" expression: "p.parse(0, Lexer::new(0, 0, \"1 + true ? 2 : 5\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"1 + true ? 2 : 5 + 2\"))" expression: "p.parse(0, Lexer::new(0, 0, \"1 + true ? 2 : 5 + 2\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"1 + (true ? 2 : 5) + 2\"))" expression: "p.parse(0, Lexer::new(0, 0, \"1 + (true ? 2 : 5) + 2\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"foo(1, 2)\"))" expression: "p.parse(0, Lexer::new(0, 0, \"foo(1, 2)\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"! false || ! true\"))" expression: "p.parse(0, Lexer::new(0, 0, \"! false || ! true\"))"
--- ---
Ok( Ok(

View file

@ -1,6 +1,6 @@
--- ---
source: crates/simplexpr/src/parser/mod.rs source: crates/simplexpr/src/parser/mod.rs
expression: "p.parse(0, Lexer::new(0, \"1\"))" expression: "p.parse(0, Lexer::new(0, 0, \"1\"))"
--- ---
Ok( Ok(

View file

@ -1,8 +1,7 @@
use crate::ast::{SimplExpr::{self, *}, BinOp::*, UnaryOp::*}; use crate::ast::{SimplExpr::{self, *}, BinOp::*, UnaryOp::*};
use eww_shared_util::{Span, VarName}; use eww_shared_util::{Span, VarName};
use crate::parser::lexer::{Token, LexicalError}; use crate::parser::lexer::{Token, LexicalError, StrLitSegment, Sp};
use crate::parser::lalrpop_helpers::*; use crate::parser::lalrpop_helpers::*;
use lalrpop_util::ParseError;
grammar(fid: usize); grammar(fid: usize);
@ -42,9 +41,7 @@ extern {
"identifier" => Token::Ident(<String>), "identifier" => Token::Ident(<String>),
"number" => Token::NumLit(<String>), "number" => Token::NumLit(<String>),
"string" => Token::StrLit(<String>), "string" => Token::StringLit(<Vec<Sp<StrLitSegment>>>),
"lexer_error" => Token::Error,
} }
} }
@ -62,11 +59,15 @@ Comma<T>: Vec<T> = {
pub Expr: SimplExpr = { pub Expr: SimplExpr = {
#[precedence(level="0")] #[precedence(level="0")]
<l:@L> "lexer_error" <r:@R> =>? { //<l:@L> "lexer_error" <r:@R> =>? {
Err(ParseError::User { error: LexicalError(l, r, fid) }) // Err(ParseError::User { error: LexicalError(l, r, fid) })
}, //},
<l:@L> <x:"string"> <r:@R> =>? parse_stringlit(Span(l, r, fid), x),
<l:@L> <x:"number"> <r:@R> => SimplExpr::literal(Span(l, r, fid), x),
<l:@L> "true" <r:@R> => SimplExpr::literal(Span(l, r, fid), "true".into()),
<l:@L> "false" <r:@R> => SimplExpr::literal(Span(l, r, fid), "false".into()),
<Literal>,
<l:@L> <ident:"identifier"> <r:@R> => VarRef(Span(l, r, fid), VarName(ident.to_string())), <l:@L> <ident:"identifier"> <r:@R> => VarRef(Span(l, r, fid), VarName(ident.to_string())),
"(" <ExprReset> ")", "(" <ExprReset> ")",
@ -109,14 +110,3 @@ pub Expr: SimplExpr = {
}; };
ExprReset = <Expr>; ExprReset = <Expr>;
Literal: SimplExpr = {
<l:@L> <x:StrLit> <r:@R> => SimplExpr::literal(Span(l, r, fid), x),
<l:@L> <x:"number"> <r:@R> => SimplExpr::literal(Span(l, r, fid), x),
<l:@L> "true" <r:@R> => SimplExpr::literal(Span(l, r, fid), "true".into()),
<l:@L> "false" <r:@R> => SimplExpr::literal(Span(l, r, fid), "false".into()),
}
StrLit: String = {
<x:"string"> => x[1..x.len() - 1].to_owned(),
};

View file

@ -14,7 +14,7 @@ Config(
widget: WidgetUse( widget: WidgetUse(
name: "foo", name: "foo",
attrs: Attributes( attrs: Attributes(
span: Span(47, 47, 51), span: Span(51, 61, 62),
attrs: { attrs: {
AttrName("arg"): AttrEntry( AttrName("arg"): AttrEntry(
key_span: Span(52, 56, 0), key_span: Span(52, 56, 0),
@ -52,7 +52,7 @@ Config(
widget: WidgetUse( widget: WidgetUse(
name: "bar", name: "bar",
attrs: Attributes( attrs: Attributes(
span: Span(463, 463, 467), span: Span(467, 478, 479),
attrs: { attrs: {
AttrName("arg"): AttrEntry( AttrName("arg"): AttrEntry(
key_span: Span(468, 472, 0), key_span: Span(468, 472, 0),

View file

@ -38,11 +38,14 @@
(defwidget time [] (defwidget time []
(box :class "time" (box :class "time"
{hour + ":" + min + " " + month + " " + number_day + ", " + year_full})) {"{hour}:{min} {month + "the month" * 2} {number_day}, {year_full}"}))
;{hour + ":" + min + " " + month + " " + number_day + ", " + year_full}))
(defvar music "bruh")
(defpoll music :interval "5s" "playerctl metadata --format '{{ artist }} - {{ title }}' || true") ;(defpoll music :interval "5s" "playerctl metadata --format '{{ artist }} - {{ title }}' || true")
(defpoll volume :interval "16s" "scripts/getvol") (defvar volume "20")
;(defpoll volume :interval "16s" "scripts/getvol")
(defpoll number_day :interval "5h" "date '+%d'") (defpoll number_day :interval "5h" "date '+%d'")
(defpoll month :interval "10h" "date '+%b'") (defpoll month :interval "10h" "date '+%b'")