Fully implement string interpolation with ${} syntax

This commit is contained in:
elkowar 2021-07-31 13:44:23 +02:00
parent 5cde0228fb
commit 24c9fee204
No known key found for this signature in database
GPG key ID: E321AD71B1D1F27F
36 changed files with 440 additions and 163 deletions

1
Cargo.lock generated
View file

@ -482,6 +482,7 @@ name = "eww_shared_util"
version = "0.1.0"
dependencies = [
"derive_more",
"insta",
"serde",
]

View file

@ -6,7 +6,7 @@ use codespan_reporting::{
};
use eww_shared_util::Span;
use once_cell::sync::Lazy;
use simplexpr::eval::EvalError;
use simplexpr::{dynval::ConversionError, eval::EvalError};
use yuck::{config::file_provider::YuckFiles, error::AstError, format_diagnostic::ToDiagnostic, gen_diagnostic};
use crate::error::DiagError;
@ -22,6 +22,8 @@ pub fn anyhow_err_to_diagnostic(err: &anyhow::Error) -> Diagnostic<usize> {
err.diag.clone()
} else if let Some(err) = err.downcast_ref::<AstError>() {
err.to_diagnostic()
} else if let Some(err) = err.downcast_ref::<ConversionError>() {
err.to_diagnostic()
} else if let Some(err) = err.downcast_ref::<EvalError>() {
err.to_diagnostic()
} else {

View file

@ -6,3 +6,7 @@ edition = "2018"
[dependencies]
serde = {version = "1.0", features = ["derive"]}
derive_more = "0.99"
[dev-dependencies]
insta = "1.7"

View file

@ -3,3 +3,36 @@ pub mod wrappers;
pub use span::*;
pub use wrappers::*;
#[macro_export]
macro_rules! snapshot_debug {
( $($name:ident => $test:expr),* $(,)?) => {
$(
#[test]
fn $name() { ::insta::assert_debug_snapshot!($test); }
)*
};
}
#[macro_export]
macro_rules! snapshot_string {
( $($name:ident => $test:expr),* $(,)?) => {
$(
#[test]
fn $name() { ::insta::assert_snapshot!($test); }
)*
};
}
#[macro_export]
macro_rules! snapshot_ron {
( $($name:ident => $test:expr),* $(,)?) => {
$(
#[test]
fn $name() {
::insta::with_settings!({sort_maps => true}, {
::insta::assert_ron_snapshot!($test);
});
}
)*
};
}

View file

@ -15,9 +15,13 @@ pub enum EvalError {
#[error("Invalid regex: {0}")]
InvalidRegex(#[from] regex::Error),
// TODO unresolved and unknown are the same for the user,....
#[error("got unresolved variable `{0}`")]
UnresolvedVariable(VarName),
#[error("Unknown variable {0}")]
UnknownVariable(VarName),
#[error(transparent)]
ConversionError(#[from] ConversionError),
@ -27,9 +31,6 @@ pub enum EvalError {
#[error("Unknown function {0}")]
UnknownFunction(String),
#[error("Unknown variable {0}")]
UnknownVariable(VarName),
#[error("Unable to index into value {0}")]
CannotIndex(String),
@ -41,6 +42,13 @@ impl EvalError {
pub fn at(self, span: Span) -> Self {
Self::Spanned(span, Box::new(self))
}
pub fn map_in_span(self, f: impl FnOnce(Self) -> Self) -> Self {
match self {
EvalError::Spanned(span, err) => EvalError::Spanned(span, Box::new(err.map_in_span(f))),
other => f(other),
}
}
}
impl Spanned for EvalError {
@ -145,8 +153,10 @@ impl SimplExpr {
pub fn eval_no_vars(&self) -> Result<DynVal, EvalError> {
match self.eval(&HashMap::new()) {
Ok(x) => Ok(x),
Err(EvalError::UnknownVariable(name)) => Err(EvalError::NoVariablesAllowed(name)),
Err(x) => Err(x),
Err(x) => Err(x.map_in_span(|err| match err {
EvalError::UnknownVariable(name) | EvalError::UnresolvedVariable(name) => EvalError::NoVariablesAllowed(name),
other => other,
})),
}
}

View file

@ -1,4 +1,5 @@
#![feature(box_patterns)]
#![feature(pattern)]
#![feature(box_syntax)]
#![feature(try_blocks)]
#![feature(unwrap_infallible)]

View file

@ -1,3 +1,5 @@
use std::str::pattern::Pattern;
use eww_shared_util::{Span, Spanned};
use once_cell::sync::Lazy;
use regex::{escape, Regex, RegexSet};
@ -63,9 +65,10 @@ macro_rules! regex_rules {
}
static ESCAPE_REPLACE_REGEX: Lazy<regex::Regex> = Lazy::new(|| Regex::new(r"\\(.)").unwrap());
pub static STR_INTERPOLATION_START: &str = "${";
pub static STR_INTERPOLATION_END: &str = "}";
regex_rules! {
escape(r"+") => |_| Token::Plus,
escape(r"-") => |_| Token::Minus,
escape(r"*") => |_| Token::Times,
@ -100,6 +103,7 @@ regex_rules! {
r"[+-]?(?:[0-9]+[.])?[0-9]+" => |x| Token::NumLit(x.to_string())
}
#[derive(Debug)]
pub struct Lexer<'s> {
file_id: usize,
source: &'s str,
@ -117,6 +121,10 @@ impl<'s> Lexer<'s> {
&self.source[self.pos..]
}
pub fn continues_with(&self, pat: impl Pattern<'s>) -> bool {
self.remaining().starts_with(pat)
}
pub fn next_token(&mut self) -> Option<Result<Sp<Token>, LexicalError>> {
loop {
if self.failed || self.pos >= self.source.len() {
@ -125,9 +133,7 @@ impl<'s> Lexer<'s> {
let remaining = self.remaining();
if remaining.starts_with(&['"', '\'', '`'][..]) {
return self
.string_lit()
.map(|x| x.map(|(lo, segs, hi)| (lo + self.offset, Token::StringLit(segs), hi + self.offset)));
return self.string_lit().map(|x| x.map(|(lo, segs, hi)| (lo, Token::StringLit(segs), hi)));
} else {
let match_set = LEXER_REGEX_SET.matches(remaining);
let matched_token = match_set
@ -148,7 +154,7 @@ impl<'s> Lexer<'s> {
let tok_str = &self.source[self.pos..self.pos + len];
let old_pos = self.pos;
self.pos += len;
self.advance_by(len);
match LEXER_FNS[i](tok_str.to_string()) {
Token::Skip | Token::Comment => {}
token => {
@ -159,54 +165,56 @@ impl<'s> Lexer<'s> {
}
}
fn advance_until_char_boundary(&mut self) {
fn advance_by(&mut self, n: usize) {
self.pos += n;
while self.pos < self.source.len() && !self.source.is_char_boundary(self.pos) {
self.pos += 1;
}
}
fn advance_until_one_of(&mut self, pat: &[char]) -> Option<char> {
for (idx, cur) in self.remaining().char_indices() {
if let Some(matched) = pat.iter().find(|p| **p == cur) {
self.pos += idx + 1;
return Some(*matched);
fn advance_until_one_of<'a>(&mut self, pat: &[&'a str]) -> Option<&'a str> {
loop {
let remaining = self.remaining();
if remaining.is_empty() {
return None;
} else if let Some(matched) = pat.iter().find(|&&p| remaining.starts_with(p)) {
self.advance_by(matched.len());
return Some(matched);
} else {
self.advance_by(1);
}
}
self.pos = self.source.len();
return None;
}
fn advance_until_unescaped_one_of(&mut self, pat: &[char]) -> Option<char> {
fn advance_until_unescaped_one_of<'a>(&mut self, pat: &[&'a str]) -> Option<&'a str> {
let mut pattern = pat.to_vec();
pattern.push('\\');
pattern.push("\\");
match self.advance_until_one_of(pattern.as_slice()) {
Some('\\') => {
self.pos += 1;
self.advance_until_char_boundary();
Some("\\") => {
self.advance_by(1);
self.advance_until_unescaped_one_of(pat)
}
result => result,
}
}
fn string_lit(&mut self) -> Option<Result<Sp<Vec<Sp<StrLitSegment>>>, LexicalError>> {
let quote = self.remaining().chars().next().unwrap();
pub fn string_lit(&mut self) -> Option<Result<Sp<Vec<Sp<StrLitSegment>>>, LexicalError>> {
let quote = self.remaining().chars().next()?.to_string();
let str_lit_start = self.pos;
self.pos += 1;
self.advance_until_char_boundary();
self.advance_by(quote.len());
let mut elements = Vec::new();
let mut in_string_lit = true;
loop {
if in_string_lit {
let segment_start = self.pos - 1;
let segment_start = self.pos - quote.len();
let segment_ender = self.advance_until_unescaped_one_of(&['{', quote][..])?;
let lit_content = &self.source[segment_start + 1..self.pos - 1];
let segment_ender = self.advance_until_unescaped_one_of(&[STR_INTERPOLATION_START, &quote])?;
let lit_content = &self.source[segment_start + quote.len()..self.pos - segment_ender.len()];
let lit_content = ESCAPE_REPLACE_REGEX.replace_all(lit_content, "$1").to_string();
elements.push((segment_start + self.offset, StrLitSegment::Literal(lit_content), self.pos + self.offset));
if segment_ender == '{' {
if segment_ender == STR_INTERPOLATION_START {
in_string_lit = false;
} else if segment_ender == quote {
return Some(Ok((str_lit_start + self.offset, elements, self.pos + self.offset)));
@ -214,14 +222,14 @@ impl<'s> Lexer<'s> {
} else {
let segment_start = self.pos;
let mut toks = Vec::new();
while self.pos < self.source.len() && !self.remaining().starts_with('}') {
while self.pos < self.source.len() && !self.remaining().starts_with(STR_INTERPOLATION_END) {
match self.next_token()? {
Ok(tok) => toks.push(tok),
Err(err) => return Some(Err(err)),
}
}
elements.push((segment_start + self.offset, StrLitSegment::Interp(toks), self.pos + self.offset));
self.pos += 1;
self.advance_by(STR_INTERPOLATION_END.len());
in_string_lit = true;
}
}
@ -254,20 +262,27 @@ impl std::fmt::Display for LexicalError {
#[cfg(test)]
mod test {
use super::*;
use eww_shared_util::snapshot_string;
use itertools::Itertools;
#[test]
fn test_simplexpr_lexer_basic() {
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"bar "foo""#).collect_vec());
macro_rules! v {
($x:literal) => {
Lexer::new(0, 0, $x)
.map(|x| match x {
Ok((l, x, r)) => format!("({}, {:?}, {})", l, x, r),
Err(err) => format!("{}", err),
})
.join("\n")
};
}
#[test]
fn test_simplexpr_lexer_str_interpolate() {
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#" "foo {2 * 2} bar" "#).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#" "foo {(2 * 2) + "{5 + 5}"} bar" "#).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, 0, r#" "a\"b\{}" "#).collect_vec());
snapshot_string! {
basic => v!(r#"bar "foo""#),
interpolation_1 => v!(r#" "foo ${2 * 2} bar" "#),
interpolation_nested => v!(r#" "foo ${(2 * 2) + "${5 + 5}"} bar" "#),
escaping => v!(r#" "a\"b\{}" "#),
comments => v!("foo ; bar"),
weird_char_boundaries => v!(r#"" " + music"#),
symbol_spam => v!(r#"(foo + - "()" "a\"b" true false [] 12.2)"#),
}
// insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"(foo + - "()" "a\"b" true false [] 12.2)"#).collect_vec());
// insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"" " + music"#).collect_vec());
// insta::assert_debug_snapshot!(Lexer::new(0, 0, r#"foo ; bar"#).collect_vec());
}

View file

@ -0,0 +1,7 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(r#\"bar \"foo\"\"#)"
---
(0, Ident("bar"), 3)
(4, StringLit([(4, Literal("foo"), 9)]), 9)

View file

@ -0,0 +1,6 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(\"foo ; bar\")"
---
(0, Ident("foo"), 3)

View file

@ -0,0 +1,6 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(r#\" \"a\\\"b\\{}\" \"#)"
---
(1, StringLit([(1, Literal("a\"b{}"), 10)]), 10)

View file

@ -0,0 +1,6 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(r#\" \"foo ${2 * 2} bar\" \"#)"
---
(1, StringLit([(1, Literal("foo "), 8), (8, Interp([(8, NumLit("2"), 9), (10, Times, 11), (12, NumLit("2"), 13)]), 13), (13, Literal(" bar"), 19)]), 19)

View file

@ -0,0 +1,6 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(r#\" \"foo ${(2 * 2) + \"${5 + 5}\"} bar\" \"#)"
---
(1, StringLit([(1, Literal("foo "), 8), (8, Interp([(8, LPren, 9), (9, NumLit("2"), 10), (11, Times, 12), (13, NumLit("2"), 14), (14, RPren, 15), (16, Plus, 17), (18, StringLit([(18, Literal(""), 21), (21, Interp([(21, NumLit("5"), 22), (23, Plus, 24), (25, NumLit("5"), 26)]), 26), (26, Literal(""), 28)]), 28)]), 28), (28, Literal(" bar"), 34)]), 34)

View file

@ -0,0 +1,17 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(r#\"(foo + - \"()\" \"a\\\"b\" true false [] 12.2)\"#)"
---
(0, LPren, 1)
(1, Ident("foo"), 4)
(5, Plus, 6)
(7, Minus, 8)
(9, StringLit([(9, Literal("()"), 13)]), 13)
(14, StringLit([(14, Literal("a\"b"), 20)]), 20)
(21, True, 25)
(26, False, 31)
(32, LBrack, 33)
(33, RBrack, 34)
(35, NumLit("12.2"), 39)
(39, RPren, 40)

View file

@ -0,0 +1,8 @@
---
source: crates/simplexpr/src/parser/lexer.rs
expression: "v!(r#\"\" \" + music\"#)"
---
(0, StringLit([(0, Literal("\u{f001} "), 8)]), 8)
(9, Plus, 10)
(11, Ident("music"), 16)

View file

@ -18,7 +18,7 @@ Config(
attrs: {
AttrName("arg"): AttrEntry(
key_span: Span(52, 56, 0),
value: Literal(Span(57, 61, 0), DynVal("hi", Span(18446744073709551615, 18446744073709551615, 18446744073709551615))),
value: SimplExpr(Span(57, 61, 0), Literal(Span(57, 61, 0), DynVal("hi", Span(57, 61, 0)))),
),
},
),
@ -56,7 +56,7 @@ Config(
attrs: {
AttrName("arg"): AttrEntry(
key_span: Span(468, 472, 0),
value: Literal(Span(473, 478, 0), DynVal("bla", Span(18446744073709551615, 18446744073709551615, 18446744073709551615))),
value: SimplExpr(Span(473, 478, 0), Literal(Span(473, 478, 0), DynVal("bla", Span(473, 478, 0)))),
),
},
),
@ -79,7 +79,7 @@ Config(
var_definitions: {
VarName("some_var"): VarDefinition(
name: VarName("some_var"),
initial_value: DynVal("bla", Span(18446744073709551615, 18446744073709551615, 18446744073709551615)),
initial_value: DynVal("bla", Span(89, 94, 0)),
span: Span(72, 95, 0),
),
},

View file

@ -39,6 +39,9 @@ pub enum AstError {
#[error("{1}")]
ErrorNote(String, #[source] Box<AstError>),
#[error(transparent)]
SimplExpr(#[from] simplexpr::error::Error),
#[error(transparent)]
ConversionError(#[from] dynval::ConversionError),
@ -94,6 +97,7 @@ impl Spanned for AstError {
AstError::ParseError { file_id, source } => get_parse_error_span(*file_id, source),
AstError::ErrorNote(_, err) => err.span(),
AstError::NoMoreElementsExpected(span) => *span,
AstError::SimplExpr(err) => err.span(),
}
}
}

View file

@ -118,6 +118,7 @@ impl ToDiagnostic for AstError {
AstError::ErrorNote(note, source) => source.to_diagnostic().with_notes(vec![note.to_string()]),
AstError::ValidationError(source) => source.to_diagnostic(),
AstError::NoMoreElementsExpected(span) => gen_diagnostic!(self, span),
AstError::SimplExpr(source) => source.to_diagnostic(),
}
}
}
@ -185,12 +186,13 @@ fn lalrpop_error_to_diagnostic<T: std::fmt::Display, E: Spanned + ToDiagnostic>(
impl ToDiagnostic for simplexpr::error::Error {
fn to_diagnostic(&self) -> Diagnostic<usize> {
use simplexpr::error::Error::*;
dbg!(&self);
match self {
ParseError { source, file_id } => lalrpop_error_to_diagnostic(source, *file_id),
ConversionError(error) => error.to_diagnostic(),
Eval(error) => error.to_diagnostic(),
Other(error) => gen_diagnostic!(error),
Spanned(span, error) => gen_diagnostic!(error, span),
Spanned(span, error) => error.to_diagnostic().with_label(span_to_primary_label(*span)),
}
}
}
@ -205,7 +207,10 @@ impl ToDiagnostic for simplexpr::eval::EvalError {
fn to_diagnostic(&self) -> Diagnostic<usize> {
use simplexpr::eval::EvalError::*;
match self {
UnresolvedVariable(name) | UnknownVariable(name) | NoVariablesAllowed(name) => gen_diagnostic! {
NoVariablesAllowed(name) => gen_diagnostic!(self),
// TODO the note here is confusing when it's an unknown variable being used _within_ a string literal / simplexpr
// it only really makes sense on top-level symbols
UnresolvedVariable(name) | UnknownVariable(name) => gen_diagnostic! {
msg = self,
note = format!("If you meant to use the literal value \"{}\", surround the value in quotes", name)
},

View file

@ -39,12 +39,29 @@ macro_rules! return_or_put_back {
impl<I: Iterator<Item = Ast>> AstIterator<I> {
return_or_put_back!(expect_symbol, AstType::Symbol, (Span, String) = Ast::Symbol(span, x) => (span, x));
return_or_put_back!(expect_literal, AstType::Literal, (Span, DynVal) = Ast::Literal(span, x) => (span, x));
// return_or_put_back!(expect_literal, AstType::Literal, (Span, DynVal) = Ast::Literal(span, x) => (span, x));
return_or_put_back!(expect_list, AstType::List, (Span, Vec<Ast>) = Ast::List(span, x) => (span, x));
return_or_put_back!(expect_array, AstType::Array, (Span, Vec<Ast>) = Ast::Array(span, x) => (span, x));
pub fn expect_literal(&mut self) -> AstResult<(Span, DynVal)> {
match self.expect_any()? {
// Ast::List(_, _) => todo!(),
// Ast::Array(_, _) => todo!(),
// Ast::Keyword(_, _) => todo!(),
// Ast::Symbol(_, _) => todo!(),
// Ast::Literal(_, _) => todo!(),
Ast::SimplExpr(span, expr) => Ok((span, dbg!(expr.eval_no_vars()).map_err(|e| AstError::SimplExpr(e.into()))?)),
other => {
let span = other.span();
let actual_type = other.expr_type();
self.put_back(other);
Err(AstError::WrongExprType(span, AstType::Literal, actual_type))
}
}
}
pub fn new(span: Span, iter: I) -> Self {
AstIterator { remaining_span: span, iter: itertools::put_back(iter) }
}

View file

@ -1,8 +1,9 @@
use once_cell::sync::Lazy;
use regex::{Regex, RegexSet};
use regex::{escape, Regex, RegexSet};
use simplexpr::parser::lexer::{STR_INTERPOLATION_END, STR_INTERPOLATION_START};
use super::parse_error;
use eww_shared_util::{AttrName, Span, VarName};
use eww_shared_util::{AttrName, Span, Spanned, VarName};
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Token {
@ -12,11 +13,10 @@ pub enum Token {
RBrack,
True,
False,
StrLit(String),
NumLit(String),
Symbol(String),
Keyword(String),
SimplExpr(String),
SimplExpr(Vec<(usize, simplexpr::parser::lexer::Token, usize)>),
Comment,
Skip,
}
@ -30,11 +30,10 @@ impl std::fmt::Display for Token {
Token::RBrack => write!(f, "']'"),
Token::True => write!(f, "true"),
Token::False => write!(f, "false"),
Token::StrLit(x) => write!(f, "\"{}\"", x),
Token::NumLit(x) => write!(f, "{}", x),
Token::Symbol(x) => write!(f, "{}", x),
Token::Keyword(x) => write!(f, "{}", x),
Token::SimplExpr(x) => write!(f, "{{{}}}", x),
Token::SimplExpr(x) => write!(f, "{{{:?}}}", x.iter().map(|x| &x.1)),
Token::Comment => write!(f, ""),
Token::Skip => write!(f, ""),
}
@ -42,7 +41,7 @@ impl std::fmt::Display for Token {
}
macro_rules! regex_rules {
($( $regex:literal => $token:expr),*) => {
($( $regex:expr => $token:expr),*) => {
static LEXER_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| { RegexSet::new(&[
$(format!("^{}", $regex)),*
]).unwrap()});
@ -58,15 +57,12 @@ macro_rules! regex_rules {
static ESCAPE_REPLACE_REGEX: Lazy<regex::Regex> = Lazy::new(|| Regex::new(r"\\(.)").unwrap());
regex_rules! {
r"\(" => |_| Token::LPren,
r"\)" => |_| Token::RPren,
r"\[" => |_| Token::LBrack,
r"\]" => |_| Token::RBrack,
r"true" => |_| Token::True,
r"false" => |_| Token::False,
r#""(?:[^"\\]|\\.)*""# => |x| Token::StrLit(ESCAPE_REPLACE_REGEX.replace_all(&x, "$1").to_string()),
r#"`(?:[^`\\]|\\.)*`"# => |x| Token::StrLit(ESCAPE_REPLACE_REGEX.replace_all(&x, "$1").to_string()),
r#"'(?:[^'\\]|\\.)*'"# => |x| Token::StrLit(ESCAPE_REPLACE_REGEX.replace_all(&x, "$1").to_string()),
escape("(") => |_| Token::LPren,
escape(")") => |_| Token::RPren,
escape("[") => |_| Token::LBrack,
escape("]") => |_| Token::RBrack,
escape("true") => |_| Token::True,
escape("false") => |_| Token::False,
r#"[+-]?(?:[0-9]+[.])?[0-9]+"# => |x| Token::NumLit(x),
r#":[^\s\)\]}]+"# => |x| Token::Keyword(x),
r#"[a-zA-Z_!\?<>/\.\*-\+\-][^\s{}\(\)\[\](){}]*"# => |x| Token::Symbol(x),
@ -85,9 +81,53 @@ impl Lexer {
pub fn new(file_id: usize, source: String) -> Self {
Lexer { source, file_id, failed: false, pos: 0 }
}
}
// TODO string literal interpolation stuff by looking for indexes of {{ and }}?
fn string_lit(&mut self) -> Option<Result<(usize, Token, usize), parse_error::ParseError>> {
let mut simplexpr_lexer = simplexpr::parser::lexer::Lexer::new(self.file_id, self.pos, &self.source[self.pos..]);
match simplexpr_lexer.string_lit() {
Some(Ok((lo, segments, hi))) => {
self.pos = hi;
self.advance_until_char_boundary();
Some(Ok((lo, Token::SimplExpr(vec![(lo, simplexpr::parser::lexer::Token::StringLit(segments), hi)]), hi)))
}
Some(Err(e)) => Some(Err(parse_error::ParseError::LexicalError(e.0))),
None => None,
}
}
fn simplexpr(&mut self) -> Option<Result<(usize, Token, usize), parse_error::ParseError>> {
self.pos += 1;
let mut simplexpr_lexer = simplexpr::parser::lexer::Lexer::new(self.file_id, self.pos, &self.source[self.pos..]);
let mut toks = Vec::new();
let mut end = self.pos;
loop {
match simplexpr_lexer.next_token() {
Some(Ok((lo, tok, hi))) => {
end = hi;
toks.push((lo, tok, hi));
}
Some(Err(err)) => {
dbg!(&simplexpr_lexer);
if simplexpr_lexer.continues_with('}') {
let start = toks.first().map(|x| x.0).unwrap_or(end);
self.pos = end + 1;
self.advance_until_char_boundary();
return Some(Ok((start, Token::SimplExpr(toks), end)));
} else {
return Some(Err(parse_error::ParseError::LexicalError(err.span())));
}
}
None => return None,
}
}
}
fn advance_until_char_boundary(&mut self) {
while self.pos < self.source.len() && !self.source.is_char_boundary(self.pos) {
self.pos += 1;
}
}
}
impl Iterator for Lexer {
type Item = Result<(usize, Token, usize), parse_error::ParseError>;
@ -97,45 +137,17 @@ impl Iterator for Lexer {
if self.failed || self.pos >= self.source.len() {
return None;
}
let string = &self.source[self.pos..];
if string.starts_with('{') {
let expr_start = self.pos;
let mut in_string = None;
loop {
if self.pos >= self.source.len() {
return None;
}
while !self.source.is_char_boundary(self.pos) {
self.pos += 1;
}
let string = &self.source[self.pos..];
if string.starts_with('}') && in_string.is_none() {
self.pos += 1;
let tok_str = &self.source[expr_start..self.pos];
return Some(Ok((expr_start, Token::SimplExpr(tok_str.to_string()), self.pos - 1)));
} else if string.starts_with('"') || string.starts_with('\'') || string.starts_with('`') {
if let Some(quote) = in_string {
if string.starts_with(quote) {
in_string = None;
}
} else {
in_string = Some(string.chars().next().unwrap());
}
self.pos += 1;
} else if string.starts_with("\\\"") {
self.pos += 2;
} else {
self.pos += 1;
}
}
let remaining = &self.source[self.pos..];
if remaining.starts_with(&['"', '\'', '`'][..]) {
return self.string_lit();
} else if remaining.starts_with('{') {
return self.simplexpr();
} else {
let match_set = LEXER_REGEX_SET.matches(string);
let match_set = LEXER_REGEX_SET.matches(remaining);
let matched_token = match_set
.into_iter()
.map(|i: usize| {
let m = LEXER_REGEXES[i].find(string).unwrap();
let m = LEXER_REGEXES[i].find(remaining).unwrap();
(m.end(), i)
})
.min_by_key(|(_, x)| *x);
@ -163,12 +175,28 @@ impl Iterator for Lexer {
}
#[cfg(test)]
#[test]
fn test_yuck_lexer() {
mod test {
use super::*;
use eww_shared_util::snapshot_string;
use itertools::Itertools;
insta::assert_debug_snapshot!(Lexer::new(0, r#"(foo + - "text" )"#.to_string()).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, r#"{ bla "} \" }" " \" "}"#.to_string()).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, r#""< \" >""#.to_string()).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, r#"{ " " + music}"#.to_string()).collect_vec());
insta::assert_debug_snapshot!(Lexer::new(0, r#"{ " } ' }" }"#.to_string()).collect_vec());
macro_rules! v {
($x:literal) => {
Lexer::new(0, 0, $x)
.map(|x| match x {
Ok((l, x, r)) => format!("({}, {:?}, {})", l, x, r),
Err(err) => format!("{}", err),
})
.join("\n")
};
}
snapshot_string! {
basic => r#"(foo + - "text" )"#,
escaped_strings => r#"{ bla "} \" }" " \" "}"#,
escaped_quote => r#""< \" >""#,
char_boundary => r#"{ " " + music}"#,
quotes_in_quotes => r#"{ " } ' }" }"#,
}
}

View file

@ -18,11 +18,10 @@ extern {
"]" => Token::RBrack,
"true" => Token::True,
"false" => Token::False,
"string" => Token::StrLit(<String>),
"number" => Token::NumLit(<String>),
"symbol" => Token::Symbol(<String>),
"keyword" => Token::Keyword(<String>),
"simplexpr" => Token::SimplExpr(<String>),
"simplexpr" => Token::SimplExpr(<Vec<(usize, simplexpr::parser::lexer::Token, usize)>>),
"comment" => Token::Comment,
}
}
@ -45,22 +44,17 @@ Keyword: Ast = <l:@L> <x:"keyword"> <r:@R> => Ast::Keyword(Span(l, r, file_id),
Symbol: Ast = <l:@L> <x:"symbol"> <r:@R> => Ast::Symbol(Span(l, r, file_id), x.to_string());
Literal: String = {
<StrLit> => <>,
<Num> => <>,
<Bool> => <>,
};
StrLit: String = {
<x:"string"> => {
x[1..x.len() - 1].to_owned()
},
};
SimplExpr: SimplExpr = {
<l:@L> <x:"simplexpr"> =>? {
let expr = x[1..x.len() - 1].to_string();
simplexpr::parse_string(l + 1, file_id, &expr).map_err(|e| {
ParseError::User { error: parse_error::ParseError::SimplExpr(e) }})
let parser = simplexpr::simplexpr_parser::ExprParser::new();
parser.parse(file_id, x.into_iter().map(Ok))
.map_err(|e| ParseError::User {
error: parse_error::ParseError::SimplExpr(simplexpr::error::Error::from_parse_error(file_id, e))
})
}
}

View file

@ -0,0 +1,6 @@
---
source: crates/yuck/src/parser/lexer.rs
expression: "r#\"(foo + - \"text\" )\"#"
---
(foo + - "text" )

View file

@ -0,0 +1,6 @@
---
source: crates/yuck/src/parser/lexer.rs
expression: "r#\"{ \" \" + music}\"#"
---
{ " " + music}

View file

@ -0,0 +1,6 @@
---
source: crates/yuck/src/parser/lexer.rs
expression: "r#\"\"< \\\" >\"\"#"
---
"< \" >"

View file

@ -0,0 +1,6 @@
---
source: crates/yuck/src/parser/lexer.rs
expression: "r#\"{ bla \"} \\\" }\" \" \\\" \"}\"#"
---
{ bla "} \" }" " \" "}

View file

@ -0,0 +1,6 @@
---
source: crates/yuck/src/parser/lexer.rs
expression: "r#\"{ \" } ' }\" }\"#"
---
{ " } ' }" }

View file

@ -6,9 +6,47 @@ expression: "Lexer::new(0, r#\"{ bla \"} \\\" }\" \" \\\" \"}\"#.to_string()).co
[
Ok(
(
0,
2,
SimplExpr(
"{ bla \"} \\\" }\" \" \\\" \"}",
[
(
2,
Ident(
"bla",
),
5,
),
(
6,
StringLit(
[
(
6,
Literal(
"} \" }",
),
14,
),
],
),
14,
),
(
15,
StringLit(
[
(
15,
Literal(
" \" ",
),
21,
),
],
),
21,
),
],
),
21,
),

View file

@ -7,8 +7,24 @@ expression: "Lexer::new(0, r#\"\"< \\\" >\"\"#.to_string()).collect_vec()"
Ok(
(
0,
StrLit(
"\"< \" >\"",
SimplExpr(
[
(
0,
StringLit(
[
(
0,
Literal(
"< \" >",
),
8,
),
],
),
8,
),
],
),
8,
),

View file

@ -6,9 +6,37 @@ expression: "Lexer::new(0, r#\"{ \" \" + music}\"#.to_string()).collect_vec
[
Ok(
(
0,
2,
SimplExpr(
"{ \"\u{f001} \" + music}",
[
(
2,
StringLit(
[
(
2,
Literal(
"\u{f001} ",
),
10,
),
],
),
10,
),
(
11,
Plus,
12,
),
(
13,
Ident(
"music",
),
18,
),
],
),
18,
),

View file

@ -1,16 +0,0 @@
---
source: crates/yuck/src/parser/lexer.rs
expression: "Lexer::new(0, r#\"{ \" } ' }\" }\"#.to_string()).collect_vec()"
---
[
Ok(
(
0,
SimplExpr(
"{ \" } ' }\" }",
),
11,
),
),
]

View file

@ -41,8 +41,24 @@ expression: "Lexer::new(0, r#\"(foo + - \"text\" )\"#.to_string()).collect_vec()
Ok(
(
9,
StrLit(
"\"text\"",
SimplExpr(
[
(
9,
StringLit(
[
(
9,
Literal(
"text",
),
15,
),
],
),
15,
),
],
),
15,
),

View file

@ -4,5 +4,5 @@ expression: "p.parse(0, Lexer::new(0, r#\"(test \"hi\")\"#.to_string()))"
---
Ok(
(test "hi"),
(test {"hi"}),
)

View file

@ -4,5 +4,5 @@ expression: "p.parse(0, Lexer::new(0, r#\"(test \"h\\\"i\")\"#.to_string()))"
---
Ok(
(test "h"i"),
(test {"h"i"}),
)

View file

@ -4,5 +4,5 @@ expression: "p.parse(0, Lexer::new(0, r#\"(test \" hi \")\"#.to_string()))"
---
Ok(
(test " hi "),
(test {" hi "}),
)

View file

@ -4,5 +4,5 @@ expression: "p.parse(0, Lexer::new(0, \"\\\"h\\\\\\\"i\\\"\".to_string()))"
---
Ok(
"h"i",
{"h"i"},
)

View file

@ -3,6 +3,7 @@
(workspaces)
(music)
(sidestuff)))
(defwidget sidestuff []
(box :class "sidestuff" :orientation "h" :space-evenly false :halign "end"
(slider-vol)
@ -23,12 +24,12 @@
(defwidget music []
(box :class "music" :orientation "h" :space-evenly false :halign "center"
{ ' ' + music}))
' ${music}'))
(defwidget slider-vol []
(box :class "slider-vol" :orientation "h" :space-evenly "false"
(box :class "label-vol" ""
(scale :min 0 :max 101 :value volume :onchange "amixer -D pulse sset Master {}%"))))
(scale :min 0 :max 101 :value volume :onchange "amixer -D pulse sset Master \{}%"))))
(defwidget slider-ram []
(box :orientation "h" :class "slider-ram" :space-evenly false
@ -38,14 +39,10 @@
(defwidget time []
(box :class "time"
{"{hour}:{min} {month + "the month" * 2} {number_day}, {year_full}"}))
;{hour + ":" + min + " " + month + " " + number_day + ", " + year_full}))
"${hour}:${min} ${month} ${number_day}, ${year_full}"))
(defvar music "bruh")
;(defpoll music :interval "5s" "playerctl metadata --format '{{ artist }} - {{ title }}' || true")
(defvar volume "20")
;(defpoll volume :interval "16s" "scripts/getvol")
(defpoll music :interval "5s" "playerctl metadata --format '{{ artist }} - {{ title }}' || true")
(defpoll volume :interval "1s" "scripts/getvol")
(defpoll number_day :interval "5h" "date '+%d'")
(defpoll month :interval "10h" "date '+%b'")
@ -61,5 +58,3 @@
:geometry (geometry :x "0%" :y "0%" :width "100%" :height "4%")
:reserve (struts :side "top" :distance "4%")
(bar))
; asdf

View file

@ -1,2 +1,2 @@
#!/bin/sh
amixer -D pulse sget Master | grep 'Left:' | awk -F'[][]' '{ print $2 }' | tr -d '%'
amixer -D pulse sget Master | grep 'Left:' | awk -F'[][]' '{ print $2 }' | tr -d '%' | head -1