Remove some unnecessary allocations (#480)

This commit is contained in:
Niclas 2022-07-02 22:00:49 +00:00 committed by GitHub
parent 0b0715fd50
commit b2f60a1f43
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 53 additions and 51 deletions

View file

@ -119,8 +119,8 @@ pub fn get_battery_capacity() -> Result<String> {
json.push_str(&format!(
r#"{:?}: {{ "status": "{}", "capacity": {} }},"#,
i.file_name().context("couldn't convert file name to rust string")?,
s.replace("\n", ""),
o.replace("\n", "")
s.trim_end_matches(|c| c == '\n'),
o.trim_end_matches(|c| c == '\n')
));
if let (Ok(t), Ok(c), Ok(v)) = (
read_to_string(i.join("charge_full")),
@ -128,13 +128,15 @@ pub fn get_battery_capacity() -> Result<String> {
read_to_string(i.join("voltage_now")),
) {
// (uAh / 1000000) * U = p and that / one million so that we have microwatt
current +=
((c.replace("\n", "").parse::<f64>()? / 1000000_f64) * v.replace("\n", "").parse::<f64>()?) / 1000000_f64;
total +=
((t.replace("\n", "").parse::<f64>()? / 1000000_f64) * v.replace("\n", "").parse::<f64>()?) / 1000000_f64;
current += ((c.trim_end_matches(|c| c == '\n').parse::<f64>()? / 1000000_f64)
* v.trim_end_matches(|c| c == '\n').parse::<f64>()?)
/ 1000000_f64;
total += ((t.trim_end_matches(|c| c == '\n').parse::<f64>()? / 1000000_f64)
* v.trim_end_matches(|c| c == '\n').parse::<f64>()?)
/ 1000000_f64;
} else if let (Ok(t), Ok(c)) = (read_to_string(i.join("energy_full")), read_to_string(i.join("energy_now"))) {
current += c.replace("\n", "").parse::<f64>()?;
total += t.replace("\n", "").parse::<f64>()?;
current += c.trim_end_matches(|c| c == '\n').parse::<f64>()?;
total += t.trim_end_matches(|c| c == '\n').parse::<f64>()?;
} else {
log::warn!(
"Failed to get/calculate uWh: the total_avg value of the battery magic var will probably be a garbage \

View file

@ -2,7 +2,7 @@ use std::str::pattern::Pattern;
use eww_shared_util::{Span, Spanned};
use once_cell::sync::Lazy;
use regex::{escape, Regex, RegexSet};
use regex::{Regex, RegexSet};
pub type Sp<T> = (usize, T, usize);
@ -56,12 +56,12 @@ pub enum Token {
}
macro_rules! regex_rules {
($( $regex:expr => $token:expr),*) => {
($( $regex:literal => $token:expr),*) => {
static LEXER_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| { RegexSet::new(&[
$(format!("^{}", $regex)),*
$(concat!("^", $regex)),*
]).unwrap()});
static LEXER_REGEXES: Lazy<Vec<Regex>> = Lazy::new(|| { vec![
$(Regex::new(&format!("^{}", $regex)).unwrap()),*
$(Regex::new(concat!("^", $regex)).unwrap()),*
]});
static LEXER_FNS: Lazy<Vec<Box<dyn Fn(String) -> Token + Sync + Send>>> = Lazy::new(|| { vec![
$(Box::new($token)),*
@ -74,37 +74,37 @@ pub static STR_INTERPOLATION_START: &str = "${";
pub static STR_INTERPOLATION_END: &str = "}";
regex_rules! {
escape(r"+") => |_| Token::Plus,
escape(r"-") => |_| Token::Minus,
escape(r"*") => |_| Token::Times,
escape(r"/") => |_| Token::Div,
escape(r"%") => |_| Token::Mod,
escape(r"==") => |_| Token::Equals,
escape(r"!=") => |_| Token::NotEquals,
escape(r"&&") => |_| Token::And,
escape(r"||") => |_| Token::Or,
escape(r">=") => |_| Token::GE,
escape(r"<=") => |_| Token::LE,
escape(r">") => |_| Token::GT,
escape(r"<") => |_| Token::LT,
escape(r"?:") => |_| Token::Elvis,
escape(r"=~") => |_| Token::RegexMatch,
r"\+" => |_| Token::Plus,
r"-" => |_| Token::Minus,
r"\*" => |_| Token::Times,
r"/" => |_| Token::Div,
r"%" => |_| Token::Mod,
r"==" => |_| Token::Equals,
r"!=" => |_| Token::NotEquals,
r"&&" => |_| Token::And,
r"\|\|" => |_| Token::Or,
r">=" => |_| Token::GE,
r"<=" => |_| Token::LE,
r">" => |_| Token::GT,
r"<" => |_| Token::LT,
r"\?:" => |_| Token::Elvis,
r"=~" => |_| Token::RegexMatch,
escape(r"!" ) => |_| Token::Not,
escape(r"-" ) => |_| Token::Negative,
r"!" => |_| Token::Not,
r"-" => |_| Token::Negative,
escape(r",") => |_| Token::Comma,
escape(r"?") => |_| Token::Question,
escape(r":") => |_| Token::Colon,
escape(r"(") => |_| Token::LPren,
escape(r")") => |_| Token::RPren,
escape(r"[") => |_| Token::LBrack,
escape(r"]") => |_| Token::RBrack,
escape(r"{") => |_| Token::LCurl,
escape(r"}") => |_| Token::RCurl,
escape(r".") => |_| Token::Dot,
escape(r"true") => |_| Token::True,
escape(r"false") => |_| Token::False,
r"," => |_| Token::Comma,
r"\?" => |_| Token::Question,
r":" => |_| Token::Colon,
r"\(" => |_| Token::LPren,
r"\)" => |_| Token::RPren,
r"\[" => |_| Token::LBrack,
r"\]" => |_| Token::RBrack,
r"\{" => |_| Token::LCurl,
r"\}" => |_| Token::RCurl,
r"\." => |_| Token::Dot,
r"true" => |_| Token::True,
r"false" => |_| Token::False,
r"\s+" => |_| Token::Skip,
r";.*"=> |_| Token::Comment,

View file

@ -1,5 +1,5 @@
use once_cell::sync::Lazy;
use regex::{escape, Regex, RegexSet};
use regex::{Regex, RegexSet};
use simplexpr::parser::lexer::{STR_INTERPOLATION_END, STR_INTERPOLATION_START};
use super::parse_error;
@ -41,12 +41,12 @@ impl std::fmt::Display for Token {
}
macro_rules! regex_rules {
($( $regex:expr => $token:expr),*) => {
($( $regex:literal => $token:expr),*) => {
static LEXER_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| { RegexSet::new(&[
$(format!("^{}", $regex)),*
$(concat!("^", $regex)),*
]).unwrap()});
static LEXER_REGEXES: Lazy<Vec<Regex>> = Lazy::new(|| { vec![
$(Regex::new(&format!("^{}", $regex)).unwrap()),*
$(Regex::new(concat!("^", $regex)).unwrap()),*
]});
static LEXER_FNS: Lazy<Vec<Box<dyn Fn(String) -> Token + Sync + Send>>> = Lazy::new(|| { vec![
$(Box::new($token)),*
@ -57,12 +57,12 @@ macro_rules! regex_rules {
static ESCAPE_REPLACE_REGEX: Lazy<regex::Regex> = Lazy::new(|| Regex::new(r"\\(.)").unwrap());
regex_rules! {
escape("(") => |_| Token::LPren,
escape(")") => |_| Token::RPren,
escape("[") => |_| Token::LBrack,
escape("]") => |_| Token::RBrack,
escape("true") => |_| Token::True,
escape("false") => |_| Token::False,
r"\(" => |_| Token::LPren,
r"\)" => |_| Token::RPren,
r"\[" => |_| Token::LBrack,
r"\]" => |_| Token::RBrack,
r"true" => |_| Token::True,
r"false" => |_| Token::False,
r#"[+-]?(?:[0-9]+[.])?[0-9]+"# => |x| Token::NumLit(x),
r#":[^\s\)\]}]+"# => |x| Token::Keyword(x),
r#"[a-zA-Z_!\?<>/\.\*-\+\-][^\s{}\(\)\[\](){}]*"# => |x| Token::Symbol(x),