From cffe9f4085cf28e9a1e62346cfb31ed0470a49ff Mon Sep 17 00:00:00 2001 From: Melody Horn Date: Tue, 6 Apr 2021 15:30:47 -0600 Subject: appease our wire lord and savior --- src/main.rs | 2 +- src/makefile/functions.rs | 10 +++++++--- src/makefile/inference_rules.rs | 2 +- src/makefile/input.rs | 2 +- src/makefile/macro.rs | 5 +---- src/makefile/token.rs | 10 +++++----- 6 files changed, 16 insertions(+), 15 deletions(-) diff --git a/src/main.rs b/src/main.rs index c3cedee..4b86307 100644 --- a/src/main.rs +++ b/src/main.rs @@ -84,7 +84,7 @@ fn main() -> Result<()> { }; for target in targets { - makefile.update_target(&target)?; + makefile.update_target(target)?; } } Ok(()) diff --git a/src/makefile/functions.rs b/src/makefile/functions.rs index a16c268..e6efd1c 100644 --- a/src/makefile/functions.rs +++ b/src/makefile/functions.rs @@ -219,7 +219,11 @@ mod text { let n = macros.expand(n)?; let n: usize = n.parse().wrap_err("while calling `word`")?; let text = macros.expand(text)?; - Ok(text.split_whitespace().nth(n + 1).unwrap_or("").to_owned()) + Ok(text + .split_whitespace() + .nth(n.saturating_add(1)) + .unwrap_or("") + .to_owned()) } pub fn words(macros: &MacroSet, words: &TokenString) -> Result { @@ -655,7 +659,7 @@ mod test { &[ TokenString::text(""), TokenString::text("yeet"), - "$(error fail)".parse().unwrap() + "$(error fail)".parse()? ], &MacroSet::new() )?, @@ -681,7 +685,7 @@ mod test { &[ TokenString::text("maybe"), TokenString::text(""), - "$(error fail)".parse().unwrap() + "$(error fail)".parse()? ], &MacroSet::new() )?, diff --git a/src/makefile/inference_rules.rs b/src/makefile/inference_rules.rs index 57756c9..397e651 100644 --- a/src/makefile/inference_rules.rs +++ b/src/makefile/inference_rules.rs @@ -72,7 +72,7 @@ mod test { #[test] fn suffix_match() -> R { - let rule = InferenceRule::new_suffix(".o".to_string(), ".c".to_string(), vec![]); + let rule = InferenceRule::new_suffix(".o".to_owned(), ".c".to_owned(), vec![]); assert!(rule.matches("foo.o")?); assert!(rule.matches("dir/foo.o")?); Ok(()) diff --git a/src/makefile/input.rs b/src/makefile/input.rs index 601a159..13ced8e 100644 --- a/src/makefile/input.rs +++ b/src/makefile/input.rs @@ -348,7 +348,7 @@ impl<'a, 'parent, R: BufRead> MakefileReader<'a, 'parent, R> { Err(err) => return Some((n, Err(err))), }; let line = COMMENT.replace(&line, ""); - line_pieces.push(line.trim_start().to_string()); + line_pieces.push(line.trim_start().to_owned()); } } let line = line_pieces.join(escaped_newline_replacement); diff --git a/src/makefile/macro.rs b/src/makefile/macro.rs index 1c8bd01..2c8747c 100644 --- a/src/makefile/macro.rs +++ b/src/makefile/macro.rs @@ -304,10 +304,7 @@ mod test { eagerly_expanded: false, }, ); - assert_eq!( - macros.expand(&"$(oof:;=?)".parse().unwrap())?, - "bruh? swag? yeet?" - ); + assert_eq!(macros.expand(&"$(oof:;=?)".parse()?)?, "bruh? swag? yeet?"); Ok(()) } } diff --git a/src/makefile/token.rs b/src/makefile/token.rs index 31adcfc..63c48d9 100644 --- a/src/makefile/token.rs +++ b/src/makefile/token.rs @@ -192,21 +192,21 @@ enum Delimiter { } impl Delimiter { - fn start(&self) -> &'static str { + const fn start(&self) -> &'static str { match self { Self::Parens => "(", Self::Braces => "{", } } - fn start_char(&self) -> char { + const fn start_char(&self) -> char { match self { Self::Parens => '(', Self::Braces => '{', } } - fn end(&self) -> &'static str { + const fn end(&self) -> &'static str { match self { Self::Parens => ")", Self::Braces => "}", @@ -317,7 +317,7 @@ fn text_but_not<'a, E: Err<'a>>( ) -> impl FnMut(&'a str) -> IResult<&'a str, TokenString, E> { map( take_till1(move |c| c == '$' || ends.contains(&c)), - |x: &str| TokenString::text(x), // TODO don't allocate an entire Vec for that + TokenString::text, // TODO don't allocate an entire Vec for that ) } @@ -349,7 +349,7 @@ fn single_token_but_not<'a, E: Err<'a>>( alt(( text_but_not(tbn_ends), macro_expansion, - nested_delimiters(ends.clone(), context), + nested_delimiters(ends, context), )) } -- cgit v1.2.3