From 82278c3a5aa93204c963a63cc3cfefea1d0fb3fd Mon Sep 17 00:00:00 2001 From: Melody Horn Date: Sun, 28 Mar 2021 01:45:52 -0600 Subject: lay down boilerplate for function calls of all the obnoxious GNUisms, this will probably wind up being the largest. especially if huge makefiles (e.g. Linux) use most of the functions that GNU offers, meaning we have to implement most of them to be Linuxable --- src/makefile/functions.rs | 31 +++++++++++++++ src/makefile/mod.rs | 4 ++ src/makefile/token.rs | 96 +++++++++++++++++++++++++++++++---------------- 3 files changed, 98 insertions(+), 33 deletions(-) create mode 100644 src/makefile/functions.rs (limited to 'src') diff --git a/src/makefile/functions.rs b/src/makefile/functions.rs new file mode 100644 index 0000000..bc39886 --- /dev/null +++ b/src/makefile/functions.rs @@ -0,0 +1,31 @@ +use super::token::TokenString; + +pub(crate) fn call(name: &str, args: &[TokenString]) -> TokenString { + match name { + // Text Functions + "filter" => todo!(), + "filter-out" => todo!(), + "sort" => todo!(), + + // File Name Functions + "notdir" => todo!(), + "basename" => todo!(), + "addprefix" => todo!(), + "wildcard" => todo!(), + + // foreach + "foreach" => todo!(), + + // call + "call" => todo!(), + + // eval + "eval" => todo!(), + + // shell + "shell" => todo!(), + + // fallback + _ => panic!("function not implemented: {}", name), + } +} diff --git a/src/makefile/mod.rs b/src/makefile/mod.rs index 0d4cded..1fc8303 100644 --- a/src/makefile/mod.rs +++ b/src/makefile/mod.rs @@ -14,6 +14,7 @@ use crate::args::Args; mod command_line; mod conditional; +mod functions; mod inference_rules; mod target; mod token; @@ -592,6 +593,9 @@ impl<'a> Makefile<'a> { }; result.push_str(¯o_value); } + Token::FunctionCall { name, args } => { + result.push_str(&self.expand_macros(&functions::call(name, args), None)); + } } } result diff --git a/src/makefile/token.rs b/src/makefile/token.rs index 86c442b..eca40ed 100644 --- a/src/makefile/token.rs +++ b/src/makefile/token.rs @@ -4,9 +4,9 @@ use std::str::FromStr; use nom::{ branch::alt, bytes::complete::{tag, take_till1, take_while1}, - character::complete::anychar, + character::complete::{anychar, space1}, combinator::{all_consuming, map, opt, verify}, - multi::many1, + multi::{many1, separated_list1}, sequence::{delimited, pair, preceded, separated_pair}, Finish, IResult, }; @@ -110,6 +110,10 @@ pub(crate) enum Token { name: String, replacement: Option<(TokenString, TokenString)>, }, + FunctionCall { + name: String, + args: Vec, + }, } impl fmt::Display for Token { @@ -124,34 +128,71 @@ impl fmt::Display for Token { name, replacement: Some((r1, r2)), } => write!(f, "$({}:{}={})", name, r1, r2), + Self::FunctionCall { name, args } => write!( + f, + "$({} {})", + name, + args.iter() + .map(|x| format!("{}", x)) + .collect::>() + .join(", ") + ), } } } -fn macro_name(input: &str) -> IResult<&str, &str> { +fn macro_function_name(input: &str) -> IResult<&str, &str> { // POSIX says "periods, underscores, digits, and alphabetics from the portable character set" - take_while1(|c: char| c == '.' || c == '_' || c.is_alphanumeric())(input) + // one GNUism is a function with a - in the name + take_while1(|c: char| c == '.' || c == '_' || c.is_alphanumeric() || c == '-')(input) } -fn macro_expansion_body<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> { +fn macro_expansion_body<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> + 'a { let subst = preceded( tag(":"), - separated_pair(tokens_but_not('='), tag("="), tokens_but_not(end)), + separated_pair( + tokens_but_not(vec!['=']), + tag("="), + tokens_but_not(vec![end]), + ), ); - map(pair(macro_name, opt(subst)), |(name, replacement)| { - Token::MacroExpansion { + map( + pair(macro_function_name, opt(subst)), + |(name, replacement)| Token::MacroExpansion { name: name.into(), replacement, - } - }) + }, + ) +} + +fn function_call_body<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> { + map( + separated_pair( + macro_function_name, + space1, + separated_list1(tag(","), tokens_but_not(vec![',', end])), + ), + |(name, args)| Token::FunctionCall { + name: name.into(), + args, + }, + ) } fn parens_macro_expansion(input: &str) -> IResult<&str, Token> { - delimited(tag("$("), macro_expansion_body(')'), tag(")"))(input) + delimited( + tag("$("), + alt((macro_expansion_body(')'), function_call_body(')'))), + tag(")"), + )(input) } fn braces_macro_expansion(input: &str) -> IResult<&str, Token> { - delimited(tag("${"), macro_expansion_body('}'), tag("}"))(input) + delimited( + tag("${"), + alt((macro_expansion_body('}'), function_call_body(')'))), + tag("}"), + )(input) } fn tiny_macro_expansion(input: &str) -> IResult<&str, Token> { @@ -176,41 +217,30 @@ fn macro_expansion(input: &str) -> IResult<&str, Token> { ))(input) } -fn text(input: &str) -> IResult<&str, Token> { - map(take_till1(|c| c == '$'), |x: &str| Token::Text(x.into()))(input) -} - -fn text_but_not<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> { - map(take_till1(move |c| c == '$' || c == end), |x: &str| { - Token::Text(x.into()) - }) +fn text_but_not<'a>(ends: Vec) -> impl FnMut(&'a str) -> IResult<&'a str, Token> { + map( + take_till1(move |c| c == '$' || ends.contains(&c)), + |x: &str| Token::Text(x.into()), + ) } -fn single_token(input: &str) -> IResult<&str, Token> { - alt((text, macro_expansion))(input) -} - -fn single_token_but_not<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> { - alt((text_but_not(end), macro_expansion)) +fn single_token_but_not<'a>(ends: Vec) -> impl FnMut(&'a str) -> IResult<&'a str, Token> { + alt((text_but_not(ends), macro_expansion)) } fn empty_tokens(input: &str) -> IResult<&str, TokenString> { map(tag(""), |_| TokenString(vec![Token::Text(String::new())]))(input) } -fn tokens(input: &str) -> IResult<&str, TokenString> { - alt((map(many1(single_token), TokenString), empty_tokens))(input) -} - -fn tokens_but_not<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, TokenString> { +fn tokens_but_not<'a>(ends: Vec) -> impl FnMut(&'a str) -> IResult<&'a str, TokenString> { alt(( - map(many1(single_token_but_not(end)), TokenString), + map(many1(single_token_but_not(ends)), TokenString), empty_tokens, )) } fn full_text_tokens(input: &str) -> IResult<&str, TokenString> { - all_consuming(tokens)(input) + all_consuming(tokens_but_not(vec![]))(input) } pub(crate) fn tokenize(input: &str) -> TokenString { -- cgit v1.2.3