use std::cell::Cell; use std::collections::HashMap; use std::error::Error as StdError; use std::fs::File; use std::io::{BufRead, BufReader, Cursor, Error as IoError, ErrorKind as IoErrorKind, Lines}; use std::iter::Peekable; use std::path::Path; use eyre::{bail, eyre, Context, Result}; use lazy_static::lazy_static; use regex::Regex; use crate::args::Args; use super::command_line::CommandLine; #[cfg(feature = "full")] use super::conditional::{Line as ConditionalLine, State as ConditionalState}; use super::inference_rules::InferenceRule; #[cfg(feature = "full")] use super::r#macro::ExportConfig; use super::r#macro::{Macro, Set as MacroSet, Source as MacroSource}; use super::target::Target; use super::token::{tokenize, Token, TokenString}; enum LineType { Rule, Macro, Unknown, } impl LineType { fn of(line_tokens: &TokenString) -> Self { #[cfg(feature = "full")] if line_tokens.starts_with("define ") { return Self::Macro; } for token in line_tokens.tokens() { if let Token::Text(text) = token { let colon_idx = text.find(':'); #[cfg(not(feature = "full"))] let equals_idx = text.find('='); #[cfg(feature = "full")] let equals_idx = ["=", ":=", "::=", "?=", "+="] .iter() .filter_map(|p| text.find(p)) .min(); match (colon_idx, equals_idx) { (Some(_), None) => { return Self::Rule; } (Some(c), Some(e)) if c < e => { return Self::Rule; } (None, Some(_)) => { return Self::Macro; } (Some(c), Some(e)) if e <= c => { return Self::Macro; } _ => {} } } } Self::Unknown } } #[derive(Debug)] struct InferenceMatch<'a> { s1: &'a str, s2: &'a str, } fn inference_match<'a>( targets: &[&'a str], prerequisites: &[String], ) -> Option> { lazy_static! { static ref INFERENCE_RULE: Regex = Regex::new(r"^(?P(\.[^/.]+)?)(?P\.[^/.]+)$").unwrap(); static ref SPECIAL_TARGET: Regex = Regex::new(r"^\.[A-Z]+$").unwrap(); } let inference_match = INFERENCE_RULE.captures(targets[0]); let special_target_match = SPECIAL_TARGET.captures(targets[0]); let inference_rule = targets.len() == 1 && prerequisites.is_empty() && inference_match.is_some() && special_target_match.is_none(); if inference_rule { inference_match.map(|x| InferenceMatch { s1: x.name("s1").unwrap().as_str(), s2: x.name("s2").unwrap().as_str(), }) } else { None } } struct LineNumbers(Inner, usize) where E: StdError + Send + Sync + 'static, Inner: Iterator>; impl LineNumbers where E: StdError + Send + Sync + 'static, Inner: Iterator>, { fn new(inner: Inner) -> Self { Self(inner, 0) } } impl Iterator for LineNumbers where E: StdError + Send + Sync + 'static, Inner: Iterator>, { type Item = (usize, Result); fn next(&mut self) -> Option { self.0.next().map(|x| { self.1 = self.1.saturating_add(1); ( self.1, x.with_context(|| format!("failed to read line {} of makefile", self.1)), ) }) } } trait IteratorExt: Iterator> { fn line_numbered(self) -> LineNumbers where Self: Sized, { LineNumbers::new(self) } } impl>> IteratorExt for I { } #[derive(Clone, Copy)] struct NextLineSettings { escaped_newline_replacement: &'static str, peeking: bool, strip_comments: bool, } impl Default for NextLineSettings { fn default() -> Self { Self { escaped_newline_replacement: " ", peeking: false, strip_comments: true, } } } pub struct MakefileReader<'a, 'parent, R: BufRead> { file_name: String, pub inference_rules: Vec, pub macros: MacroSet<'parent, 'static>, pub targets: HashMap, built_in_targets: HashMap, pub first_non_special_target: Option, pub failed_includes: Vec, args: &'a Args, lines_iter: Peekable>>, // join with escaped_newline_replacement to get the actual line pending_line: Option<(usize, Vec)>, #[cfg(feature = "full")] conditional_stack: Vec, } impl<'a, 'parent> MakefileReader<'a, 'parent, BufReader> { pub fn read_file( args: &'a Args, mut macros: MacroSet<'parent, 'static>, path: impl AsRef, ) -> Result { #[cfg(feature = "full")] if let Some(mut old_makefile_list) = macros.pop("MAKEFILE_LIST") { old_makefile_list.text.extend(TokenString::text(format!( " {}", path.as_ref().to_string_lossy() ))); macros.set("MAKEFILE_LIST".to_owned(), old_makefile_list); } else { macros.set( "MAKEFILE_LIST".to_owned(), Macro { source: MacroSource::Builtin, text: TokenString::text(path.as_ref().to_string_lossy()), #[cfg(feature = "full")] eagerly_expanded: false, }, ); } let file = File::open(path.as_ref()); // TODO handle errors let file = file.context("couldn't open makefile!")?; let file_reader = BufReader::new(file); Self::read(args, macros, file_reader, path.as_ref().to_string_lossy()) } } impl<'a, 'parent, R: BufRead> MakefileReader<'a, 'parent, R> { pub fn read( args: &'a Args, macros: MacroSet<'parent, 'static>, source: R, name: impl Into, ) -> Result { let name = name.into(); let mut reader = Self { file_name: name.clone(), inference_rules: Vec::new(), macros, targets: HashMap::new(), built_in_targets: HashMap::new(), first_non_special_target: None, failed_includes: Vec::new(), args, lines_iter: source.lines().line_numbered().peekable(), pending_line: None, #[cfg(feature = "full")] conditional_stack: Vec::new(), }; // TODO be smart about this instead, please if !args.no_builtin_rules { reader.built_in_targets.insert( ".SUFFIXES".to_owned(), Target { name: ".SUFFIXES".into(), prerequisites: vec![".o", ".c", ".y", ".l", ".a", ".sh", ".f"] .into_iter() .map(String::from) .collect(), commands: vec![], stem: None, already_updated: Cell::new(false), }, ); } reader .read_all() .wrap_err_with(|| format!("while reading {}", name))?; Ok(reader) } fn read_all(&mut self) -> Result<()> { let topmost = NextLineSettings { escaped_newline_replacement: " ", ..Default::default() }; while let Some((line_number, line)) = self.next_line(topmost) { let line = line?; // handle include lines let original_line = &line; if let Some(line) = line .strip_prefix("include ") .or(line.strip_prefix("-include ")) { // remove extra leading space let line = line.trim_start(); let line = self.expand_macros(&tokenize(line)?)?; let fields = line.split_whitespace(); // POSIX says we only have to handle a single filename, but GNU make // handles arbitrarily many filenames, and it's not like that's more work for field in fields { let child_macros = self.macros.with_overlay(); let child = MakefileReader::read_file(self.args, child_macros, field) .with_context(|| format!("while including {}", field)); match child { Ok(child) => { let child = child.finish(); self.extend(child); } Err(err) => { if !original_line.starts_with('-') { match err.downcast_ref::() { Some(err) if err.kind() == IoErrorKind::NotFound => { log::error!( "{}:{}: included makefile {} not found", &self.file_name, line_number, field, ); self.failed_includes.push(field.to_owned()); } _ => { return Err(err); } } } } } } continue; } if line.trim().is_empty() { // handle blank lines continue; } // unfortunately, rules vs macros can't be determined until after // macro tokenizing. so that's suboptimal. // TODO errors let line_tokens: TokenString = line .parse() .with_context(|| format!("failed to parse line {}", line_number))?; let line_type = LineType::of(&line_tokens); // before we actually test it, see if it's only visible after expanding macros let (line_tokens, line_type) = if let LineType::Unknown = line_type { let line_tokens = TokenString::text( self.expand_macros(&line_tokens) .wrap_err_with(|| format!("while parsing line {}", line_number))? .trim(), ); // and let's eval whatever bullshit needs evaling #[cfg(feature = "full")] { let eval = self.macros.to_eval.take(); for eval in eval { let child_macros = self.macros.with_overlay(); let child = MakefileReader::read( self.args, child_macros, Cursor::new(eval), "", ) .context("while evaling")? .finish(); self.extend(child); } } let line_type = LineType::of(&line_tokens); (line_tokens, line_type) } else { (line_tokens, line_type) }; match line_type { LineType::Rule => { self.read_rule(&line_tokens, line_number) .wrap_err_with(|| { format!( "while parsing rule definition starting on line {}", line_number ) })? } LineType::Macro => { self.read_macro(line_tokens, line_number) .wrap_err_with(|| { format!( "while parsing macro definition starting on line {}", line_number ) })? } LineType::Unknown => { if !line_tokens.is_empty() { // TODO handle assignments here #[cfg(feature = "full")] if line_tokens.starts_with("export") { let mut line_tokens = line_tokens; line_tokens.strip_prefix("export"); if line_tokens.is_empty() { self.macros.exported = ExportConfig::all_but(); } else { let exported = self.expand_macros(&line_tokens)?; self.macros.exported.add_all(exported.split_whitespace()); } continue; } else if line_tokens.starts_with("unexport") { let mut line_tokens = line_tokens; line_tokens.strip_prefix("unexport"); if line_tokens.is_empty() { self.macros.exported = ExportConfig::only(); } else { let exported = self.expand_macros(&line_tokens)?; self.macros.exported.remove_all(exported.split_whitespace()); } continue; } bail!( "error: line {}: unknown line \"{}\"", line_number, line_tokens ); } } } } Ok(()) } fn next_line(&mut self, settings: NextLineSettings) -> Option<(usize, Result)> { lazy_static! { static ref COMMENT: Regex = Regex::new(r"(^|[^\\])#.*$").unwrap(); } let escaped_newline_replacement = settings.escaped_newline_replacement; if let Some((line_number, line)) = self.pending_line.take() { if settings.peeking { self.pending_line = Some((line_number, line.clone())); } let line = line.join(escaped_newline_replacement); let line = if settings.strip_comments { // TODO only do this if we were in don't-strip-comments mode before // TODO deduplicate COMMENT .replace(&line, "$1") .replace(r"\#", "#") .trim_end() .to_owned() } else { line }; return Some((line_number, Ok(line))); } while let Some((line_number, line)) = self.lines_iter.next() { let line = match line { Ok(x) => x, Err(err) => return Some((line_number, Err(err))), }; // handle comments let line = if settings.strip_comments { COMMENT .replace(&line, "$1") .replace(r"\#", "#") .trim_end() .to_owned() } else { line }; // handle escaped newlines let mut line_pieces = vec![line]; while line_pieces.last().map_or(false, |p| p.ends_with('\\')) { line_pieces.last_mut().map(|x| x.pop()); if let Some((n, x)) = self.lines_iter.next() { let line = match x { Ok(x) => x, Err(err) => return Some((n, Err(err))), }; let line = if settings.strip_comments { COMMENT .replace(&line, "$1") .replace(r"\#", "#") .trim_end() .to_owned() } else { line }; line_pieces.push(line.trim_start().to_owned()); } } let line = line_pieces.join(escaped_newline_replacement); #[cfg(feature = "full")] { let cond_line = ConditionalLine::from(&line, |t| self.expand_macros(t)); let cond_line = match cond_line { Ok(x) => x, Err(err) => return Some((line_number, Err(err))), }; if let Some(line) = cond_line { let action = line .action( self.conditional_stack.last(), |name| self.macros.is_defined(name), |t| self.expand_macros(t), ) .wrap_err_with(|| { format!("while applying conditional on line {}", line_number) }); let action = match action { Ok(x) => x, Err(err) => return Some((line_number, Err(err))), }; action.apply_to(&mut self.conditional_stack); continue; } // skip lines if we need to if self .conditional_stack .iter() .any(ConditionalState::skipping) { continue; } } if settings.peeking { self.pending_line = Some((line_number, line_pieces)); } return Some((line_number, Ok(line))); } None } /// Only applies the predicate to the next physical line in the file. /// Doesn't apply the escaped newline replacement unless the predicate passes. fn next_line_if( &mut self, settings: NextLineSettings, predicate: impl FnOnce(&(usize, Result)) -> bool, ) -> Option<(usize, Result)> { let peek_settings = NextLineSettings { peeking: true, ..settings }; if predicate(&self.next_line(peek_settings)?) { self.next_line(settings) } else { None } } fn special_target_has_prereq(&self, target: &str, name: &str, empty_counts: bool) -> bool { match self .targets .get(target) .or_else(|| self.built_in_targets.get(target)) { Some(target) => { (empty_counts && target.prerequisites.is_empty()) || target.prerequisites.iter().any(|e| e == name) } None => false, } } fn read_rule(&mut self, line_tokens: &TokenString, line_number: usize) -> Result<()> { let (targets, not_targets) = line_tokens .split_once(':') .ok_or_else(|| eyre!("read_rule couldn't find a ':' on line {}", line_number))?; #[cfg(feature = "full")] let (static_targets, targets, not_targets) = if not_targets.contains_text(":") { // ugh, this is probably a Static Pattern Rule let (pattern, not_targets) = not_targets .split_once(':') .ok_or_else(|| eyre!("bro hold the fuck up it literally just had that"))?; (Some(targets), pattern, not_targets) } else { (None, targets, not_targets) }; let targets = self.expand_macros(&targets)?; let targets = targets.split_whitespace().collect::>(); let (prerequisites, mut commands) = match not_targets.split_once(';') { Some((prerequisites, command)) => { // TODO make sure escaped newlines get retroactively treated correctly here (prerequisites, vec![command]) } None => (not_targets, vec![]), }; if prerequisites.contains_text("=") { log::error!("rule-specific macros are not implemented yet"); return Ok(()); } let prerequisites = self .macros .with_lookup(&|macro_name: &str| { let macro_pieces = if macro_name.starts_with('@') { // The $@ shall evaluate to the full target name of the // current target. targets.iter() } else { bail!("unknown internal macro") }; let macro_pieces = if macro_name.ends_with('D') { macro_pieces .map(|x| { Path::new(x) .parent() .ok_or_else(|| eyre!("no parent")) .map(|x| x.to_string_lossy().into()) }) .collect::, _>>()? } else if macro_name.ends_with('F') { macro_pieces .map(|x| { Path::new(x) .file_name() .ok_or_else(|| eyre!("no filename")) .map(|x| x.to_string_lossy().into()) }) .collect::, _>>()? } else { macro_pieces.map(|&x| x.to_owned()).collect::>() }; Ok(macro_pieces.join(" ")) }) .expand(&prerequisites)?; let prerequisites = prerequisites .split_whitespace() .map(|x| x.into()) .collect::>(); let settings = NextLineSettings { escaped_newline_replacement: "\\\n", strip_comments: false, ..Default::default() }; while let Some((_, x)) = self.next_line_if(settings, |(_, x)| { x.as_ref() .ok() .map_or(false, |line| line.starts_with('\t') || line.is_empty()) }) { let mut line = x?; if !line.is_empty() { assert!(line.starts_with('\t')); line.remove(0); } if line.is_empty() { continue; } commands.push( line.parse() .with_context(|| format!("failed to parse line {}", line_number))?, ); } let commands = commands .into_iter() .map(CommandLine::from) .collect::>(); if targets.is_empty() { return Ok(()); } // we don't know yet if it's a target rule or an inference rule (or a GNUish "pattern rule") let inference_match = inference_match(&targets, &prerequisites); #[cfg(feature = "full")] let is_pattern = targets.iter().all(|x| x.contains('%')); #[cfg(feature = "full")] if is_pattern { let new_rule = InferenceRule { products: targets.into_iter().map(|x| x.to_owned()).collect(), prerequisites, commands, }; if let Some(static_targets) = static_targets { let static_targets = self.expand_macros(&static_targets)?; let static_targets = static_targets.split_whitespace(); for real_target in static_targets { if new_rule.matches(real_target)? { let new_target = Target { name: real_target.to_owned(), prerequisites: new_rule.prereqs(real_target)?.collect(), commands: new_rule.commands.clone(), stem: new_rule .first_match(real_target)? .and_then(|x| x.get(1).map(|x| x.as_str().to_owned())), already_updated: Cell::new(false), }; self.targets.insert(real_target.to_owned(), new_target); } } } else { self.inference_rules.push(new_rule); } return Ok(()); } // don't interpret things like `.tmp: ; mkdir -p $@` as single-suffix rules let inference_match = inference_match.and_then(|inference| { if self.special_target_has_prereq(".SUFFIXES", inference.s1, false) && (inference.s2.is_empty() || self.special_target_has_prereq(".SUFFIXES", inference.s2, false)) { Some(inference) } else { log::info!( "{}:{}: looks like {:?} is not a suffix rule because .SUFFIXES is {:?}", &self.file_name, line_number, inference, self.targets .get(".SUFFIXES") .or_else(|| self.built_in_targets.get(".SUFFIXES")) .map(|x| &x.prerequisites) ); None } }); if let Some(inference_match) = inference_match { let new_rule = InferenceRule::new_suffix( inference_match.s1.to_owned(), inference_match.s2.to_owned(), commands, ); log::trace!( "suffix-based inference rule defined by {:?} - {:?}", &inference_match, &new_rule, ); self.inference_rules.retain(|existing_rule| { (&existing_rule.prerequisites, &existing_rule.products) != (&new_rule.prerequisites, &new_rule.products) }); self.inference_rules.push(new_rule); } else { for target in targets { if self.first_non_special_target.is_none() && !target.starts_with('.') { self.first_non_special_target = Some(target.into()); } // TODO handle appending to built-in (it's Complicated) match self.targets.get_mut(target) { Some(old_target) if commands.is_empty() && !(target == ".SUFFIXES" && prerequisites.is_empty()) => { let new_prerequisites = prerequisites .iter() .filter(|x| !old_target.prerequisites.contains(x)) .cloned() .collect::>(); old_target.prerequisites.extend(new_prerequisites); } _ => { let new_target = Target { name: target.into(), prerequisites: prerequisites.clone(), commands: commands.clone(), stem: None, already_updated: Cell::new(false), }; self.targets.insert(target.into(), new_target); } } } } Ok(()) } fn read_macro(&mut self, mut line_tokens: TokenString, line_number: usize) -> Result<()> { let (name, mut value) = if cfg!(feature = "full") && line_tokens.starts_with("define ") { line_tokens.strip_prefix("define "); if line_tokens.ends_with("=") { line_tokens.strip_suffix("="); line_tokens.trim_end(); } let mut value = TokenString::empty(); // TODO what should be done with escaped newlines while let Some((_, line)) = self.next_line(Default::default()) { let line = line?; if line == "endef" { break; } if !value.is_empty() { value.extend(TokenString::text("\n")); } value.extend(line.parse()?); } (line_tokens, value) } else { line_tokens .split_once('=') .ok_or_else(|| eyre!("read_rule couldn't find a ':' on line {}", line_number))? }; let name = self.expand_macros(&name)?; // GNUisms are annoying, but popular let mut expand_value = false; let mut skip_if_defined = false; let mut append = false; #[cfg(feature = "full")] let name = if let Some(real_name) = name.strip_suffix("::") { expand_value = true; real_name } else if let Some(real_name) = name.strip_suffix(":") { expand_value = true; real_name } else if let Some(real_name) = name.strip_suffix("?") { skip_if_defined = true; real_name } else if let Some(real_name) = name.strip_suffix("+") { append = true; real_name } else { &name }; let name = name.trim(); value.trim_start(); let value = if expand_value { TokenString::text( self.expand_macros(&value) .wrap_err_with(|| format!("while defining {} on line {}", name, line_number))?, ) } else { value }; match self.macros.get(name) { // We always let command line or MAKEFLAGS macros override macros from the file. Some(Macro { source: MacroSource::CommandLineOrMakeflags, .. }) => return Ok(()), // We let environment variables override macros from the file only if the command-line argument to do that was given Some(Macro { source: MacroSource::Environment, .. }) if self.args.environment_overrides => return Ok(()), Some(_) if skip_if_defined => return Ok(()), _ => {} } let value = match self.macros.pop(name) { Some(mut old_value) if append => { #[cfg(feature = "full")] let value = if old_value.eagerly_expanded { TokenString::text(self.expand_macros(&value).wrap_err_with(|| { format!("while defining {} on line {}", name, line_number) })?) } else { value }; old_value.text.extend(TokenString::text(" ")); old_value.text.extend(value); old_value } _ => Macro { source: MacroSource::File, text: value, #[cfg(feature = "full")] eagerly_expanded: expand_value, }, }; self.macros.set(name.into(), value); Ok(()) } fn expand_macros(&self, text: &TokenString) -> Result { self.macros .expand(text) .wrap_err_with(|| format!("while expanding \"{}\"", text)) } pub fn finish(self) -> FinishedMakefileReader { FinishedMakefileReader { inference_rules: self.inference_rules, macros: self.macros.data, #[cfg(feature = "full")] macro_exports: self.macros.exported, targets: self.targets, first_non_special_target: self.first_non_special_target, failed_includes: self.failed_includes, } } fn extend(&mut self, new: FinishedMakefileReader) { self.inference_rules.extend(new.inference_rules); self.macros.extend( new.macros, #[cfg(feature = "full")] new.macro_exports, ); self.targets.extend(new.targets); if self.first_non_special_target.is_none() { self.first_non_special_target = new.first_non_special_target; } self.failed_includes.extend(new.failed_includes); } } pub struct FinishedMakefileReader { pub inference_rules: Vec, pub macros: HashMap, #[cfg(feature = "full")] pub macro_exports: ExportConfig, pub targets: HashMap, pub first_non_special_target: Option, pub failed_includes: Vec, } #[cfg(test)] mod test { use super::*; type R = Result<()>; #[test] fn multi_line_dependencies() -> R { let file = " unrelated: example \tswag x = 3 4 \\ \t\t5 a: $(x) b \\ \t\tc \\ \t\td \tfoo"; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?.finish(); assert_eq!( makefile.targets["a"].prerequisites, vec!["3", "4", "5", "b", "c", "d"] ); Ok(()) } #[cfg(feature = "full")] #[test] fn basic_conditionals() -> R { let file = " ifeq (1,1) worked = yes else ifeq (2,2) worked = no else worked = perhaps endif "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; assert_eq!( makefile.expand_macros(&TokenString::r#macro("worked"))?, "yes" ); Ok(()) } #[cfg(feature = "full")] #[test] fn condition_in_rule() -> R { let file = " a: ifeq (1,1) \tfoo endif "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; assert_eq!(makefile.targets["a"].commands.len(), 1); Ok(()) } #[cfg(feature = "full")] #[test] fn define_syntax() -> R { let file = " define foo = bar baz endef "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; assert_eq!( makefile.expand_macros(&TokenString::r#macro("foo"))?, "bar\nbaz" ); Ok(()) } #[cfg(feature = "full")] #[test] fn elseif() -> R { let file = " ifdef CONFIG_CC_OPTIMIZE_FOR_PERFORMANCE KBUILD_CFLAGS += -O2 else ifdef CONFIG_CC_OPTIMIZE_FOR_PERFORMANCE_O3 KBUILD_CFLAGS += -O3 else ifdef CONFIG_CC_OPTIMIZE_FOR_SIZE KBUILD_CFLAGS += -Os endif FOO = bar "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; assert_eq!(makefile.expand_macros(&TokenString::r#macro("FOO"))?, "bar",); Ok(()) } #[test] #[cfg(feature = "full")] fn eval() -> R { // This, for the record, is a terrible misfeature. // If you need this, you probably shouldn't be using Make. // But a lot of people are using this and still use Make anyway, so here we go, // I guess. let file = " PROGRAMS = server client server_OBJS = server.o server_priv.o server_access.o server_LIBS = priv protocol client_OBJS = client.o client_api.o client_mem.o client_LIBS = protocol # Everything after this is generic .PHONY: all all: $(PROGRAMS) define PROGRAM_template = $(1): $$($(1)_OBJS) $$($(1)_LIBS:%=-l%) ALL_OBJS += $$($(1)_OBJS) endef $(foreach prog,$(PROGRAMS),$(eval $(call PROGRAM_template,$(prog)))) $(PROGRAMS): \t$(LINK.o) $^ $(LDLIBS) -o $@ clean: \trm -f $(ALL_OBJS) $(PROGRAMS) "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; assert!(makefile.targets.contains_key("server")); Ok(()) } #[test] fn comment_bullshit() -> R { let file = " foo: bar baz#swag example: test\\#post info: \thello # there "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; let makefile = makefile.finish(); assert_eq!( makefile.targets["foo"], Target { name: "foo".to_owned(), prerequisites: vec!["bar".to_owned(), "baz".to_owned()], commands: vec![], stem: None, already_updated: Cell::new(false) } ); assert_eq!( makefile.targets["example"], Target { name: "example".to_owned(), prerequisites: vec!["test#post".to_owned()], commands: vec![], stem: None, already_updated: Cell::new(false) } ); assert_eq!( makefile.targets["info"], Target { name: "info".to_owned(), prerequisites: vec![], commands: vec![CommandLine::from(TokenString::text("hello # there")),], stem: None, already_updated: Cell::new(false) } ); Ok(()) } #[test] fn sdafjijsafjdoisdf() -> R { let file = " cursed: \techo this uses the bash variable '$$#' and all that \\ \techo yeah its value is $$# and it's really cool "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; let _makefile = makefile.finish(); Ok(()) } #[test] fn double_suffix_rule() -> R { let file = " .c.o: \techo yeet .SUFFIXES: .l.a: \techo hey .SUFFIXES: .test .post .post.test: \techo hiiii "; let args = Args::empty(); let makefile = MakefileReader::read(&args, MacroSet::new(), Cursor::new(file), "")?; let makefile = makefile.finish(); assert_eq!(makefile.inference_rules.len(), 2); Ok(()) } }