aboutsummaryrefslogtreecommitdiff
path: root/src/makefile
diff options
context:
space:
mode:
Diffstat (limited to 'src/makefile')
-rw-r--r--src/makefile/mod.rs214
-rw-r--r--src/makefile/token.rs164
2 files changed, 251 insertions, 127 deletions
diff --git a/src/makefile/mod.rs b/src/makefile/mod.rs
index 359355e..138d612 100644
--- a/src/makefile/mod.rs
+++ b/src/makefile/mod.rs
@@ -2,7 +2,7 @@ use std::cell::{Cell, RefCell};
use std::collections::HashMap;
use std::env;
use std::fmt;
-use std::fs::{File, metadata};
+use std::fs::{metadata, File};
use std::io::{BufRead, BufReader};
use std::path::Path;
use std::rc::Rc;
@@ -70,9 +70,10 @@ impl Target {
return true;
}
let exists = metadata(&self.name).is_ok();
- let newer_than_all_dependencies = self.prerequisites
- .iter()
- .all(|t| self.newer_than(&file.get_target(t).borrow()).unwrap_or(false));
+ let newer_than_all_dependencies = self.prerequisites.iter().all(|t| {
+ self.newer_than(&file.get_target(t).borrow())
+ .unwrap_or(false)
+ });
exists && newer_than_all_dependencies
}
@@ -135,7 +136,7 @@ impl CommandLine {
Some('-') => ignore_errors = true,
Some('@') => silent = true,
Some('+') => always_execute = true,
- _ => unreachable!()
+ _ => unreachable!(),
},
_ => break,
}
@@ -152,12 +153,12 @@ impl CommandLine {
}
fn execute(&self, file: &Makefile, target: &Target) {
- let ignore_error = self.ignore_errors ||
- file.args.ignore_errors ||
- file.special_target_has_prereq(".IGNORE", &target.name);
- let silent = (self.silent && !file.args.dry_run) ||
- file.args.silent ||
- file.special_target_has_prereq(".SILENT", &target.name);
+ let ignore_error = self.ignore_errors
+ || file.args.ignore_errors
+ || file.special_target_has_prereq(".IGNORE", &target.name);
+ let silent = (self.silent && !file.args.dry_run)
+ || file.args.silent
+ || file.special_target_has_prereq(".SILENT", &target.name);
let execution_line = file.expand_macros(&self.execution_line, Some(target));
@@ -165,8 +166,8 @@ impl CommandLine {
println!("{}", execution_line);
}
- let should_execute = self.always_execute ||
- !(file.args.dry_run || file.args.question || file.args.touch);
+ let should_execute =
+ self.always_execute || !(file.args.dry_run || file.args.question || file.args.touch);
if !should_execute {
return;
}
@@ -232,8 +233,11 @@ impl Makefile {
for (k, v) in builtin_macros() {
macros.insert(k.into(), (MacroSource::Builtin, v));
}
- targets.extend(builtin_targets().into_iter()
- .map(|t| (t.name.clone(), Rc::new(RefCell::new(t)))));
+ targets.extend(
+ builtin_targets()
+ .into_iter()
+ .map(|t| (t.name.clone(), Rc::new(RefCell::new(t)))),
+ );
}
for (k, v) in env::vars() {
@@ -246,7 +250,13 @@ impl Makefile {
let pieces = r#macro.splitn(2, '=').collect::<Vec<_>>();
match *pieces {
[name, value] => {
- macros.insert(name.into(), (MacroSource::CommandLineOrMAKEFLAGS, TokenString::text(value)));
+ macros.insert(
+ name.into(),
+ (
+ MacroSource::CommandLineOrMAKEFLAGS,
+ TokenString::text(value),
+ ),
+ );
}
_ => {}
}
@@ -357,22 +367,32 @@ impl Makefile {
LineType::Rule => {
let (targets, not_targets) = line_tokens.split_once(':').unwrap();
let targets = self.expand_macros(&targets, None);
- let targets = targets.split_whitespace().map(|x| x.into()).collect::<Vec<String>>();
+ let targets = targets
+ .split_whitespace()
+ .map(|x| x.into())
+ .collect::<Vec<String>>();
let (prerequisites, mut commands) = match not_targets.split_once(';') {
Some((prerequisites, mut command)) => {
while command.ends_with(r"\") && lines_iter.peek().is_some() {
command.strip_suffix(r"\");
- command.extend(tokenize(&lines_iter.next().unwrap().1.unwrap()));
+ command
+ .extend(tokenize(&lines_iter.next().unwrap().1.unwrap()));
}
(prerequisites, vec![command])
}
None => (not_targets, vec![]),
};
let prerequisites = self.expand_macros(&prerequisites, None);
- let prerequisites = prerequisites.split_whitespace().map(|x| x.into()).collect::<Vec<String>>();
-
- while lines_iter.peek().and_then(|(_, x)| x.as_ref().ok())
- .map_or(false, |line| line.starts_with('\t') || line.is_empty()) {
+ let prerequisites = prerequisites
+ .split_whitespace()
+ .map(|x| x.into())
+ .collect::<Vec<String>>();
+
+ while lines_iter
+ .peek()
+ .and_then(|(_, x)| x.as_ref().ok())
+ .map_or(false, |line| line.starts_with('\t') || line.is_empty())
+ {
let line = lines_iter.next().unwrap().1.unwrap();
let mut line: String = line.strip_prefix("\t").unwrap_or(&line).into();
if line.is_empty() {
@@ -381,17 +401,19 @@ impl Makefile {
while line.ends_with('\\') {
match lines_iter.next() {
Some((_, Ok(next_line))) => {
- let next_line = next_line.strip_prefix("\t").unwrap_or(&next_line);
+ let next_line =
+ next_line.strip_prefix("\t").unwrap_or(&next_line);
line.push('\n');
line.push_str(next_line);
}
- _ => break
+ _ => break,
}
}
commands.push(line.parse().unwrap());
}
- let commands = commands.into_iter()
+ let commands = commands
+ .into_iter()
.map(CommandLine::from)
.collect::<Vec<_>>();
@@ -401,14 +423,18 @@ impl Makefile {
// we don't know yet if it's a target rule or an inference rule
lazy_static! {
- static ref INFERENCE_RULE: Regex = Regex::new(r"^(?P<s2>(\.[^/.]+)?)(?P<s1>\.[^/.]+)$").unwrap();
+ static ref INFERENCE_RULE: Regex =
+ Regex::new(r"^(?P<s2>(\.[^/.]+)?)(?P<s1>\.[^/.]+)$").unwrap();
static ref SPECIAL_TARGET: Regex = Regex::new(r"^\.[A-Z]+$").unwrap();
}
let inference_match = INFERENCE_RULE.captures(&targets[0]);
let special_target_match = SPECIAL_TARGET.captures(&targets[0]);
- let inference_rule = targets.len() == 1 && prerequisites.len() == 0 && inference_match.is_some() && special_target_match.is_none();
+ let inference_rule = targets.len() == 1
+ && prerequisites.len() == 0
+ && inference_match.is_some()
+ && special_target_match.is_none();
if inference_rule {
let inference_match = inference_match.unwrap();
let new_rule = InferenceRule {
@@ -417,19 +443,28 @@ impl Makefile {
commands,
};
- self.inference_rules.retain(|existing_rule|
- (&existing_rule.prereq, &existing_rule.product) != (&new_rule.prereq, &new_rule.product));
+ self.inference_rules.retain(|existing_rule| {
+ (&existing_rule.prereq, &existing_rule.product)
+ != (&new_rule.prereq, &new_rule.product)
+ });
self.inference_rules.push(new_rule);
} else {
for target in targets {
- if self.first_non_special_target.is_none() && !target.starts_with('.') {
+ if self.first_non_special_target.is_none()
+ && !target.starts_with('.')
+ {
self.first_non_special_target = Some(target.clone());
}
let mut targets = self.targets.borrow_mut();
match targets.get_mut(&target) {
- Some(old_target) if commands.is_empty() && !(target == ".SUFIXES" && prerequisites.is_empty()) => {
+ Some(old_target)
+ if commands.is_empty()
+ && !(target == ".SUFIXES"
+ && prerequisites.is_empty()) =>
+ {
let mut old_target = old_target.borrow_mut();
- let new_prerequisites = prerequisites.iter()
+ let new_prerequisites = prerequisites
+ .iter()
.filter(|x| !old_target.prerequisites.contains(x))
.cloned()
.collect::<Vec<_>>();
@@ -442,12 +477,15 @@ impl Makefile {
commands: commands.clone(),
already_updated: Cell::new(false),
};
- targets.insert(target.clone(), Rc::new(RefCell::new(new_target)));
+ targets.insert(
+ target.clone(),
+ Rc::new(RefCell::new(new_target)),
+ );
}
}
}
}
- },
+ }
LineType::Macro => {
let (name, mut value) = line_tokens.split_once('=').unwrap();
let name = self.expand_macros(&name, None);
@@ -484,7 +522,11 @@ impl Makefile {
// We always let command line or MAKEFLAGS macros override macros from the file.
Some((MacroSource::CommandLineOrMAKEFLAGS, _)) => continue,
// We let environment variables override macros from the file only if the command-line argument to do that was given
- Some((MacroSource::Environment, _)) if self.args.environment_overrides => continue,
+ Some((MacroSource::Environment, _))
+ if self.args.environment_overrides =>
+ {
+ continue
+ }
_ if skip_if_defined => continue,
_ => {}
}
@@ -495,13 +537,16 @@ impl Makefile {
old_value.extend(TokenString::text(" "));
old_value.extend(value);
old_value
- },
- _ => value
+ }
+ _ => value,
};
self.macros.insert(name.into(), (MacroSource::File, value));
}
LineType::Unknown => {
- panic!("error: line {}: unknown line {:?}", line_number, line_tokens);
+ panic!(
+ "error: line {}: unknown line {:?}",
+ line_number, line_tokens
+ );
}
}
}
@@ -515,7 +560,7 @@ impl Makefile {
Some(target) => {
let target = target.borrow();
target.prerequisites.len() == 0 || target.prerequisites.contains(&name.into())
- },
+ }
None => false,
}
}
@@ -529,7 +574,7 @@ impl Makefile {
let vpath = self.expand_macros(vpath, None);
env::split_paths(&vpath).collect()
}
- _ => vec![]
+ _ => vec![],
};
let targets = self.targets.borrow();
@@ -548,7 +593,9 @@ impl Makefile {
if !targets.contains_key(name) || exists_but_infer_anyway {
// When no target rule is found to update a target, the inference rules shall
// be checked. The suffix of the target to be built...
- let suffix = Path::new(name).extension().map_or_else(String::new, |ext| format!(".{}", ext.to_string_lossy()));
+ let suffix = Path::new(name)
+ .extension()
+ .map_or_else(String::new, |ext| format!(".{}", ext.to_string_lossy()));
// is compared to the list of suffixes specified by the .SUFFIXES special
// targets. If the .s1 suffix is found in .SUFFIXES...
if self.special_target_has_prereq(".SUFFIXES", &suffix) || suffix.is_empty() {
@@ -557,12 +604,14 @@ impl Makefile {
// for the first .s2.s1 rule...
if rule.product == suffix {
// whose prerequisite file ($*.s2) exists.
- let prereq_path = Path::new(name).with_extension(rule.prereq.trim_start_matches('.'));
+ let prereq_path =
+ Path::new(name).with_extension(rule.prereq.trim_start_matches('.'));
let prereq_path_options = if prereq_path.is_absolute() {
vec![prereq_path]
} else {
let mut options = vec![prereq_path.clone()];
- options.extend(vpath_options.iter().map(|vpath| vpath.join(&prereq_path)));
+ options
+ .extend(vpath_options.iter().map(|vpath| vpath.join(&prereq_path)));
options
};
for prereq in prereq_path_options {
@@ -631,7 +680,9 @@ impl Makefile {
let internal_macro_names = &['@', '?', '<', '*'][..];
let internal_macro_suffices = &['D', 'F'][..];
let just_internal = name.len() == 1 && name.starts_with(internal_macro_names);
- let suffixed_internal = name.len() == 2 && name.starts_with(internal_macro_names) && name.ends_with(internal_macro_suffices);
+ let suffixed_internal = name.len() == 2
+ && name.starts_with(internal_macro_names)
+ && name.ends_with(internal_macro_suffices);
let macro_value = if just_internal || suffixed_internal {
let target = target.expect("internal macro but no current target!");
let macro_pieces = if name.starts_with('@') {
@@ -641,9 +692,15 @@ impl Makefile {
} else if name.starts_with('?') {
// The $? macro shall evaluate to the list of prerequisites
// that are newer than the current target.
- target.prerequisites
+ target
+ .prerequisites
.iter()
- .filter(|prereq| self.get_target(prereq).borrow().newer_than(target).unwrap_or(false))
+ .filter(|prereq| {
+ self.get_target(prereq)
+ .borrow()
+ .newer_than(target)
+ .unwrap_or(false)
+ })
.cloned()
.collect()
} else if name.starts_with('<') {
@@ -661,12 +718,26 @@ impl Makefile {
};
let macro_pieces = if name.ends_with('D') {
- macro_pieces.into_iter()
- .map(|x| Path::new(&x).parent().expect("no parent").to_string_lossy().into())
+ macro_pieces
+ .into_iter()
+ .map(|x| {
+ Path::new(&x)
+ .parent()
+ .expect("no parent")
+ .to_string_lossy()
+ .into()
+ })
.collect()
} else if name.ends_with('F') {
- macro_pieces.into_iter()
- .map(|x| Path::new(&x).file_name().expect("no filename").to_string_lossy().into())
+ macro_pieces
+ .into_iter()
+ .map(|x| {
+ Path::new(&x)
+ .file_name()
+ .expect("no filename")
+ .to_string_lossy()
+ .into()
+ })
.collect()
} else {
macro_pieces
@@ -676,19 +747,18 @@ impl Makefile {
} else {
match self.macros.get(name) {
Some((_, macro_value)) => self.expand_macros(&macro_value, target),
- None => {
- String::new()
- }
+ None => String::new(),
}
};
let macro_value = match replacement {
Some((subst1, subst2)) => {
let subst1 = self.expand_macros(subst1, target);
let subst1_suffix = regex::escape(&subst1);
- let subst1_suffix = Regex::new(&format!(r"{}\b", subst1_suffix)).unwrap();
+ let subst1_suffix =
+ Regex::new(&format!(r"{}\b", subst1_suffix)).unwrap();
let subst2 = self.expand_macros(subst2, target);
subst1_suffix.replace_all(&macro_value, subst2).to_string()
- },
+ }
None => macro_value,
};
result.push_str(&macro_value);
@@ -730,8 +800,12 @@ impl fmt::Display for Makefile {
fn builtin_inference_rules() -> Vec<InferenceRule> {
// This is a terrible idea.
macro_rules! prepend_dot {
- ($x:tt) => {concat!(".", stringify!($x))};
- () => {""};
+ ($x:tt) => {
+ concat!(".", stringify!($x))
+ };
+ () => {
+ ""
+ };
}
macro_rules! make {
@@ -789,8 +863,12 @@ fn builtin_inference_rules() -> Vec<InferenceRule> {
fn builtin_macros() -> Vec<(&'static str, TokenString)> {
// Fuck it, might as well.
macro_rules! handle {
- ($value:ident) => {stringify!($value)};
- ($value:literal) => {$value};
+ ($value:ident) => {
+ stringify!($value)
+ };
+ ($value:literal) => {
+ $value
+ };
}
macro_rules! make {
($($name:ident=$value:tt)+) => {vec![$(
@@ -832,18 +910,18 @@ fn builtin_macros() -> Vec<(&'static str, TokenString)> {
}
fn builtin_targets() -> Vec<Target> {
// even i'm not going to do that just for this
- vec![
- Target {
- name: ".SUFFIXES".into(),
- prerequisites: vec![".o", ".c", ".y", ".l", ".a", ".sh", ".f"].into_iter().map(String::from).collect(),
- commands: vec![],
- already_updated: Cell::new(false),
- }
- ]
+ vec![Target {
+ name: ".SUFFIXES".into(),
+ prerequisites: vec![".o", ".c", ".y", ".l", ".a", ".sh", ".f"]
+ .into_iter()
+ .map(String::from)
+ .collect(),
+ commands: vec![],
+ already_updated: Cell::new(false),
+ }]
}
#[cfg(test)]
mod test {
use super::*;
-
}
diff --git a/src/makefile/token.rs b/src/makefile/token.rs
index 8bac34f..720055f 100644
--- a/src/makefile/token.rs
+++ b/src/makefile/token.rs
@@ -2,13 +2,13 @@ use std::fmt;
use std::str::FromStr;
use nom::{
- Finish, IResult,
branch::alt,
bytes::complete::{tag, take_till1, take_while1},
character::complete::anychar,
combinator::{all_consuming, map, opt, verify},
multi::many1,
sequence::{delimited, pair, preceded, separated_pair},
+ Finish, IResult,
};
#[derive(PartialEq, Eq, Clone, Debug)]
@@ -19,7 +19,7 @@ impl TokenString {
Self(vec![Token::Text(text.into())])
}
- pub fn tokens(&self) -> impl Iterator<Item=&Token> {
+ pub fn tokens(&self) -> impl Iterator<Item = &Token> {
self.0.iter()
}
@@ -50,7 +50,7 @@ impl TokenString {
pub fn ends_with(&self, pattern: &str) -> bool {
match self.0.last() {
Some(Token::Text(t)) => t.ends_with(pattern),
- _ => false
+ _ => false,
}
}
@@ -95,25 +95,34 @@ impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Token::Text(t) => write!(f, "{}", t),
- Token::MacroExpansion { name, replacement: None } => write!(f, "$({})", name),
- Token::MacroExpansion { name, replacement: Some((r1, r2)) } => write!(f, "$({}:{}={})", name, r1, r2),
+ Token::MacroExpansion {
+ name,
+ replacement: None,
+ } => write!(f, "$({})", name),
+ Token::MacroExpansion {
+ name,
+ replacement: Some((r1, r2)),
+ } => write!(f, "$({}:{}={})", name, r1, r2),
}
}
}
fn macro_name(input: &str) -> IResult<&str, &str> {
// POSIX says "periods, underscores, digits, and alphabetics from the portable character set"
- take_while1(|c: char| {
- c == '.' || c == '_' || c.is_alphanumeric()
- })(input)
+ take_while1(|c: char| c == '.' || c == '_' || c.is_alphanumeric())(input)
}
fn macro_expansion_body<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> {
- let subst = preceded(tag(":"), separated_pair(tokens_but_not('='), tag("="), tokens_but_not(end)));
- map(
- pair(macro_name, opt(subst)),
- |(name, replacement)| Token::MacroExpansion { name: name.into(), replacement },
- )
+ let subst = preceded(
+ tag(":"),
+ separated_pair(tokens_but_not('='), tag("="), tokens_but_not(end)),
+ );
+ map(pair(macro_name, opt(subst)), |(name, replacement)| {
+ Token::MacroExpansion {
+ name: name.into(),
+ replacement,
+ }
+ })
}
fn parens_macro_expansion(input: &str) -> IResult<&str, Token> {
@@ -139,7 +148,11 @@ fn tiny_macro_expansion(input: &str) -> IResult<&str, Token> {
}
fn macro_expansion(input: &str) -> IResult<&str, Token> {
- alt((tiny_macro_expansion, parens_macro_expansion, braces_macro_expansion))(input)
+ alt((
+ tiny_macro_expansion,
+ parens_macro_expansion,
+ braces_macro_expansion,
+ ))(input)
}
fn text(input: &str) -> IResult<&str, Token> {
@@ -147,7 +160,9 @@ fn text(input: &str) -> IResult<&str, Token> {
}
fn text_but_not<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, Token> {
- map(take_till1(move |c| c == '$' || c == end), |x: &str| Token::Text(x.into()))
+ map(take_till1(move |c| c == '$' || c == end), |x: &str| {
+ Token::Text(x.into())
+ })
}
fn single_token(input: &str) -> IResult<&str, Token> {
@@ -167,7 +182,10 @@ fn tokens(input: &str) -> IResult<&str, TokenString> {
}
fn tokens_but_not<'a>(end: char) -> impl FnMut(&'a str) -> IResult<&'a str, TokenString> {
- alt((map(many1(single_token_but_not(end)), TokenString), empty_tokens))
+ alt((
+ map(many1(single_token_but_not(end)), TokenString),
+ empty_tokens,
+ ))
}
fn full_text_tokens(input: &str) -> IResult<&str, TokenString> {
@@ -185,15 +203,13 @@ impl FromStr for TokenString {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
- full_text_tokens(s).finish()
- .map(|(_, x)| x)
- .map_err(|_| ())
+ full_text_tokens(s).finish().map(|(_, x)| x).map_err(|_| ())
}
}
#[cfg(test)]
mod test {
- use super::{Token, TokenString, tokenize};
+ use super::{tokenize, Token, TokenString};
impl From<Vec<Token>> for TokenString {
fn from(x: Vec<Token>) -> Self {
@@ -206,13 +222,21 @@ mod test {
}
fn token_macro_expansion(name: impl Into<String>) -> Token {
- Token::MacroExpansion { name: name.into(), replacement: None }
+ Token::MacroExpansion {
+ name: name.into(),
+ replacement: None,
+ }
}
- fn token_macro_expansion_replacement(name: impl Into<String>,
- subst1: impl Into<TokenString>,
- subst2: impl Into<TokenString>) -> Token {
- Token::MacroExpansion { name: name.into(), replacement: Some((subst1.into(), subst2.into())) }
+ fn token_macro_expansion_replacement(
+ name: impl Into<String>,
+ subst1: impl Into<TokenString>,
+ subst2: impl Into<TokenString>,
+ ) -> Token {
+ Token::MacroExpansion {
+ name: name.into(),
+ replacement: Some((subst1.into(), subst2.into())),
+ }
}
#[test]
@@ -226,60 +250,82 @@ mod test {
fn no_replacement() {
let text = "This is a $Q sentence! There are $(BORING) macros in it at ${YEET}!";
let tokens = tokenize(text);
- assert_eq!(tokens, TokenString(vec![
- token_text("This is a "),
- token_macro_expansion("Q"),
- token_text(" sentence! There are "),
- token_macro_expansion("BORING"),
- token_text(" macros in it at "),
- token_macro_expansion("YEET"),
- token_text("!"),
- ]));
+ assert_eq!(
+ tokens,
+ TokenString(vec![
+ token_text("This is a "),
+ token_macro_expansion("Q"),
+ token_text(" sentence! There are "),
+ token_macro_expansion("BORING"),
+ token_text(" macros in it at "),
+ token_macro_expansion("YEET"),
+ token_text("!"),
+ ])
+ );
}
#[test]
fn escaped() {
let text = "This costs $$2 to run, which isn't ideal";
let tokens = tokenize(text);
- assert_eq!(tokens, TokenString(vec![
- token_text("This costs "),
- token_text("$"),
- token_text("2 to run, which isn't ideal"),
- ]));
+ assert_eq!(
+ tokens,
+ TokenString(vec![
+ token_text("This costs "),
+ token_text("$"),
+ token_text("2 to run, which isn't ideal"),
+ ])
+ );
}
#[test]
fn replacement() {
let text = "Can I get a $(DATA:.c=.oof) in this ${SWAG:.yolo=}";
let tokens = tokenize(text);
- assert_eq!(tokens, TokenString(vec![
- token_text("Can I get a "),
- token_macro_expansion_replacement("DATA", vec![token_text(".c")], vec![token_text(".oof")]),
- token_text(" in this "),
- token_macro_expansion_replacement("SWAG", vec![token_text(".yolo")], vec![token_text("")]),
- ]));
+ assert_eq!(
+ tokens,
+ TokenString(vec![
+ token_text("Can I get a "),
+ token_macro_expansion_replacement(
+ "DATA",
+ vec![token_text(".c")],
+ vec![token_text(".oof")]
+ ),
+ token_text(" in this "),
+ token_macro_expansion_replacement(
+ "SWAG",
+ vec![token_text(".yolo")],
+ vec![token_text("")]
+ ),
+ ])
+ );
}
#[test]
fn hell() {
let text = "$(OOF:${ouch:hi=hey} there=$(owie:$(my)=${bones})), bro.";
let tokens = tokenize(text);
- assert_eq!(tokens, TokenString(vec![
- token_macro_expansion_replacement(
- "OOF",
- vec![
- token_macro_expansion_replacement("ouch", vec![token_text("hi")], vec![token_text("hey")]),
- token_text(" there"),
- ],
- vec![
- token_macro_expansion_replacement(
+ assert_eq!(
+ tokens,
+ TokenString(vec![
+ token_macro_expansion_replacement(
+ "OOF",
+ vec![
+ token_macro_expansion_replacement(
+ "ouch",
+ vec![token_text("hi")],
+ vec![token_text("hey")]
+ ),
+ token_text(" there"),
+ ],
+ vec![token_macro_expansion_replacement(
"owie",
vec![token_macro_expansion("my")],
vec![token_macro_expansion("bones")],
- ),
- ],
- ),
- token_text(", bro."),
- ]));
+ ),],
+ ),
+ token_text(", bro."),
+ ])
+ );
}
}