aboutsummaryrefslogtreecommitdiff
path: root/yapymake
diff options
context:
space:
mode:
authorMelody Horn <melody@boringcactus.com>2021-03-25 18:14:38 -0600
committerMelody Horn <melody@boringcactus.com>2021-03-25 18:14:38 -0600
commit459141ef81edcaa0f552eeb318cb4b7b7e8ef1c8 (patch)
treebfb9b4536994da259762fc252c6685bef2f41287 /yapymake
parent1f534fbf0c0571feb5d8675e428536436fc58060 (diff)
downloadyapymake-459141ef81edcaa0f552eeb318cb4b7b7e8ef1c8.tar.gz
yapymake-459141ef81edcaa0f552eeb318cb4b7b7e8ef1c8.zip
avoid circular dependencies in parsing
Diffstat (limited to 'yapymake')
-rw-r--r--yapymake/makefile/token.py21
1 files changed, 16 insertions, 5 deletions
diff --git a/yapymake/makefile/token.py b/yapymake/makefile/token.py
index a0c3f8f..948fcac 100644
--- a/yapymake/makefile/token.py
+++ b/yapymake/makefile/token.py
@@ -12,6 +12,8 @@ __all__ = [
]
class TokenString(Iterable['Token']):
+ _tokens: List['Token']
+
def __init__(self, my_tokens: List['Token'] = None):
if my_tokens is None:
my_tokens = []
@@ -27,6 +29,9 @@ class TokenString(Iterable['Token']):
def __iter__(self) -> Iterator['Token']:
return iter(self._tokens)
+ def __repr__(self) -> str:
+ return f'TokenString({repr(self._tokens)})'
+
def split_once(self, delimiter: str) -> Optional[Tuple['TokenString', 'TokenString']]:
result0 = []
self_iter = iter(self._tokens)
@@ -70,8 +75,11 @@ def macro_expansion_body(end: str) -> Parser[MacroToken]:
subst = preceded(tag(":"), separated_pair(tokens('='), '=', tokens(end)))
return map_parser(pair(macro_name, opt(subst)), MacroToken)
-parens_macro_expansion = delimited(tag('$('), macro_expansion_body(')'), tag(')'))
-braces_macro_expansion = delimited(tag('${'), macro_expansion_body('}'), tag('}'))
+def parens_macro_expansion(text: str) -> ParseResult[MacroToken]:
+ return delimited(tag('$('), macro_expansion_body(')'), tag(')'))(text)
+
+def braces_macro_expansion(text: str) -> ParseResult[MacroToken]:
+ return delimited(tag('${'), macro_expansion_body('}'), tag('}'))(text)
def build_tiny_expansion(name_probably: str) -> Token:
if name_probably == '$':
@@ -79,9 +87,11 @@ def build_tiny_expansion(name_probably: str) -> Token:
else:
return MacroToken(name_probably)
-tiny_macro_expansion = map_parser(preceded(tag('$'), verify(any_char, lambda c: c not in ['(', '{'])), build_tiny_expansion)
+def tiny_macro_expansion(text: str) -> ParseResult[MacroToken]:
+ return map_parser(preceded(tag('$'), verify(any_char, lambda c: c not in ['(', '{'])), build_tiny_expansion)(text)
-macro_expansion = alt(tiny_macro_expansion, parens_macro_expansion, braces_macro_expansion)
+def macro_expansion(text: str) -> ParseResult[MacroToken]:
+ return alt(tiny_macro_expansion, parens_macro_expansion, braces_macro_expansion)(text)
just_text = map_parser(take_till1(lambda c: c == '$'), TextToken)
@@ -100,7 +110,8 @@ empty_tokens = map_parser(tag(''), lambda _: TokenString.text(''))
def tokens(until: Optional[str] = None) -> Parser[TokenString]:
return alt(map_parser(many1(single_token(until)), TokenString), empty_tokens)
-full_text_tokens = all_consuming(tokens())
+def full_text_tokens(text: str) -> ParseResult[TokenString]:
+ return all_consuming(tokens())(text)
def tokenize(text: str) -> TokenString:
result, _ = full_text_tokens(text)