aboutsummaryrefslogtreecommitdiff
path: root/yapymake/makefile/token.py
diff options
context:
space:
mode:
Diffstat (limited to 'yapymake/makefile/token.py')
-rw-r--r--yapymake/makefile/token.py40
1 files changed, 24 insertions, 16 deletions
diff --git a/yapymake/makefile/token.py b/yapymake/makefile/token.py
index 948fcac..fd7de5b 100644
--- a/yapymake/makefile/token.py
+++ b/yapymake/makefile/token.py
@@ -1,5 +1,5 @@
from dataclasses import dataclass
-from typing import Iterable, Iterator, List, Optional, Tuple
+from typing import Iterable, Iterator, List, MutableSequence, Optional, Tuple
from .parse_util import *
@@ -12,18 +12,19 @@ __all__ = [
]
class TokenString(Iterable['Token']):
- _tokens: List['Token']
+ _tokens: MutableSequence['Token']
- def __init__(self, my_tokens: List['Token'] = None):
+ def __init__(self, my_tokens: Optional[MutableSequence['Token']] = None):
if my_tokens is None:
- my_tokens = []
- self._tokens = my_tokens
+ self._tokens = []
+ else:
+ self._tokens = my_tokens
@staticmethod
def text(body: str) -> 'TokenString':
return TokenString([TextToken(body)])
- def __eq__(self, other) -> bool:
+ def __eq__(self, other: object) -> bool:
return isinstance(other, TokenString) and self._tokens == other._tokens
def __iter__(self) -> Iterator['Token']:
@@ -33,7 +34,7 @@ class TokenString(Iterable['Token']):
return f'TokenString({repr(self._tokens)})'
def split_once(self, delimiter: str) -> Optional[Tuple['TokenString', 'TokenString']]:
- result0 = []
+ result0: List[Token] = []
self_iter = iter(self._tokens)
for t in self_iter:
if isinstance(t, TextToken) and delimiter in t.text:
@@ -44,13 +45,13 @@ class TokenString(Iterable['Token']):
result0.append(t)
return None
- def lstrip(self):
+ def lstrip(self) -> None:
first_token = self._tokens[0]
if isinstance(first_token, TextToken):
first_token.text = first_token.text.lstrip()
self._tokens[0] = first_token
- def rstrip(self):
+ def rstrip(self) -> None:
last_token = self._tokens[-1]
if isinstance(last_token, TextToken):
last_token.text = last_token.text.rstrip()
@@ -67,13 +68,18 @@ class TextToken(Token):
@dataclass()
class MacroToken(Token):
name: str
- replacement: Optional[Tuple[TokenString, TokenString]] = None
+ replacement: Optional[Tuple[TokenString, TokenString]]
macro_name = take_while1(lambda c: c.isalnum() or c in ['.', '_'])
def macro_expansion_body(end: str) -> Parser[MacroToken]:
- subst = preceded(tag(":"), separated_pair(tokens('='), '=', tokens(end)))
- return map_parser(pair(macro_name, opt(subst)), MacroToken)
+ subst = preceded(tag(":"), separated_pair(tokens('='), tag('='), tokens(end)))
+
+ def make_token(data: Tuple[str, Optional[Tuple[TokenString, TokenString]]]) -> MacroToken:
+ name, replacement = data
+ return MacroToken(name, replacement)
+
+ return map_parser(pair(macro_name, opt(subst)), make_token)
def parens_macro_expansion(text: str) -> ParseResult[MacroToken]:
return delimited(tag('$('), macro_expansion_body(')'), tag(')'))(text)
@@ -85,12 +91,12 @@ def build_tiny_expansion(name_probably: str) -> Token:
if name_probably == '$':
return TextToken('$')
else:
- return MacroToken(name_probably)
+ return MacroToken(name_probably, None)
-def tiny_macro_expansion(text: str) -> ParseResult[MacroToken]:
+def tiny_macro_expansion(text: str) -> ParseResult[Token]:
return map_parser(preceded(tag('$'), verify(any_char, lambda c: c not in ['(', '{'])), build_tiny_expansion)(text)
-def macro_expansion(text: str) -> ParseResult[MacroToken]:
+def macro_expansion(text: str) -> ParseResult[Token]:
return alt(tiny_macro_expansion, parens_macro_expansion, braces_macro_expansion)(text)
just_text = map_parser(take_till1(lambda c: c == '$'), TextToken)
@@ -114,7 +120,9 @@ def full_text_tokens(text: str) -> ParseResult[TokenString]:
return all_consuming(tokens())(text)
def tokenize(text: str) -> TokenString:
- result, _ = full_text_tokens(text)
+ parsed_result = full_text_tokens(text)
+ assert parsed_result is not None
+ result, _ = parsed_result
return result
# TODO handle errors