import xtokens as t import xlexer as xl from utils import Backtrackable def parse(char_stream): tokens = Backtrackable(xl.lex(char_stream)) _list(tokens) _match(tokens, t.Eof) def _test(tokens, f): with tokens.backtrack_always(): try: f(tokens) except ValueError: return False return True def _element(tokens): if _test(tokens, _assign): _assign(tokens) elif _test(tokens, _name): _name(tokens) elif _test(tokens, _assign_list): _assign_list(tokens) elif _test(tokens, _list): _list(tokens) else: raise ValueError(f"Invalid token {tokens[0]}") def _list(tokens): _match(tokens, t.Lbrack) _elements(tokens) _match(tokens, t.Rbrack) def _elements(tokens): _element(tokens) while tokens[0] == t.Comma(): next(tokens) _element(tokens) def _assign(tokens): _match(tokens, t.Ident) _match(tokens, t.Equal) _match(tokens, t.Ident) def _assign_list(tokens): _list(tokens) _match(tokens, t.Equal) _list(tokens) def _name(tokens): _match(tokens, t.Ident) def _match(tokens, token_type): if isinstance(tokens[0], token_type): next(tokens) else: raise ValueError(f"Failed to match {tokens[0]} to type {token_type}")