def elements(self, ast): elements = [e for e in ast if e is not None] if not elements: return model.Void() elif len(elements) == 1: return elements[0] else: return model.Sequence(AST(sequence=elements))
class TOKEN_TREE: tokens: list RULE = grammars.Rule( ast=None, name="TOKEN_TREE", exp=grammars.Choice([ grammars.RuleRef("LITERAL"), grammars.RuleRef("IDENT"), grammars.RuleRef("LIFETIME"), grammars.RuleRef("PUNCT"), grammars.Sequence( AST(sequence=[ grammars.Token("("), grammars.Closure(grammars.RuleRef("TOKEN_TREE")), grammars.Token(")"), ])), grammars.Sequence( AST(sequence=[ grammars.Token("{"), grammars.Closure(grammars.RuleRef("TOKEN_TREE")), grammars.Token("}"), ])), grammars.Sequence( AST(sequence=[ grammars.Token("["), grammars.Closure(grammars.RuleRef("TOKEN_TREE")), grammars.Token("]"), ])), ]), params=None, kwparams=None, ) @classmethod def from_ast(cls, ast) -> TOKEN_TREE: return cls(ast)
def negative(self, ast): neg = model.NegativeLookahead(ast) any = model.Pattern('.') return model.Sequence(AST(sequence=[neg, any]))
def sequence(self, ast, *args): seq = ast.sequence assert isinstance(seq, list), str(seq) if len(seq) == 1: return seq[0] return grammars.Sequence(ast)
def node_to_tatsu(node: gll_grammar.RuleNode): match node: case gll_grammar.Empty(): return tatsu_grammars.EmptyClosure() case gll_grammar.LabeledNode(name, item): return tatsu_grammars.Named(AST(name=name, exp=node_to_tatsu(item))) case gll_grammar.StringLiteral(s): return tatsu_grammars.Token(ast=s) case gll_grammar.CharacterRange(_, _): raise NotImplementedError("character ranges") case gll_grammar.SymbolName(name): return tatsu_grammars.RuleRef(ast=name) case gll_grammar.Concatenation(items): return tatsu_grammars.Sequence( ast=AST(sequence=list(map(node_to_tatsu, items))) ) case gll_grammar.Alternation(items): return tatsu_grammars.Choice(ast=list(map(node_to_tatsu, items))) case gll_grammar.Option(item): return tatsu_grammars.Optional(exp=node_to_tatsu(item)) case gll_grammar.Repeated(False, item, separator=None, allow_trailing=False): return tatsu_grammars.Closure(exp=node_to_tatsu(item)) case gll_grammar.Repeated(True, item, separator=None, allow_trailing=False): return tatsu_grammars.PositiveClosure(exp=node_to_tatsu(item)) case gll_grammar.Repeated(False, item, separator, allow_trailing=False): return tatsu_grammars.Join( ast=AST(exp=node_to_tatsu(item), sep=tatsu_grammars.Token(ast=separator)) ) case gll_grammar.Repeated(True, item, separator, allow_trailing=False): return tatsu_grammars.PositiveJoin( ast=AST(exp=node_to_tatsu(item), sep=tatsu_grammars.Token(ast=separator)) ) case gll_grammar.Repeated(False, item, separator, allow_trailing=True): item = node_to_tatsu(item) separator = tatsu_grammars.Token(ast=separator) return tatsu_grammars.Sequence( ast=AST( sequence=[ tatsu_grammars.Closure( exp=tatsu_grammars.Sequence( ast=AST( sequence=[ item, separator, ] ) ), ), tatsu_grammars.Optional(exp=item), ] ) ) case gll_grammar.Repeated(True, item, separator, allow_trailing=True): item = node_to_tatsu(item) separator = tatsu_grammars.Token(ast=separator) return tatsu_grammars.Sequence( ast=AST( sequence=[ item, tatsu_grammars.Closure( exp=tatsu_grammars.Sequence( ast=AST( sequence=[ separator, item, ] ) ), ), tatsu_grammars.Optional(exp=separator), ] ) ) case _: # should be unreachable assert False, repr(node)