Пример #1
0
python_grammar = Grammar('main_rule')
P = python_grammar

python_grammar.add_rules(
    name=Name(L.Identifier(keep=True)),
    number=NumberLit(L.Number(keep=True)),
    string=StringLit(L.String(keep=True)),
    cat_string=ConcatStringLit(P.string, List(P.string)),
    nl=NL(L.Newline()),
    main_rule=FileNode(List(newlines(), P.stmt, newlines()), L.Termination()),
    decorator=Decorator('@', P.dotted_name, Opt('(', P.arg_list, ')'),
                        L.Newline()),
    decorators=List(P.decorator),
    decorated=Decorated(P.decorators, Or(P.class_def, P.func_def)),
    func_def=FuncDef('def', P.name, P.parameters, ':', P.suite),
    parameters=Pick('(', Opt(P.varargslist), ')'),
    varargslist=Params(
        List(SingleParam(
            Opt('*').as_bool(VarArgsFlag),
            Opt('**').as_bool(KwArgsFlag), P.fpdef, Opt('=', P.test)),
             empty_valid=True,
             sep=","), ),
    fpdef=Or(P.name, Pick('(', P.name_list, ')')),
    name_list=TrailList(P.name, sep=','),
    stmt=Or(P.simple_stmt, P.compound_stmt),
    simple_stmt=Pick(Or(P.small_stmt, TrailList(P.small_stmt, sep=';')),
                     L.Newline()),
    small_stmt=(P.expr_stmt | P.print_stmt | P.del_stmt | P.pass_stmt
                | P.flow_stmt
                | P.import_stmt | P.global_stmt | P.exec_stmt | P.assert_stmt),
    expr_stmt=Or(
Пример #2
0
def TrailList(el, sep, empty_valid=False):
    return Pick(List(el, sep=sep, empty_valid=empty_valid), Opt(sep))
Пример #3
0
 pattern=Or(FilteredPattern(G.chained_pattern, "when", G.expr),
            G.chained_pattern),
 chained_pattern=Or(
     ChainedNodePattern(
         G.binding_pattern,
         List(
             Or(SelectorLink(G.selector_call, "is", G.binding_pattern),
                FieldLink(".", G.id, "=", G.binding_pattern),
                PropertyLink(".", G.fun_call, "is", G.binding_pattern)))),
     G.binding_pattern),
 binding_pattern=Or(BindingPattern(G.id, "@", G.value_pattern),
                    G.value_pattern),
 value_pattern=Or(
     ExtendedNodePattern(
         Or(UniversalPattern("_"), NodeKindPattern(G.kind_name)),
         Pick("(", c(), List(G.pattern_arg, sep=","), ")")),
     NodeKindPattern(G.kind_name), UniversalPattern("_")),
 pattern_arg=Or(
     NodePatternSelector(
         SelectorCall(
             G.id, Opt(Pick(G.id, "@")), G.id,
             Opt("(", c(), List(G.named_arg, sep=",", empty_valid=False),
                 ")")), "is", G.pattern),
     NodePatternField(G.id, "=", c(), G.detail_value),
     NodePatternProperty(G.fun_call, "is", c(), G.detail_value)),
 detail_value=Or(DetailPattern(G.pattern), DetailExpr(G.expr)),
 selector_call=SelectorCall(
     G.id, Opt(Pick(G.id, "@")), G.id,
     Opt("(", c(), List(G.named_arg, sep=",", empty_valid=False), ")")),
 listcomp=ListComprehension(
     "[", G.expr, "for",
Пример #4
0
        reference(Self.name.cast(FooNode).to_array,
                  through=Name.designated_env))


class Ref(FooNode):
    name = Field()

    @langkit_property(public=True)
    def entity():
        return Self.as_entity.name.entity


foo_grammar = Grammar('main_rule')
foo_grammar.add_rules(
    main_rule=List(foo_grammar.block),
    name=Name(Tok(Token.Identifier, keep=True)),
    block=Block(foo_grammar.name, foo_grammar.decl_list, Tok(Token.LBrace),
                foo_grammar.using_list, foo_grammar.ref_list,
                Tok(Token.RBrace)),
    decl_list=Pick(Tok(Token.LPar), List(foo_grammar.decl, empty_valid=True),
                   Tok(Token.RPar)),
    using_list=Pick(Tok(Token.LPar), List(foo_grammar.using, empty_valid=True),
                    Tok(Token.RPar)),
    ref_list=List(foo_grammar.ref, empty_valid=True),
    decl=Decl(foo_grammar.name),
    using=Using(foo_grammar.name),
    ref=Ref(foo_grammar.name),
)
build_and_run(foo_grammar, 'main.py')
print('Done')
Пример #5
0
    visible=Visible('visible', 'if', G.expr),
    depends=Depends('depends', 'on', G.expr),
    range=Range('range', G.value, G.value, G.opt_condition),
    comment=Comment('comment', G.string_literal),

    # Misc options

    # TODO: Here we could add handling of custom (user defined) options that
    # would provide additional features for GPR files.
    # For instance : option source_dir="src/some_feature"
    option=Option(
        'option',
        OptionName(
            Or(Token.OptDefConfigList, Token.OptModules, Token.OptEnv,
               Token.OptAllNoConfY)), Opt('=', G.string_literal)),
    source=Source('source', G.string_literal),

    # Expressions
    expr=Or(
        Pick('(', G.expr, ')'),
        BinaryExpr(
            G.expr,
            Or(Operator.alt_equal_op('='), Operator.alt_diff_op('!='),
               Operator.alt_and_op('&&'), Operator.alt_or_op('||')), G.expr),
        G.identifier,
        G.bool_literal,
        G.tristate_literal,
        NotExpr('!', G.expr),
    ),
)
Пример #6
0
    env_element = Property(Self.children_env.get(Self.tok.symbol).at(0))
    deref_env_element = Property(Self.env_element.null_node, public=True)
    match_env_element = Property(Self.env_element.match(
        lambda l=T.Literal.entity: l,
        lambda n=T.Name.entity: n,
        lambda others: others,
    ),
                                 public=True)


class Plus(Expression):
    left = Field()
    right = Field()


foo_grammar = Grammar('main_rule')
foo_grammar.add_rules(
    main_rule=foo_grammar.expression,
    expression=Or(
        Pick('(', foo_grammar.expression, ')'),
        Plus(foo_grammar.atom, '+', foo_grammar.main_rule),
        foo_grammar.atom,
    ),
    atom=Or(
        Literal(Tok(Token.Number, keep=True)),
        Name(Tok(Token.Identifier, keep=True)),
    ),
)
build_and_run(foo_grammar, 'main.py')
print('Done')
Пример #7
0
python_grammar.add_rules(
    name=Id(L.Identifier),
    number=NumberLit(L.Number),
    string=StringLit(L.String),
    cat_string=ConcatStringLit(P.string, List(P.string)),
    nl=NL(L.Newline),
    main_rule=FileNode(
        List(newlines(), P.stmt, newlines()), L.Termination
    ),
    decorator=Decorator(
        '@', P.dotted_name, Opt('(', P.arg_list, ')'), L.Newline
    ),
    decorators=List(P.decorator),
    decorated=Decorated(P.decorators, Or(P.class_def, P.func_def)),
    func_def=FuncDef('def', P.name, P.parameters, ':', P.suite),
    parameters=Pick('(', Opt(P.varargslist), ')'),
    varargslist=Params(
        List(
            SingleParam(
                Opt('*').as_bool(VarArgsFlag), Opt('**').as_bool(KwArgsFlag),
                P.fpdef, Opt('=', P.test)
            ),
            empty_valid=True, sep=","
        ),
    ),
    fpdef=Or(P.name, Pick('(', P.name_list, ')')),
    name_list=TrailList(P.name, sep=','),
    stmt=Or(P.simple_stmt, P.compound_stmt),
    simple_stmt=Pick(Or(P.small_stmt, TrailList(P.small_stmt, sep=';')),
                     L.Newline),
    small_stmt=(
Пример #8
0
        add_to_env(New(T.env_assoc, key=Self.name.suffix_symbol, val=Self)),
        add_env(),
    )


class Dep(FooNode):
    name = Field()

    env_spec = EnvSpec(do(Self.name.referenced_unit))


class Var(FooNode):
    name = Field()
    value = Field()

    env_spec = EnvSpec(
        add_to_env(New(T.env_assoc, key=Self.name.symbol, val=Self)), )


G = Grammar('scope')
G.add_rules(
    deps=List(Pick(Dep(Pick('+', G.name)), ';'), empty_valid=True),
    defs=List(Pick(Or(G.scope, G.var), ';'), empty_valid=True),
    scope=Scope(G.name, '{', G.deps, G.defs, '}'),
    var=Var(G.id, '=', G.name),
    id=Id(Token.Identifier),
    name=Or(Prefix(G.name, '.', G.id), G.id),
)
build_and_run(G, ada_main='main.adb')
print('Done')
Пример #9
0
        add_to_env(mappings=New(T.env_assoc, key=Self.name.symbol, val=Self)),
        add_env(),
        reference(
            Self.imports.map(lambda i: i.cast(T.FooNode)),

            # If PropertyDef rewriting omits the following references,
            # env lookup will never reach DerivedRef.referenced_env, so
            # resolution will sometimes fail to reach definition.
            T.MiddleRef.referenced_env))


grammar = Grammar('main_rule')
grammar.add_rules(
    name=Name(Token.Identifier),
    main_rule=List(
        Or(
            Def('def', grammar.name, grammar.imports, grammar.vars,
                grammar.expr), grammar.expr)),
    imports=Pick('(', List(grammar.derived_ref, empty_valid=True), ')'),
    var=Var(grammar.name, '=', grammar.expr),
    vars=Pick('{', List(grammar.var, empty_valid=True), '}'),
    expr=Or(grammar.atom, grammar.plus),
    atom=Or(grammar.lit, grammar.ref),
    lit=Lit(Token.Number),
    ref=Ref(grammar.name),
    derived_ref=DerivedRef(grammar.name),
    plus=Pick('(', Plus(grammar.expr, '+', grammar.expr), ')'),
)
build_and_run(grammar, 'main.py')
print('Done')
Пример #10
0
class Using(FooNode):
    name = Field()
    env_spec = EnvSpec(
        reference(Self.name.cast(FooNode).to_array,
                  through=Name.designated_env))


class Ref(FooNode):
    name = Field()

    @langkit_property(public=True)
    def entity():
        return Self.as_entity.name.entity


foo_grammar = Grammar('main_rule')
foo_grammar.add_rules(
    main_rule=List(foo_grammar.block),
    name=Name(Token.Identifier),
    block=Block(foo_grammar.name, foo_grammar.decl_list, '{',
                foo_grammar.using_list, foo_grammar.ref_list, '}'),
    decl_list=Pick('(', List(foo_grammar.decl, empty_valid=True), ')'),
    using_list=Pick('(', List(foo_grammar.using, empty_valid=True), ')'),
    ref_list=List(foo_grammar.ref, empty_valid=True),
    decl=Decl(foo_grammar.name),
    using=Using(foo_grammar.name),
    ref=Ref(foo_grammar.name),
)
build_and_run(foo_grammar, 'main.py')
print('Done')
Пример #11
0
    @langkit_property(public=True)
    def evaluate_concrete():
        return Self.lhs.evaluate_concrete + Self.rhs.evaluate_concrete

    @langkit_property(public=True)
    def evaluate_entity():
        return Entity.lhs.evaluate_entity + Entity.rhs.evaluate_entity


class Def(FooNode):
    name = Field()
    expr = Field()

    env_spec = EnvSpec(
        add_to_env(mappings=New(T.env_assoc, key=Self.name.symbol, val=Self)))


grammar = Grammar('main_rule')
grammar.add_rules(
    main_rule=List(
        Or(Def('def', Tok(Token.Identifier, keep=True), grammar.expr),
           grammar.expr)),
    expr=Or(grammar.atom, grammar.plus),
    atom=Or(grammar.lit, grammar.ref),
    lit=Lit(Tok(Token.Number, keep=True)),
    ref=Ref(Tok(Token.Identifier, keep=True)),
    plus=Pick('(', Plus(grammar.expr, '+', grammar.expr), ')'),
)
build_and_run(grammar, 'main.py')
print('Done')
Пример #12
0
    var_type_name2 = Field(type=T.Identifier)


class ProjectReference(GPRNode):
    attr_ref = Field(type=T.AttributeReference)


A.add_rules(
    identifier=Identifier(Token.Identifier),
    string_literal=StringLiteral(Token.String),
    num_literal=NumLiteral(Token.Number),
    static_name=Or(Prefix(A.static_name, ".", A.identifier), A.identifier),
    # ----------------------------------------------------------------
    attribute_reference=AttributeReference(
        A.identifier,
        Opt(Pick("(", Or(A.others_designator, A.string_literal), ")")),
    ),
    variable_reference=VariableReference(
        A.identifier,
        Opt(Pick(".", A.identifier)),
        Opt(Pick(".", A.identifier)),
        Opt(Pick("'", A.attribute_reference)),
    ),
    type_reference=TypeReference(
        A.identifier,
        Opt(Pick(".", A.identifier)),
    ),
    builtin_function_call=BuiltinFunctionCall(A.identifier, A.expression_list),
    # ----------------------------------------------------------------
    expression=List(A.term, sep="&", list_cls=TermList),
    expression_list=Terms("(", List(A.expression, sep=",", empty_valid=True),
Пример #13
0
from lexer_example import Token
from utils import build_and_run


@has_abstract_list
class FooNode(ASTNode):
    pass


class Sequence(FooNode.list):
    pass


class Atom(FooNode):
    token_node = True


foo_grammar = Grammar('main_rule')
foo_grammar.add_rules(
    main_rule=foo_grammar.element,
    element=Or(foo_grammar.sequence, foo_grammar.atom),
    sequence=Pick(
        '(', List(foo_grammar.element, list_cls=Sequence, empty_valid=True),
        ')'),
    atom=Atom(Token.Identifier),
)

build_and_run(foo_grammar, 'main.py')

print('Done')
Пример #14
0
def end_liblevel_block():
    return Pick("end", Opt(EndName(A.static_name)))
Пример #15
0
 project=Project(
     A.context_clauses,
     A.project_declaration,
 ),
 # ----------------------------------------------- declarative items
 declarative_items=List(A.declarative_item, empty_valid=True),
 declarative_item=Or(A.simple_declarative_item, A.typed_string_decl,
                     A.package_decl),
 simple_declarative_items=List(A.simple_declarative_item, empty_valid=True),
 simple_declarative_item=Or(
     A.variable_decl,
     A.attribute_decl,
     A.case_construction,
     A.empty_declaration,
 ),
 variable_decl=VariableDecl(A.identifier, Opt(Pick(":", A.type_reference)),
                            ":=", A.expression, ";"),
 attribute_decl=AttributeDecl(
     "for",
     A.identifier,
     Opt(Pick("(", A.associative_array_index, ")")),
     "use",
     A.expression,
     ";",
 ),
 associative_array_index=Or(A.others_designator, A.string_literal_at),
 package_decl=PackageDecl("package", A.identifier,
                          Or(A.package_renaming, A.package_spec), ";"),
 package_renaming=PackageRenaming("renames", List(A.identifier, sep=".")),
 package_extension=PackageExtension("extends", List(A.identifier, sep=".")),
 package_spec=PackageSpec(
Пример #16
0
def end_named_block():
    return Pick("end", Opt(EndName(A.identifier)))
Пример #17
0
from __future__ import absolute_import, division, print_function

from langkit.dsl import ASTNode, Field
from langkit.envs import EnvSpec, add_env, add_to_env
from langkit.expressions import Self
from langkit.parsers import Grammar, List, Pick

from lexer_example import Token
from utils import emit_and_print_errors


class FooNode(ASTNode):
    pass


class Def(FooNode):
    name = Field()
    body = Field()
    env_spec = EnvSpec(add_env(), add_to_env(Self.name, Self))


grammar = Grammar('stmt_rule')
grammar.add_rules(
    def_rule=Def(Token.Identifier, '(', grammar.stmt_rule, ')'),
    stmt_rule=List(grammar.def_rule
                   | Pick('{', List(grammar.stmt_rule, empty_valid=True), '}'),
                   empty_valid=True))
emit_and_print_errors(grammar)

print('Done')
Пример #18
0
    def lower(rule):
        """
        Helper to lower one parser.

        :param liblktlang.GrammarExpr rule: Grammar rule to lower.
        :rtype: Parser
        """
        # For convenience, accept null input rules, as we generally want to
        # forward them as-is to the lower level parsing machinery.
        if rule is None:
            return None

        loc = ctx.lkt_loc(rule)
        with ctx.lkt_context(rule):
            if isinstance(rule, liblktlang.ParseNodeExpr):
                node = resolve_node_ref(rule.f_node_name)

                # Lower the subparsers
                subparsers = [lower(subparser)
                              for subparser in rule.f_sub_exprs]

                # Qualifier nodes are a special case: we produce one subclass
                # or the other depending on whether the subparsers accept the
                # input.
                if node._type.is_bool_node:
                    return Opt(*subparsers, location=loc).as_bool(node)

                # Likewise for enum nodes
                elif node._type.base and node._type.base.is_enum_node:
                    return _Transform(_Row(*subparsers, location=loc),
                                      node.type_ref,
                                      location=loc)

                # For other nodes, always create the node when the subparsers
                # accept the input.
                else:
                    return _Transform(parser=_Row(*subparsers), typ=node,
                                      location=loc)

            elif isinstance(rule, liblktlang.GrammarToken):
                token_name = rule.f_token_name.text
                try:
                    val = tokens[token_name]
                except KeyError:
                    check_source_language(
                        False, 'Unknown token: {}'.format(token_name)
                    )

                match_text = ''
                if rule.f_expr:
                    # The grammar is supposed to mainain this invariant
                    assert isinstance(rule.f_expr, liblktlang.TokenLit)
                    match_text = denoted_string_literal(rule.f_expr)

                return _Token(val=val, match_text=match_text, location=loc)

            elif isinstance(rule, liblktlang.TokenLit):
                return _Token(denoted_string_literal(rule), location=loc)

            elif isinstance(rule, liblktlang.GrammarList):
                return List(lower(rule.f_expr),
                            empty_valid=rule.f_kind.text == '*',
                            list_cls=resolve_node_ref(rule.f_list_type),
                            sep=lower(rule.f_sep),
                            location=loc)

            elif isinstance(rule, (liblktlang.GrammarImplicitPick,
                                   liblktlang.GrammarPick)):
                return Pick(*[lower(subparser) for subparser in rule.f_exprs],
                            location=loc)

            elif isinstance(rule, liblktlang.GrammarRuleRef):
                return getattr(grammar, rule.f_node_name.text)

            elif isinstance(rule, liblktlang.GrammarOrExpr):
                return Or(*[lower(subparser)
                            for subparser in rule.f_sub_exprs],
                          location=loc)

            elif isinstance(rule, liblktlang.GrammarOpt):
                return Opt(lower(rule.f_expr), location=loc)

            elif isinstance(rule, liblktlang.GrammarOptGroup):
                return Opt(*[lower(subparser) for subparser in rule.f_expr],
                           location=loc)

            elif isinstance(rule, liblktlang.GrammarExprList):
                return Pick(*[lower(subparser) for subparser in rule],
                            location=loc)

            elif isinstance(rule, liblktlang.GrammarDiscard):
                return Discard(lower(rule.f_expr), location=loc)

            elif isinstance(rule, liblktlang.GrammarNull):
                return Null(resolve_node_ref(rule.f_name), location=loc)

            elif isinstance(rule, liblktlang.GrammarSkip):
                return Skip(resolve_node_ref(rule.f_name), location=loc)

            elif isinstance(rule, liblktlang.GrammarDontSkip):
                return DontSkip(lower(rule.f_expr),
                                lower(rule.f_dont_skip),
                                location=loc)

            elif isinstance(rule, liblktlang.GrammarPredicate):
                check_source_language(
                    isinstance(rule.f_prop_ref, liblktlang.DotExpr),
                    'Invalid property reference'
                )
                node = resolve_node_ref(rule.f_prop_ref.f_prefix)
                prop_name = rule.f_prop_ref.f_suffix.text
                try:
                    prop = getattr(node, prop_name)
                except AttributeError:
                    check_source_language(
                        False,
                        '{} has no {} property'
                        .format(node._name.camel_with_underscores,
                                prop_name)
                    )
                return Predicate(lower(rule.f_expr), prop, location=loc)

            else:
                raise NotImplementedError('unhandled parser: {}'.format(rule))
Пример #19
0

class Ref(FooNode):
    name = Field(type=Name)

    @langkit_property(public=True)
    def referenced():
        return Self.referenced_env.env_node.as_bare_entity

    @langkit_property(memoized=True)
    def referenced_env():
        return Self.node_env.get(Self.name.symbol).at(0).children_env


class Block(FooNode):
    name = Field(type=Name)
    content = Field(type=Ref.list)

    env_spec = EnvSpec(
        add_env(),
        add_to_env(T.env_assoc.new(key=Self.name.symbol, val=Self),
                   dest_env=Self.node_env),
    )


foo_grammar = Grammar('main_rule')
foo_grammar.add_rules(main_rule=Block(
    Name(Token.Identifier), Pick('(', List(Ref(Name(Token.Identifier))), ')')))
build_and_run(foo_grammar, 'main.py')
print('Done')
Пример #20
0
        add_to_env(mappings=New(T.env_assoc, key=Self.name, val=Self)),
        add_env())


class Param(Def):
    tok = Field(type=T.TokenType)
    name = Property(Self.tok.symbol)


class Params(Param.list):
    env_spec = EnvSpec(add_env())


class BlockVar(Def):
    tok = Field(type=T.TokenType)
    name = Property(Self.tok.symbol)


grammar = Grammar('main_rule')
grammar.add_rules(
    main_rule=List(grammar.block),
    block=Block(grammar.params, Tok(Token.Identifier, keep=True),
                grammar.vars),
    params=Pick('(', List(grammar.param, list_cls=Params), ')'),
    param=Param(Tok(Token.Identifier, keep=True)),
    vars=Pick('{', List(grammar.var), '}'),
    var=BlockVar(Tok(Token.Identifier, keep=True)),
)
build_and_run(grammar, 'main.py')
print('Done')
Пример #21
0
    Nodes.Root = Root
    Nodes.Identifier = Identifier
    Nodes.Number = Number

    g = Grammar('main_rule')
    g.add_rules(**{name: parser_fn(Nodes, g)
                   for name, parser_fn in kwargs.items()})
    emit_and_print_errors(g, generate_unparser=True)
    print('')


run(
    'Pick in Or',
    main_rule=lambda T, g: T.Root(
        Or(
            Pick('example', T.Identifier(Token.Identifier)),
            T.Number(Token.Number)
        )
    )
)
run(
    'Toplevel Pick',
    main_rule=lambda T, g: Pick('example', T.Root(g.item)),
    item=lambda T, g: Or(T.Identifier(Token.Identifier),
                         T.Number(Token.Number)),
)
run(
    'Several token kinds for token node (1)',
    main_rule=lambda T, g: T.Root(
        Or(
            T.Identifier(Token.Identifier() | Token.Example()),
Пример #22
0
            Skip(ErrorDecl),
        )),
    compilation_unit=CompilationUnit(
        List(A.context_item, empty_valid=True),
        A.subunit | A.library_item,

        # Eventual pragmas attached to the body
        List(A.pragma, empty_valid=True)),

    # This is the main rule. The root node will then be either:
    # * A CompilationUnit node.
    # * A list of CompilationUnit nodes.
    # * A list of pragmas.
    compilation=Or(
        # Special case for No_Body files and gnat.adc
        Pick(List(A.pragma, empty_valid=False), L.Termination),

        # One compilation unit case
        Pick(A.compilation_unit, L.Termination),

        # Several compilation units case
        Pick(List(A.compilation_unit, empty_valid=True), L.Termination),
    ),
    decl_part=DeclarativePart(A.basic_decls),
    entry_body=EntryBody(
        "entry", A.defining_id,
        Opt(
            EntryIndexSpec("(", "for", A.defining_id,
                           "in", A.discrete_subtype_definition,
                           Opt(A.aspect_spec), ")")),
        EntryCompletionFormalParams(Opt(A.param_specs)), A.aspect_spec, "when",
Пример #23
0
    @langkit_property(public=True)
    def referenced():
        return Self.referenced_env.env_node.as_bare_entity

    @langkit_property(memoized=True)
    def referenced_env():
        return Self.node_env.get(Self.name).at(0).children_env


class Block(FooNode):
    name = Field(type=TokenType)
    content = Field(type=Ref.list)

    env_spec = EnvSpec(
        add_env(),
        add_to_env(T.env_assoc.new(key=Self.name.symbol, val=Self),
                   dest_env=Self.node_env),
    )


foo_grammar = Grammar('main_rule')
foo_grammar.add_rules(
    main_rule=Block(
        Tok(Token.Identifier, keep=True),
        Pick('(', List(Ref(Tok(Token.Identifier, keep=True))), ')')
    )
)
build_and_run(foo_grammar, 'main.py')
print('Done')