def l_id(text): return Lex.Identifier(text) A.add_rules( project_qualifier=Or( ProjectQualifier.alt_abstract("abstract"), ProjectQualifier.alt_library(l_id("library")), ProjectQualifier.alt_aggregate_library(l_id("aggregate"), l_id("library")), ProjectQualifier.alt_aggregate(l_id("aggregate")), ProjectQualifier.alt_configuration(l_id("configuration")), ProjectQualifier.alt_standard(l_id("standard")), ), project_extension=ProjectExtension("extends", Opt("all").as_bool(AllQualifier), A.string_literal), project_declaration=ProjectDeclaration( Opt(A.project_qualifier), l_id("project"), A.static_name, Opt(A.project_extension), "is", A.declarative_items, "end", A.static_name, ";", ), project=Project( A.context_clauses, A.project_declaration,
@abstract class Stmt(FooNode): pass class Def(Stmt): id = Field() body = Field() name = Property(Self.id) env_spec = EnvSpec(add_to_env(Self.id.symbol, Self), add_env()) faulty_prop = Property(Self._env_mappings_0) class Block(Stmt): items = Field() env_spec = EnvSpec(add_env()) grammar = Grammar('stmts_rule') grammar.add_rules( def_rule=Def(Tok(Token.Identifier, keep=True), Opt('(', grammar.stmts_rule, ')')), stmt_rule=(grammar.def_rule | Block('{', List(grammar.stmt_rule, empty_valid=True), '}')), stmts_rule=List(grammar.stmt_rule)) emit_and_print_errors(grammar) print('Done')
python_grammar = Grammar('main_rule') P = python_grammar python_grammar.add_rules( name=Id(L.Identifier), number=NumberLit(L.Number), string=StringLit(L.String), cat_string=ConcatStringLit(P.string, List(P.string)), nl=NL(L.Newline), main_rule=FileNode( List(newlines(), P.stmt, newlines()), L.Termination ), decorator=Decorator( '@', P.dotted_name, Opt('(', P.arg_list, ')'), L.Newline ), decorators=List(P.decorator), decorated=Decorated(P.decorators, Or(P.class_def, P.func_def)), func_def=FuncDef('def', P.name, P.parameters, ':', P.suite), parameters=Pick('(', Opt(P.varargslist), ')'), varargslist=Params( List( SingleParam( Opt('*').as_bool(VarArgsFlag), Opt('**').as_bool(KwArgsFlag), P.fpdef, Opt('=', P.test) ), empty_valid=True, sep="," ), ), fpdef=Or(P.name, Pick('(', P.name_list, ')')),
return Opt(*rules).error() def end_liblevel_block(): return Pick("end", Opt(EndName(A.static_name))) def end_named_block(): return Pick("end", Opt(EndName(A.identifier))) A.add_rules( parent_list=List(A.static_name, sep="and", list_cls=ParentList), protected_type_decl=ProtectedTypeDecl(res("protected"), "type", A.defining_id, Opt(A.discriminant_part), A.aspect_spec, "is", Opt("new", A.parent_list, "with"), A.protected_def, sc()), protected_op=Or(A.subp_decl, A.entry_decl, A.aspect_clause, A.pragma), protected_el=Or(A.protected_op, A.component_decl), protected_def=ProtectedDef( PublicPart(List(A.protected_op, empty_valid=True, list_cls=DeclList)), Opt( "private", PrivatePart( List(A.protected_el, empty_valid=True, list_cls=DeclList))), end_named_block()), protected_decl=SingleProtectedDecl(res("protected"), A.defining_id, A.aspect_spec, "is", Opt("new", A.parent_list,
def end_liblevel_block(): return Pick("end", Opt(EndName(A.static_name)))
var_type_name2 = Field() class ProjectReference(GPRNode): attr_ref = Field() A.add_rules( identifier=Identifier(Token.Identifier), string_literal=StringLiteral(Token.String), num_literal=NumLiteral(Token.Number), static_name=Or(Prefix(A.static_name, '.', A.identifier), A.identifier), # ---------------------------------------------------------------- attribute_reference=AttributeReference( A.identifier, Opt(Pick("(", Or(A.others_designator, A.string_literal), ")"))), variable_reference=VariableReference(A.identifier, Opt(Pick(".", A.identifier)), Opt(Pick(".", A.identifier)), Opt(Pick("'", A.attribute_reference))), type_reference=TypeReference( A.identifier, Opt(Pick(".", A.identifier)), ), builtin_function_call=BuiltinFunctionCall(A.identifier, A.expression_list), # ---------------------------------------------------------------- expression=List(A.term, sep="&", list_cls=TermList), expression_list=ExprList("(", List(A.expression, sep=",", empty_valid=True), ")"), string_literal_at=StringLiteralAt(A.string_literal,
def end_named_block(): return recover("end", Opt(A.identifier))
env_spec = EnvSpec( add_to_env(mappings=New(T.env_assoc, key=Self.name.symbol, val=Self), metadata=New(Metadata, b=Self.has_plus.as_bool)) ) @langkit_property(public=True, return_type=T.Ref.entity.array) def entity_items(): return Self.as_entity.items.map(lambda i: i) class Ref(FooNode): name = Field() @langkit_property(public=True, return_type=Decl.entity) def decl(): return Self.children_env.get(Self.name).at(0).cast_or_raise(Decl) fg = Grammar('main_rule') fg.add_rules( main_rule=List(fg.decl), decl=Decl(Opt('+').as_bool(HasPlus), Name(Token.Identifier), '(', fg.ref_list, ')'), ref_list=List(fg.ref, empty_valid=True), ref=Ref(Name(Token.Identifier)), ) build_and_run(fg, 'main.py') print('Done')
class BinaryExpr(Expression): lhs = Field() op = Field() rhs = Field() kconfig_grammar = Grammar('main_rule') G = kconfig_grammar kconfig_grammar.add_rules( # Main rule main_rule=RootNode(Opt(G.mainmenu), G.block), # Block rule block=List(Or(G.config, G.menuconfig, G.source, G.menu, G.comment, G.if_rule, G.choice), empty_valid=True), # Config config=Config('config', G.identifier, G.config_options), config_options=List( Or(G.type, G.prompt, G.default, G.depends, G.select, G.imply, G.help, G.range, G.comment, G.def_bool, G.def_tristate, G.option)), config_list=List(G.config, empty_valid=True), # Menuconfig menuconfig=MenuConfig('menuconfig', G.identifier, G.menuconfig_options),
defs = Field() env_spec = EnvSpec( add_to_env(New(T.env_assoc, key=Self.name.symbol, val=Self)), add_env(), do(If(Self.error.as_bool, PropertyError(T.FooNode), No(T.FooNode))), ) class Var(DefNode): name = Field() value = Field() env_spec = EnvSpec( add_to_env(New(T.env_assoc, key=Self.name.symbol, val=Self)), ) G = Grammar('main_rule') G.add_rules( main_rule=G.defs, defs=List(G.def_rule, empty_valid=True), def_rule=Or(G.scope, G.var), scope=Scope( Opt('error').as_bool(HasError), Id(Token.Identifier), '{', G.defs, '}'), var=Var(Id(Token.Identifier), '=', G.name), name=Or(Prefix(G.name, '.', Id(Token.Identifier)), Id(Token.Identifier)), ) build_and_run(G, 'main.py') print('Done')
) @langkit_property(public=True, return_type=T.BoolType) def test_env(other=T.FooNode.entity): return Self.children_env.env_orphan == other.children_env.env_orphan @langkit_property(public=True, return_type=T.BoolType) def test_struct(other=T.FooNode.entity): return Self.env_struct == other.env_struct @langkit_property(public=True, return_type=T.BoolType) def test_array(other=T.FooNode.entity): return Self.env_array == other.env_array class Ref(FooNode): name = Field() fg = Grammar('main_rule') fg.add_rules( main_rule=List(fg.decl), decl=Decl( Opt('+').as_bool(), Tok(Token.Identifier, keep=True), '(', fg.ref_list, ')'), ref_list=List(fg.ref, empty_valid=True), ref=Ref(Tok(Token.Identifier, keep=True)), ) build_and_run(fg, 'main.py') print('Done')
field_dont_skip = Field() # Non optional fields field_opt_bool = Field() field_transform = Field() class HasExample(FooNode): enum_node = True qualifier = True foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Or(foo_grammar.rule_1, foo_grammar.rule_2), rule_1=ExampleWrapper( Opt(Example("example")), Or(Example("example"), Null(Example)), foo_grammar.sub_rule, Null(Example), DontSkip(Opt(Example("example"))), Opt(Example("example")).as_bool(HasExample), Example("example")), rule_2=ExampleWrapper(Example("example"), Example("example"), Example("example"), Example("example"), Example("example"), HasExample("example"), Example("example")), sub_rule=Opt(Example("example"))) emit_and_print_errors(foo_grammar)
))) class SelfDecl(FooNode): id = Field(type=T.Id) md_node = Field(type=T.Id) env_spec = EnvSpec( add_to_env( New(T.env_assoc, key=Self.id.simple_name.symbol, val=Self.id.resolve(Self.node_env)), metadata=New( T.Metadata, node=Self.md_node.then(lambda n: n.resolve(Self.node_env))))) G = Grammar('main_rule') G.add_rules( main_rule=List(Or(G.scope, G.self_decl, G.foreign_decl)), scope=Scope(G.simple_identifier, '{', List(G.scope, empty_valid=True), '}'), identifier=Or(ScopedId(G.identifier, '.', G.simple_identifier), G.simple_identifier), simple_identifier=SimpleId(Token.Identifier), foreign_decl=ForeignDecl(G.identifier), self_decl=SelfDecl('+', G.identifier, Opt('(', G.identifier, ')')), ) build_and_run(G, 'main.py') print('Done')
class Decl(FooNode): has_plus = Field() name = Field() items = Field() env_spec = EnvSpec( add_to_env(mappings=New(T.env_assoc, key=Self.name.symbol, val=Self), metadata=New(Metadata, b=Self.has_plus.as_bool))) @langkit_property(public=True, return_type=T.Ref.entity.array) def entity_items(): return Self.as_entity.items.map(lambda i: i) class Ref(FooNode): name = Field() fg = Grammar('main_rule') fg.add_rules( main_rule=List(fg.decl), decl=Decl( Opt('+').as_bool(HasPlus), Tok(Token.Identifier, keep=True), '(', fg.ref_list, ')'), ref_list=List(fg.ref, empty_valid=True), ref=Ref(Tok(Token.Identifier, keep=True)), ) build_and_run(fg, 'main.py', properties_logging=True) print('Done')
env_spec = EnvSpec( add_to_env_kv(key=Self.name.symbol, val=Self, metadata=New(Metadata, b=Self.has_plus.as_bool))) @langkit_property(public=True, return_type=T.Ref.entity.array) def entity_items(): return Self.as_entity.items.map(lambda i: i) class Ref(FooNode): name = Field() @langkit_property(public=True, return_type=Decl.entity) def decl(): return Self.children_env.get(Self.name).at(0).cast_or_raise(Decl) fg = Grammar('main_rule') fg.add_rules( main_rule=List(fg.decl), decl=Decl( Opt('+').as_bool(HasPlus), Name(Token.Identifier), '(', fg.ref_list, ')'), ref_list=List(fg.ref, empty_valid=True), ref=Ref(Name(Token.Identifier)), ) build_and_run(fg, 'main.py') print('Done')
No(T.FooNode))), ) class Var(DefNode): name = Field() value = Field() env_spec = EnvSpec( add_to_env_kv(key=Self.name.symbol, val=Self), ) G = Grammar('main_rule') G.add_rules( main_rule=G.defs, defs=List(G.def_rule, empty_valid=True), def_rule=Or(G.scope, G.var), scope=Scope(Opt('error').as_bool(HasError), Id(Token.Identifier), '{', G.defs, '}'), var=Var(Id(Token.Identifier), '=', G.name), name=Or(Prefix(G.name, '.', Id(Token.Identifier)), Id(Token.Identifier)), ) build_and_run(G, 'main.py') print('Done')
env_spec = EnvSpec( add_to_env(New(T.env_assoc, key=Self.name.symbol, val=Self)), add_env(), do(If(Self.error.as_bool, PropertyError(T.FooNode), No(T.FooNode))), ) class Var(Def): name = Field() value = Field() env_spec = EnvSpec( add_to_env(New(T.env_assoc, key=Self.name.symbol, val=Self)), ) G = Grammar('main_rule') G.add_rules( main_rule=G.defs, defs=List(G.def_rule, empty_valid=True), def_rule=Or(G.scope, G.var), scope=Scope( Opt('error').as_bool(HasError), Tok(Token.Identifier, keep=True), '{', G.defs, '}'), var=Var(Tok(Token.Identifier, keep=True), '=', G.name), name=Or(Prefix(G.name, '.', Tok(Token.Identifier, keep=True)), Id(Tok(Token.Identifier, keep=True))), ) build_and_run(G, 'main.py') print('Done')
def create_parser_bool_node(cls, *args): # If the node is a boolean node, then we want to parse the # sub-parsers as an optional parser that will be booleanized. return Opt(*args).as_bool(cls)
def end_liblevel_block(): return recover("end", Opt(A.static_name))
class Decl(FooNode): has_plus = Field() name = Field() items = Field() env_spec = EnvSpec( add_to_env(mappings=New(T.env_assoc, key=Self.name.symbol, val=Self), metadata=New(Metadata, b=Self.has_plus.as_bool))) @langkit_property(public=True, return_type=T.Ref.entity.array, activate_tracing=True) def entity_items(): return Self.as_entity.items.map(lambda i: i) class Ref(FooNode): name = Field() fg = Grammar('main_rule') fg.add_rules( main_rule=List(fg.decl), decl=Decl(Opt('+').as_bool(HasPlus), fg.name, '(', fg.ref_list, ')'), ref_list=List(fg.ref, empty_valid=True), ref=Ref(fg.name), name=Name(Token.Identifier), ) build_and_run(fg, 'main.py') print('Done')
def end_liblevel_block(): return Pick("end", Opt(EndName(A.static_name))) def end_named_block(): return Pick("end", Opt(EndName(A.identifier))) A.add_rules( parent_list=List(A.static_name, sep="and", list_cls=ParentList), protected_type_decl=ProtectedTypeDecl( res("protected"), "type", A.defining_id, Opt(A.discriminant_part), A.aspect_spec, "is", Opt("new", A.parent_list, "with"), A.protected_def, sc() ), protected_op=Or(A.subp_decl, A.entry_decl, A.aspect_clause, A.pragma), protected_el=Or(A.protected_op, A.component_decl), protected_def=ProtectedDef( PublicPart(List(A.protected_op, empty_valid=True, list_cls=DeclList)), Opt("private", PrivatePart(List(A.protected_el, empty_valid=True, list_cls=DeclList))), end_named_block()
class Name(FooNode): token_node = True sym = Property(Self.symbol, type=T.Symbol) resolve = Property(Self.parent.node_env.get(Self.sym).at(0), type=T.FooNode.entity) class Def(FooNode): name = Field(type=T.Name) ref = Field(type=T.Name) env_spec = EnvSpec( add_to_env_kv(key=Self.name.sym, val=Self, metadata=New(Metadata, node=Self.ref.then(lambda r: r.resolve.node, default_val=No( T.FooNode))))) grammar = Grammar('main_rule') grammar.add_rules(main_rule=List(grammar.def_rule), def_rule=Def(grammar.name, Opt('+', grammar.name)), name=Name(Token.Identifier)) build_and_run(grammar, 'main.py') print('') print('Done')
def recover(*rules): """ Helper to parse a sequence of rules, and ignore their result, but recover if they're absent. """ return Opt(*rules).error()
def lower(rule): """ Helper to lower one parser. :param liblktlang.GrammarExpr rule: Grammar rule to lower. :rtype: Parser """ # For convenience, accept null input rules, as we generally want to # forward them as-is to the lower level parsing machinery. if rule is None: return None loc = ctx.lkt_loc(rule) with ctx.lkt_context(rule): if isinstance(rule, liblktlang.ParseNodeExpr): node = resolve_node_ref(rule.f_node_name) # Lower the subparsers subparsers = [ lower(subparser) for subparser in rule.f_sub_exprs ] # Qualifier nodes are a special case: we produce one subclass # or the other depending on whether the subparsers accept the # input. if node._type.is_bool_node: return Opt(*subparsers, location=loc).as_bool(node) # Likewise for enum nodes elif node._type.base and node._type.base.is_enum_node: return _Transform(_Row(*subparsers, location=loc), node.type_ref, location=loc) # For other nodes, always create the node when the subparsers # accept the input. else: return _Transform(parser=_Row(*subparsers), typ=node, location=loc) elif isinstance(rule, liblktlang.GrammarToken): token_name = rule.f_token_name.text try: val = tokens[token_name] except KeyError: check_source_language( False, 'Unknown token: {}'.format(token_name)) match_text = '' if rule.f_expr: # The grammar is supposed to mainain this invariant assert isinstance(rule.f_expr, liblktlang.TokenLit) match_text = denoted_string_literal(rule.f_expr) return _Token(val=val, match_text=match_text, location=loc) elif isinstance(rule, liblktlang.TokenLit): return _Token(denoted_string_literal(rule), location=loc) elif isinstance(rule, liblktlang.GrammarList): return List(lower(rule.f_expr), empty_valid=rule.f_kind.text == '*', list_cls=resolve_node_ref(rule.f_list_type), sep=lower(rule.f_sep), location=loc) elif isinstance( rule, (liblktlang.GrammarImplicitPick, liblktlang.GrammarPick)): return Pick(*[lower(subparser) for subparser in rule.f_exprs], location=loc) elif isinstance(rule, liblktlang.GrammarRuleRef): return getattr(grammar, rule.f_node_name.text) elif isinstance(rule, liblktlang.GrammarOrExpr): return Or( *[lower(subparser) for subparser in rule.f_sub_exprs], location=loc) elif isinstance(rule, liblktlang.GrammarOpt): return Opt(lower(rule.f_expr), location=loc) elif isinstance(rule, liblktlang.GrammarOptGroup): return Opt(*[lower(subparser) for subparser in rule.f_expr], location=loc) elif isinstance(rule, liblktlang.GrammarExprList): return Pick(*[lower(subparser) for subparser in rule], location=loc) elif isinstance(rule, liblktlang.GrammarDiscard): return Discard(lower(rule.f_expr), location=loc) elif isinstance(rule, liblktlang.GrammarNull): return Null(resolve_node_ref(rule.f_name), location=loc) elif isinstance(rule, liblktlang.GrammarSkip): return Skip(resolve_node_ref(rule.f_name), location=loc) elif isinstance(rule, liblktlang.GrammarDontSkip): return DontSkip(lower(rule.f_expr), lower(rule.f_dont_skip), location=loc) elif isinstance(rule, liblktlang.GrammarPredicate): check_source_language( isinstance(rule.f_prop_ref, liblktlang.DotExpr), 'Invalid property reference') node = resolve_node_ref(rule.f_prop_ref.f_prefix) prop_name = rule.f_prop_ref.f_suffix.text try: prop = getattr(node, prop_name) except AttributeError: check_source_language( False, '{} has no {} property'.format( node._name.camel_with_underscores, prop_name)) return Predicate(lower(rule.f_expr), prop, location=loc) else: raise NotImplementedError('unhandled parser: {}'.format(rule))
def end_named_block(): return Pick("end", Opt(EndName(A.identifier)))
Tuple expression. """ exprs = Field(type=Expr.list) lkql_grammar = Grammar('main_rule') G = lkql_grammar # noinspection PyTypeChecker lkql_grammar.add_rules( main_rule=List(Or(G.import_clause, G.decl, G.expr), list_cls=TopLevelList, empty_valid=True), import_clause=Import("import", G.id), query=Query( Opt("from", Or(G.expr, Unpack("*", G.expr))), "select", c(), Or( QueryKind.alt_first(L.Identifier(match_text="first")), QueryKind.alt_all(), ), G.pattern), pattern=Or(OrPattern(G.chained_node_pattern, "or", G.pattern), G.chained_node_pattern), chained_node_pattern=Or( ChainedNodePattern( G.filtered_pattern, List( Or(SelectorLink(G.selector_call, "is", G.filtered_pattern), FieldLink(".", G.id, "is", G.filtered_pattern), PropertyLink(".", G.fun_call, "is", G.filtered_pattern)))), G.filtered_pattern), filtered_pattern=Or(FilteredPattern(G.binding_pattern, "when", G.expr),
class AbstractPresent(GPRNode): pass class OthersDesignator(GPRNode): pass class Choices(GPRNode.list): pass A.add_rules( context_clauses=List(A.with_decl, empty_valid=True), with_decl=WithDecl( Opt("limited").as_bool(Limited), "with", List(A.string_literal, sep=","), ";"), abstract_present=AbstractPresent("abstract"), qualifier_names=QualifierNames(A.identifier, Opt(A.identifier)), project_qualifier=ProjectQualifier( Or(A.abstract_present, A.qualifier_names)), project_extension=ProjectExtension("extends", Opt("all").as_bool(AllQualifier), A.string_literal), project_declaration=ProjectDeclaration(Opt(A.project_qualifier), "project", A.static_name, Opt(A.project_extension), "is", A.declarative_items, "end", A.static_name, ";"), project=Project( A.context_clauses,
This enumeration type is documented, and thus tests documentation generation for it. """ alternatives = ['e_example', 'e_null'] class EnumNode(FooNode): enum_1 = Field(type=MyEnum1) enum_2 = Field(type=MyEnum2) has_plus = Field(type=BoolType) class Nodes(FooNode): nodes = Field(type=EnumNode.list) foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Nodes(List(EnumNode( Or(Enum(Tok(Token.Example), MyEnum1('e_example')), Enum(Tok(Token.Null), MyEnum1('e_null'))), Enum(Tok(Token.Example), MyEnum2('e_example')), Opt('+').as_bool() ))), ) build_and_run(foo_grammar, 'main.py') print('Done')
def TrailList(el, sep, empty_valid=False): return Pick(List(el, sep=sep, empty_valid=empty_valid), Opt(sep))
class Expr(FooNode): pass class Literal(Expr): token_node = True class Ref(Expr): name = Field() class ParentExpr(Expr): expr = Field() class Plus(Expr): lhs = Field() rhs = Field() g = Grammar('main_rule') g.add_rules(main_rule=List(g.def_rule), name=Name(Token.Identifier), def_rule=Def('def', g.name, Opt('(', List(g.name, sep=','), ')'), '=', g.expr), expr=Or(Plus(g.expr, '+', g.expr), ParentExpr('(', g.expr, ')'), Ref(g.name), Literal(Token.Number))) build_and_run(g, 'main.py', generate_unparser=True) print('Done')