def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Or(Row('example') ^ ExampleNode, Row('null') ^ NullNode) ) return foo_grammar
def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example', foo_grammar.list_rule) ^ BarNode, list_rule=Row(List(Tok(Token.Number))) ^ ListNode, ) return foo_grammar
def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example', foo_grammar.rule_2) ^ BarCode, rule_2=Row('example') ^ BarNode, ) return foo_grammar
def lang_def(): @root_grammar_class class FooNode(ASTNode): b = Property(lit) foo_grammar = Grammar("main_rule") foo_grammar.add_rules(main_rule=Row("example") ^ FooNode) return foo_grammar
def lang_def(): @abstract @root_grammar_class class FooNode(ASTNode): pass class ExampleNode(FooNode): pass foo_grammar = Grammar('main_rulezz') foo_grammar.add_rules( main_rule=Row('example') ^ ExampleNode ) return foo_grammar
def lang_def(): @abstract @root_grammar_class class FooNode(ASTNode): pass class ExampleNode(FooNode): pass foo_grammar = Grammar('main_rule') foo_grammar.add_rules( sec_rule=Row('example'), ) foo_grammar.add_rules( sec_rule=Row('example'), main_rule=Row(foo_grammar.sec_rule) ^ ExampleNode )
def lang_def(): foo_grammar = Grammar('stmts_rule') foo_grammar.add_rules( def_rule=Row( Tok(Token.Identifier, keep=True), Opt(Row('(', foo_grammar.stmts_rule, ')')[1]) ) ^ Def, stmt_rule=( foo_grammar.def_rule | Row('{', List(foo_grammar.stmt_rule, empty_valid=True), '}') ^ Block ), stmts_rule=List(foo_grammar.stmt_rule) ) return foo_grammar
def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row( 'example', Or(foo_grammar.expression, Row('null') ^ NullNode) ) ^ ExampleNode, number=Tok(Token.Number) ^ Number, expression=Or( Row(foo_grammar.number, ',', foo_grammar.expression) ^ Compound, foo_grammar.number ), ) return foo_grammar
def lang_def(): @root_grammar_class class FooNode(ASTNode): pass class Def(FooNode): name = Field() body = Field() env_spec = EnvSpec(add_env=True, add_to_env=(Self.name, Self)) foo_grammar = Grammar('stmt_rule') foo_grammar.add_rules( def_rule=Row( Tok(Token.Identifier, keep=True), '(', foo_grammar.stmt_rule, ')' ) ^ Def, stmt_rule=List( foo_grammar.def_rule | Row('{', List(foo_grammar.stmt_rule, empty_valid=True), '}')[1], empty_valid=True ) ) return foo_grammar
def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ ConcreteNode, ) return foo_grammar
python_grammar.add_rules( name=Id(L.Identifier), number=NumberLit(L.Number), string=StringLit(L.String), cat_string=ConcatStringLit(P.string, List(P.string)), nl=NL(L.Newline), main_rule=FileNode(List(newlines(), P.stmt, newlines()), L.Termination), decorator=Decorator('@', P.dotted_name, Opt('(', P.arg_list, ')'), L.Newline), decorators=List(P.decorator), decorated=Decorated(P.decorators, Or(P.class_def, P.func_def)), func_def=FuncDef('def', P.name, P.parameters, ':', P.suite), parameters=Pick('(', Opt(P.varargslist), ')'), varargslist=Params( List(SingleParam( Opt('*').as_bool(VarArgsFlag), Opt('**').as_bool(KwArgsFlag), P.fpdef, Opt('=', P.test)), empty_valid=True, sep=","), ), fpdef=Or(P.name, Pick('(', P.name_list, ')')), name_list=TrailList(P.name, sep=','), stmt=Or(P.simple_stmt, P.compound_stmt), simple_stmt=Pick(Or(P.small_stmt, TrailList(P.small_stmt, sep=';')), L.Newline), small_stmt=(P.expr_stmt | P.print_stmt | P.del_stmt | P.pass_stmt | P.flow_stmt | P.import_stmt | P.global_stmt | P.exec_stmt | P.assert_stmt), expr_stmt=Or( AugAssignStmt( P.test_list, Op( Or('+=', '-=', '*=', '/=', '%=', '&=', '|=', '^=', '<<=', '>>=', '**=', '//=')), Or(P.yield_expr, P.test_list)), AssignStmt(P.test_list, List(Pick('=', Or(P.yield_expr, P.test_list)))), P.test_list), print_stmt=Or(PrintStmt('print', P.test_list), StreamPrintStmt('print', '>>', P.test, ',', P.test_list)), del_stmt=DelStmt('del', P.expr_list), pass_stmt=PassStmt('pass'), flow_stmt=Or(P.break_stmt, P.continue_stmt, P.return_stmt, P.raise_stmt, P.yield_stmt), break_stmt=BreakStmt('break'), continue_stmt=ContinueStmt('continue'), return_stmt=ReturnStmt('return', Opt(P.test_list)), yield_stmt=P.yield_expr, raise_stmt=RaiseStmt('raise', Opt(P.test_list)), import_stmt=Or(P.import_name, P.import_from), import_name=ImportName('import', P.dotted_as_names), dot=Dot('.'), import_from=ImportFrom( 'from', Or(P.dotted_name, RelName(List(P.dot), Opt(P.dotted_name))), 'import', Or(ImportStar('*'), Pick('(', P.import_as_names, ')'), P.import_as_names), ), as_name=AsNameNode(P.name, 'as', P.name), dotted_as_name=AsNameNode(P.dotted_name, 'as', P.name), import_as_names=TrailList(Or(P.as_name, P.name), sep=','), dotted_as_names=TrailList(Or(P.dotted_as_name, P.dotted_name), sep=','), dotted_name=DottedName(P.dotted_name, '.', P.name) | P.name, global_stmt=GlobalStmt('global', P.name_list), exec_stmt=ExecStmt('exec', P.expr, Opt('in', P.test_list)), assert_stmt=AssertStmt('assert', P.test, Opt(',', P.test)), compound_stmt=(P.if_stmt | P.while_stmt | P.for_stmt | P.try_stmt | P.with_stmt | P.func_def | P.class_def | P.decorated), else_part=ElsePart('else', ':', P.suite), if_stmt=IfStmt( 'if', P.test, ':', P.suite, List('elif', ElifBranch(P.test, ':', P.suite), empty_valid=True), Opt(P.else_part)), while_stmt=WhileStmt('while', P.test, ':', P.suite, Opt(P.else_part)), for_stmt=ForStmt('for', P.expr_list, 'in', P.test_list, ':', P.suite, Opt(P.else_part)), try_stmt=TryStmt( 'try', ':', P.suite, List(ExceptPart('except', Opt(AsNameNode(P.test, Opt('as', P.test))), ':', P.suite), empty_valid=True), Opt(P.else_part), Opt('finally', ':', P.suite), ), with_stmt=WithStmt('with', List(P.with_item, sep=','), ":", P.suite), with_item=AsNameNode(P.test, Opt('as', P.expr)), suite=Or( Pick(newlines(), L.Indent, List(newlines(), P.stmt, newlines()), L.Dedent), P.simple_stmt, ), test=Or( P.lambdef, IfExpr(P.or_test, 'if', P.or_test, 'else', P.test), P.or_test, ), or_test=Or(OrOp(P.or_test, 'or', P.and_test), P.and_test), and_test=Or(AndOp(P.and_test, 'and', P.not_test), P.not_test), not_test=Or(NotOp('not', P.not_test), P.comparison), comparison=Or( CompOp( P.comparison, CompOpKind.alt_lt('<') | CompOpKind.alt_gt('>') | CompOpKind.alt_eq('==') | CompOpKind.alt_gte('>=') | CompOpKind.alt_lte('<=') | CompOpKind.alt_diamond('<>') | CompOpKind.alt_noteq('!=') | CompOpKind.alt_in('in') | CompOpKind.alt_notin('not', 'in') | CompOpKind.alt_isnot('is', 'not') | CompOpKind.alt_is('is'), P.expr), P.expr), expr=Or(OrExpr(P.expr, '|', P.xor_expr), P.xor_expr), xor_expr=Or(XorExpr(P.xor_expr, '^', P.and_expr), P.and_expr), and_expr=Or(AndExpr(P.and_expr, '&', P.shift_expr), P.shift_expr), shift_expr=Or(ShiftExpr(P.shift_expr, Op(Or('<<', '>>')), P.arith_expr), P.arith_expr), arith_expr=Or(ArithExpr(P.arith_expr, Op(Or('+', '-')), P.term), P.term), term=Or(Term(P.term, Op(Or('*', '/', '%', '//')), P.factor), P.factor), factor=Or(Factor(Op(Or('+', '-', '~')), P.factor), P.power), power=Or(Power(P.atom_expr, '**', P.factor), P.atom_expr), atom_expr=Or(DottedName(P.atom_expr, ".", P.name), CallExpr(P.atom_expr, '(', P.arg_list, ')'), SubscriptExpr(P.atom_expr, '[', P.subscript_list, ']'), P.atom), dict_assoc=DictAssoc(P.test, ':', P.test), yield_expr=YieldExpr('yield', Opt(P.test_list)), atom=Or(Pick('(', P.yield_expr, ')'), ListGen('(', P.test, P.list_for, ')'), TupleLit('(', Opt(P.test_list), ')'), ListComp('[', P.test, P.list_for, ']'), ListLit('[', P.empty_test_list, ']'), SetComp('{', P.test, P.comp_for, '}'), P.set_lit, DictComp('{', P.dict_assoc, P.comp_for, '}'), DictLit('{', TrailList(P.dict_assoc, sep=','), '}'), InlineEval('`', P.test_list, '`'), P.name, P.number, P.cat_string, P.string), set_lit=SetLit('{', P.empty_test_list, '}'), lambdef=LambdaDef('lambda', P.varargslist, ':', P.test), subscript_list=TrailList(P.subscript, sep=","), subscript=Or( EllipsisExpr('.', '.', '.'), ExtSliceExpr(Opt(P.test), ':', Opt(P.test), ':', Opt(P.test)), SliceExpr(Opt(P.test), ':', Opt(P.test)), P.test, ), expr_list=TrailList(P.expr, ','), test_list=TrailList(P.test, ','), empty_test_list=TrailList(P.test, ',', empty_valid=True), class_def=ClassDef('class', P.name, Opt('(', Opt(P.test_list), ')'), ':', P.suite), arg_list=TrailList(Or( ArgGen(P.test, P.comp_for), ArgAssoc(Opt(P.test, '='), P.test), VarArgs('*', P.test), KwArgs('**', P.test), ), sep=",", empty_valid=True), list_iter=Or(P.list_for, P.list_if), list_for=CompForL('for', P.expr_list, 'in', P.test_list, Opt(P.list_iter)), list_if=CompIf('if', P.test, Opt(P.list_iter)), comp_iter=Or(P.comp_for, P.comp_if), comp_for=CompFor('for', P.expr_list, 'in', P.or_test, Opt(P.comp_iter)), comp_if=CompIf('if', P.test, Opt(P.comp_iter)))
name = Field() items = Field() env_spec = EnvSpec( add_to_env(mappings=New(T.env_assoc, key=Self.name.symbol, val=Self), metadata=New(Metadata, b=Self.has_plus.as_bool)) ) @langkit_property(public=True, return_type=T.Ref.entity.array, activate_tracing=True) def entity_items(): return Self.as_entity.items.map(lambda i: i) class Ref(FooNode): name = Field() fg = Grammar('main_rule') fg.add_rules( main_rule=List(fg.decl), decl=Decl(Opt('+').as_bool(HasPlus), fg.name, '(', fg.ref_list, ')'), ref_list=List(fg.ref, empty_valid=True), ref=Ref(fg.name), name=Name(Token.Identifier), ) build_and_run(fg, 'main.py') print('Done')
from langkit.dsl import ASTNode, Field, T, abstract from langkit.expressions import Property, Self from langkit.parsers import Grammar from lexer_example import Token from utils import emit_and_print_errors class FooNode(ASTNode): pass @abstract class RootNode(FooNode): name = Property(Self.match(lambda e=T.Expr: e.name, lambda n=T.Name: n)) class Expr(RootNode): name = Field() class Name(RootNode): token_node = True grammar = Grammar('main_rule') grammar.add_rules(main_rule=Expr(Name(Token.Identifier))) emit_and_print_errors(grammar) print('Done')
from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode, Field, abstract from langkit.parsers import Grammar from utils import emit_and_print_errors @abstract class FooNode(ASTNode): pass class ExampleNode(FooNode): pass class UnreferencedNode(FooNode): untyped_field = Field() grammar = Grammar('main_rule') grammar.add_rules( main_rule=ExampleNode('example') ) emit_and_print_errors(grammar) print('Done')
def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ BarNode) return foo_grammar
""" Check that property memoization is properly rejected when using unsupported argument types. """ from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode, T from langkit.expressions import langkit_property from langkit.parsers import Grammar from utils import emit_and_print_errors class FooNode(ASTNode): pass class Example(FooNode): @langkit_property(public=True, memoized=True) def prop(a=T.Token.array): return a.length == 0 grammar = Grammar('main_rule') grammar.add_rules(main_rule=Example('example'), ) emit_and_print_errors(grammar) print('Done')
def lang_def(): foo_grammar = Grammar("main_rule") foo_grammar.add_rules(main_rule=Row("example") ^ BarNode) return foo_grammar
rhs = Field() class Number(Expr): token_node = True class Null(FooNode): enum_node = True qualifier = True class Ref(Expr): null_qual = Field() name = Field() g = Grammar('main_rule') g.add_rules( main_rule=List(g.var_decl), var_decl=VarDecl('var', g.name, '=', g.expr, ';'), expr=Or(Addition(g.expr, '+', g.expr), g.atom), atom=Or(g.number, g.ref), number=Number(Token.Number), ref=Ref(Null('null'), g.name), name=Name(Token.Identifier), ) build_and_run(g, ada_main=['main.adb']) print('Done')
def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row( List(Tok(Token.Number, keep=True) ^ Literal)), ) return foo_grammar
""" Test that garbage tokens left after the main parsing rule completes does not crash. It used to! """ from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode, Field from langkit.parsers import Grammar, Tok from lexer_example import Token from utils import build_and_run class FooNode(ASTNode): pass class Literal(FooNode): tok = Field() foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Literal(Tok(Token.Number, keep=True)), ) build_and_run(foo_grammar, 'main.py') print('Done')
from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode, Field, T from langkit.parsers import Grammar from utils import emit_and_print_errors class FooNode(ASTNode): pass class Example(FooNode): tok = Field(type=T.FooNode.does_not_exist) class ExampleField(FooNode): pass fg = Grammar('main_rule') fg.add_rules(main_rule=Example(ExampleField())) emit_and_print_errors(fg) print('Done')
Test that a warning is emitted when the type of a parsing field is not as specific as it could be. """ from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode, Field from langkit.parsers import Grammar from utils import emit_and_print_errors class FooNode(ASTNode): pass class ExampleWrapper(FooNode): example = Field(type=FooNode) class Example(FooNode): pass foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=ExampleWrapper(Example('example'))) emit_and_print_errors(foo_grammar) print('Done')