def run(name, *args): """ Emit and print the errors we get for the below grammar with *args as a list of NodeMacro classes to use on BarNode. This will not only check the consistency of Property diagnostics, but also that the SLOCs generated for NodeMacros are good, ie. they will reference the original definition site. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): _macros = args def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ BarNode) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): prop = Property(expr) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ BarNode, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ global FooNode, BarNode, ListNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print ("== {} ==".format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): list_node = Field() class ListNode(FooNode): nb_list = Field() bar_node_parent = Property(Self.parent.cast(BarNode)) prop = Property(expr) def lang_def(): foo_grammar = Grammar("main_rule") foo_grammar.add_rules( main_rule=Row("example", foo_grammar.list_rule) ^ BarNode, list_rule=Row(List(Tok(Token.Number))) ^ ListNode ) return foo_grammar emit_and_print_errors(lang_def) print ("")
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in ExampleNode. """ global Compound, Expression, FooNode, NullNode, Number Diagnostics.set_lang_source_dir(path.abspath(__file__)) print ("== {} ==".format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): prop = Property(expr) def lang_def(): foo_grammar = Grammar("main_rule") foo_grammar.add_rules(main_rule=Row("example") ^ BarNode) return foo_grammar emit_and_print_errors(lang_def) print ("")
def run(name, astnode_fn): """ Emit and print the errors we get for the below grammar with "match_expr" as a property in ExampleNode. """ Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) print('== {} =='.format(name)) astnode = astnode_fn(T) @abstract @root_grammar_class class FooNode(ASTNode): pass @abstract class MiddleNode(FooNode): get_random_node = AbstractProperty(type=T.MiddleNode) class ExampleNode(MiddleNode): get_random_node = Property(No(astnode)) @abstract class NullNode(FooNode): pass def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Or(Row('example') ^ ExampleNode, Row('null') ^ NullNode) ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(abstract_private, concrete_private): """ Emit and print the errors we get for the below grammar for the given privacy levels. """ fmt_privacy = { None: 'default', True: 'private', False: 'public', } print '== abstract: {}, concrete: {} =='.format( fmt_privacy[abstract_private], fmt_privacy[concrete_private] ) Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) @root_grammar_class class AbstractNode(ASTNode): prop = AbstractProperty(BoolType, private=abstract_private) class ConcreteNode(AbstractNode): prop = Property(Literal(True), private=concrete_private) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ ConcreteNode, ) return foo_grammar if emit_and_print_errors(lang_def): for fld in (AbstractNode._fields['prop'], ConcreteNode._fields['prop']): print ' {}: {}'.format(fld.qualname, fmt_privacy[fld.is_private]) print('')
""" Check that printing diagnostics (in particular quoting the source buffer) works fine when the source buffer is empty. """ from langkit.dsl import ASTNode from utils import emit_and_print_errors class FooNode(ASTNode): pass class Example(FooNode): token_node = True emit_and_print_errors(lkt_file='input.lkt') print('Done')
""" Test that the unparsing machinery rejects lexers with Ignore actions. """ from langkit.dsl import ASTNode from utils import emit_and_print_errors class FooNode(ASTNode): pass class Example(FooNode): token_node = True emit_and_print_errors(lkt_file='foo.lkt', generate_unparser=True) print('Done')
from langkit.diagnostics import Diagnostics from langkit.expressions import Property, Self from langkit.parsers import Grammar, Row from os import path from utils import emit_and_print_errors Diagnostics.set_lang_source_dir(path.abspath(__file__)) def make_lang_def(lit): def lang_def(): @root_grammar_class class FooNode(ASTNode): b = Property(lit) foo_grammar = Grammar("main_rule") foo_grammar.add_rules(main_rule=Row("example") ^ FooNode) return foo_grammar return lang_def print "Valid case" emit_and_print_errors(make_lang_def(12)) print "Invalid case" emit_and_print_errors(make_lang_def("lol")) print ("") print "Done"
defined on an abstract node while all concrete subclasses have it overriden. """ from langkit.dsl import ASTNode from langkit.expressions import Property, Self from utils import emit_and_print_errors class FooNode(ASTNode): # We should have no warnings for the whole subgraph of properties here, # since the root of the callgraph (the "c" property) has # "warn_on_unused=False". a = Property(1) b = Property(Self.a) c = Property(Self.b, warn_on_unused=False) # We should get a warning for this one though, which is out of the # subgraph. d = Property(Self.c) class Node(FooNode): token_node = True emit_and_print_errors(lkt_file='foo.lkt') print('Done')
from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode, Field, T from langkit.parsers import Grammar, Tok from lexer_example import Token from utils import emit_and_print_errors class FooNode(ASTNode): pass class Example(FooNode): tok = Field(type=T.FooNode.does_not_exist) fg = Grammar('main_rule') fg.add_rules(main_rule=Example(Tok(Token.Example, keep=True)), ) emit_and_print_errors(fg) print('Done')
), rule_2=ExampleWrapper( Example("example"), Example("example"), Example("example"), Example("example"), Example("example"), HasExample("example"), Example("example") ), sub_rule=Opt(Example("example")) ) emit_and_print_errors(foo_grammar) fields = [ ExampleWrapper.field_opt, ExampleWrapper.field_or, ExampleWrapper.field_defer, ExampleWrapper.field_null, ExampleWrapper.field_dont_skip, ExampleWrapper.field_opt_bool, ExampleWrapper.field_transform ] for field in fields: print("Field {} is {}".format( field,
def will_doc_prop(): pass class Example(FooNode): # This property is undocumented but it inherits a documented one, so it # should not have a warning. @langkit_property(public=True) def doc_prop(): return True # This property is undocumented, so it should have a warning @langkit_property(public=True) def undoc_prop(): return True # This property is documented, so it should not have a warning @langkit_property(public=True) def will_doc_prop(): """ This property is documented. """ return True grammar = Grammar('item') grammar.add_rules(item=Example('example')) emit_and_print_errors(grammar, warning_set=WarningSet()) print('Done')
@langkit_property(public=True, memoized=True) def holder(): return New(SynthHolder, f=Self) class Name(Expr): token_node = True @langkit_property(memoized=True) def synth(): return New(SynthNode, f=Self) g = Grammar('main_rule') g.add_rules(main_rule=Or(g.literal, g.name, g.holder), literal=Literal(Token.Number), name=Name(Token.Identifier), holder=ParsedHolder('(', g.name, ')')) ctx = emit_and_print_errors(g) nodes = {n.dsl_name: n for n in ctx.astnode_types} for node_name in ['SynthNode', 'AbstractHolder']: node = nodes[node_name] fields = {f.original_name.lower: f for f in node.get_fields()} f = fields['f'] print('Precise types for {}:'.format(f.qualname)) for t in f.precise_types.minimal_matched_types: print(' * {}'.format(t.dsl_name)) print('Done')
""" RA22-015: check the Python grammar's concrete syntax. """ import os import sys from utils import emit_and_print_errors, langkit_root, unparse_all_script # Make the Python grammar importable sys.path.append(os.path.join(langkit_root, 'contrib', 'python')) from language import lexer, parser emit_and_print_errors(parser.python_grammar, lexer.python_lexer, unparse_script=unparse_all_script)
error_node = True f = Field(type=Name) def test_synthetic(root): @synthetic class ErrorDecl(root): error_node = True for func_name in sorted(dir()): if not func_name.startswith("test_"): continue print(f"== {func_name} ==") func = locals().get(func_name) class FooNode(ASTNode): pass try: func(FooNode) except DiagnosticError: pass else: emit_and_print_errors() print() langkit.reset() print('Done')
id = Field() body = Field() name = Property(Self.id) env_spec = EnvSpec(add_env=True, add_to_env=add_to_env(Self.id.symbol, Self)) faulty_prop = Property(Self._env_value_1) class Block(Stmt): items = Field() env_spec = EnvSpec(add_env=True) def lang_def(): foo_grammar = Grammar('stmts_rule') foo_grammar.add_rules( def_rule=Row(Tok(Token.Identifier, keep=True), Opt(Row('(', foo_grammar.stmts_rule, ')')[1])) ^ Def, stmt_rule=(foo_grammar.def_rule | Row('{', List(foo_grammar.stmt_rule, empty_valid=True), '}') ^ Block), stmts_rule=List(foo_grammar.stmt_rule)) return foo_grammar emit_and_print_errors(lang_def) print 'Done'
from langkit.dsl import ASTNode, abstract from langkit.parsers import Grammar from utils import emit_and_print_errors @abstract class FooNode(ASTNode): pass class ExampleNode(FooNode): pass grammar = Grammar('main_rulezz') grammar.add_rules(main_rule=ExampleNode('example')) emit_and_print_errors(grammar, lkt_file='foo.lkt') print('Done')
def test(label, lkt_file): print('== {} =='.format(label)) yield emit_and_print_errors(lkt_file=lkt_file) langkit.reset() print()
from __future__ import absolute_import, division, print_function from langkit.dsl import ASTNode from utils import emit_and_print_errors for lkt_file in ('no-dot.lkt', 'bad-prefix.lkt', 'bad-alt.lkt'): print('== {} =='.format(lkt_file)) class FooNode(ASTNode): pass class Example(FooNode): enum_node = True alternatives = ['example', 'null', 'def'] emit_and_print_errors(lkt_file=lkt_file) print('') print('Done')
""" Check that the railroad diagrams pass at least works without crashing. """ import os import sys from utils import emit_and_print_errors, langkit_root # Make the Python grammar importable sys.path.append(os.path.join(langkit_root, 'contrib', 'python')) from language import lexer, parser emit_and_print_errors( parser.python_grammar, lexer.python_lexer, explicit_passes_triggers={'emit railroad diagrams': True} )
from langkit.expressions import If, No, Property, Self from langkit.parsers import Grammar, Or, Tok from lexer_example import Token from utils import emit_and_print_errors class FooNode(ASTNode): pred = Property(True) foo_1 = Property(If(Self.pred, No(T.BarNode), No(T.Literal)).as_bare_entity, public=True) class BarNode(FooNode): pass class Literal(FooNode): pass grammar = Grammar('main_rule') grammar.add_rules(main_rule=Or( BarNode(Tok(Token.Example)), Literal(Tok(Token.Number)), )) emit_and_print_errors(grammar) print('Done')
Diagnostics.set_lang_source_dir(path.abspath(__file__)) @root_grammar_class class FooNode(ASTNode): pass class BarCode(FooNode): a = Field() prop_1 = Property(Self.a.prop_2) class BarNode(FooNode): prop_2 = Property(Self.parent.cast(BarCode).prop_1) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example', foo_grammar.rule_2) ^ BarCode, rule_2=Row('example') ^ BarNode, ) return foo_grammar emit_and_print_errors(lang_def) print('') print 'Done'
""" Test creating an array literal whose element_type is the node type defined by the enclosing class. """ from langkit.dsl import ASTNode, T from langkit.expressions import (ArrayLiteral, Entity, langkit_property) from utils import emit_and_print_errors class FooNode(ASTNode): pass class Example(FooNode): @langkit_property(public=True, return_type=T.Example.entity.array) def entities_array(): return ArrayLiteral([Entity, Entity, Entity], element_type=Example.entity) emit_and_print_errors(lkt_file="expected_concrete_syntax.lkt") print('Done')
class Decl(FooNode): assignment = Field() example = Field() class Example(FooNode): token_node = True class Identifier(FooNode): token_node = True class Number(FooNode): token_node = True g = Grammar('main_rule') g.add_rules( main_rule=List(Or(g.decl, g.assignment)), assignment=Assignment(g.identifier, '=', g.number, ';'), decl=Decl('def', g.decl_assignment, ',', Example('example')), decl_assignment=Assignment(g.identifier, '=', g.number), identifier=Identifier(Token.Identifier), number=Number(Token.Number), ) emit_and_print_errors(g, generate_unparser=True) print('Done')
def run(name, match_expr): """ Emit and print the errors we get for the below grammar with "match_expr" as a property in ExampleNode. """ global BodyNode, Compound, Expression, FooNode, NullNode, Number Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @abstract @root_grammar_class class FooNode(ASTNode): pass @abstract class BodyNode(FooNode): pass class NullNode(BodyNode): pass @abstract class Expression(BodyNode): pass class Number(Expression): tok = Field() class Compound(Expression): prefix = Field() suffix = Field() class ExampleNode(FooNode): body = Field() prop = Property(match_expr) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row( 'example', Or(foo_grammar.expression, Row('null') ^ NullNode) ) ^ ExampleNode, number=Tok(Token.Number) ^ Number, expression=Or( Row(foo_grammar.number, ',', foo_grammar.expression) ^ Compound, foo_grammar.number ), ) return foo_grammar emit_and_print_errors(lang_def) print('')
""" Test that invalid uses of abstract fields are duly diagnosed and rejected. """ import glob from langkit.dsl import ASTNode from utils import emit_and_print_errors for lkt_file in sorted(glob.glob('*.lkt')): # Skip the source that contains common declarations for all tests if lkt_file == "nodes.lkt": continue print('== {} =='.format(lkt_file)) class FooNode(ASTNode): pass class Example(FooNode): token_node = True emit_and_print_errors(lkt_file=lkt_file, lkt_semantic_checks=True) print('') print('Done')