def emit_and_print_errors(grammar=None, lexer=None, lkt_file=None, warning_set=default_warning_set, generate_unparser=False, symbol_canonicalizer=None, unparse_script=None, explicit_passes_triggers={}): """ Compile and emit code the given set of arguments. Return the compile context if this was successful, None otherwise. :param langkit.parsers.Grammar grammar_fn: The language grammar to use. :param langkit.lexer.Lexer lexer: The lexer to use along with the grammar. Use `lexer_example.foo_lexer` if left to None. :param str|None lkt_file: If provided, file from which to read the Lkt language spec. :param WarningSet warning_set: Set of warnings to emit. :param bool generate_unparser: Whether to generate unparser. :param langkit.compile_context.LibraryEntity|None symbol_canonicalizer: Symbol canoncalizes to use for this context, if any. :rtype: None|langkit.compile_context.CompileCtx :param None|str unparse_script: Script to unparse the language spec. """ try: ctx = prepare_context(grammar, lexer, lkt_file, warning_set, symbol_canonicalizer=symbol_canonicalizer) ctx.create_all_passes( 'build', generate_unparser=generate_unparser, unparse_script=(UnparseScript(unparse_script) if unparse_script else None), explicit_passes_triggers=explicit_passes_triggers) ctx.emit() # ... and tell about how it went except DiagnosticError: # If there is a diagnostic error, don't say anything, the diagnostics # are enough. return None else: print('Code generation was successful') return ctx finally: if lexer is not None: lexer._dfa_code = None langkit.reset()
def emit_and_print_errors(grammar, lexer=None, warning_set=default_warning_set, generate_unparser=False, symbol_canonicalizer=None): """ Compile and emit code for CTX. Return the compile context if this was successful, None otherwise. :param langkit.parsers.Grammar grammar_fn: The language grammar to use. :param langkit.lexer.Lexer lexer: The lexer to use along with the grammar. Use `lexer_example.foo_lexer` if left to None. :param WarningSet warning_set: Set of warnings to emit. :param bool generate_unparser: Whether to generate unparser. :param langkit.compile_context.LibraryEntity|None symbol_canonicalizer: Symbol canoncalizes to use for this context, if any. :rtype: None|langkit.compile_context.CompileCtx """ if lexer is None: from lexer_example import foo_lexer lexer = foo_lexer try: ctx = prepare_context(grammar, lexer, warning_set, symbol_canonicalizer=symbol_canonicalizer) ctx.emit('build', generate_unparser=generate_unparser) # ... and tell about how it went except DiagnosticError: # If there is a diagnostic error, don't say anything, the diagnostics # are enough. return None else: print('Code generation was successful') return ctx finally: lexer._dfa_code = None langkit.reset()
def run(label, **kwargs): print("== {} ==".format(label)) class FooNode(ASTNode): pass class Example(FooNode): token_node = True try: build_and_run(lkt_file="expected_concrete_syntax.lkt", version="<version-number>", build_date="<build-date-number>", **kwargs) except DiagnosticError: print("DiagnosticError: skipping...") langkit.reset() print("")
def run(md_constructor): """ Emit and print he errors we get for the below grammar. `md_constructor` is called to create the lexical environment metadata. """ print('== {} =='.format(md_constructor.__name__)) class FooNode(ASTNode): pass class Example(FooNode): pass try: md_constructor() except DiagnosticError: langkit.reset() else: emit_and_print_errors(lkt_file='foo.lkt') print('')
def run(md_constructor): """ Emit and print he errors we get for the below grammar. `md_constructor` is called to create the lexical environment metadata. """ print('== {} =='.format(md_constructor.__name__)) class FooNode(ASTNode): pass class Example(FooNode): pass grammar = Grammar('main_rule') grammar.add_rules(main_rule=Example('example')) try: md_constructor() except DiagnosticError: langkit.reset() else: emit_and_print_errors(grammar) print('')
def test(label, lkt_file): print('== {} =='.format(label)) yield emit_and_print_errors(lkt_file=lkt_file) langkit.reset() print()
def test(label): print('== {} =='.format(label)) yield langkit.reset() print()
Case( Literal('def'), Alt(prev_token_cond=(Token.Var, ), send=Token.LexingFailure, match_size=3), Alt(send=Token.Def, match_size=3))) for name, value in sorted(locals().iteritems()): if not name.startswith('test_'): continue print('== {} =='.format(name)) lexer = Lexer(Token) value(lexer) class FooNode(ASTNode): pass class Example(FooNode): pass grammar = Grammar('main_rule') grammar.add_rules(main_rule=Example('example'), ) emit_and_print_errors(grammar, lexer) langkit.reset() print('') print('Done')
def build_and_run(grammar=None, py_script=None, ada_main=None, lexer=None, lkt_file=None, types_from_lkt=False, lkt_semantic_checks=False, ocaml_main=None, warning_set=default_warning_set, generate_unparser=False, symbol_canonicalizer=None, mains=False, show_property_logging=False, unparse_script=unparse_script, strict_sound_envs: bool = False, case_insensitive: bool = False, version: str = "undefined", build_date: str = "undefined", full_error_traces: bool = True, additional_make_args: List[str] = []): """ Compile and emit code for `ctx` and build the generated library. Then, execute the provided scripts/programs, if any. An exception is raised if any step fails (the script must return code 0). :param langkit.lexer.Lexer lexer: The lexer to use along with the grammar. See emit_and_print_errors. :param str|None lkt_file: If provided, file from which to read the Lkt language spec. :param bool types_from_lkt: If true (valid only when `lkt_file` is not None), first unparse the DSL and then do the build based on node definitions from the unparsing result. False by default. :param None|str py_script: If not None, name of the Python script to run with the built library available. :param None|str|list[str] ada_main: If not None, list of name of main source files for Ada programs to build and run with the generated library. If the input is a single string, consider it's a single mail source file. :param None|str ocaml_main: If not None, name of the OCaml source file to build and run with the built library available. :param WarningSet warning_set: Set of warnings to emit. :param bool generate_unparser: Whether to generate unparser. :param langkit.compile_context.LibraryEntity|None symbol_canonicalizer: Symbol canonicalizer to use for this context, if any. :param bool mains: Whether to build mains. :param bool show_property_logging: If true, any property that has been marked with tracing activated will be traced on stdout by default, without need for any config file. :param None|str unparse_script: Script to unparse the language spec. :param strict_sound_envs: Pass --strict-sound-envs to generation. :param case_insensitive: See CompileCtx's constructor. :param version: See CompileCtx's constructor. :param build_date: See CompileCtx's constructor. :param full_error_traces: Whether to pass a --full-error-traces argument to "manage.py make". :param additional_make_args: Additional command-line arguments to pass to "manage.py make". """ assert not types_from_lkt or lkt_file is not None class Manage(ManageScript): def __init__(self, ctx): self._cached_context = ctx super().__init__(root_dir=os.getcwd()) def create_context(self, args): return self._cached_context build_mode = 'dev' def manage_run(generate_only, types_from_lkt, additional_args): ctx = prepare_context(grammar, lexer, lkt_file, warning_set, symbol_canonicalizer=symbol_canonicalizer, show_property_logging=show_property_logging, types_from_lkt=types_from_lkt, lkt_semantic_checks=lkt_semantic_checks, case_insensitive=case_insensitive, version=version, build_date=build_date) m = Manage(ctx) extensions_dir = P.abspath('extensions') if P.isdir(extensions_dir): ctx.extensions_dir = extensions_dir # First build the library. Forward all test.py's arguments to the # libmanage call so that manual testcase runs can pass "-g", for # instance. argv = ['make'] + sys.argv[1:] + ['-vnone'] if full_error_traces: argv.append("--full-error-traces") # Generate the public Ada API only when necessary (i.e. if we have # mains that do use this API). This reduces the time it takes to run # tests. if not mains and not ada_main: argv.append('--no-ada-api') argv.append('--build-mode={}'.format(build_mode)) for w in WarningSet.available_warnings: argv.append('-{}{}'.format('W' if w in warning_set else 'w', w.name)) if not pretty_print: argv.append('--no-pretty-print') if generate_unparser: argv.append('--generate-unparser') if strict_sound_envs: argv.append('--strict-sound-envs') # For testsuite performance, do not generate mains unless told # otherwise. if not mains: argv.append('--disable-all-mains') argv.extend(additional_args) argv.extend(additional_make_args) return_code = m.run_no_exit(argv) # Flush stdout and stderr, so that diagnostics appear deterministically # before the script/program output. sys.stdout.flush() sys.stderr.flush() if return_code != 0: raise DiagnosticError() return ctx, m unparse_args = (['--unparse-script', unparse_script] if unparse_script else []) if unparse_script and types_from_lkt: # RA22-015: Unparse the language to concrete syntax, then use the # result to do a full build. Note that we don't unparse the DSL during # the second run, as dsl_unparse requires Python sources, which the # second run does not have access to. manage_run(generate_only=True, types_from_lkt=False, additional_args=unparse_args) langkit.reset() ctx, m = manage_run(generate_only=False, types_from_lkt=True, additional_args=[]) else: ctx, m = manage_run(generate_only=False, types_from_lkt=False, additional_args=unparse_args) # Write a "setenv" script to make developper investigation convenient with open('setenv.sh', 'w') as f: m.write_setenv(f) env = m.derived_env() def run(*argv, **kwargs): valgrind = kwargs.pop('valgrind', False) suppressions = kwargs.pop('valgrind_suppressions', []) assert not kwargs if valgrind_enabled and valgrind: argv = valgrind_cmd(list(argv), suppressions) subprocess.check_call(argv, env=env) if py_script is not None: # Run the Python script. Note that in order to use the generated # library, we have to use the special Python interpreter the testsuite # provides us. See the corresponding code in # testuite_support/python_driver.py. python_interpreter = os.environ['PYTHON_INTERPRETER'] run(python_interpreter, py_script) if ada_main is not None: if isinstance(ada_main, str): ada_main = [ada_main] # Generate a project file to build the given Ada main and then run # the program. Do a static build to improve the debugging experience. with open('gen.gpr', 'w') as f: f.write( project_template.format(main_sources=', '.join( '"{}"'.format(m) for m in ada_main))) run('gprbuild', '-Pgen', '-q', '-p', '-XLIBRARY_TYPE=static', '-XXMLADA_BUILD=static') for i, m in enumerate(ada_main): assert m.endswith('.adb') if i > 0: print('') if len(ada_main) > 1: print('== {} =='.format(m)) sys.stdout.flush() run(P.join('obj', m[:-4]), valgrind=True) if ocaml_main is not None: # Set up a Dune project with open('dune', 'w') as f: f.write(""" (executable (name {}) (flags (-w -9)) (libraries {})) """.format(ocaml_main, ctx.c_api_settings.lib_name)) with open('dune-project', 'w') as f: f.write('(lang dune 1.6)') # Build the ocaml executable run('dune', 'build', '--display', 'quiet', '--root', '.', './{}.exe'.format(ocaml_main)) # Run the ocaml executable run('./_build/default/{}.exe'.format(ocaml_main), valgrind=True, valgrind_suppressions=['ocaml'])