def prepare_context(grammar, lexer=None, library_fields_all_public=False): """ Create a compile context and prepare the build directory for code generation. :param langkit.parsers.Grammar grammar: The language grammar to use for this context. :param langkit.lexer.Lexer lexer: The language lexer to use for this context. :param bool library_fields_all_public: Whether private fields should be exported in code generation (they are not by default). """ if lexer is None: from lexer_example import foo_lexer lexer = foo_lexer # Have a clean build directory if os.path.exists('build'): shutil.rmtree('build') os.mkdir('build') # Try to emit code ctx = CompileCtx(lang_name='Foo', lexer=lexer, grammar=grammar) ctx.library_fields_all_public = library_fields_all_public return ctx
def prepare_context(grammar=None, lexer=None, lkt_file=None, warning_set=default_warning_set, symbol_canonicalizer=None, show_property_logging=False, types_from_lkt=False, lkt_semantic_checks=False, case_insensitive: bool = False, version: Optional[str] = None, build_date: Optional[str] = None, standalone: bool = False): """ Create a compile context and prepare the build directory for code generation. :param langkit.parsers.Grammar grammar: The language grammar to use for this context. :param langkit.lexer.Lexer lexer: The language lexer to use for this context. :param str|None lkt_file: If provided, file from which to read the Lkt language spec. :param WarningSet warning_set: Set of warnings to emit. :param langkit.compile_context.LibraryEntity|None symbol_canonicalizer: Symbol canonicalizer to use for this context, if any. :param bool show_property_logging: See CompileCtx.show_property_logging. :param bool types_from_lkt: See CompileCtx.types_from_lkt. :param case_insensitive: See CompileCtx's constructor. :param version: See CompileCtx's constructor. :param build_date: See CompileCtx's constructor. :param standalone: See CompileCtx's constructor. """ # Have a clean build directory if P.exists('build'): shutil.rmtree('build') os.mkdir('build') # Try to emit code ctx = CompileCtx(lang_name='Foo', short_name='foo', lexer=lexer, grammar=grammar, symbol_canonicalizer=symbol_canonicalizer, show_property_logging=show_property_logging, lkt_file=lkt_file, types_from_lkt=types_from_lkt, lkt_semantic_checks=lkt_semantic_checks, case_insensitive=case_insensitive, version=version, build_date=build_date, standalone=standalone) ctx.warnings = warning_set ctx.pretty_print = pretty_print return ctx
def prepare_context(grammar, lexer=None, warning_set=default_warning_set): """ Create a compile context and prepare the build directory for code generation. :param langkit.parsers.Grammar grammar: The language grammar to use for this context. :param langkit.lexer.Lexer lexer: The language lexer to use for this context. :param WarningSet warning_set: Set of warnings to emit. """ if lexer is None: from lexer_example import foo_lexer lexer = foo_lexer # Have a clean build directory if os.path.exists('build'): shutil.rmtree('build') os.mkdir('build') # Try to emit code ctx = CompileCtx(lang_name='Foo', lexer=lexer, grammar=grammar) ctx.warnings = warning_set ctx.pretty_print = pretty_print return ctx
def emit_gdb_helpers(self, ctx: CompileCtx) -> None: """ Emit support files for GDB helpers. """ lib_name = ctx.ada_api_settings.lib_name.lower() gdbinit_path = os.path.join(self.lib_root, 'gdbinit.py') gdb_c_path = os.path.join(self.src_dir, '{}-gdb.c'.format(lib_name)) # Always emit the ".gdbinit.py" GDB script write_source_file( gdbinit_path, ctx.render_template( 'gdb_py', langkit_path=os.path.dirname(os.path.dirname(__file__)), lib_name=lib_name, prefix=ctx.short_name_or_long, ), self.post_process_python) # Generate the C file to embed the absolute path to this script in the # generated library only if requested. if self.generate_gdb_hook: write_source_file( gdb_c_path, ctx.render_template('gdb_c', gdbinit_path=gdbinit_path, os_name=os.name), self.post_process_cpp) self.project_languages.add('C')
def emit_ocaml_api(self, ctx: CompileCtx) -> None: """ Generate binding for the external OCaml API. """ if not ctx.ocaml_api_settings: return ctx.ocaml_api_settings.init_type_graph() if not os.path.isdir(self.ocaml_dir): os.mkdir(self.ocaml_dir) with names.camel: # Write an empty ocamlformat file so we can call ocamlformat write_source_file(os.path.join(self.ocaml_dir, '.ocamlformat'), '') ctx = get_context() code = ctx.render_template("ocaml_api/module_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings) ocaml_filename = '{}.ml'.format(ctx.c_api_settings.lib_name) write_ocaml_file( os.path.join(self.ocaml_dir, ocaml_filename), code, self.post_process_ocaml, ) code = ctx.render_template("ocaml_api/module_sig_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings) ocaml_filename = '{}.mli'.format(ctx.c_api_settings.lib_name) write_ocaml_file( os.path.join(self.ocaml_dir, ocaml_filename), code, self.post_process_ocaml, ) # Emit dune file to easily compile and install bindings code = ctx.render_template("ocaml_api/dune_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings) write_source_file(os.path.join(self.ocaml_dir, 'dune'), code) write_source_file(os.path.join(self.ocaml_dir, 'dune-project'), '(lang dune 1.6)') # Write an empty opam file to install the lib with dune write_source_file( os.path.join(self.ocaml_dir, '{}.opam'.format(ctx.c_api_settings.lib_name)), '')
def emit_mains(self, ctx: CompileCtx) -> None: """ Emit sources and the project file for mains. """ with names.camel_with_underscores: write_ada_file(path.join(self.lib_root, 'src-mains'), AdaSourceKind.body, ['Parse'], ctx.render_template('main_parse_ada'), self.post_process_ada) write_source_file( self.mains_project, ctx.render_template('mains_project_file', lib_name=ctx.ada_api_settings.lib_name, source_dirs=self.main_source_dirs, main_programs=self.main_programs))
def emit_python_api(self, ctx: CompileCtx) -> None: """ Generate the Python binding module. """ def render_python_template(file_path: str, *args: Any, **kwargs: Any) -> None: with names.camel: code = ctx.render_template(*args, **kwargs) self.write_python_file(file_path, code) # Emit the Python modules themselves render_python_template(os.path.join(self.python_pkg_dir, '__init__.py'), 'python_api/module_py', c_api=ctx.c_api_settings, pyapi=ctx.python_api_settings, module_name=ctx.python_api_settings.module_name) # Emit the empty "py.type" file so that users can easily leverage type # annotations in the generated bindings. self.write_source_file(os.path.join(self.python_pkg_dir, "py.typed"), "") # Emit the setup.py script to easily install the Python binding setup_py_file = os.path.join(self.lib_root, 'python', 'setup.py') self.write_python_file(setup_py_file, ctx.render_template('python_api/setup_py'))
def create_context(self, args): from langkit.compile_context import (CompileCtx, ADA_BODY) from language.lexer import lkql_lexer from language.parser import lkql_grammar ctx = CompileCtx(lang_name='LKQL', short_name='lkql', lexer=lkql_lexer, grammar=lkql_grammar) ctx.add_with_clause('Implementation', ADA_BODY, 'Liblkqllang.Prelude', use_clause=True) return ctx
def create_context(self, args): from langkit.compile_context import CompileCtx from language.lexer import kconfig_lexer from language.parser import kconfig_grammar return CompileCtx(lang_name='KConfig', lexer=kconfig_lexer, grammar=kconfig_grammar)
def create_context(self, args): from langkit.compile_context import CompileCtx from language.lexer import lkt_lexer from language.parser import lkt_grammar return CompileCtx(lang_name='lkt', lexer=lkt_lexer, grammar=lkt_grammar)
def create_context(self, args): from langkit.compile_context import AdaSourceKind, CompileCtx from language.lexer import lkql_lexer from language.parser import lkql_grammar ctx = CompileCtx(lang_name='Lkql', short_name='lkql', lexer=lkql_lexer, grammar=lkql_grammar) ctx.add_with_clause('Implementation', AdaSourceKind.body, 'Liblkqllang.Prelude', use_clause=True) return ctx
def create_context(self, args): return CompileCtx( lang_name=name, lexer=None, grammar=None, lkt_file=f"{name_low}.lkt", types_from_lkt=True, standalone=standalone, )
def prepare_context(grammar=None, lexer=None, lkt_file=None, warning_set=default_warning_set, symbol_canonicalizer=None, show_property_logging=False): """ Create a compile context and prepare the build directory for code generation. :param langkit.parsers.Grammar grammar: The language grammar to use for this context. :param langkit.lexer.Lexer lexer: The language lexer to use for this context. :param str|None lkt_file: If provided, file from which to read the Lkt language spec. :param WarningSet warning_set: Set of warnings to emit. :param langkit.compile_context.LibraryEntity|None symbol_canonicalizer: Symbol canonicalizer to use for this context, if any. :param bool show_property_logging: See CompileCtx.show_property_logging. """ # Have a clean build directory if P.exists('build'): shutil.rmtree('build') os.mkdir('build') # Try to emit code ctx = CompileCtx(lang_name='Foo', short_name='Foo', lexer=lexer, grammar=grammar, symbol_canonicalizer=symbol_canonicalizer, show_property_logging=show_property_logging, lkt_file=lkt_file) ctx.warnings = warning_set ctx.pretty_print = pretty_print return ctx
def create_context(self, args): from langkit.compile_context import CompileCtx from language.lexer import dependz_lexer from language.grammar import dependz_grammar return CompileCtx(lang_name='Dependz', short_name='LDL', lexer=dependz_lexer, grammar=dependz_grammar)
def create_context(self, args): from langkit.compile_context import CompileCtx, LibraryEntity from language.lexer import lkt_lexer from language.parser import lkt_grammar return CompileCtx(lang_name='Lkt', short_name='lkt', lexer=lkt_lexer, grammar=lkt_grammar, default_unit_provider=LibraryEntity( 'Liblktlang.Default_Provider', 'Create'))
def emit_python_api(self, ctx: CompileCtx) -> None: """ Generate the Python binding module. """ def pretty_print(code: str) -> str: if not self.pretty_print: return code try: from black import FileMode, format_file_contents return format_file_contents(code, fast=True, mode=FileMode()) except ImportError: check_source_language( False, 'Black not available, not pretty-printing Python code', severity=Severity.warning, ok_for_codegen=True) return code def render_python_template(file_path: str, *args: Any, **kwargs: Any) -> None: with names.camel: code = ctx.render_template(*args, **kwargs) # If pretty-printing failed, write the original code anyway in # order to ease debugging. exc = None try: pp_code = pretty_print(code) except SyntaxError: pp_code = code write_source_file(file_path, pp_code, self.post_process_python) if exc: raise exc # Emit the Python modules themselves render_python_template(os.path.join(self.python_pkg_dir, '__init__.py'), 'python_api/module_py', c_api=ctx.c_api_settings, pyapi=ctx.python_api_settings, module_name=ctx.python_api_settings.module_name) # Emit the empty "py.type" file so that users can easily leverage type # annotations in the generated bindings. write_source_file(os.path.join(self.python_pkg_dir, "py.typed"), "") # Emit the setup.py script to easily install the Python binding setup_py_file = os.path.join(self.lib_root, 'python', 'setup.py') write_source_file(setup_py_file, ctx.render_template('python_api/setup_py'), self.post_process_python)
def compile_rules(self, context: CompileCtx) -> None: """ Pass to turn the lexer DSL into our internal regexp objects. """ assert context.nfa_start is None regexps = RegexpCollection(case_insensitive=context.case_insensitive) # Import patterns into regexps for name, pattern, loc in self.patterns: with Context(loc): regexps.add_pattern(name, pattern) # Now turn each rule into a NFA nfas = [] for i, a in enumerate(self.rules): assert isinstance(a, RuleAssoc) # Check that actions never emit Termination and LexingFailure # tokens. These tokens are supposed to be emitted by the lexing # engine only. def check(token: Action) -> None: if token in (self.tokens.Termination, self.tokens.LexingFailure): assert isinstance(token, TokenAction) error(f'{token.dsl_name} is reserved for automatic actions' f' only') if isinstance(a.action, Case.CaseAction): for alt in a.action.all_alts: check(alt.send) elif isinstance(a.action, Ignore): pass else: assert isinstance(a.action, TokenAction) check(a.action) assert a.location is not None with Context(a.location): nfa_start, nfa_end = regexps.nfa_for(a.matcher.regexp) nfas.append(nfa_start) # The first rule that was added must have precedence when multiple # rules compete for the longest match. To implement this behavior, # we associate increasing ids to each token action. nfa_end.label = (i, a.action) # Create a big OR for all possible accepted patterns context.nfa_start = NFAState() for nfa in nfas: context.nfa_start.add_transition(None, nfa)
def emit_python_playground(self, ctx: CompileCtx) -> None: """ Emit sources for the Python playground script. """ playground_file = os.path.join( self.scripts_dir, '{}_playground'.format(ctx.short_name_or_long)) write_source_file( playground_file, ctx.render_template( 'python_api/playground_py', module_name=ctx.python_api_settings.module_name), self.post_process_python) os.chmod(playground_file, 0o775)
def emit_lib_project_file(self, ctx: CompileCtx) -> None: """ Emit a project file for the generated library. """ self._project_file_emitted = True write_source_file( self.main_project_file, ctx.render_template( 'project_file', lib_name=ctx.ada_api_settings.lib_name, os_path=os.path, project_path=os.path.dirname(self.main_project_file), ))
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import CompileCtx from language.parser import gpr_grammar from language.parser.lexer import gpr_lexer return CompileCtx(lang_name='GPR', lexer=gpr_lexer, grammar=gpr_grammar, lib_name='GPR_Parser', default_charset='iso-8859-1', verbosity=args.verbosity)
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import ADA_BODY, CompileCtx, LibraryEntity from language.lexer import ada_lexer from language.grammar import ada_grammar from language.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, env_hook_subprogram=LibraryEntity('Libadalang.Unit_Files.Env_Hook', 'Env_Hook'), default_unit_provider=LibraryEntity( 'Libadalang.Unit_Files.Default', 'Default_Unit_Provider'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) for unit in ('GNATCOLL.Projects', 'GNATCOLL.VFS', 'Libadalang.Unit_Files.Projects'): ctx.add_with_clause('Analysis.Implementation.C', ADA_BODY, unit, use_clause=True) ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Unit_Files', use_clause=True) # Lal needs access to the static expression evaluator, for name # resolution of aggregates. ctx.add_with_clause('Analysis.Implementation', ADA_BODY, 'Libadalang.Expr_Eval', use_clause=False) return ctx
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import CompileCtx, LibraryEntity from language.lexer import ada_lexer from language.grammar import ada_grammar from language.documentation import libadalang_docs return CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, env_hook_subprogram=LibraryEntity('Libadalang.Unit_Files.Env_Hook', 'Env_Hook'), default_unit_provider=LibraryEntity( 'Libadalang.Unit_Files.Default', 'Default_Unit_Provider'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, )
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import ADA_BODY, CompileCtx, LibraryEntity from ada.lexer import ada_lexer from ada.grammar import ada_grammar from ada.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, default_unit_provider=LibraryEntity( 'Libadalang.Internal_Default_Provider', 'Create' ), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) # Internals need to access environment hooks and the symbolizer ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Env_Hooks', use_clause=True) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Sources', use_clause=False) # Bind Libadalang's custom iterators to the public API ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Iterators.Extensions') # LAL.Analysis.Is_Keyword is implemented using LAL.Lexer's ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Lexer') ctx.post_process_ada = ada.copyright.format_ada ctx.post_process_cpp = ada.copyright.format_c ctx.post_process_python = ada.copyright.format_python return ctx
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import (AdaSourceKind, CompileCtx, LibraryEntity) from ada.lexer import ada_lexer from ada.grammar import ada_grammar from ada.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='lal', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, default_unit_provider=LibraryEntity( 'Libadalang.Internal_Default_Provider', 'Create'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) # Internals need to access environment hooks and the symbolizer ctx.add_with_clause('Implementation', AdaSourceKind.body, 'Libadalang.Env_Hooks', use_clause=True) ctx.add_with_clause('Implementation', AdaSourceKind.body, 'Libadalang.Sources', use_clause=False) # Bind Libadalang's custom iterators to the public API ctx.add_with_clause('Iterators', AdaSourceKind.body, 'Libadalang.Iterators.Extensions') # LAL.Analysis.Is_Keyword is implemented using LAL.Lexer's ctx.add_with_clause('Analysis', AdaSourceKind.body, 'Libadalang.Lexer') # LAL.Lexer.Is_Keyword's implementation uses precomputed symbols ctx.add_with_clause('Lexer', AdaSourceKind.body, 'Libadalang.Implementation') ctx.post_process_ada = ada.copyright.format_ada ctx.post_process_cpp = ada.copyright.format_c ctx.post_process_python = ada.copyright.format_python ctx.post_process_ocaml = ada.copyright.format_ocaml # Register our custom exception types ctx.register_exception_type( package=["GNATCOLL", "Projects"], name=names.Name("Invalid_Project"), doc_section="libadalang.project_provider", ) ctx.register_exception_type( package=["Libadalang", "Project_Provider"], name=names.Name("Unsupported_View_Error"), doc_section="libadalang.project_provider", ) return ctx
def __init__(self, context: CompileCtx, lib_root: str, extensions_dir: Optional[str], main_source_dirs: Set[str] = set(), main_programs: Set[str] = set(), no_property_checks: bool = False, generate_ada_api: bool = True, generate_gdb_hook: bool = True, pretty_print: bool = False, post_process_ada: PostProcessFn = None, post_process_cpp: PostProcessFn = None, post_process_python: PostProcessFn = None, post_process_ocaml: PostProcessFn = None, coverage: bool = False, relative_project: bool = False, unparse_script: Optional[str] = None): """ Generate sources for the analysis library. Also emit a tiny program useful for testing purposes. :param lib_root: Path of the directory in which the library should be generated. :param extensions_dir: Directory to contain extensions for code generation. If None is provided, assume there is no extension. :param main_source_dirs: List of source directories to use in the project file for mains. Source directories must be relative to the mains project file directory (i.e. $BUILD/src-mains). :param main_programs: List of names for programs to build in addition to the generated library. To each X program, there must be a X.adb source file in the $BUILD/src directory. :param no_property_checks: If True, do not emit safety checks in the generated code for properties. Namely, this disables null checks on field access. :param generate_ada_api: If True, generate the public Ada API. If False and there is no main to generate, do not generate this Ada API. :param generate_gdb_hook: Whether to generate the ".debug_gdb_scripts" section. Good for debugging, but better to disable for releases. :param pretty_print: If true, pretty-print the generated sources. :param post_process_ada: Optional post-processing for generated Ada source code. :param post_process_cpp: Optional post-processing for generated C++ source code. :param post_process_python: Optional post-processing for generated Python source code. :param post_process_ocaml: Optional post-processing for generated OCaml source code. :param coverage: Instrument the generated library to compute its code coverage. This requires GNATcoverage. :param relative_project: See libmanage's --relative-project option. """ self.context = context self.verbosity = context.verbosity self.lib_root = lib_root self.cache = Cache(os.path.join(self.lib_root, 'obj', 'langkit_cache')) self.extensions_dir = extensions_dir # TODO: contain the add_template_dir calls to this context (i.e. avoid # global mutation). if self.extensions_dir: add_template_dir(self.extensions_dir) for dirpath in keep(self.context.template_lookup_extra_dirs): add_template_dir(dirpath) self.no_property_checks = no_property_checks self.generate_ada_api = generate_ada_api or bool(main_programs) self.generate_gdb_hook = generate_gdb_hook self.generate_unparser = context.generate_unparser self.pretty_print = pretty_print self.post_process_ada = post_process_ada self.post_process_cpp = post_process_cpp self.post_process_python = post_process_python self.post_process_ocaml = post_process_ocaml self.coverage = coverage self.gnatcov = context.gnatcov self.relative_project = relative_project # Automatically add all source files in the "extensions/src" directory # to the generated library project. self.extensions_src_dir = None if self.extensions_dir: src_dir = path.join(self.extensions_dir, 'src') if path.isdir(src_dir): self.extensions_src_dir = src_dir for filename in os.listdir(src_dir): filepath = path.join(src_dir, filename) if path.isfile(filepath) and not filename.startswith('.'): self.context.additional_source_files.append(filepath) self.main_source_dirs = main_source_dirs self.main_programs = main_programs self.lib_name_low = context.ada_api_settings.lib_name.lower() """ Lower-case name for the generated library. """ self.lib_name_up = context.ada_api_settings.lib_name.upper() """ Upper-case name for the generated library. """ # Paths for the various directories in which code is generated self.src_dir = path.join(self.lib_root, "src") self.src_mains_dir = path.join(self.lib_root, "src-mains") self.scripts_dir = path.join(self.lib_root, "scripts") self.python_dir = path.join(self.lib_root, "python") self.python_pkg_dir = path.join( self.lib_root, "python", context.python_api_settings.module_name) self.ocaml_dir = path.join(self.lib_root, "ocaml") self.lib_project = path.join(self.lib_root, f"{self.lib_name_low}.gpr") self.mains_project = path.join(self.lib_root, "mains.gpr") self.dfa_code: DFACodeGenHolder """ Holder for the data structures used to generate code for the lexer state machine (DFA). As an optimization, it is left to None if we decide not to generate it (i.e. when the already generated sources are up-to-date). """ self._project_file_emitted = False """ Whether we emitted a project file for the generated library. :type: bool """ self.project_languages = {'Ada'} """ List of GPR names for languages used in the generated library. :type: set[str] """ self.library_interfaces = set() """ Set of source file base names for all sources that must appear in the "Interfaces" attribute of the generated library project file. :type: set[str] """ self.instr_md = InstrumentationMetadata() # Add all additional source files to the list of library interfaces and # declare them as such in instrumentation metadata. for f in context.additional_source_files: self.add_library_interface(f, generated=False) if self.coverage: assert self.gnatcov # Add the buffer-list unit from GNATcoverage's instrumentation to # the list of library interfaces. TODO: hopefully, we should not # have to do this anymore after S916-064 is addressed. self.library_interfaces.add(self.gnatcov.buffer_list_file(self)) self.main_project_file = os.path.join(self.lib_root, f'{self.lib_name_low}.gpr') self.unparse_script = unparse_script """ RA22-015: If set to something else than None, then the "dsl unparse" pass will be run on the given script. :type: langkit.compile_context.UnparseScript|None """ # Determine whether we have user external properties. If so, # automatically WITH $.Implementation.Extensions from the body of # $.Analysis and $.Implementation. if any(prop.user_external for prop in context.all_properties(include_inherited=True)): for unit in ('Analysis', 'Implementation', 'Implementation.C'): context.add_with_clause(unit, AdaSourceKind.body, '{}.Implementation.Extensions'.format( context.ada_api_settings.lib_name), use_clause=True)
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import (ADA_BODY, ADA_SPEC, CompileCtx, LibraryEntity) from language.lexer import ada_lexer from language.grammar import ada_grammar from language.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, env_hook_subprogram=LibraryEntity( 'Libadalang.Env_Hooks', 'Env_Hook' ), default_unit_provider=LibraryEntity('Libadalang.Unit_Files', 'Default_Provider'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) for unit in ('GNATCOLL.Projects', 'GNATCOLL.VFS', 'Libadalang.Project_Provider', 'Libadalang.Auto_Provider'): ctx.add_with_clause('Implementation.C', ADA_BODY, unit, use_clause=True) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Env_Hooks', use_clause=True) # Libadalang needs access to the static expression evaluator, for name # resolution of aggregates. ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Expr_Eval', use_clause=False) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Doc_Utils', use_clause=False) # It also needs access to the literal decoders ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Sources', use_clause=False) ctx.add_with_clause('Implementation', ADA_BODY, 'Ada.Containers.Hashed_Maps', use_clause=False) # Our iterators are implemented using internal data structures ctx.add_with_clause('Iterators', ADA_SPEC, 'Libadalang.Common', is_private=True) ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Implementation', use_clause=True) ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Converters', use_clause=True) # LAL.Analysis.Is_Keyword is implemented using LAL.Lexer's ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Lexer') ctx.post_process_ada = copyright.format_ada ctx.post_process_cpp = copyright.format_c ctx.post_process_python = copyright.format_python return ctx
def create_context(self, args: object) -> CompileCtx: return CompileCtx(lang_name="RFLX", lexer=lexer, grammar=grammar)
def run(self, context: CompileCtx) -> None: for prop in context.all_properties(include_inherited=False): with prop.diagnostic_context: self.pass_fn(prop, context)