def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import ADA_BODY, CompileCtx, LibraryEntity from ada.lexer import ada_lexer from ada.grammar import ada_grammar from ada.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, default_unit_provider=LibraryEntity( 'Libadalang.Internal_Default_Provider', 'Create'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) # Internals need to access environment hooks and the symbolizer ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Env_Hooks', use_clause=True) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Sources', use_clause=False) # Bind Libadalang's custom iterators to the public API ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Iterators.Extensions') # LAL.Analysis.Is_Keyword is implemented using LAL.Lexer's ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Lexer') ctx.post_process_ada = ada.copyright.format_ada ctx.post_process_cpp = ada.copyright.format_c ctx.post_process_python = ada.copyright.format_python # Register our custom exception types ctx.register_exception_type( package=[names.Name("GNATCOLL"), names.Name("Projects")], name=names.Name("Invalid_Project"), doc_section="libadalang.project_provider", ) ctx.register_exception_type( package=[names.Name("Libadalang"), names.Name("Project_Provider")], name=names.Name("Unsupported_View_Error"), doc_section="libadalang.project_provider", ) return ctx
def create_context(self, args): from langkit.compile_context import (CompileCtx, ADA_BODY) from language.lexer import lkql_lexer from language.parser import lkql_grammar ctx = CompileCtx(lang_name='LKQL', short_name='lkql', lexer=lkql_lexer, grammar=lkql_grammar) ctx.add_with_clause('Implementation', ADA_BODY, 'Liblkqllang.Prelude', use_clause=True) return ctx
def create_context(self, args): from langkit.compile_context import AdaSourceKind, CompileCtx from language.lexer import lkql_lexer from language.parser import lkql_grammar ctx = CompileCtx(lang_name='Lkql', short_name='lkql', lexer=lkql_lexer, grammar=lkql_grammar) ctx.add_with_clause('Implementation', AdaSourceKind.body, 'Liblkqllang.Prelude', use_clause=True) return ctx
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import ADA_BODY, CompileCtx, LibraryEntity from language.lexer import ada_lexer from language.grammar import ada_grammar from language.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, env_hook_subprogram=LibraryEntity('Libadalang.Unit_Files.Env_Hook', 'Env_Hook'), default_unit_provider=LibraryEntity( 'Libadalang.Unit_Files.Default', 'Default_Unit_Provider'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) for unit in ('GNATCOLL.Projects', 'GNATCOLL.VFS', 'Libadalang.Unit_Files.Projects'): ctx.add_with_clause('Analysis.Implementation.C', ADA_BODY, unit, use_clause=True) ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Unit_Files', use_clause=True) # Lal needs access to the static expression evaluator, for name # resolution of aggregates. ctx.add_with_clause('Analysis.Implementation', ADA_BODY, 'Libadalang.Expr_Eval', use_clause=False) return ctx
def __init__(self, context: CompileCtx, lib_root: str, extensions_dir: Optional[str], main_source_dirs: Set[str] = set(), main_programs: Set[str] = set(), no_property_checks: bool = False, generate_ada_api: bool = True, generate_gdb_hook: bool = True, pretty_print: bool = False, post_process_ada: PostProcessFn = None, post_process_cpp: PostProcessFn = None, post_process_python: PostProcessFn = None, post_process_ocaml: PostProcessFn = None, coverage: bool = False, relative_project: bool = False, unparse_script: Optional[str] = None): """ Generate sources for the analysis library. Also emit a tiny program useful for testing purposes. :param lib_root: Path of the directory in which the library should be generated. :param extensions_dir: Directory to contain extensions for code generation. If None is provided, assume there is no extension. :param main_source_dirs: List of source directories to use in the project file for mains. Source directories must be relative to the mains project file directory (i.e. $BUILD/src-mains). :param main_programs: List of names for programs to build in addition to the generated library. To each X program, there must be a X.adb source file in the $BUILD/src directory. :param no_property_checks: If True, do not emit safety checks in the generated code for properties. Namely, this disables null checks on field access. :param generate_ada_api: If True, generate the public Ada API. If False and there is no main to generate, do not generate this Ada API. :param generate_gdb_hook: Whether to generate the ".debug_gdb_scripts" section. Good for debugging, but better to disable for releases. :param pretty_print: If true, pretty-print the generated sources. :param post_process_ada: Optional post-processing for generated Ada source code. :param post_process_cpp: Optional post-processing for generated C++ source code. :param post_process_python: Optional post-processing for generated Python source code. :param post_process_ocaml: Optional post-processing for generated OCaml source code. :param coverage: Instrument the generated library to compute its code coverage. This requires GNATcoverage. :param relative_project: See libmanage's --relative-project option. """ self.context = context self.verbosity = context.verbosity self.lib_root = lib_root self.cache = Cache(os.path.join(self.lib_root, 'obj', 'langkit_cache')) self.extensions_dir = extensions_dir # TODO: contain the add_template_dir calls to this context (i.e. avoid # global mutation). if self.extensions_dir: add_template_dir(self.extensions_dir) for dirpath in keep(self.context.template_lookup_extra_dirs): add_template_dir(dirpath) self.no_property_checks = no_property_checks self.generate_ada_api = generate_ada_api or bool(main_programs) self.generate_gdb_hook = generate_gdb_hook self.generate_unparser = context.generate_unparser self.pretty_print = pretty_print self.post_process_ada = post_process_ada self.post_process_cpp = post_process_cpp self.post_process_python = post_process_python self.post_process_ocaml = post_process_ocaml self.coverage = coverage self.gnatcov = context.gnatcov self.relative_project = relative_project # Automatically add all source files in the "extensions/src" directory # to the generated library project. self.extensions_src_dir = None if self.extensions_dir: src_dir = path.join(self.extensions_dir, 'src') if path.isdir(src_dir): self.extensions_src_dir = src_dir for filename in os.listdir(src_dir): filepath = path.join(src_dir, filename) if path.isfile(filepath) and not filename.startswith('.'): self.context.additional_source_files.append(filepath) self.main_source_dirs = main_source_dirs self.main_programs = main_programs self.lib_name_low = context.ada_api_settings.lib_name.lower() """ Lower-case name for the generated library. """ self.lib_name_up = context.ada_api_settings.lib_name.upper() """ Upper-case name for the generated library. """ # Paths for the various directories in which code is generated self.src_dir = path.join(self.lib_root, "src") self.src_mains_dir = path.join(self.lib_root, "src-mains") self.scripts_dir = path.join(self.lib_root, "scripts") self.python_dir = path.join(self.lib_root, "python") self.python_pkg_dir = path.join( self.lib_root, "python", context.python_api_settings.module_name) self.ocaml_dir = path.join(self.lib_root, "ocaml") self.lib_project = path.join(self.lib_root, f"{self.lib_name_low}.gpr") self.mains_project = path.join(self.lib_root, "mains.gpr") self.dfa_code: DFACodeGenHolder """ Holder for the data structures used to generate code for the lexer state machine (DFA). As an optimization, it is left to None if we decide not to generate it (i.e. when the already generated sources are up-to-date). """ self._project_file_emitted = False """ Whether we emitted a project file for the generated library. :type: bool """ self.project_languages = {'Ada'} """ List of GPR names for languages used in the generated library. :type: set[str] """ self.library_interfaces = set() """ Set of source file base names for all sources that must appear in the "Interfaces" attribute of the generated library project file. :type: set[str] """ self.instr_md = InstrumentationMetadata() # Add all additional source files to the list of library interfaces and # declare them as such in instrumentation metadata. for f in context.additional_source_files: self.add_library_interface(f, generated=False) if self.coverage: assert self.gnatcov # Add the buffer-list unit from GNATcoverage's instrumentation to # the list of library interfaces. TODO: hopefully, we should not # have to do this anymore after S916-064 is addressed. self.library_interfaces.add(self.gnatcov.buffer_list_file(self)) self.main_project_file = os.path.join(self.lib_root, f'{self.lib_name_low}.gpr') self.unparse_script = unparse_script """ RA22-015: If set to something else than None, then the "dsl unparse" pass will be run on the given script. :type: langkit.compile_context.UnparseScript|None """ # Determine whether we have user external properties. If so, # automatically WITH $.Implementation.Extensions from the body of # $.Analysis and $.Implementation. if any(prop.user_external for prop in context.all_properties(include_inherited=True)): for unit in ('Analysis', 'Implementation', 'Implementation.C'): context.add_with_clause(unit, AdaSourceKind.body, '{}.Implementation.Extensions'.format( context.ada_api_settings.lib_name), use_clause=True)
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import (ADA_BODY, ADA_SPEC, CompileCtx, LibraryEntity) from language.lexer import ada_lexer from language.grammar import ada_grammar from language.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, env_hook_subprogram=LibraryEntity( 'Libadalang.Env_Hooks', 'Env_Hook' ), default_unit_provider=LibraryEntity('Libadalang.Unit_Files', 'Default_Provider'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) for unit in ('GNATCOLL.Projects', 'GNATCOLL.VFS', 'Libadalang.Project_Provider', 'Libadalang.Auto_Provider'): ctx.add_with_clause('Implementation.C', ADA_BODY, unit, use_clause=True) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Env_Hooks', use_clause=True) # Libadalang needs access to the static expression evaluator, for name # resolution of aggregates. ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Expr_Eval', use_clause=False) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Doc_Utils', use_clause=False) # It also needs access to the literal decoders ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Sources', use_clause=False) ctx.add_with_clause('Implementation', ADA_BODY, 'Ada.Containers.Hashed_Maps', use_clause=False) # Our iterators are implemented using internal data structures ctx.add_with_clause('Iterators', ADA_SPEC, 'Libadalang.Common', is_private=True) ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Implementation', use_clause=True) ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Converters', use_clause=True) # LAL.Analysis.Is_Keyword is implemented using LAL.Lexer's ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Lexer') ctx.post_process_ada = copyright.format_ada ctx.post_process_cpp = copyright.format_c ctx.post_process_python = copyright.format_python return ctx