def write_source_file(file_path, source, post_process=None): """ Helper to write a source file. Return whether the file has been updated. :param str file_path: Path of the file to write. :param str source: Content of the file to write. :param post_process: If provided, callable used to transform the source file content just before writing it. :type post_process: None | (str) -> str :rtype: bool """ context = get_context() if post_process: source = post_process(source) if (not os.path.exists(file_path) or context.emitter.cache.is_stale(file_path, source)): if context.verbosity.debug: printcol('Rewriting stale source: {}'.format(file_path), Colors.OKBLUE) # Emit all source files as UTF-8 with "\n" line endings, no matter the # current platform. with open(file_path, 'w', encoding='utf-8', newline='') as f: f.write(source) return True return False
def process_unit(self, unit: lal.AnalysisUnit) -> None: """ Process one LAL analysis unit. """ if unit.diagnostics: self.error('Parsing error in {}'.format(unit.filename)) for diag in unit.diagnostics: self.error("{}:{}".format(str(diag.sloc_range.start), diag.message)) if not unit.root: self.error('{} is empty'.format(unit.filename)) try: decl = (unit.root.cast(lal.CompilationUnit).f_body.cast( lal.LibraryItem).f_item) package_decl = decl.cast(lal.BasePackageDecl) self.handle_package(package_decl) out_file = P.join(self.args.output_dir, P.basename(P.splitext(unit.filename)[0])) with open(f"{out_file}.rst", "w") as f: f.write("\n".join(line.rstrip() for line in self.lines)) self.lines = [] except AssertionError: printcol(f"WARNING: Non handled top level decl: {decl}", Colors.RED) return
def log_exec(self, argv: List[str]) -> None: """ If verbosity level is debug, log a command we are about to execute. :param argv: Arguments for the command to log. """ if self.verbosity.debug: printcol( 'Executing: {}'.format(' '.join( pipes.quote(arg) for arg in argv)), Colors.CYAN)
def log_exec(self, args, argv): """ If verbosity level is debug, log a command we are about to execute. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. :param list[str] argv: Arguments for the command to log. """ if args.verbosity.debug: printcol('Executing: {}'.format( ' '.join(pipes.quote(arg) for arg in argv) ), Colors.CYAN)
def log_exec(self, args, argv): """ If verbosity level is debug, log a command we are about to execute. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. :param list[str] argv: Arguments for the command to log. """ if args.verbosity.debug: printcol( 'Executing: {}'.format(' '.join( pipes.quote(arg) for arg in argv)), Colors.CYAN)
def do_list_optional_passes(self, args: argparse.Namespace) -> None: """ List optional passes and exit. :param args: The arguments parsed from the command line invocation of manage.py. """ self.prepare_generation(args) printcol("Optional passes\n", Colors.CYAN) for p in self.context.all_passes: if p.is_optional: printcol(p.name, Colors.YELLOW) print(p.doc) print()
def set_quex_path(self): """ If the QUEX_PATH environment variable is defined, do nothing. Otherwise, look for the "quex" program and determine Quex's "share" install directory: define the QUEX_PATH environment variable with it. """ if 'QUEX_PATH' in os.environ: return try: quex_bin = subprocess.check_output(['which', 'quex']).strip() except subprocess.CalledProcessError: printcol( 'Cannot find the "quex" program. Please define the' ' QUEX_PATH environment variable to Quex\'s "share"' ' install directory', Colors.FAIL) raise os.environ['QUEX_PATH'] = os.path.join( os.path.dirname(os.path.dirname(quex_bin)), 'share', 'quex')
def set_quex_path(self): """ If the QUEX_PATH environment variable is defined, do nothing. Otherwise, look for the "quex" program and determine Quex's "share" install directory: define the QUEX_PATH environment variable with it. """ if 'QUEX_PATH' in os.environ: return try: quex_bin = subprocess.check_output(['which', 'quex']).strip() except subprocess.CalledProcessError: printcol('Cannot find the "quex" program. Please define the' ' QUEX_PATH environment variable to Quex\'s "share"' ' install directory', Colors.FAIL) raise os.environ['QUEX_PATH'] = os.path.join( os.path.dirname(os.path.dirname(quex_bin)), 'share', 'quex' )
def do_generate(self, args): """ Generate source code for libadalang. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. """ printcol("Generating source for libadalang ...", Colors.HEADER) self.context.emit(file_root=self.dirs.build_dir()) def gnatpp(project_file, glob_pattern): try: subprocess.check_call([ 'gnatpp', '-P{}'.format(project_file), '-XLIBRARY_TYPE=relocatable', '-rnb', ] + glob.glob(glob_pattern), env=self.derived_env()) except subprocess.CalledProcessError as exc: print >> sys.stderr, 'Pretty-printing failed: {}'.format(exc) sys.exit(1) if hasattr(args, 'pretty_print') and args.pretty_print: printcol("Pretty-printing sources for Libadalang ...", Colors.HEADER) gnatpp( self.dirs.build_dir('lib', 'gnat', '{}.gpr'.format(self.lib_name.lower())), self.dirs.build_dir('include', self.lib_name.lower(), '*.ad*') ) gnatpp(self.dirs.build_dir('src', 'parse.gpr'), self.dirs.build_dir('src', '*.ad*')) printcol("Generation complete!", Colors.OKGREEN)
def do_build(self, args): """ Build generated source code. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. """ # Build the generated library itself if args.verbosity.info: printcol("Building the generated source code ...", Colors.HEADER) lib_project = self.dirs.build_dir( 'lib', 'gnat', '{}.gpr'.format(self.lib_name.lower())) self.gprbuild(args, lib_project, True) # Then build the main programs, if any disabled_mains = reduce(set.union, args.disable_mains, set()) mains = (set() if args.disable_all_mains else self.main_programs - disabled_mains) if mains: if args.verbosity.info: printcol("Building the main programs ...", Colors.HEADER) self.gprbuild(args, self.dirs.build_dir('src', 'mains.gpr'), False, mains) # On Windows, shared libraries (DLL) are looked up in the PATH, just # like binaries (it's LD_LIBRARY_PATH on Unix). For this platform, # don't bother and just copy these DLL next to binaries. if os.name == 'nt': for dll in glob.glob(self.dirs.build_dir('lib', '*.dll')): shutil.copy(dll, self.dirs.build_dir('bin', path.basename(dll))) if args.verbosity.info: printcol("Compilation complete!", Colors.OKGREEN)
def do_build(self, args): """ Build generated source code. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. """ build_mode = args.build_mode if args.build_mode else 'dev' cargs = [] # Depending on where this is invoked, the "cargs" option may not be set if hasattr(args, 'cargs'): cargs.extend(args.cargs) def gprbuild(project_file, is_dynamic): try: subprocess.check_call([ 'gprbuild', '-m', '-p', '-j{}'.format(args.jobs), '-P{}'.format(project_file), '-XBUILD_MODE={}'.format(build_mode), '-XLIBRARY_TYPE={}'.format( 'relocatable' if is_dynamic else 'static' ), '-XLIBLANG_SUPPORT_EXTERNALLY_BUILT=false', '-X{}_EXTERNALLY_BUILT=false'.format( self.lib_name.upper() ), '-cargs', ] + cargs, env=self.derived_env()) except subprocess.CalledProcessError as exc: print >> sys.stderr, 'Build failed: {}'.format(exc) sys.exit(1) printcol("Building the generated source code ...", Colors.HEADER) lib_project = self.dirs.build_dir( 'lib', 'gnat', '{}.gpr'.format(self.lib_name.lower()) ) if args.enable_static: gprbuild(lib_project, False) if not args.disable_shared: gprbuild(lib_project, True) printcol("Building the interactive test main ...", Colors.HEADER) if args.enable_static: gprbuild(self.dirs.build_dir('src', 'parse.gpr'), False) if not args.disable_shared: gprbuild(self.dirs.build_dir('src', 'parse.gpr'), True) # On Windows, shared libraries (DLL) are looked up in the PATH, just # like binaries (it's LD_LIBRARY_PATH on Unix). For this platform, # don't bother and just copy these DLL next to binaries. if os.name == 'nt': for dll in glob.glob(self.dirs.build_dir('lib', '*.dll')): shutil.copy(dll, self.dirs.build_dir('bin', os.path.basename(dll))) printcol("Compilation complete!", Colors.OKGREEN)
def do_generate(self, args): """ Generate source code for libadalang. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. """ if args.verbosity.info: printcol( "Generating source for {} ...".format(self.lib_name.lower()), Colors.HEADER) self.context.emit(file_root=self.dirs.build_dir(), main_programs=self.main_programs, annotate_fields_types=args.annotate_fields_types, generate_lexer=not args.no_compile_quex, compile_only=args.check_only, no_property_checks=args.no_property_checks) if args.check_only: return def gnatpp(project_file, glob_pattern): self.check_call(args, 'Pretty-printing', [ 'gnatpp', '-P{}'.format(project_file), '-XLIBRARY_TYPE=relocatable', '-rnb', '--insert-blank-lines', ] + glob.glob(glob_pattern)) if hasattr(args, 'pretty_print') and args.pretty_print: if args.verbosity.info: printcol("Pretty-printing sources for Libadalang ...", Colors.HEADER) gnatpp( self.dirs.build_dir('lib', 'gnat', '{}.gpr'.format(self.lib_name.lower())), self.dirs.build_dir('include', self.lib_name.lower(), '*.ad*')) gnatpp(self.dirs.build_dir('src', 'mains.gpr'), self.dirs.build_dir('src', '*.ad*')) if args.verbosity.info: printcol("Generation complete!", Colors.OKGREEN)
def run(self, context: CompileCtx) -> None: """ Run through the execution pipeline. """ assert not self.frozen, 'Invalid attempt to run the pipeline twice' self.frozen = True for p in self.passes: if p.disabled: if context.verbosity.debug: printcol('Skipping pass: {}'.format(p.name), Colors.YELLOW) continue if isinstance(p, StopPipeline): if context.verbosity.info: printcol('Stopping pipeline execution: {}'.format(p.name), Colors.OKBLUE) return else: if (not isinstance(p, MajorStepPass) and context.verbosity.debug): # no-code-coverage printcol('Running pass: {}'.format(p.name), Colors.YELLOW) p.run(context)
def do_build(self, args): """ Build generated source code. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. """ # Build the generated library itself if args.verbosity.info: printcol("Building the generated source code ...", Colors.HEADER) lib_project = self.dirs.build_dir( 'lib', 'gnat', '{}.gpr'.format(self.lib_name.lower()) ) self.gprbuild(args, lib_project, True) # Then build the main programs, if any disabled_mains = reduce(set.union, args.disable_mains, set()) mains = (set() if args.disable_all_mains else self.main_programs - disabled_mains) if mains: if args.verbosity.info: printcol("Building the main programs ...", Colors.HEADER) self.gprbuild(args, self.dirs.build_dir('src', 'mains.gpr'), False, mains) # On Windows, shared libraries (DLL) are looked up in the PATH, just # like binaries (it's LD_LIBRARY_PATH on Unix). For this platform, # don't bother and just copy these DLL next to binaries. if os.name == 'nt': for dll in glob.glob(self.dirs.build_dir('lib', '*.dll')): shutil.copy(dll, self.dirs.build_dir('bin', path.basename(dll))) if args.verbosity.info: printcol("Compilation complete!", Colors.OKGREEN)
def do_generate(self, args): """ Generate source code for libadalang. :param argparse.Namespace args: The arguments parsed from the command line invocation of manage.py. """ if args.verbosity.info: printcol("Generating source for libadalang ...", Colors.HEADER) self.context.emit(file_root=self.dirs.build_dir(), main_programs=self.main_programs, annotate_fields_types=args.annotate_fields_types, generate_lexer=not args.no_compile_quex) def gnatpp(project_file, glob_pattern): self.check_call(args, 'Pretty-printing', [ 'gnatpp', '-P{}'.format(project_file), '-XLIBRARY_TYPE=relocatable', '-rnb', ] + glob.glob(glob_pattern)) if hasattr(args, 'pretty_print') and args.pretty_print: if args.verbosity.info: printcol("Pretty-printing sources for Libadalang ...", Colors.HEADER) gnatpp( self.dirs.build_dir('lib', 'gnat', '{}.gpr'.format(self.lib_name.lower())), self.dirs.build_dir('include', self.lib_name.lower(), '*.ad*') ) gnatpp(self.dirs.build_dir('src', 'mains.gpr'), self.dirs.build_dir('src', '*.ad*')) if args.verbosity.info: printcol("Generation complete!", Colors.OKGREEN)
def handle_entity(self, decl: lal.BasicDecl): # Get the documentation content doc, annotations = self.get_documentation(decl) def emit_directive(directive_header): self.add_lines([ directive_header, ]) with self.indent(): self.add_lines([ ":package: " f"{decl.p_parent_basic_decl.p_fully_qualified_name}" ]) if isinstance(decl, (lal.BasicSubpDecl, lal.ExprFunction)): subp_spec = decl.p_subp_spec_or_null() prof = strip_ws(subp_spec.text) subp_kind = ('procedure' if subp_spec.p_returns is None else 'function') emit_directive(f".. ada:{subp_kind}:: {prof}") for formal in decl.p_subp_spec_or_null().p_abstract_formal_params: formal_doc, annots = self.get_documentation(formal) # Only generate a param profile if you have doc to show. # TODO: This is weird, because params without doc will not be # shown. Ideally it would be better to switch on if any param # has doc. if formal_doc: for i in formal.p_defining_names: fqn = formal.p_formal_type().p_fully_qualified_name self.add_string(f":param {fqn} {i.text}:") with self.indent(): self.add_lines(doc) elif isinstance(decl, lal.BaseTypeDecl): if isinstance(decl, lal.IncompleteTypeDecl): return prof = f"type {decl.p_relative_name.text}" emit_directive(f".. ada:type:: {prof}") with self.indent(): self.add_lines(['']) # Register components (discriminants and fields) comps: Dict[lal.BaseFormalParamDecl, Set[Tuple[lal.DiscriminantValues]]] = {} if decl.p_is_access_type(): pass elif decl.p_is_record_type(): try: for shape in decl.p_shapes(): for comp in shape.components: ctx = comp.parent.parent.parent s = comps.setdefault(comp, set()) if not ctx.is_a(lal.Variant): s.add(tuple(shape.discriminants_values)) except lal.PropertyError: # TODO TA20-019: p_shapes will fail on some types that # are considered records, so we should not crash on # this. pass else: for comp in decl.p_discriminants_list: comps[comp] = set() # Emit components for comp, discrs in comps.items(): doc, annots = self.get_documentation(comp) for dn in comp.p_defining_names: fqn = comp.p_formal_type().p_fully_qualified_name comp_kind = ("discriminant" if comp.is_a( lal.DiscriminantSpec) else "component") self.add_string(f":{comp_kind} {fqn} {dn.text}:") with self.indent(): self.add_lines(doc) elif isinstance(decl, lal.ObjectDecl): default_expr = None if decl.f_default_expr and annotations.get('document-value', True): # If there is a default expression to describe, do it as an # additional description. The title will only contain the name # up to the type expression. default_expr = decl.f_default_expr.text last_token = decl.f_type_expr.token_end elif decl.f_renaming_clause: # If there is a renaming clause, just put everything until the # renaming clause in the title. last_token = decl.f_renaming_clause.token_end else: # By default, go until the type expression last_token = decl.f_type_expr.token_end descr = strip_ws(lal.Token.text_range(decl.token_start, last_token)) emit_directive(f".. ada:object:: {descr}") with self.indent(): self.add_lines(['']) typ = decl.p_type_expression.p_designated_type_decl if typ.is_a(lal.AnonymousTypeDecl): typ_str = f"``{decl.p_type_expression.text}``" else: typ_str = typ.p_fully_qualified_name if not decl.parent.is_a(lal.GenericFormal): self.add_string(f":objtype: {typ_str}") if default_expr: self.add_string( f":defval: ``{strip_ws(default_expr)}``") if decl.f_renaming_clause: self.add_string( ":renames: " f"{decl.f_renaming_clause.f_renamed_object.text}") elif isinstance(decl, lal.ExceptionDecl): name = decl.p_defining_name.text emit_directive(f".. ada:exception:: {name}") elif isinstance(decl, lal.GenericPackageInstantiation): sig = strip_ws( lal.Token.text_range(decl.token_start, decl.f_generic_pkg_name.token_end)) emit_directive(f".. ada:generic-package-instantiation:: {sig}") with self.indent(): self.add_lines(['']) self.add_string(".. code-block:: ada") with self.indent(): self.add_lines([''] + decl.text.splitlines()) self.add_lines(['']) self.add_string( ":instpkg: " f"{decl.p_designated_generic_decl.p_fully_qualified_name}") elif isinstance(decl, lal.GenericFormal): self.handle_entity(decl.f_decl) return else: printcol(f"WARNING: Non handled entity: {decl}", Colors.RED) with self.indent(): self.add_lines([''] + doc + [''])
def run(self, context): if context.verbosity.info: printcol('{}...'.format(self.message), Colors.OKBLUE)
def log_debug(self, msg, color): """ If verbosity level is debug, log a message with given color. """ if self.verbosity.debug: printcol(msg, color)
def log_info(self, msg, color): """ If verbosity level is info, log a message with given color. """ if self.verbosity.info: printcol(msg, color)
def _compile(self, compile_only=False): """ Compile the language specification: perform legality checks and type inference. """ # Compile the first time, do nothing next times if self.compiled: return self.compiled = True assert self.grammar, "Set grammar before compiling" if not self.grammar.rules.get(self.main_rule_name, None): close_matches = difflib.get_close_matches( self.main_rule_name, self.grammar.rules.keys()) with self.grammar.context(): check_source_language( False, 'Invalid rule name specified for main rule: "{}". ' '{}'.format( self.main_rule_name, 'Did you mean "{}"?'.format( close_matches[0]) if close_matches else "")) unreferenced_rules = self.grammar.get_unreferenced_rules() check_source_language(not unreferenced_rules, "The following parsing rules are not " "used: {}".format(", ".join( sorted(unreferenced_rules))), severity=Severity.warning) if self.verbosity.info: printcol("Compiling the grammar...", Colors.OKBLUE) with names.camel_with_underscores: # Compute the type of fields for types used in the grammar. Also # register its symbol literals. for r_name, r in self.grammar.rules.items(): with r.error_context(): r.compute_fields_types() for sym in r.symbol_literals: self.add_symbol_literal(sym) # Compute type information, so that it is available for further # compilation stages. self.compute_types() errors_checkpoint() with names.camel_with_underscores: # Compute properties information, so that it is available for # further compilation stages. self.compute_properties(compile_only=compile_only) errors_checkpoint() # Past this point, the set of symbol literals is frozen self.finalize_symbol_literals() unresolved_types = set( [t for t in self.astnode_types if not t.is_type_resolved]) check_source_language( not unresolved_types, "The following ASTNode subclasses are not type resolved. They are" " not used by the grammar, and their types not annotated:" " {}".format(", ".join(t.name().camel for t in unresolved_types))) astnodes_files = { path.abspath(inspect.getsourcefile(n)) for n in self.astnode_types } if compile_only: return with names.camel_with_underscores: for r_name, r in self.grammar.rules.items(): with r.error_context(): r.compile() if self.annotate_fields_types: # Only import lib2to3 if the users needs it import lib2to3.main lib2to3.main.main( "langkit", ["-f", "annotate_fields_types", "--no-diff", "-w"] + list(astnodes_files)) for i, astnode in enumerate( (astnode for astnode in self.astnode_types if not astnode.abstract), # Compute kind constants for all ASTNode concrete subclasses. # Start with 1: the constant 0 is reserved as an # error/uninitialized code. start=1): self.node_kind_constants[astnode] = i # Now that all Struct subclasses referenced by the grammar have been # typed, iterate over all declared subclasses to register the ones that # are unreachable from the grammar. TODO: this kludge will eventually # disappear as part of OC22-016. for t in self.struct_types + self.astnode_types: t.add_to_context() errors_checkpoint()
def run(self, context: CompileCtx) -> None: if context.verbosity.info: printcol('{}...'.format(self.message), Colors.OKBLUE)
def do_perf_test(self, args): """ Run the performance regression testsuite. """ from time import time self.set_context(args) def file_lines(filename): with open(filename) as f: return len(list(f)) work_dir = os.path.abspath(args.work_dir) if not args.no_recompile: # The perf testsuite only needs the "parse" main program args.disable_mains = self.main_programs - {"parse"} # Build libadalang in production mode inside of the perf testsuite # directory. args.build_dir = os.path.join(work_dir, "build") self.dirs.set_build_dir(args.build_dir) args.build_mode = "prod" self._mkdir(args.build_dir) self.do_make(args) # Checkout the code bases that we will use for the perf testsuite source_dir = os.path.join(work_dir, "source") try: os.mkdir(source_dir) except OSError: pass os.chdir(source_dir) if not os.path.exists("gnat"): subprocess.check_call(["svn", "co", "svn+ssh://svn.us.adacore.com/Dev/trunk/gnat", "-r", "314163"]) if not os.path.exists("gps"): subprocess.check_call(["git", "clone", "ssh://review.eu.adacore.com:29418/gps"]) os.chdir("gps") subprocess.check_call(["git", "checkout", "00b73897a867514732d48ae1429faf97fb07ad7c"]) os.chdir("..") # Make a list of every ada file # Exclude some files that are contained here but that we do not parse # correctly. excluded_patterns = ["@", "a-numeri", "rad-project"] ada_files = filter( lambda f: all(map(lambda p: p not in f, excluded_patterns)), self._find_ada_sources(source_dir) ) file_list_name = "ada_file_list" with open(file_list_name, "w") as file_list: for f in ada_files: file_list.write(f + "\n") # Get a count of the total number of ada source lines lines_count = sum(map(file_lines, ada_files)) printcol("=================================", Colors.HEADER) printcol("= Performance testsuite results =", Colors.HEADER) printcol("=================================", Colors.HEADER) elapsed_list = [] for _ in range(args.nb_runs): # Execute parse on the file list and get the elapsed time t = time() subprocess.check_call(["../build/bin/parse", "-s", "-F", file_list_name]) elapsed = time() - t elapsed_list.append(elapsed) # Print a very basic report print "Parsed {0} lines of Ada code in {1:.2f} seconds".format(lines_count, elapsed) print "" printcol("= Performance summary =", Colors.OKGREEN) print "Mean time to parse {0} lines of code : {1:.2f} seconds".format( lines_count, sum(elapsed_list) / float(len(elapsed_list)) )
def _emit(self, file_root, generate_lexer, main_programs): """ Emit native code for all the rules in this grammar as a library: a library specification and the corresponding implementation. Also emit a tiny program that can parse starting with any parsing rule for testing purposes. """ lib_name_low = self.ada_api_settings.lib_name.lower() include_path = path.join(file_root, "include") src_path = path.join(file_root, "include", lib_name_low) lib_path = path.join(file_root, "lib") share_path = path.join(file_root, "share", lib_name_low) if not path.exists(file_root): os.mkdir(file_root) if self.verbosity.info: printcol("File setup...", Colors.OKBLUE) for d in [ "include", "include/langkit_support", "include/{}".format(lib_name_low), "share", "share/{}".format(lib_name_low), "obj", "src", "bin", "lib", "lib/gnat" ]: p = path.join(file_root, d) if not path.exists(p): os.mkdir(p) self.cache = caching.Cache( os.path.join(file_root, 'obj', 'langkit_cache')) # Create the project file for the generated library main_project_file = os.path.join( lib_path, "gnat", "{}.gpr".format(self.ada_api_settings.lib_name.lower()), ) with open(main_project_file, "w") as f: f.write( self.render_template( "project_file", lib_name=self.ada_api_settings.lib_name, os_path=os.path, quex_path=os.environ['QUEX_PATH'], )) # Copy langkit_support sources files to the include prefix and # create its own project file. from os.path import dirname, abspath, join lngk_support_dir = join(dirname(abspath(__file__)), "support") for f in itertools.chain(glob(join(lngk_support_dir, "*.adb")), glob(join(lngk_support_dir, "*.ads"))): shutil.copy(f, join(include_path, "langkit_support")) shutil.copy(join(lngk_support_dir, "langkit_support_installed.gpr"), join(lib_path, "gnat", "langkit_support.gpr")) # Copy adalog files. TODO: This is kludgeish to the extreme, and is # only a workaround the fact you can't with regular projects from # library projects. adalog_dir = join(dirname(abspath(__file__)), "adalog") for f in glob(join(adalog_dir, "src", "*.ad*")): shutil.copy(f, join(include_path, lib_name_low)) # Copy additional source files from the language specification for filepath in self.additional_source_files: filename = os.path.basename(filepath) shutil.copy(filepath, join(src_path, filename)) with file(os.path.join(share_path, 'ast-types.txt'), 'w') as f: from langkit import astdoc astdoc.write_astdoc(self, f) if self.verbosity.info: printcol("Generating sources... ", Colors.OKBLUE) ada_modules = [ # Top (pure) package ("pkg_main", [], False), # Unit for initialization primitives ("pkg_init", ["init"], True), # Unit for analysis primitives ("pkg_analysis", ["analysis"], True), # Unit for all parsers ("parsers/pkg_main", ["analysis", "parsers"], True), # Unit for the lexer ("lexer/pkg_lexer", ["lexer"], True), # Unit for debug helpers ("pkg_debug", ["debug"], True), ] for template_base_name, qual_name, has_body in ada_modules: self.write_ada_module(src_path, template_base_name, qual_name, has_body) with names.camel_with_underscores: write_ada_file( path.join(file_root, "src"), ADA_BODY, ["parse"], self.render_template("interactive_main_ada", _self=self)) with names.lower: # ... and the Quex C interface write_cpp_file( path.join(src_path, "quex_interface.h"), self.render_template("lexer/quex_interface_header_c", _self=self)) write_cpp_file( path.join(src_path, "quex_interface.c"), self.render_template("lexer/quex_interface_body_c", _self=self)) imain_project_file = os.path.join(file_root, "src", "mains.gpr") with open(imain_project_file, "w") as f: f.write( self.render_template("mains_project_file", lib_name=self.ada_api_settings.lib_name, main_programs=main_programs)) self.emit_c_api(src_path, include_path) if self.python_api_settings: python_path = path.join(file_root, "python") if not path.exists(python_path): os.mkdir(python_path) self.emit_python_api(python_path) # Add any sources in $lang_path/extensions/support if it exists if self.ext('support'): for f in glob(join(self.ext('support'), "*.ad*")): shutil.copy(f, src_path) if self.verbosity.info: printcol("Compiling the quex lexer specification", Colors.OKBLUE) quex_file = os.path.join(src_path, "{}.qx".format(self.lang_name.lower)) quex_spec = self.lexer.emit() with open(quex_file, 'w') as f: f.write(quex_spec) # Generating the lexer C code with Quex is quite long: do it only when # the Quex specification changed from last build. if generate_lexer and self.cache.is_stale('quex_specification', quex_spec): quex_py_file = path.join(os.environ["QUEX_PATH"], "quex-exe.py") subprocess.check_call([ sys.executable, quex_py_file, "-i", quex_file, "-o", "quex_lexer", "--buffer-element-size", "4", "--token-id-offset", "0x1000", "--language", "C", "--no-mode-transition-check", "--single-mode-analyzer", "--token-memory-management-by-user", "--token-policy", "single", "--token-id-prefix", self.lexer.prefix ], cwd=src_path) self.cache.save()
def _emit(self, file_root): """ Emit native code for all the rules in this grammar as a library: a library specification and the corresponding implementation. Also emit a tiny program that can parse starting with any parsing rule for testing purposes. """ assert self.grammar, "Set grammar before calling emit" unreferenced_rules = self.grammar.get_unreferenced_rules( self.main_rule_name ) if unreferenced_rules: print ( "warning: The following parsing rules are not used: {}".format( ", ".join(sorted(unreferenced_rules)) ) ) # Compute type information, so that it is available for further # compilation stages. self.compute_types() # Compute properties information, so that it is available for further # compilation stages. self.compute_properties() lib_name_low = self.ada_api_settings.lib_name.lower() include_path = path.join(file_root, "include") src_path = path.join(file_root, "include", lib_name_low) lib_path = path.join(file_root, "lib") share_path = path.join(file_root, "share", lib_name_low) if not path.exists(file_root): os.mkdir(file_root) printcol("File setup ...", Colors.OKBLUE) for d in ["include", "include/langkit_support", "include/{}".format(lib_name_low), "share", "share/{}".format(lib_name_low), "obj", "src", "bin", "lib", "lib/gnat"]: p = path.join(file_root, d) if not path.exists(p): os.mkdir(p) self.cache = caching.Cache( os.path.join(file_root, 'obj', 'langkit_cache') ) # Create the project file for the generated library main_project_file = os.path.join( lib_path, "gnat", "{}.gpr".format(self.ada_api_settings.lib_name.lower()), ) with open(main_project_file, "w") as f: f.write(self.render_template( "project_file", lib_name=self.ada_api_settings.lib_name, quex_path=os.environ["QUEX_PATH"], )) # Copy langkit_support sources files to the include prefix and # create its own project file. from os.path import dirname, abspath, join lngk_support_dir = join(dirname(abspath(__file__)), "support") for f in itertools.chain(glob(join(lngk_support_dir, "*.adb")), glob(join(lngk_support_dir, "*.ads"))): shutil.copy(f, join(include_path, "langkit_support")) shutil.copy(join(lngk_support_dir, "langkit_support_installed.gpr"), join(lib_path, "gnat", "langkit_support.gpr")) printcol("Compiling the grammar...", Colors.OKBLUE) with names.camel_with_underscores: for r_name, r in self.grammar.rules.items(): r.compute_fields_types() for r_name, r in self.grammar.rules.items(): r.compile() self.rules_to_fn_names[r_name] = r not_resolved_types = set() for astnode_type in self.astnode_types: if not astnode_type.is_type_resolved: not_resolved_types.add(astnode_type) assert not not_resolved_types, ( "The following ASTNode subclasss are not type resolved. They are" " not used by the grammar, and their types not annotated:" " {}".format( ", ".join(astnode_type.name().camel for astnode_type in not_resolved_types) ) ) for i, astnode in enumerate( (astnode for astnode in self.astnode_types if not astnode.abstract), # Compute kind constants for all ASTNode concrete subclasses. # Start with 2: the constant 0 is reserved as an # error/uninitialized code and the constant 1 is reserved for all # ASTList nodes. start=2 ): self.node_kind_constants[astnode] = i with file(os.path.join(share_path, 'ast-types.txt'), 'w') as f: astdoc.write_astdoc(self, f) # Now that all Struct subclasses referenced by the grammar have been # typed, iterate over all declared subclasses to register the ones that # are unreachable from the grammar. TODO: this kludge will eventually # disappear as part of OC22-016. for t in self.struct_types + self.astnode_types: t.add_to_context() printcol("Generating sources... ", Colors.OKBLUE) ada_modules = [ # Top (pure) package ("pkg_main", [], False), # Unit for initialization primitives ("pkg_init", ["init"], True), # Unit for analysis primitives ("pkg_analysis", ["analysis"], True), # Unit for the root AST node ("pkg_ast", ["ast"], True), # Unit for generic AST lists ("pkg_ast_list", ["ast", "list"], True), # Unit for all derived AST nodes ("pkg_ast_types", ["ast", "types"], True), # Unit for all parsers ("parsers/pkg_main", ["ast", "types", "parsers"], True), # Unit for the lexer ("lexer/pkg_lexer", ["lexer"], True), ] for template_base_name, qual_name, has_body in ada_modules: self.write_ada_module(src_path, template_base_name, qual_name, has_body) with names.camel_with_underscores: write_ada_file( path.join(file_root, "src"), ADA_BODY, ["parse"], self.render_template("interactive_main_ada", _self=self) ) with names.lower: # ... and the Quex C interface write_cpp_file(path.join(src_path, "quex_interface.h"), self.render_template( "lexer/quex_interface_header_c", _self=self)) write_cpp_file(path.join(src_path, "quex_interface.c"), self.render_template( "lexer/quex_interface_body_c", _self=self)) imain_project_file = os.path.join(file_root, "src", "parse.gpr") with open(imain_project_file, "w") as f: f.write(self.render_template( "parse_project_file", lib_name=self.ada_api_settings.lib_name, )) self.emit_c_api(src_path, include_path) if self.python_api_settings: python_path = path.join(file_root, "python") if not path.exists(python_path): os.mkdir(python_path) self.emit_python_api(python_path) # Add any sources in $lang_path/extensions/support if it exists if self.ext('support'): for f in glob(join(self.ext('support'), "*.ad*")): shutil.copy(f, src_path) printcol("Compiling the quex lexer specification", Colors.OKBLUE) quex_file = os.path.join(src_path, "{}.qx".format(self.lang_name.lower)) quex_spec = self.lexer.emit() with open(quex_file, 'w') as f: f.write(quex_spec) # Generating the lexer C code with Quex is quite long: do it only when # the Quex specification changed from last build. if self.cache.is_stale('quex_specification', quex_spec): quex_py_file = path.join(environ["QUEX_PATH"], "quex-exe.py") subprocess.check_call([sys.executable, quex_py_file, "-i", quex_file, "-o", "quex_lexer", "--buffer-element-size", "4", "--token-id-offset", "0x1000", "--language", "C", "--no-mode-transition-check", "--single-mode-analyzer", "--token-memory-management-by-user", "--token-policy", "single"], cwd=src_path) self.cache.save()
def _emit(self, file_root, generate_lexer, main_programs): """ Emit native code for all the rules in this grammar as a library: a library specification and the corresponding implementation. Also emit a tiny program that can parse starting with any parsing rule for testing purposes. """ lib_name_low = self.ada_api_settings.lib_name.lower() include_path = path.join(file_root, "include") src_path = path.join(file_root, "include", lib_name_low) lib_path = path.join(file_root, "lib") share_path = path.join(file_root, "share", lib_name_low) if not path.exists(file_root): os.mkdir(file_root) if self.verbosity.info: printcol("File setup...", Colors.OKBLUE) for d in ["include", "include/langkit_support", "include/{}".format(lib_name_low), "share", "share/{}".format(lib_name_low), "obj", "src", "bin", "lib", "lib/gnat"]: p = path.join(file_root, d) if not path.exists(p): os.mkdir(p) self.cache = caching.Cache( os.path.join(file_root, 'obj', 'langkit_cache') ) # Create the project file for the generated library main_project_file = os.path.join( lib_path, "gnat", "{}.gpr".format(self.ada_api_settings.lib_name.lower()), ) with open(main_project_file, "w") as f: f.write(self.render_template( "project_file", lib_name=self.ada_api_settings.lib_name, os_path=os.path, quex_path=os.environ['QUEX_PATH'], )) # Copy langkit_support sources files to the include prefix and # create its own project file. from os.path import dirname, abspath, join lngk_support_dir = join(dirname(abspath(__file__)), "support") for f in itertools.chain(glob(join(lngk_support_dir, "*.adb")), glob(join(lngk_support_dir, "*.ads"))): shutil.copy(f, join(include_path, "langkit_support")) shutil.copy(join(lngk_support_dir, "langkit_support_installed.gpr"), join(lib_path, "gnat", "langkit_support.gpr")) # Copy adalog files. TODO: This is kludgeish to the extreme, and is # only a workaround the fact you can't with regular projects from # library projects. adalog_dir = join(dirname(abspath(__file__)), "adalog") for f in glob(join(adalog_dir, "src", "*.ad*")): shutil.copy(f, join(include_path, lib_name_low)) # Copy additional source files from the language specification for filepath in self.additional_source_files: filename = os.path.basename(filepath) shutil.copy(filepath, join(src_path, filename)) with file(os.path.join(share_path, 'ast-types.txt'), 'w') as f: from langkit import astdoc astdoc.write_astdoc(self, f) if self.verbosity.info: printcol("Generating sources... ", Colors.OKBLUE) ada_modules = [ # Top (pure) package ("pkg_main", [], False), # Unit for initialization primitives ("pkg_init", ["init"], True), # Unit for analysis primitives ("pkg_analysis_interfaces", ["analysis_interfaces"], True), # Unit for analysis unit conversions hack ("pkg_analysis_internal", ["analysis", "internal"], False), # Unit for analysis primitives ("pkg_analysis", ["analysis"], True), # Unit for the root AST node ("pkg_ast", ["ast"], True), # Unit for generic AST lists ("pkg_ast_list", ["ast", "list"], True), # Unit for all derived AST nodes ("pkg_ast_types", ["ast", "types"], True), # Unit for all parsers ("parsers/pkg_main", ["ast", "types", "parsers"], True), # Unit for the lexer ("lexer/pkg_lexer", ["lexer"], True), # Unit for debug helpers ("pkg_debug", ["debug"], True), ] for template_base_name, qual_name, has_body in ada_modules: self.write_ada_module(src_path, template_base_name, qual_name, has_body) with names.camel_with_underscores: write_ada_file( path.join(file_root, "src"), ADA_BODY, ["parse"], self.render_template("interactive_main_ada", _self=self) ) with names.lower: # ... and the Quex C interface write_cpp_file(path.join(src_path, "quex_interface.h"), self.render_template( "lexer/quex_interface_header_c", _self=self)) write_cpp_file(path.join(src_path, "quex_interface.c"), self.render_template( "lexer/quex_interface_body_c", _self=self)) imain_project_file = os.path.join(file_root, "src", "mains.gpr") with open(imain_project_file, "w") as f: f.write(self.render_template( "mains_project_file", lib_name=self.ada_api_settings.lib_name, main_programs=main_programs )) self.emit_c_api(src_path, include_path) if self.python_api_settings: python_path = path.join(file_root, "python") if not path.exists(python_path): os.mkdir(python_path) self.emit_python_api(python_path) # Add any sources in $lang_path/extensions/support if it exists if self.ext('support'): for f in glob(join(self.ext('support'), "*.ad*")): shutil.copy(f, src_path) if self.verbosity.info: printcol("Compiling the quex lexer specification", Colors.OKBLUE) quex_file = os.path.join(src_path, "{}.qx".format(self.lang_name.lower)) quex_spec = self.lexer.emit() with open(quex_file, 'w') as f: f.write(quex_spec) # Generating the lexer C code with Quex is quite long: do it only when # the Quex specification changed from last build. if generate_lexer and self.cache.is_stale('quex_specification', quex_spec): quex_py_file = path.join(os.environ["QUEX_PATH"], "quex-exe.py") subprocess.check_call([sys.executable, quex_py_file, "-i", quex_file, "-o", "quex_lexer", "--buffer-element-size", "4", "--token-id-offset", "0x1000", "--language", "C", "--no-mode-transition-check", "--single-mode-analyzer", "--token-memory-management-by-user", "--token-policy", "single"], cwd=src_path) self.cache.save()
def do_perf_test(self, args): """ Run the performance regression testsuite. """ from time import time def file_lines(filename): with open(filename) as f: return len(list(f)) work_dir = os.path.abspath(args.work_dir) if not args.no_recompile: # Build libadalang in production mode inside of the perf testsuite # directory. args.build_dir = os.path.join(work_dir, 'build') self.dirs.set_build_dir(args.build_dir) args.build_mode = 'prod' self._mkdir(args.build_dir) self.do_make(args) # Checkout the code bases that we will use for the perf testsuite os.chdir(work_dir) if not os.path.exists('gnat'): subprocess.check_call([ 'svn', 'co', 'svn+ssh://svn.us.adacore.com/Dev/trunk/gnat', '-r', '314163']) if not os.path.exists('gps'): subprocess.check_call(['git', 'clone', 'ssh://review.eu.adacore.com:29418/gps']) os.chdir('gps') subprocess.check_call(['git', 'checkout', '00b73897a867514732d48ae1429faf97fb07ad7c']) os.chdir('..') # Make a list of every ada file # Exclude some files that are contained here but that we do not parse # correctly. excluded_patterns = ['@', 'a-numeri', 'rad-project'] ada_files = filter( lambda f: all(map(lambda p: p not in f, excluded_patterns)), self._find_ada_sources(work_dir) ) file_list_name = 'ada_file_list' with open(file_list_name, 'w') as file_list: for f in ada_files: file_list.write(f + '\n') # Get a count of the total number of ada source lines lines_count = sum(map(file_lines, ada_files)) printcol("=================================", Colors.HEADER) printcol("= Performance testsuite results =", Colors.HEADER) printcol("=================================", Colors.HEADER) elapsed_list = [] for _ in range(args.nb_runs): # Execute parse on the file list and get the elapsed time t = time() subprocess.check_call(['build/bin/parse', '-s', '-F', file_list_name]) elapsed = time() - t elapsed_list.append(elapsed) # Print a very basic report print "Parsed {0} lines of Ada code in {1:.2f} seconds".format( lines_count, elapsed ) print "" printcol("= Performance summary =", Colors.OKGREEN) print "Mean time to parse {0} lines of code : {1:.2f} seconds".format( lines_count, sum(elapsed_list) / float(len(elapsed_list)) )
def write_report(text, color=None): if color: printcol(text, color) else: print(text) print(text, file=f)
def _compile(self): """ Compile the language specification: perform legality checks and type inference. """ # Compile the first time, do nothing next times if self.compiled: return self.compiled = True assert self.grammar, "Set grammar before compiling" if not self.grammar.rules.get(self.main_rule_name, None): close_matches = difflib.get_close_matches( self.main_rule_name, self.grammar.rules.keys() ) with self.grammar.context(): check_source_language( False, 'Invalid rule name specified for main rule: "{}". ' '{}'.format( self.main_rule_name, 'Did you mean "{}"?'.format(close_matches[0]) if close_matches else "" ) ) unreferenced_rules = self.grammar.get_unreferenced_rules() check_source_language( not unreferenced_rules, "The following parsing rules are not " "used: {}".format(", ".join(sorted(unreferenced_rules))), severity=Severity.warning ) # Compute type information, so that it is available for further # compilation stages. self.compute_types() errors_checkpoint() if self.verbosity.info: printcol("Compiling the grammar...", Colors.OKBLUE) with names.camel_with_underscores: # Compute the type of fields for types used in the grammar for r_name, r in self.grammar.rules.items(): r.compute_fields_types() # Compute properties information, so that it is available for # further compilation stages. self.compute_properties() errors_checkpoint() for r_name, r in self.grammar.rules.items(): r.compile() self.rules_to_fn_names[r_name] = r unresolved_types = set([t for t in self.astnode_types if not t.is_type_resolved]) check_source_language( not unresolved_types, "The following ASTNode subclasses are not type resolved. They are" " not used by the grammar, and their types not annotated:" " {}".format(", ".join(t.name().camel for t in unresolved_types)) ) astnodes_files = { path.abspath(inspect.getsourcefile(n)) for n in self.astnode_types } if self.annotate_fields_types: # Only import lib2to3 if the users needs it import lib2to3.main lib2to3.main.main( "langkit", ["-f", "annotate_fields_types", "--no-diff", "-w"] + list(astnodes_files) ) for i, astnode in enumerate( (astnode for astnode in self.astnode_types if not astnode.abstract), # Compute kind constants for all ASTNode concrete subclasses. # Start with 2: the constant 0 is reserved as an # error/uninitialized code and the constant 1 is reserved for all # ASTList nodes. start=2 ): self.node_kind_constants[astnode] = i # Now that all Struct subclasses referenced by the grammar have been # typed, iterate over all declared subclasses to register the ones that # are unreachable from the grammar. TODO: this kludge will eventually # disappear as part of OC22-016. for t in self.struct_types + self.astnode_types: t.add_to_context() errors_checkpoint()