예제 #1
0
    def emit_astdoc(self, ctx):
        """
        If requested, generate the HTML documentation for node types.
        """
        if not self.generate_astdoc:
            return

        from langkit import astdoc

        f = StringIO()
        astdoc.write_astdoc(ctx, f)
        f.seek(0)
        write_source_file(os.path.join(self.share_path, 'ast-types.html'),
                          f.read())
    def _emit(self, file_root):
        """
        Emit native code for all the rules in this grammar as a library:
        a library specification and the corresponding implementation.  Also
        emit a tiny program that can parse starting with any parsing rule for
        testing purposes.
        """
        assert self.grammar, "Set grammar before calling emit"

        unreferenced_rules = self.grammar.get_unreferenced_rules(
            self.main_rule_name
        )
        if unreferenced_rules:
            print (
                "warning: The following parsing rules are not used: {}".format(
                    ", ".join(sorted(unreferenced_rules))
                )
            )

        # Compute type information, so that it is available for further
        # compilation stages.
        self.compute_types()

        # Compute properties information, so that it is available for further
        # compilation stages.
        self.compute_properties()

        lib_name_low = self.ada_api_settings.lib_name.lower()

        include_path = path.join(file_root, "include")
        src_path = path.join(file_root, "include", lib_name_low)
        lib_path = path.join(file_root, "lib")
        share_path = path.join(file_root, "share", lib_name_low)

        if not path.exists(file_root):
            os.mkdir(file_root)

        printcol("File setup ...", Colors.OKBLUE)

        for d in ["include",
                  "include/langkit_support",
                  "include/{}".format(lib_name_low),
                  "share",
                  "share/{}".format(lib_name_low),
                  "obj", "src", "bin",
                  "lib", "lib/gnat"]:
            p = path.join(file_root, d)
            if not path.exists(p):
                os.mkdir(p)

        self.cache = caching.Cache(
            os.path.join(file_root, 'obj', 'langkit_cache')
        )

        # Create the project file for the generated library
        main_project_file = os.path.join(
            lib_path, "gnat",
            "{}.gpr".format(self.ada_api_settings.lib_name.lower()),
        )
        with open(main_project_file, "w") as f:
            f.write(self.render_template(
                "project_file",
                lib_name=self.ada_api_settings.lib_name,
                quex_path=os.environ["QUEX_PATH"],
            ))

        # Copy langkit_support sources files to the include prefix and
        # create its own project file.
        from os.path import dirname, abspath, join
        lngk_support_dir = join(dirname(abspath(__file__)), "support")

        for f in itertools.chain(glob(join(lngk_support_dir, "*.adb")),
                                 glob(join(lngk_support_dir, "*.ads"))):
            shutil.copy(f, join(include_path, "langkit_support"))
        shutil.copy(join(lngk_support_dir, "langkit_support_installed.gpr"),
                    join(lib_path, "gnat", "langkit_support.gpr"))

        printcol("Compiling the grammar...", Colors.OKBLUE)

        with names.camel_with_underscores:
            for r_name, r in self.grammar.rules.items():
                r.compute_fields_types()

            for r_name, r in self.grammar.rules.items():
                r.compile()
                self.rules_to_fn_names[r_name] = r

        not_resolved_types = set()
        for astnode_type in self.astnode_types:
            if not astnode_type.is_type_resolved:
                not_resolved_types.add(astnode_type)
        assert not not_resolved_types, (
            "The following ASTNode subclasss are not type resolved. They are"
            " not used by the grammar, and their types not annotated:"
            " {}".format(
                ", ".join(astnode_type.name().camel
                          for astnode_type in not_resolved_types)
            )
        )

        for i, astnode in enumerate(
            (astnode
             for astnode in self.astnode_types
             if not astnode.abstract),
            # Compute kind constants for all ASTNode concrete subclasses.
            # Start with 2: the constant 0 is reserved as an
            # error/uninitialized code and the constant 1 is reserved for all
            # ASTList nodes.
            start=2
        ):
            self.node_kind_constants[astnode] = i

        with file(os.path.join(share_path, 'ast-types.txt'), 'w') as f:
            astdoc.write_astdoc(self, f)

        # Now that all Struct subclasses referenced by the grammar have been
        # typed, iterate over all declared subclasses to register the ones that
        # are unreachable from the grammar.  TODO: this kludge will eventually
        # disappear as part of OC22-016.
        for t in self.struct_types + self.astnode_types:
            t.add_to_context()

        printcol("Generating sources... ", Colors.OKBLUE)

        ada_modules = [
            # Top (pure) package
            ("pkg_main",         [], False),
            # Unit for initialization primitives
            ("pkg_init",         ["init"], True),
            # Unit for analysis primitives
            ("pkg_analysis",     ["analysis"], True),
            # Unit for the root AST node
            ("pkg_ast",          ["ast"], True),
            # Unit for generic AST lists
            ("pkg_ast_list",     ["ast", "list"], True),
            # Unit for all derived AST nodes
            ("pkg_ast_types",    ["ast", "types"], True),
            # Unit for all parsers
            ("parsers/pkg_main", ["ast", "types", "parsers"], True),
            # Unit for the lexer
            ("lexer/pkg_lexer",  ["lexer"], True),
        ]

        for template_base_name, qual_name, has_body in ada_modules:
            self.write_ada_module(src_path, template_base_name, qual_name,
                                  has_body)

        with names.camel_with_underscores:
            write_ada_file(
                path.join(file_root, "src"), ADA_BODY, ["parse"],
                self.render_template("interactive_main_ada", _self=self)
            )

        with names.lower:
            # ... and the Quex C interface
            write_cpp_file(path.join(src_path, "quex_interface.h"),
                           self.render_template(
                               "lexer/quex_interface_header_c",
                               _self=self))
            write_cpp_file(path.join(src_path, "quex_interface.c"),
                           self.render_template(
                               "lexer/quex_interface_body_c",
                               _self=self))

        imain_project_file = os.path.join(file_root, "src", "parse.gpr")
        with open(imain_project_file, "w") as f:
            f.write(self.render_template(
                "parse_project_file",
                lib_name=self.ada_api_settings.lib_name,
            ))

        self.emit_c_api(src_path, include_path)
        if self.python_api_settings:
            python_path = path.join(file_root, "python")
            if not path.exists(python_path):
                os.mkdir(python_path)
            self.emit_python_api(python_path)

        # Add any sources in $lang_path/extensions/support if it exists
        if self.ext('support'):
            for f in glob(join(self.ext('support'), "*.ad*")):
                shutil.copy(f, src_path)

        printcol("Compiling the quex lexer specification", Colors.OKBLUE)

        quex_file = os.path.join(src_path,
                                 "{}.qx".format(self.lang_name.lower))
        quex_spec = self.lexer.emit()
        with open(quex_file, 'w') as f:
            f.write(quex_spec)

        # Generating the lexer C code with Quex is quite long: do it only when
        # the Quex specification changed from last build.
        if self.cache.is_stale('quex_specification', quex_spec):
            quex_py_file = path.join(environ["QUEX_PATH"], "quex-exe.py")
            subprocess.check_call([sys.executable, quex_py_file, "-i",
                                   quex_file,
                                   "-o", "quex_lexer",
                                   "--buffer-element-size", "4",
                                   "--token-id-offset",  "0x1000",
                                   "--language", "C",
                                   "--no-mode-transition-check",
                                   "--single-mode-analyzer",
                                   "--token-memory-management-by-user",
                                   "--token-policy", "single"],
                                  cwd=src_path)

        self.cache.save()
예제 #3
0
    def _emit(self, file_root, generate_lexer, main_programs):
        """
        Emit native code for all the rules in this grammar as a library:
        a library specification and the corresponding implementation.  Also
        emit a tiny program that can parse starting with any parsing rule for
        testing purposes.
        """
        lib_name_low = self.ada_api_settings.lib_name.lower()

        include_path = path.join(file_root, "include")
        src_path = path.join(file_root, "include", lib_name_low)
        lib_path = path.join(file_root, "lib")
        share_path = path.join(file_root, "share", lib_name_low)

        if not path.exists(file_root):
            os.mkdir(file_root)

        if self.verbosity.info:
            printcol("File setup...", Colors.OKBLUE)

        for d in ["include",
                  "include/langkit_support",
                  "include/{}".format(lib_name_low),
                  "share",
                  "share/{}".format(lib_name_low),
                  "obj", "src", "bin",
                  "lib", "lib/gnat"]:
            p = path.join(file_root, d)
            if not path.exists(p):
                os.mkdir(p)

        self.cache = caching.Cache(
            os.path.join(file_root, 'obj', 'langkit_cache')
        )

        # Create the project file for the generated library
        main_project_file = os.path.join(
            lib_path, "gnat",
            "{}.gpr".format(self.ada_api_settings.lib_name.lower()),
        )
        with open(main_project_file, "w") as f:
            f.write(self.render_template(
                "project_file",
                lib_name=self.ada_api_settings.lib_name,
                os_path=os.path,
                quex_path=os.environ['QUEX_PATH'],
            ))

        # Copy langkit_support sources files to the include prefix and
        # create its own project file.
        from os.path import dirname, abspath, join
        lngk_support_dir = join(dirname(abspath(__file__)), "support")

        for f in itertools.chain(glob(join(lngk_support_dir, "*.adb")),
                                 glob(join(lngk_support_dir, "*.ads"))):
            shutil.copy(f, join(include_path, "langkit_support"))
        shutil.copy(join(lngk_support_dir, "langkit_support_installed.gpr"),
                    join(lib_path, "gnat", "langkit_support.gpr"))

        # Copy adalog files. TODO: This is kludgeish to the extreme, and is
        # only a workaround the fact you can't with regular projects from
        # library projects.
        adalog_dir = join(dirname(abspath(__file__)), "adalog")
        for f in glob(join(adalog_dir, "src", "*.ad*")):
            shutil.copy(f, join(include_path, lib_name_low))

        # Copy additional source files from the language specification
        for filepath in self.additional_source_files:
            filename = os.path.basename(filepath)
            shutil.copy(filepath, join(src_path, filename))

        with file(os.path.join(share_path, 'ast-types.txt'), 'w') as f:
            from langkit import astdoc
            astdoc.write_astdoc(self, f)

        if self.verbosity.info:
            printcol("Generating sources... ", Colors.OKBLUE)

        ada_modules = [
            # Top (pure) package
            ("pkg_main",         [], False),
            # Unit for initialization primitives
            ("pkg_init",         ["init"], True),
            # Unit for analysis primitives
            ("pkg_analysis_interfaces", ["analysis_interfaces"], True),
            # Unit for analysis unit conversions hack
            ("pkg_analysis_internal", ["analysis", "internal"], False),
            # Unit for analysis primitives
            ("pkg_analysis",     ["analysis"], True),
            # Unit for the root AST node
            ("pkg_ast",          ["ast"], True),
            # Unit for generic AST lists
            ("pkg_ast_list",     ["ast", "list"], True),
            # Unit for all derived AST nodes
            ("pkg_ast_types",    ["ast", "types"], True),
            # Unit for all parsers
            ("parsers/pkg_main", ["ast", "types", "parsers"], True),
            # Unit for the lexer
            ("lexer/pkg_lexer",  ["lexer"], True),
            # Unit for debug helpers
            ("pkg_debug",        ["debug"], True),
        ]

        for template_base_name, qual_name, has_body in ada_modules:
            self.write_ada_module(src_path, template_base_name, qual_name,
                                  has_body)

        with names.camel_with_underscores:
            write_ada_file(
                path.join(file_root, "src"), ADA_BODY, ["parse"],
                self.render_template("interactive_main_ada", _self=self)
            )

        with names.lower:
            # ... and the Quex C interface
            write_cpp_file(path.join(src_path, "quex_interface.h"),
                           self.render_template(
                               "lexer/quex_interface_header_c",
                               _self=self))
            write_cpp_file(path.join(src_path, "quex_interface.c"),
                           self.render_template(
                               "lexer/quex_interface_body_c",
                               _self=self))

        imain_project_file = os.path.join(file_root, "src", "mains.gpr")
        with open(imain_project_file, "w") as f:
            f.write(self.render_template(
                "mains_project_file",
                lib_name=self.ada_api_settings.lib_name,
                main_programs=main_programs
            ))

        self.emit_c_api(src_path, include_path)
        if self.python_api_settings:
            python_path = path.join(file_root, "python")
            if not path.exists(python_path):
                os.mkdir(python_path)
            self.emit_python_api(python_path)

        # Add any sources in $lang_path/extensions/support if it exists
        if self.ext('support'):
            for f in glob(join(self.ext('support'), "*.ad*")):
                shutil.copy(f, src_path)

        if self.verbosity.info:
            printcol("Compiling the quex lexer specification", Colors.OKBLUE)

        quex_file = os.path.join(src_path,
                                 "{}.qx".format(self.lang_name.lower))
        quex_spec = self.lexer.emit()
        with open(quex_file, 'w') as f:
            f.write(quex_spec)

        # Generating the lexer C code with Quex is quite long: do it only when
        # the Quex specification changed from last build.
        if generate_lexer and self.cache.is_stale('quex_specification',
                                                  quex_spec):
            quex_py_file = path.join(os.environ["QUEX_PATH"], "quex-exe.py")
            subprocess.check_call([sys.executable, quex_py_file, "-i",
                                   quex_file,
                                   "-o", "quex_lexer",
                                   "--buffer-element-size", "4",
                                   "--token-id-offset",  "0x1000",
                                   "--language", "C",
                                   "--no-mode-transition-check",
                                   "--single-mode-analyzer",
                                   "--token-memory-management-by-user",
                                   "--token-policy", "single"],
                                  cwd=src_path)

        self.cache.save()
예제 #4
0
    def _emit(self, file_root, generate_lexer, main_programs):
        """
        Emit native code for all the rules in this grammar as a library:
        a library specification and the corresponding implementation.  Also
        emit a tiny program that can parse starting with any parsing rule for
        testing purposes.
        """
        lib_name_low = self.ada_api_settings.lib_name.lower()

        include_path = path.join(file_root, "include")
        src_path = path.join(file_root, "include", lib_name_low)
        lib_path = path.join(file_root, "lib")
        share_path = path.join(file_root, "share", lib_name_low)

        if not path.exists(file_root):
            os.mkdir(file_root)

        if self.verbosity.info:
            printcol("File setup...", Colors.OKBLUE)

        for d in [
                "include", "include/langkit_support",
                "include/{}".format(lib_name_low), "share",
                "share/{}".format(lib_name_low), "obj", "src", "bin", "lib",
                "lib/gnat"
        ]:
            p = path.join(file_root, d)
            if not path.exists(p):
                os.mkdir(p)

        self.cache = caching.Cache(
            os.path.join(file_root, 'obj', 'langkit_cache'))

        # Create the project file for the generated library
        main_project_file = os.path.join(
            lib_path,
            "gnat",
            "{}.gpr".format(self.ada_api_settings.lib_name.lower()),
        )
        with open(main_project_file, "w") as f:
            f.write(
                self.render_template(
                    "project_file",
                    lib_name=self.ada_api_settings.lib_name,
                    os_path=os.path,
                    quex_path=os.environ['QUEX_PATH'],
                ))

        # Copy langkit_support sources files to the include prefix and
        # create its own project file.
        from os.path import dirname, abspath, join
        lngk_support_dir = join(dirname(abspath(__file__)), "support")

        for f in itertools.chain(glob(join(lngk_support_dir, "*.adb")),
                                 glob(join(lngk_support_dir, "*.ads"))):
            shutil.copy(f, join(include_path, "langkit_support"))
        shutil.copy(join(lngk_support_dir, "langkit_support_installed.gpr"),
                    join(lib_path, "gnat", "langkit_support.gpr"))

        # Copy adalog files. TODO: This is kludgeish to the extreme, and is
        # only a workaround the fact you can't with regular projects from
        # library projects.
        adalog_dir = join(dirname(abspath(__file__)), "adalog")
        for f in glob(join(adalog_dir, "src", "*.ad*")):
            shutil.copy(f, join(include_path, lib_name_low))

        # Copy additional source files from the language specification
        for filepath in self.additional_source_files:
            filename = os.path.basename(filepath)
            shutil.copy(filepath, join(src_path, filename))

        with file(os.path.join(share_path, 'ast-types.txt'), 'w') as f:
            from langkit import astdoc
            astdoc.write_astdoc(self, f)

        if self.verbosity.info:
            printcol("Generating sources... ", Colors.OKBLUE)

        ada_modules = [
            # Top (pure) package
            ("pkg_main", [], False),
            # Unit for initialization primitives
            ("pkg_init", ["init"], True),
            # Unit for analysis primitives
            ("pkg_analysis", ["analysis"], True),
            # Unit for all parsers
            ("parsers/pkg_main", ["analysis", "parsers"], True),
            # Unit for the lexer
            ("lexer/pkg_lexer", ["lexer"], True),
            # Unit for debug helpers
            ("pkg_debug", ["debug"], True),
        ]

        for template_base_name, qual_name, has_body in ada_modules:
            self.write_ada_module(src_path, template_base_name, qual_name,
                                  has_body)

        with names.camel_with_underscores:
            write_ada_file(
                path.join(file_root, "src"), ADA_BODY, ["parse"],
                self.render_template("interactive_main_ada", _self=self))

        with names.lower:
            # ... and the Quex C interface
            write_cpp_file(
                path.join(src_path, "quex_interface.h"),
                self.render_template("lexer/quex_interface_header_c",
                                     _self=self))
            write_cpp_file(
                path.join(src_path, "quex_interface.c"),
                self.render_template("lexer/quex_interface_body_c",
                                     _self=self))

        imain_project_file = os.path.join(file_root, "src", "mains.gpr")
        with open(imain_project_file, "w") as f:
            f.write(
                self.render_template("mains_project_file",
                                     lib_name=self.ada_api_settings.lib_name,
                                     main_programs=main_programs))

        self.emit_c_api(src_path, include_path)
        if self.python_api_settings:
            python_path = path.join(file_root, "python")
            if not path.exists(python_path):
                os.mkdir(python_path)
            self.emit_python_api(python_path)

        # Add any sources in $lang_path/extensions/support if it exists
        if self.ext('support'):
            for f in glob(join(self.ext('support'), "*.ad*")):
                shutil.copy(f, src_path)

        if self.verbosity.info:
            printcol("Compiling the quex lexer specification", Colors.OKBLUE)

        quex_file = os.path.join(src_path,
                                 "{}.qx".format(self.lang_name.lower))
        quex_spec = self.lexer.emit()
        with open(quex_file, 'w') as f:
            f.write(quex_spec)

        # Generating the lexer C code with Quex is quite long: do it only when
        # the Quex specification changed from last build.
        if generate_lexer and self.cache.is_stale('quex_specification',
                                                  quex_spec):
            quex_py_file = path.join(os.environ["QUEX_PATH"], "quex-exe.py")
            subprocess.check_call([
                sys.executable, quex_py_file, "-i", quex_file, "-o",
                "quex_lexer", "--buffer-element-size", "4",
                "--token-id-offset", "0x1000", "--language", "C",
                "--no-mode-transition-check", "--single-mode-analyzer",
                "--token-memory-management-by-user", "--token-policy",
                "single", "--token-id-prefix", self.lexer.prefix
            ],
                                  cwd=src_path)

        self.cache.save()