예제 #1
0
    def gen_for_module(self, module):
        # attach VS2010-specific data to the model
        module.solution = self.Solution(self, module)

        for t in module.targets.itervalues():
            with error_context(t):
                prj = self.get_project_object(t)
                if prj is None:
                    # TODO: see the TODO in get_project_object()
                    continue
                if prj.name != t.name:
                    # TODO: This is only for the solution file; we should remap the name instead of
                    #       failure. Note that we don't always control prj.name, it may come from external
                    #       project file.
                    raise Error("project name (\"%s\") differs from target name (\"%s\"), they must be the same" %
                                (prj.name, t.name))
                if prj.version and prj.version not in self.proj_versions:
                    if prj.version > self.proj_versions[-1]:
                        raise Error("project %s is for Visual Studio %.1f and will not work with %.1f" %
                                    (prj.projectfile, prj.version, self.version))
                    else:
                        warning("project %s is for Visual Studio %.1f, not %.1f, will be converted when built",
                                prj.projectfile, prj.version, self.version)

                if self.is_natively_supported(t):
                    self.gen_for_target(t, prj)

                module.solution.add_project(prj)
예제 #2
0
 def _do_format_node(self, n, indent):
     attrs = self._get_quoted_nonempty_attrs(n)
     if n.children:
         children_markup = []
         assert not n.text, "nodes with both text and children not implemented"
         subindent = indent + self.indent_step
         for key, value in n.children:
             if isinstance(value, Node):
                 assert key == value.name
                 children_markup.append(self._do_format_node(value, subindent))
             else:
                 try:
                     v = escape(self.format_value(value))
                     if v:
                         children_markup.append("%s<%s>%s</%s>\n" % (subindent, key, v, key))
                     # else: empty value, don't write that
                 except CannotDetermineError as e:
                     with error_context(value):
                         raise Error("cannot set property \"%s\" to non-constant expression \"%s\" (%s)" %
                                     (key, value, e.msg))
         children_markup = "".join(children_markup)
     else:
         children_markup = None
     text = self.format_value(n.text) if n.text else None
     return self.format_node(n.name, attrs, text, children_markup, indent)
예제 #3
0
파일: native.py 프로젝트: solotic/bakefile
    def _find_linkable_deps(self, target, found, recursed):
        # Note: We must ensure that the dependencies are in the correct link
        #       order for Unix linkers. I.e. all dependencies of a library must
        #       be to the right side of it in the resulting list.
        #
        #       A simple way to accomplish this is to scan the dependencies
        #       backwards (because the 'deps' property must be ordered
        #       Unix-style) _and_ put the recursively found libraries in front
        #       of the parent/dependent one. The result will be in inverse order,
        #       but that's easily corrected by the caller.
        with error_context(target):
            if target in recursed:
                raise Error("circular dependency between targets")
            recursed.add(target)

            project = target.project
            deps = reversed(
                [project.get_target(x.as_py()) for x in target["deps"]])
            todo = (x for x in deps if isinstance(x.type, LibraryType)
                    or isinstance(x.type, SharedLibraryType))
            for t in todo:
                if t in found:
                    continue
                if isinstance(
                        t.type, LibraryType
                ):  # dependencies of shared libraries are not transitive
                    self._find_linkable_deps(t, found, recursed)
                found.append(t)
예제 #4
0
    def _find_linkable_deps(self, target, found, recursed):
        # Note: We must ensure that the dependencies are in the correct link
        #       order for Unix linkers. I.e. all dependencies of a library must
        #       be to the right side of it in the resulting list.
        #
        #       A simple way to accomplish this is to scan the dependencies
        #       backwards (because the 'deps' property must be ordered
        #       Unix-style) _and_ put the recursively found libraries in front
        #       of the parent/dependent one. The result will be in inverse order,
        #       but that's easily corrected by the caller.
        with error_context(target):
            if target in recursed:
                raise Error("circular dependency between targets")
            recursed.add(target)

            project = target.project
            deps = reversed([project.get_target(x.as_py()) for x in target["deps"]])
            todo = (x for x in deps if
                    isinstance(x.type, LibraryType) or isinstance(x.type, SharedLibraryType))
            for t in todo:
                if t in found:
                    continue
                if isinstance(t.type, LibraryType): # dependencies of shared libraries are not transitive
                    self._find_linkable_deps(t, found, recursed)
                found.append(t)
예제 #5
0
def _get_matching_project_config(cfg, prj):
    """
    Returns best match project configuration for given solution configuration.
    """
    with error_context(prj):
        if cfg in prj.configurations:
            return cfg

        # else: try to find a configuration closest to the given one, i.e.
        # the one from which it inherits via the minimal number of
        # intermediate configurations:
        compatibles = []
        for pc in prj.configurations:
            degree = cfg.derived_from(pc)
            if degree:
                compatibles.append((degree, pc))

        if not compatibles:
            # if we don't have any project configurations from which this
            # one inherits, check if we have any which inherit from this
            # one themselves as they should be a reasonably good fallback:
            for pc in prj.configurations:
                degree = pc.derived_from(cfg)
                if degree:
                    compatibles.append((degree, pc))

        if compatibles:
            if len(compatibles) > 1:
                compatibles.sort()
                # It can happen that we have 2 project configurations
                # inheriting from the solution configuration with the same
                # degree. In this case there we can't really make the
                # right choice automatically, so we must warn the user.
                degree = compatibles[0][0]
                if compatibles[1][0] == degree:
                    good_ones = [x[1].name for x in compatibles if x[0] == degree]
                    warning("project %s: no unambiguous choice of project configuration to use for the solution configuration \"%s\", equally good candidates are: \"%s\"",
                            prj.projectfile,
                            cfg.name,
                            '", "'.join(good_ones))

            degree, ret = compatibles[0]
            logger.debug("solution config \"%s\" -> project %s config \"%s\" (dg %d)",
                         cfg.name, prj.projectfile, ret.name, degree)
            return ret

        # if all failed, just pick the first config, but at least try to match
        # debug/release setting:
        compatibles = [x for x in prj.configurations if x.is_debug == cfg.is_debug]
        if compatibles:
            ret = compatibles[0]
            warning("project %s: using unrelated project configuration \"%s\" for solution configuration \"%s\"",
                    prj.projectfile, ret.name, cfg.name)
            return ret
        else:
            ret = prj.configurations[0]
            warning("project %s: using incompatible project configuration \"%s\" for solution configuration \"%s\"",
                    prj.projectfile, ret.name, cfg.name)
            return ret
예제 #6
0
 def generate(self, project):
     # generate vcxproj files and prepare solutions
     for m in project.modules:
         with error_context(m):
             self.gen_for_module(m)
     # Commit solutions; this must be done after processing all modules
     # because of inter-module dependencies and references.
     for m in project.modules:
         for sub in m.submodules:
             m.solution.add_subsolution(sub.solution)
     for m in project.modules:
         m.solution.write()
예제 #7
0
    def generate(self, project):
        # We need to know build graphs of all targets so that we can generate
        # dependencies on produced files. Worse yet, we need to have them for
        # all modules before generating the output, because of cross-module
        # dependencies.
        # TODO-MT: read only, can be ran in parallel
        from bkl.interpreter.passes import PathsNormalizer
        norm = PathsNormalizer(project)
        build_graphs = {}
        for t in project.all_targets():
            with error_context(t):
                if not t.should_build():
                    continue
                norm.set_context(t)
                graph = t.type.get_build_subgraph(self, t)
                for node in graph.all_nodes():
                    node.inputs = [norm.visit(e) for e in node.inputs]
                    node.outputs = [norm.visit(e) for e in node.outputs]
                    node.commands = [norm.visit(e) for e in node.commands]
                build_graphs[t] = graph

        for m in project.modules:
            with error_context(m):
                self._gen_makefile(build_graphs, m)
예제 #8
0
    def generate(self, project):
        # We need to know build graphs of all targets so that we can generate
        # dependencies on produced files. Worse yet, we need to have them for
        # all modules before generating the output, because of cross-module
        # dependencies.
        # TODO-MT: read only, can be ran in parallel
        from bkl.interpreter.passes import PathsNormalizer
        norm = PathsNormalizer(project)
        build_graphs = {}
        for t in project.all_targets():
            with error_context(t):
                if not t.should_build():
                    continue
                norm.set_context(t)
                graph = t.type.get_build_subgraph(self, t)
                for node in graph.all_nodes():
                    node.inputs = [norm.visit(e) for e in node.inputs]
                    node.outputs = [norm.visit(e) for e in node.outputs]
                    node.commands = [norm.visit(e) for e in node.commands]
                build_graphs[t] = graph

        for m in project.modules:
            with error_context(m):
                self._gen_makefile(build_graphs, m)
예제 #9
0
파일: analyze.py 프로젝트: Fooway/bakefile
def detect_missing_generated_outputs(model):
    """
    Warns about generated source files not included in sources/headers.
    """
    for t in model.all_targets():
        for srcfile in t.all_source_files():
            with error_context(srcfile):
                if not srcfile["compile-commands"]:
                    continue
                sources = set(ch.name for ch in t.child_parts())
                outputs = set(i for c,i in bkl.expr.enum_possible_values(srcfile["outputs"]))
                for item in outputs:
                    partname = bkl.expr.get_model_name_from_path(item)
                    if partname not in sources:
                        warning("file %s generated from %s is not among sources or headers of target \"%s\"",
                                item, srcfile.filename, t.name, pos=item.pos)
예제 #10
0
def detect_missing_generated_outputs(model):
    """
    Warns about generated source files not included in sources/headers.
    """
    for t in model.all_targets():
        for srcfile in t.all_source_files():
            with error_context(srcfile):
                if not srcfile["compile-commands"]:
                    continue
                sources = set(ch.name for ch in t.child_parts())
                outputs = set(i for c, i in bkl.expr.enum_possible_values(
                    srcfile["outputs"]))
                for item in outputs:
                    partname = bkl.expr.get_model_name_from_path(item)
                    if partname not in sources:
                        warning(
                            "file %s generated from %s is not among sources or headers of target \"%s\"",
                            item,
                            srcfile.filename,
                            t.name,
                            pos=item.pos)
예제 #11
0
    def _gen_makefile(self, build_graphs, module):
        output_value = module.get_variable_value("%s.makefile" % self.name)
        output = output_value.as_native_path_for_output(module)

        paths_info = expr.PathAnchorsInfo(
                dirsep="/", # FIXME - format-configurable
                outfile=output,
                builddir=self.get_module_builddir(module).as_native_path_for_output(module),
                model=module)

        mk_fmt = self.Formatter()
        expr_fmt = self.ExprFormatter(self, paths_info)

        f = io.OutputFile(output, io.EOL_UNIX, creator=self, create_for=module)
        self.on_header(f, module)
        f.write("""
# The directory for the build files, may be overridden on make command line.
builddir = .

""")

        self._gen_settings(module, mk_fmt, expr_fmt, f)

        #FIXME: make this part of the formatter for (future) IdRefExpr
        def _format_dep(t):
            g = build_graphs[t].main
            if len(g.outputs) == 0:
                assert g.name
                if t.parent is not module:
                    raise Error("cross-module dependencies on phony targets (\"%s\") not supported yet" % t.name) # TODO
                out = g.name
            else:
                # FIXME: handle multi-output nodes too
                assert len(g.outputs) == 1
                out = g.outputs[0]
            return expr_fmt.format(out)

        def _get_submodule_deps(main, submodule):
            """
            Return list of dependencies that 'submodule' has on other submodules of
            'main'.  Submodules have dependency if a target from one depends on a
            target from another.
            """
            mod_deps = set()
            project = main.project
            inspect = [submodule] + [p for p in project.modules if p.is_submodule_of(submodule)]
            for mod in inspect:
                for target in mod.targets.itervalues():
                    for dep in target["deps"]:
                        tdep = project.get_target(dep.as_py())
                        tmod = tdep.parent
                        if tmod is main:
                            mod_deps.add(_format_dep(tdep))
                        elif tmod.is_submodule_of(main):
                            while tmod.parent is not main:
                                tmod = tmod.parent
                            if tmod is not submodule:
                                mod_deps.add(tmod.name)
            return sorted(mod_deps)

        # Write the "all" target:
        all_targets = (
                      [_format_dep(t) for t in module.targets.itervalues()] +
                      [sub.name for sub in module.submodules]
                      )
        f.write(mk_fmt.target(name="all", deps=all_targets, commands=None))

        phony_targets = ["all", "clean"]

        targets_from_submodules = OrderedDict()
        submakefiles = OrderedDict()
        for sub in module.submodules:
            subpath = sub.get_variable_value("%s.makefile" % self.name)
            # FIXME: use $dirname(), $basename() functions, this is hacky
            subdir = subpath.get_directory_path()
            subfile = subpath.components[-1]
            submakefiles[sub] = (sub.name,
                                 expr_fmt.format(subdir),
                                 expr_fmt.format(subfile),
                                 _get_submodule_deps(module, sub))
        for subname, subdir, subfile, subdeps in submakefiles.itervalues():
            subcmd = mk_fmt.submake_command(subdir, subfile, "all")
            f.write(mk_fmt.target(name=subname, deps=subdeps, commands=[subcmd]))
            phony_targets.append(subname)

        for t in module.targets.itervalues():
            with error_context(t):
                # collect target's dependencies
                target_deps = []
                for dep in t["deps"]:
                    tdep = module.project.get_target(dep.as_py())
                    tdepstr = _format_dep(tdep)
                    target_deps.append(tdepstr)
                    if tdep.parent is not module:
                        # link external dependencies with submodules to build them
                        tmod = tdep.parent
                        while tmod.parent is not None and tmod.parent is not module:
                            tmod = tmod.parent
                        if tmod in module.submodules:
                            targets_from_submodules[tdepstr] = tmod

                # generate code for the target's build graph:
                graph = build_graphs[t]
                for node in graph.all_nodes():
                    with error_context(node):
                        if node.outputs:
                            out = node.outputs
                        else:
                            out = [node.name]
                            phony_targets.append(expr_fmt.format(out[0]))

                        deps = [expr_fmt.format(i) for i in node.inputs]
                        if node is graph.main:
                            deps += target_deps

                        out_fmt = [expr_fmt.format(x) for x in out]
                        commands_fmt = [expr_fmt.format(c) for c in node.commands]
                        if len(out_fmt) == 1:
                            text = mk_fmt.target(name=out_fmt[0],
                                                 deps=deps,
                                                 commands=commands_fmt)
                        else:
                            text = mk_fmt.multifile_target(
                                                 outfiles=out_fmt,
                                                 deps=deps,
                                                 commands=commands_fmt)
                        f.write(text)
                        all_targets += out_fmt

        # dependencies on submodules to build targets from them:
        if targets_from_submodules:
            f.write("# Targets from sub-makefiles:\n")
            for t, tsub in targets_from_submodules.iteritems():
                f.write(mk_fmt.target(name=t, deps=[submakefiles[tsub][0]], commands=None))

        # Write the "clean" target:
        clean_cmds = self._get_clean_commands(
                        mk_fmt, expr_fmt,
                        (build_graphs[t] for t in module.targets.itervalues()),
                        submakefiles.itervalues())
        f.write(mk_fmt.target(name="clean", deps=[], commands=clean_cmds))

        self.on_phony_targets(f, phony_targets)
        self.on_footer(f, module)

        f.commit()
예제 #12
0
 def get_handler(self, target):
     with error_context(target["file"]):
         return ExternalBuildHandler.get_for_file(target["file"].as_native_path_for_output(target))
예제 #13
0
def _get_matching_project_config(cfg, prj):
    """
    Returns best match project configuration for given solution configuration.
    """
    with error_context(prj):
        # If the project doesn't have any configurations, it means that we
        # failed to parse it properly, presumably because it defines its
        # configurations (and platforms, see _get_matching_project_platform()
        # too) in a separately imported file. Ideal would be to follow the
        # import chain, but this is not trivial, e.g. we would need to parse
        # and evaluate MSBuild functions to find the full path of the file
        # being imported, so for now we just optimistically assume that the
        # project supports all solution configurations because it's the only
        # thing we can do, the only alternative would be to refuse to use it
        # completely.
        if cfg in prj.configurations or not prj.configurations:
            return cfg

        # else: try to find a configuration closest to the given one, i.e.
        # the one from which it inherits via the minimal number of
        # intermediate configurations:
        compatibles = []
        for pc in prj.configurations:
            degree = cfg.derived_from(pc)
            if degree:
                compatibles.append((degree, pc))

        if not compatibles:
            # if we don't have any project configurations from which this
            # one inherits, check if we have any which inherit from this
            # one themselves as they should be a reasonably good fallback:
            for pc in prj.configurations:
                degree = pc.derived_from(cfg)
                if degree:
                    compatibles.append((degree, pc))

        if compatibles:
            if len(compatibles) > 1:
                compatibles.sort()
                # It can happen that we have 2 project configurations
                # inheriting from the solution configuration with the same
                # degree. In this case there we can't really make the
                # right choice automatically, so we must warn the user.
                degree = compatibles[0][0]
                if compatibles[1][0] == degree:
                    good_ones = [x[1].name for x in compatibles if x[0] == degree]
                    warning("project %s: no unambiguous choice of project configuration to use for the solution configuration \"%s\", equally good candidates are: \"%s\"",
                            prj.projectfile,
                            cfg.name,
                            '", "'.join(good_ones))

            degree, ret = compatibles[0]
            logger.debug("solution config \"%s\" -> project %s config \"%s\" (dg %d)",
                         cfg.name, prj.projectfile, ret.name, degree)
            return ret

        # if all failed, just pick the first config, but at least try to match
        # debug/release setting:
        compatibles = [x for x in prj.configurations if x.is_debug == cfg.is_debug]
        if compatibles:
            ret = compatibles[0]
            warning("project %s: using unrelated project configuration \"%s\" for solution configuration \"%s\"",
                    prj.projectfile, ret.name, cfg.name)
            return ret
        else:
            ret = prj.configurations[0]
            warning("project %s: using incompatible project configuration \"%s\" for solution configuration \"%s\"",
                    prj.projectfile, ret.name, cfg.name)
            return ret
예제 #14
0
    def _gen_makefile(self, build_graphs, module):
        # Flag indicating whether this makefile actually builds anything.
        self.uses_builddir = False

        output_value = module.get_variable_value("%s.makefile" % self.name)
        output = output_value.as_native_path_for_output(module)

        paths_info = expr.PathAnchorsInfo(
                dirsep="/", # FIXME - format-configurable
                outfile=output,
                builddir=None,
                model=module)

        mk_fmt = self.Formatter()
        expr_fmt = self.ExprFormatter(self, paths_info)

        f = io.OutputFile(output, io.EOL_UNIX, creator=self, create_for=module)
        self.on_header(f, module)

        self._gen_settings(module, mk_fmt, expr_fmt, f)

        #FIXME: make this part of the formatter for (future) IdRefExpr
        def _format_dep(t):
            g = build_graphs[t].main
            if len(g.outputs) == 0:
                assert g.name
                if t.parent is not module:
                    raise Error("cross-module dependencies on phony targets (\"%s\") not supported yet" % t.name) # TODO
                out = g.name
            else:
                # FIXME: handle multi-output nodes too
                assert len(g.outputs) == 1
                out = g.outputs[0]
            return expr_fmt.format(out)

        def _get_submodule_deps(main, submodule):
            """
            Return list of dependencies that 'submodule' has on other submodules of
            'main'.  Submodules have dependency if a target from one depends on a
            target from another.
            """
            mod_deps = set()
            project = main.project
            inspect = [submodule] + [p for p in project.modules if p.is_submodule_of(submodule)]
            for mod in inspect:
                for target in mod.targets.itervalues():
                    for dep in target["deps"]:
                        tdep = project.get_target(dep.as_py())
                        tmod = tdep.parent
                        if tmod is main:
                            mod_deps.add(_format_dep(tdep))
                        elif tmod.is_submodule_of(main):
                            while tmod.parent is not main:
                                tmod = tmod.parent
                            if tmod is not submodule:
                                mod_deps.add(tmod.name)
            return sorted(mod_deps)

        # Write the "all" target:
        all_targets = (
                      [_format_dep(t) for t in module.targets.itervalues()] +
                      [sub.name for sub in module.submodules]
                      )
        f.write(mk_fmt.target(name="all", deps=all_targets, commands=None))

        phony_targets = ["all", "clean"]

        targets_from_submodules = OrderedDict()
        submakefiles = OrderedDict()
        for sub in module.submodules:
            subpath = sub.get_variable_value("%s.makefile" % self.name)
            # FIXME: use $dirname(), $basename() functions, this is hacky
            subdir = subpath.get_directory_path()
            subfile = subpath.components[-1]
            submakefiles[sub] = (sub.name,
                                 expr_fmt.format(subdir),
                                 expr_fmt.format(subfile),
                                 _get_submodule_deps(module, sub))
        for subname, subdir, subfile, subdeps in submakefiles.itervalues():
            subcmd = mk_fmt.submake_command(subdir, subfile, "all")
            f.write(mk_fmt.target(name=subname, deps=subdeps, commands=[subcmd]))
            phony_targets.append(subname)

        for t in module.targets.itervalues():
            with error_context(t):
                # collect target's dependencies
                target_deps = []
                for dep in t["deps"]:
                    tdep = module.project.get_target(dep.as_py())
                    tdepstr = _format_dep(tdep)
                    target_deps.append(tdepstr)
                    if tdep.parent is not module:
                        # link external dependencies with submodules to build them
                        tmod = tdep.parent
                        while tmod.parent is not None and tmod.parent is not module:
                            tmod = tmod.parent
                        if tmod in module.submodules:
                            targets_from_submodules[tdepstr] = tmod

                # generate code for the target's build graph:
                graph = build_graphs[t]
                for node in graph.all_nodes():
                    with error_context(node):
                        if node.outputs:
                            out = node.outputs
                        else:
                            out = [node.name]
                            phony_targets.append(expr_fmt.format(out[0]))

                        deps = [expr_fmt.format(i) for i in node.inputs]
                        if node is graph.main:
                            deps += target_deps

                        out_fmt = [expr_fmt.format(x) for x in out]
                        commands_fmt = [expr_fmt.format(c) for c in node.commands]
                        if len(out_fmt) == 1:
                            text = mk_fmt.target(name=out_fmt[0],
                                                 deps=deps,
                                                 commands=commands_fmt)
                        else:
                            text = mk_fmt.multifile_target(
                                                 outputs=out,
                                                 outfiles=out_fmt,
                                                 deps=deps,
                                                 commands=commands_fmt)
                        f.write(text)
                        all_targets += out_fmt

        # dependencies on submodules to build targets from them:
        if targets_from_submodules:
            f.write("# Targets from sub-makefiles:\n")
            for t, tsub in targets_from_submodules.iteritems():
                f.write(mk_fmt.target(name=t, deps=[submakefiles[tsub][0]], commands=None))

        # Write the "clean" target:
        clean_cmds = self._get_clean_commands(
                        mk_fmt, expr_fmt,
                        (build_graphs[t] for t in module.targets.itervalues()),
                        submakefiles.itervalues())
        f.write(mk_fmt.target(name="clean", deps=[], commands=clean_cmds))

        self.on_phony_targets(f, phony_targets)
        self.on_footer(f, module)

        f.commit()
예제 #15
0
 def get_handler(self, target):
     with error_context(target["file"]):
         return ExternalBuildHandler.get_for_file(target["file"].as_native_path_for_output(target))