示例#1
0
    def configure(self, build_graph: Graph):
        config = ""

        node_ids = {}
        id = 0
        for layer in build_graph.layers():
            for node in layer:
                for artifact in node.artifacts():
                    config += f"path {artifact.path} #{id}\n"

                    if artifact.dependencies:
                        config += f"deps {' '.join([str(node_ids[node]) for node in artifact.dependencies])}\n"

                    for cmd in artifact.commands:
                        config += "run"
                        for arg in cmd:
                            config += f' "{arg}"'
                        config += '\n'

                    for cmd in artifact.always:
                        config += "always"
                        for arg in cmd:
                            config += f' "{arg}"'
                        config += '\n'

                    # Only the id for the final artifact is used by other nodes
                    node_ids[node] = id
                    id += 1

        G_LOGGER.info(f"Generating configuration files in build directory: {self.build_dir}")
        with open(self.config_file, "w") as f:
            G_LOGGER.debug(f"Writing {self.config_file}")
            f.write(config)
示例#2
0
 def __init__(self, path: str, inputs: List["Node"] = []):
     self.path = path
     self.inputs: List[Node] = []
     self.outputs: List[Node] = []
     G_LOGGER.debug(
         f"Constructing {type(self)} with path: {self.path}, with {len(inputs)} inputs: {inputs}"
     )
     for inp in inputs:
         self.add_input(inp)
示例#3
0
    def build(self, nodes: List[Node]) -> (subprocess.CompletedProcess, float):
        # Early exit if no targets were provided
        if not nodes:
            G_LOGGER.debug(f"No targets specified, skipping build.")
            return subprocess.CompletedProcess(args=[], returncode=0, stdout=b"", stderr=b"No targets specified"), 0

        paths = [node.path for node in nodes]
        cmd = ["rbuild", "--threads", str(multiprocessing.cpu_count()), f"{self.config_file}"] + paths
        G_LOGGER.verbose(f"Build command: {' '.join(cmd)}\nTarget file paths: {paths}")
        return utils.time_subprocess(cmd)
示例#4
0
    def load(path: str = None) -> "Project":
        f"""
        Load a project from the specified path.

        :param path: The path from which to load the project. Defaults to {os.path.abspath(os.path.join("build", Project.DEFAULT_SAVED_PROJECT_NAME))}

        :returns: The loaded project.
        """
        path = path or os.path.abspath(
            os.path.join("build", Project.DEFAULT_SAVED_PROJECT_NAME))
        G_LOGGER.debug(f"Loading project from {path}")
        with open(path, "rb") as f:
            return pickle.load(f)
示例#5
0
 def _run_linked_node(self, node: LinkedNode, *args,
                      **kwargs) -> subprocess.CompletedProcess:
     loader_path = os.environ.get(paths.loader_path_env_var(), "")
     G_LOGGER.verbose(f"Running linked node: {node}")
     for lib_dir in node.lib_dirs:
         loader_path += f"{os.path.pathsep}{lib_dir}"
     G_LOGGER.debug(f"Using loader paths: {loader_path}")
     G_LOGGER.log(
         f"{paths.loader_path_env_var()}={loader_path} {node.path}\n",
         colors=[Color.BOLD, Color.GREEN])
     env = copy.copy(os.environ)
     env[paths.loader_path_env_var()] = loader_path
     return subprocess.run([node.path], *args, env=env, **kwargs)
示例#6
0
 def select_nodes(targets: List[ProjectTarget],
                  profile_names: List[str]) -> List[Node]:
     # Create all required profile build directories and populate nodes.
     nodes = []
     for prof_name in profile_names:
         if prof_name not in self.profiles:
             G_LOGGER.critical(
                 f"Profile {prof_name} does not exist in the project. Available profiles: {self.all_profile_names()}"
             )
         # Populate nodes.
         for target in targets:
             if prof_name in target:
                 node = target[prof_name]
                 G_LOGGER.verbose(
                     f"For target: {target}, profile: {prof_name}, found path: {node.path}"
                 )
                 nodes.append(node)
             else:
                 G_LOGGER.debug(
                     f"Skipping target: {target.name} for profile: {prof_name}, as it does not exist."
                 )
     return nodes
示例#7
0
 def link(
     self,
     input_paths: List[str],
     output_path: str,
     libs: List[str] = [],
     lib_dirs: List[str] = [],
     flags: BuildFlags = BuildFlags()) -> List[str]:
     G_LOGGER.debug(f"self.ldef: {self.ldef}")
     linker_flags = self.ldef.parse_flags(flags)
     lib_dirs = [self.ldef.lib_dir(dir) for dir in lib_dirs]
     # In libs, absolute paths are not prepended with the lib prefix (e.g. -l)
     libs = [
         lib if os.path.isabs(lib) else self.ldef.lib(lib) for lib in libs
     ]
     # The full command.
     cmd = [self.ldef.executable()
            ] + input_paths + libs + linker_flags + lib_dirs + [
                self.ldef.output(output_path)
            ]
     G_LOGGER.debug(
         f"Linking: input_paths: {input_paths}, libs: {libs}, linker_flags: {linker_flags}, lib_dirs: {lib_dirs}"
     )
     G_LOGGER.verbose(f"Link Command: {' '.join(cmd)}")
     return cmd
示例#8
0
    def __init__(self, root_dir: str, dirs: Set[str]=set(), exclude_dirs: Set[str]=set(), writable_dirs: Set[str]=set()):
        self.exclude_dirs: Set[str] = exclude_dirs
        # writable_dirs are the only locations to which FileManager is allowed to write.
        self.writable_dirs: Set[str] = writable_dirs
        G_LOGGER.verbose(f"Excluded directories: {exclude_dirs}. Writable directories: {writable_dirs}")

        # Include dirs/"header" files are only considered when searching for includes.
        self.include_dirs: List[str] = []
        self.header_files: List[str] = [] # List to enable header priority

        self.files: Set[str] = set()

        self.root_dir = os.path.abspath(root_dir)
        if not os.path.isdir(self.root_dir):
            G_LOGGER.critical(f"Root Directory: {self.root_dir} does not exist, or is not a directory.")
        self.add_dir(self.root_dir)

        # Remove directories that are within exclude_dirs after converting all directories to abspaths.
        for dir in dirs:
            self.add_dir(dir)
        G_LOGGER.debug(f"Found {len(self.files)} files")
        G_LOGGER.verbose(f"{self.files}")
        # Keep track of all files relevant to building the project.
        self.graph = Graph()
示例#9
0
    def scan(self, node: str) -> None:
        G_LOGGER.debug(f"Scanning {node.path}")

        # Finds the file path for the file included in `include_path` by the `included_token` token.
        # This always returns an absolute path, since self.find always returns absolute paths.
        def disambiguate_included_file(included_token: str, include_path: str) -> str:
            # TODO: Handle paths that start with ../
            # Such paths should always be relative to the file itself, otherwise it's an error.
            if include_path.startswith(os.path.pardir):
                raise NotImplementedError(f"FileManager does not currently support includes containing {os.path.pardir}")

            candidates = self.find(included_token, search_include_dirs=True)
            if len(candidates) == 0:
                return None

            # TODO: Move this into misc.paths, add test case.
            # Determines how "close together" files are. Smaller numbers mean they are further apart in the tree.
            def file_proximity(path_a: str, path_b: str) -> int:
                return len(os.path.commonpath([path_a, path_b]).split(os.path.sep))

            # Return the path that is closest to the including file
            closest_path = max(candidates, key=lambda candidate: file_proximity(candidate, include_path))

            if len(candidates) > 1:
                G_LOGGER.warning(f"For {include_path}, found multiple possible headers, but determined that {closest_path} best matches include for {included_token}. If this is not the case, please provide a longer path in the include to disambiguate, or manually provide the correct include directories. Note, candidates were: {candidates}")
            return closest_path

        # TODO: Handle relative paths in included here.
        # TODO: FIXME: This will not work if the include has escaped characters in it.
        def get_path_include_dir(included_path: str, included_token: str) -> str:
            # included_path is the full path of the included file.
            # included_token is the token used to include the file.
            # Absolute paths do not require include directories.
            if os.path.isabs(included_token):
                return None
            include_dir = included_path[:-len(included_token)]
            if not os.path.isdir(include_dir):
                # It would be completely ridiculous if this actually displays ever.
                G_LOGGER.critical(f"While attempting to find include dir to use for {included_path} (Note: included in {path}), found that {include_dir} does not exist!")
            return os.path.abspath(include_dir)

        # Find all included files in this file. If they are in the project, recurse over them.
        # Otherwise, assume they are external headers.
        include_dirs = set()
        external_includes = set()
        path = node.path
        included_files = _find_included(path)
        for included in included_files:
            # Determines the most likely file path based on an include.
            included_path = disambiguate_included_file(included, path)
            if included_path:
                G_LOGGER.verbose(f"For included token {included}, found path: {included_path}")
                # The include dir for a path for path depends on how exactly the path was included in path.
                include_dir = get_path_include_dir(included_path, included)
                if include_dir:
                    G_LOGGER.verbose(f"For path {included_path}, using include dir: {include_dir}")
                    include_dirs.add(include_dir)
                # Also recurse over any include directories needed for the path itself
                included_path_node = self.source(included_path)
                if included_path_node.include_dirs is None:
                    G_LOGGER.verbose(f"{included_path_node} does not specify include directories. Scanning file.")
                    self.scan(included_path_node)
                include_dirs.update(included_path_node.include_dirs)
                node.add_input(included_path_node)
            else:
                external_includes.add(included)
        if external_includes:
            G_LOGGER.debug(f"For {path}, could not find headers: {external_includes}. Assuming they are external. If this is not the case, please add the appropriate directories to the project definition.")

        include_dirs = sorted(include_dirs)
        node.include_dirs = include_dirs
        G_LOGGER.debug(f"For {path}, found include dirs: {include_dirs}")
        G_LOGGER.verbose(f"Updated source graph to: {self.graph}")
示例#10
0
    def build(self,
              targets: List[ProjectTarget] = None,
              profile_names: List[str] = None) -> float:
        """
        Builds the specified targets for this project. Configuration should be run prior to calling this function.

        :param targets: The targets to build. Defaults to all targets.
        :param profile_names: The profiles for which to build the targets. Defaults to all profiles.

        :returns: Time elapsed during the build.
        """
        targets = utils.default_value(targets, self.all_targets())
        profile_names = utils.default_value(profile_names,
                                            self.all_profile_names())
        G_LOGGER.info(
            f"Building targets: {[target.name for target in targets]} for profiles: {profile_names}"
        )
        G_LOGGER.debug(f"Targets: {targets}")

        def select_nodes(targets: List[ProjectTarget],
                         profile_names: List[str]) -> List[Node]:
            # Create all required profile build directories and populate nodes.
            nodes = []
            for prof_name in profile_names:
                if prof_name not in self.profiles:
                    G_LOGGER.critical(
                        f"Profile {prof_name} does not exist in the project. Available profiles: {self.all_profile_names()}"
                    )
                # Populate nodes.
                for target in targets:
                    if prof_name in target:
                        node = target[prof_name]
                        G_LOGGER.verbose(
                            f"For target: {target}, profile: {prof_name}, found path: {node.path}"
                        )
                        nodes.append(node)
                    else:
                        G_LOGGER.debug(
                            f"Skipping target: {target.name} for profile: {prof_name}, as it does not exist."
                        )
            return nodes

        nodes = select_nodes(targets, profile_names)
        if not nodes:
            return

        # Create all required build directories.
        self.files.mkdir(self.common_build_dir)
        profile_build_dirs = [
            self.profiles[prof_name].build_dir for prof_name in profile_names
        ]
        [self.files.mkdir(dir) for dir in profile_build_dirs]
        G_LOGGER.verbose(
            f"Created build directories: {self.common_build_dir}, {profile_build_dirs}"
        )

        if not self.backend:
            G_LOGGER.critical(
                f"Backend has not been configured. Please call `configure()` prior to attempting to build"
            )
        status, time_elapsed = self.backend.build(nodes)
        if status.returncode:
            G_LOGGER.critical(
                f"Failed with to build. Reconfiguring the project or running a clean build may resolve this."
            )
        G_LOGGER.info(
            f"Built {plural('target', len(targets))} for {plural('profile', len(profile_names))} in {time_elapsed} seconds."
        )
        return time_elapsed
示例#11
0
    def _target(
        self,
        name: str,
        ext_path: str,
        sources: List[str],
        flags: BuildFlags,
        libs: List[Union[DependencyLibrary, ProjectTarget, Library]],
        compiler: compiler.Compiler,
        include_dirs: List[str],
        linker: linker.Linker,
        depends: List[Dependency],
        internal: bool,
        is_lib: bool,
    ) -> ProjectTarget:

        if not all([
                isinstance(lib, ProjectTarget) or isinstance(lib, Library)
                or isinstance(lib, DependencyLibrary) for lib in libs
        ]):
            G_LOGGER.critical(
                f"Libraries must be instances of either sbuildr.Library, sbuildr.dependencies.DependencyLibrary or sbuildr.ProjectTarget"
            )

        if os.path.basename(ext_path) != ext_path:
            G_LOGGER.critical(
                f"Target: {ext_path} looks like a path. Target names should not contain characters that are unsupported by the filesystem."
            )

        dependencies: List[Dependency] = [] + depends  # Create copy
        for lib in libs:
            if isinstance(lib, DependencyLibrary):
                dependencies.append(lib.dependency)
                # Add all Library targets from dependencies to the file manager's graph, since they are independent of profiles
                # TODO: Add `library` function to FileManager
                self.files.graph.add(lib.library)
                G_LOGGER.verbose(f"Adding {lib.library} to file manager.")
        # Inherit dependencies from any input libraries as well
        [
            dependencies.extend(lib.dependencies) for lib in libs
            if isinstance(lib, ProjectTarget)
        ]

        libs: List[Union[ProjectTarget, Library]] = [
            lib.library if isinstance(lib, DependencyLibrary) else lib
            for lib in libs
        ]

        source_nodes: List[CompiledNode] = [
            self.files.source(path) for path in sources
        ]
        G_LOGGER.verbose(
            f"For sources: {sources}, found source paths: {source_nodes}")

        target = ProjectTarget(name=name,
                               internal=internal,
                               is_lib=is_lib,
                               dependencies=dependencies)
        for profile_name, profile in self.profiles.items():
            # Convert all libraries to nodes. These will be inputs to the target.
            # Profile will later convert them to library names and directories.
            lib_nodes: List[Library] = [
                lib[profile_name] if isinstance(lib, ProjectTarget) else lib
                for lib in libs
            ]
            input_nodes = [lib for lib in lib_nodes]
            G_LOGGER.verbose(
                f"Library inputs for target: {name} are: {input_nodes}")

            # Per-target flags always overwrite profile flags.
            flags = profile.flags + flags

            # First, add or retrieve object nodes for each source.
            for source_node in source_nodes:
                obj_path = os.path.join(
                    self.common_build_dir,
                    f"{os.path.splitext(os.path.basename(source_node.path))[0]}.o"
                )
                # User defined includes are always prepended the ones deduced for SourceNodes.
                obj_node = CompiledNode(obj_path, source_node, compiler,
                                        include_dirs, flags)
                input_nodes.append(profile.graph.add(obj_node))

            # Hard links are needed because during linkage, the library must have a clean name.
            hashed_path = os.path.join(self.common_build_dir, ext_path)
            path = os.path.join(profile.build_dir,
                                paths.insert_suffix(ext_path, profile.suffix))
            target[profile_name] = profile.graph.add(
                LinkedNode(path,
                           input_nodes,
                           linker,
                           hashed_path=hashed_path,
                           flags=flags))
            G_LOGGER.debug(
                f"Adding target: {name}, with hashed path: {hashed_path}, public path: {path} to profile: {profile_name}"
            )
        return target
示例#12
0
from sbuildr.project.project import Project
from sbuildr.project.profile import Profile
from sbuildr.tools.flags import BuildFlags
from sbuildr.tools import compiler, linker
from sbuildr.graph.node import Library
from sbuildr.logger import G_LOGGER, SBuildrException, Verbosity

__version__ = "0.6.4"

G_LOGGER.debug(f"Loading SBuildr {__version__} from {__path__}")