Beispiel #1
0
    def configure(self, build_graph: Graph):
        config = ""

        node_ids = {}
        id = 0
        for layer in build_graph.layers():
            for node in layer:
                for artifact in node.artifacts():
                    config += f"path {artifact.path} #{id}\n"

                    if artifact.dependencies:
                        config += f"deps {' '.join([str(node_ids[node]) for node in artifact.dependencies])}\n"

                    for cmd in artifact.commands:
                        config += "run"
                        for arg in cmd:
                            config += f' "{arg}"'
                        config += '\n'

                    for cmd in artifact.always:
                        config += "always"
                        for arg in cmd:
                            config += f' "{arg}"'
                        config += '\n'

                    # Only the id for the final artifact is used by other nodes
                    node_ids[node] = id
                    id += 1

        G_LOGGER.info(f"Generating configuration files in build directory: {self.build_dir}")
        with open(self.config_file, "w") as f:
            G_LOGGER.debug(f"Writing {self.config_file}")
            f.write(config)
Beispiel #2
0
 def uninstall_header(header):
     uninstall_path = os.path.join(header_install_path,
                                   os.path.basename(header))
     if dry_run:
         G_LOGGER.info(
             f"Would remove header: {header} from {uninstall_path}")
     else:
         os.remove(uninstall_path)
         G_LOGGER.info(
             f"Uninstalled header: {header} from {uninstall_path}")
Beispiel #3
0
 def install_header(header):
     install_path = os.path.join(header_install_path,
                                 os.path.basename(header))
     if dry_run:
         G_LOGGER.info(
             f"Would install header: {header} to {install_path}")
     else:
         if utils.copy_path(header, install_path):
             G_LOGGER.info(
                 f"Installed header: {header} to {install_path}")
Beispiel #4
0
        def find_dependencies():
            unique_deps: Set[Dependency] = set()
            for target in targets:
                unique_deps.update(target.dependencies)

            required_deps = self.public_header_dependencies + list(unique_deps)
            G_LOGGER.info(f"Fetching dependencies: {required_deps}")
            for dep in required_deps:
                meta = dep.setup()
                self.files.add_include_dir(dep.include_dir())
                [self.files.add_include_dir(dir) for dir in meta.include_dirs]
Beispiel #5
0
    def export(self, path: str = None) -> None:
        f"""
        Export this project to the specified path. This enables the project to be used with SBuildr's dependency management system, as well as with the command-line sbuildr utility.

        :param path: The path at which to export the project. Defaults to {Project.DEFAULT_SAVED_PROJECT_NAME} in the project's build directory.
        """
        path = path or os.path.join(self.build_dir,
                                    Project.DEFAULT_SAVED_PROJECT_NAME)
        os.makedirs(os.path.dirname(path), exist_ok=True)
        G_LOGGER.info(f"Exporting project to {path}")
        with open(path, "wb") as f:
            pickle.dump(self, f)
Beispiel #6
0
 def uninstall_target(target, prof_name):
     node: LinkedNode = target[prof_name]
     uninstall_dir = library_install_path if target.is_lib else executable_install_path
     uninstall_path = os.path.join(uninstall_dir,
                                   os.path.basename(node.path))
     if dry_run:
         G_LOGGER.info(
             f"Would remove target: {node.path} from {uninstall_path}")
     else:
         os.remove(uninstall_path)
         G_LOGGER.info(
             f"Uninstalled target: {node.path} from {uninstall_path}")
Beispiel #7
0
 def install_target(target, prof_name):
     node: LinkedNode = target[prof_name]
     install_dir = library_install_path if target.is_lib else executable_install_path
     install_path = os.path.join(install_dir,
                                 os.path.basename(node.path))
     if dry_run:
         G_LOGGER.info(
             f"Would install target: {node.path} to {install_path}")
     else:
         if utils.copy_path(node.path, install_path):
             G_LOGGER.info(
                 f"Installed target: {node.path} to {install_path}")
Beispiel #8
0
    def setup(self, force=False) -> DependencyMetadata:
        """
        Fetch, build, and install the dependency if the dependency does not exist in the cache. After setting up the dependency, all references to libraries in the dependency are updated according to the metadata reported by the builder. If the dependency is found in the cache, loads the metadata from the cache instead.

        :param force: Force the dependency to be fetched, built and installed, even if it already exists in the cache.

        :returns: A list of include directories from this dependency.
        """
        # Create the destination directory for the fetcher
        os.makedirs(self.fetcher.dest_dir, exist_ok=True)

        def update_package_root():
            name = self.fetcher.dependency_name
            self.version = self.fetcher.version()
            dir = f"{name}-{self.version}" if self.version else name
            self.package_root = os.path.join(self.cache_root, Dependency.CACHE_PACKAGES_SUBDIR, dir)

        update_package_root()
        metadata_path = os.path.join(self.package_root, Dependency.METADATA_FILENAME)
        meta = None
        if os.path.exists(metadata_path):
            meta = DependencyMetadata.load(metadata_path)

        if force or meta is None or meta.META_API_VERSION != DependencyMetadata.META_API_VERSION:
            G_LOGGER.info(f"{self.package_root} does not contain package metadata. Fetching dependency.")
            self.fetcher.fetch()
            # Install
            lib_dir = os.path.join(self.package_root, Dependency.PACKAGE_LIBRARY_SUBDIR)
            exec_dir = os.path.join(self.package_root, Dependency.PACKAGE_EXECUTABLE_SUBDIR)
            meta = self.builder.install(self.fetcher.dest_dir, header_dir=self.include_dir(), lib_dir=lib_dir, exec_dir=exec_dir)
            meta.save(metadata_path)

        # TODO: FIXME: Make this more resilient to copies by moving this logic to Project. FileManager already tracks all dependency libraries as Library nodes.
        # Next, update all libraries that have been requested from this dependency.
        for name, lib in self.libraries.items():
            if name not in meta.libraries:
                G_LOGGER.critical(f"Requested library: {name} is not present in dependency: {self.name}")
            metalib = meta.libraries[name]
            lib.path = metalib.path
            lib.libs.extend(metalib.libs)
            lib.lib_dirs.extend(metalib.lib_dirs)
            G_LOGGER.verbose(f"Correcting library: {name} to {lib}")
        return meta
Beispiel #9
0
    def clean(self, nuke: bool = False, dry_run: bool = True):
        """
        Removes build directories and project artifacts.

        :param nuke: Whether to remove all build directories associated with the project, including profile build directories.
        :param dry_run: Whether this is a dry-run, in which case SBuildr will only display which directories would be removed rather than removing them. Defaults to True.
        """
        # TODO(3): Add per-target cleaning.
        to_remove = []
        if dry_run:
            G_LOGGER.warning(f"Clean dry-run, will not remove files.")

        # By default, cleans all targets for all profiles.
        to_remove = [
            self.profiles[prof_name].build_dir
            for prof_name in self.all_profile_names()
        ] + [self.common_build_dir]
        G_LOGGER.info(
            f"Cleaning targets for profiles: {self.all_profile_names()}")
        if nuke:
            # The nuclear option
            to_remove += [self.build_dir]
            G_LOGGER.info(f"Initiating Nuclear Protocol!")
        # Remove
        for path in to_remove:
            if dry_run:
                G_LOGGER.info(f"Would remove: {path}")
            else:
                self.files.rm(path)
Beispiel #10
0
    def fetch(self) -> str:
        super().fetch()
        subprocess.run(["git", "init"], cwd=self.dest_dir, capture_output=True)

        # Stash any local changes made by external sources
        G_LOGGER.info(f"Stashing changes in {self.dest_dir}")
        subprocess.run(["git", "stash"],
                       capture_output=False,
                       cwd=self.dest_dir)

        checkout = self.commit or self.tag or self.branch
        G_LOGGER.info(
            f"Pulling: {self.url} at {checkout} into {self.dest_dir}")
        # # TODO: Error checking here? Pull may fail if this is a local repo.
        subprocess.run(
            [
                "git", "pull", "--force", "--recurse-submodules", "--tags",
                self.url, checkout
            ],
            capture_output=False,
            cwd=self.dest_dir,
        )

        G_LOGGER.info(f"Checking out: {checkout}")
        checkout_status = subprocess.run(["git", "checkout", checkout],
                                         capture_output=True,
                                         cwd=self.dest_dir)
        if checkout_status.returncode:
            G_LOGGER.critical(
                f"Failed to checkout {checkout} with:\n{utils.subprocess_output(checkout_status)}"
            )
        return self.dest_dir
Beispiel #11
0
 def rm(self, path: str) -> bool:
     if _is_in_directories(path, self.writable_dirs):
         shutil.rmtree(path, ignore_errors=True)
         G_LOGGER.info(f"Removed: {path}")
         return True
     return False
Beispiel #12
0
    def build(self,
              targets: List[ProjectTarget] = None,
              profile_names: List[str] = None) -> float:
        """
        Builds the specified targets for this project. Configuration should be run prior to calling this function.

        :param targets: The targets to build. Defaults to all targets.
        :param profile_names: The profiles for which to build the targets. Defaults to all profiles.

        :returns: Time elapsed during the build.
        """
        targets = utils.default_value(targets, self.all_targets())
        profile_names = utils.default_value(profile_names,
                                            self.all_profile_names())
        G_LOGGER.info(
            f"Building targets: {[target.name for target in targets]} for profiles: {profile_names}"
        )
        G_LOGGER.debug(f"Targets: {targets}")

        def select_nodes(targets: List[ProjectTarget],
                         profile_names: List[str]) -> List[Node]:
            # Create all required profile build directories and populate nodes.
            nodes = []
            for prof_name in profile_names:
                if prof_name not in self.profiles:
                    G_LOGGER.critical(
                        f"Profile {prof_name} does not exist in the project. Available profiles: {self.all_profile_names()}"
                    )
                # Populate nodes.
                for target in targets:
                    if prof_name in target:
                        node = target[prof_name]
                        G_LOGGER.verbose(
                            f"For target: {target}, profile: {prof_name}, found path: {node.path}"
                        )
                        nodes.append(node)
                    else:
                        G_LOGGER.debug(
                            f"Skipping target: {target.name} for profile: {prof_name}, as it does not exist."
                        )
            return nodes

        nodes = select_nodes(targets, profile_names)
        if not nodes:
            return

        # Create all required build directories.
        self.files.mkdir(self.common_build_dir)
        profile_build_dirs = [
            self.profiles[prof_name].build_dir for prof_name in profile_names
        ]
        [self.files.mkdir(dir) for dir in profile_build_dirs]
        G_LOGGER.verbose(
            f"Created build directories: {self.common_build_dir}, {profile_build_dirs}"
        )

        if not self.backend:
            G_LOGGER.critical(
                f"Backend has not been configured. Please call `configure()` prior to attempting to build"
            )
        status, time_elapsed = self.backend.build(nodes)
        if status.returncode:
            G_LOGGER.critical(
                f"Failed with to build. Reconfiguring the project or running a clean build may resolve this."
            )
        G_LOGGER.info(
            f"Built {plural('target', len(targets))} for {plural('profile', len(profile_names))} in {time_elapsed} seconds."
        )
        return time_elapsed