def install( self, targets: List[ProjectTarget] = None, profile_names: List[str] = None, headers: List[str] = None, header_install_path: str = paths.default_header_install_path(), library_install_path: str = paths.default_library_install_path(), executable_install_path: str = paths.default_executable_install_path(), dry_run: bool = True, ): """ Install the specified targets for the specified profiles. :param targets: The targets to install. Defaults to all non-internal project targets. :param profile_names: The profiles for which to install. Defaults to the "release" profile. :param headers: The headers to install. Defaults to all headers that are part of the interface as per :func:`interfaces` . :param header_install_path: The path to which to install headers. This defaults to one of the default locations for the host OS. :param library_install_path: The path to which to install libraries. This defaults to one of the default locations for the host OS. :param executable_install_path: The path to which to install executables. This defaults to one of the default locations for the host OS. :param dry_run: Whether to perform a dry-run only, with no file copying. Defaults to True. """ targets = utils.default_value(targets, self.install_targets()) profile_names = utils.default_value(profile_names, [self.install_profile()]) headers = [self.find(header) for header in headers ] if headers is not None else list(self.public_headers) if dry_run: G_LOGGER.warning(f"Install dry-run, will not copy files.") def install_target(target, prof_name): node: LinkedNode = target[prof_name] install_dir = library_install_path if target.is_lib else executable_install_path install_path = os.path.join(install_dir, os.path.basename(node.path)) if dry_run: G_LOGGER.info( f"Would install target: {node.path} to {install_path}") else: if utils.copy_path(node.path, install_path): G_LOGGER.info( f"Installed target: {node.path} to {install_path}") for prof_name in profile_names: for target in targets: install_target(target, prof_name) def install_header(header): install_path = os.path.join(header_install_path, os.path.basename(header)) if dry_run: G_LOGGER.info( f"Would install header: {header} to {install_path}") else: if utils.copy_path(header, install_path): G_LOGGER.info( f"Installed header: {header} to {install_path}") for header in headers: install_header(header)
def run(self, targets: List[ProjectTarget], profile_names: List[str] = []) -> None: """ Runs targets from this project. :param targets: The targets to run. :param profile_names: The profiles for which to run the targets. """ for target in targets: if target.name not in self.executables: G_LOGGER.critical( f"Could not find target: {target.name} in project executables. Note: Available executables are: {list(self.executables.keys())}" ) def run_target(target: ProjectTarget, prof_name: str): G_LOGGER.log( f"\nRunning target: {target}, for profile: {prof_name}", colors=[Color.BOLD, Color.GREEN]) status = self._run_linked_node(target[prof_name]) if status.returncode: G_LOGGER.critical( f"Failed to run. Reconfiguring the project or running a clean build may resolve this." ) for prof_name in profile_names: G_LOGGER.log(f"\n{utils.wrap_str(f' Profile: {prof_name} ')}", colors=[Color.BOLD, Color.GREEN]) for target in targets: run_target(target, prof_name)
def profile(self, name: str, flags: BuildFlags = BuildFlags(), build_dir: str = None, file_suffix: str = "") -> Profile: f""" Returns or creates a profile with the specified parameters. :param name: The name of this profile. :param flags: The flags to use for this profile. These will be applied to all targets for this profile. Per-target flags always take precedence. :param build_dir: The directory to use for build artifacts. Defaults to {os.path.join(self.build_dir, name)} :param file_suffix: A file suffix to attach to all artifacts generated for this profile. For example, the default debug profile attaches a ``_debug`` suffix to all library and executable names. :returns: :class:`sbuildr.Profile` """ if name not in self.profiles: build_dir = self.files.add_writable_dir( self.files.add_exclude_dir( os.path.abspath(build_dir or os.path.join(self.build_dir, name)))) G_LOGGER.verbose( f"Setting build directory for profile: {name} to: {build_dir}") self.profiles[name] = Profile(flags=flags, build_dir=build_dir, suffix=file_suffix) return self.profiles[name]
def interfaces(self, headers: List[str], depends: List[Dependency] = []) -> List[str]: """ Specifies headers that are part of this project's public interface. When running the ``install`` command on the CLI, the headers specified via this function will be copied to installation directories. :param headers: A list of paths to a public headers. :returns: The absolute paths of the discovered headers. """ discovered_paths = [] for header in headers: candidates = self.files.find(header) if len(candidates) == 0: G_LOGGER.critical( f"Could not find installation target: {target}") if len(candidates) > 1: G_LOGGER.critical( f"For installation target: {target}, found multiple installation candidates: {candidates}. Please provide a longer path to disambiguate." ) discovered_paths.append(candidates[0]) self.public_headers = set(discovered_paths) self.public_header_dependencies.extend(depends) return discovered_paths
def configure_libraries(self): for layer in self.graph.layers(): lib_nodes = [node for node in layer if isinstance(node, Library)] for node in lib_nodes: # Pull in libraries from this node's inputs lib_inputs = [ inp for inp in node.inputs if isinstance(inp, Library) ] for inp in lib_inputs: # Avoid duplicates in libs/lib_dirs. if inp.name not in node.libs: node.libs.append(inp.name) node.libs.extend(inp.libs) # Add the dependency's lib_dirs in addition to the directory containing the dependency itself. lib_dirs = ([os.path.dirname(inp.path)] if inp.path else []) + inp.lib_dirs lib_dirs = [ dir for dir in lib_dirs if dir not in node.lib_dirs ] node.lib_dirs.extend(lib_dirs) G_LOGGER.verbose( f"Adding library: {inp.name}, and library directories: {lib_dirs} to {node}" ) # Lastly, if this input does not have a path, it needs to be removed if not inp.path: node.remove_input(inp)
def add_exclude_dir(self, dir: str) -> str: absdir = self.abspath(dir) self.exclude_dirs.add(absdir) # Remove any files that are in the new exclude directory. self.files = set([file for file in self.files if not _is_in_directory(file, absdir)]) G_LOGGER.verbose(f"Updated files to: {self.files}") return absdir
def compile(self, input_path: str, output_path: str, include_dirs: List[str]=[], flags: BuildFlags=BuildFlags()) -> List[str]: compiler_flags = self.cdef.parse_flags(flags) includes = [self.cdef.include(dir) for dir in include_dirs] # The full command, including the output file and the compile-only flag. cmd = [self.cdef.executable(), input_path] + compiler_flags + includes + [self.cdef.compile_only(), self.cdef.output(output_path)] G_LOGGER.verbose(f"Compile Command: {' '.join(cmd)}") return cmd
def configure(self, build_graph: Graph): config = "" node_ids = {} id = 0 for layer in build_graph.layers(): for node in layer: for artifact in node.artifacts(): config += f"path {artifact.path} #{id}\n" if artifact.dependencies: config += f"deps {' '.join([str(node_ids[node]) for node in artifact.dependencies])}\n" for cmd in artifact.commands: config += "run" for arg in cmd: config += f' "{arg}"' config += '\n' for cmd in artifact.always: config += "always" for arg in cmd: config += f' "{arg}"' config += '\n' # Only the id for the final artifact is used by other nodes node_ids[node] = id id += 1 G_LOGGER.info(f"Generating configuration files in build directory: {self.build_dir}") with open(self.config_file, "w") as f: G_LOGGER.debug(f"Writing {self.config_file}") f.write(config)
def include_dir(self) -> str: """ Return the directory containing the headers required for this dependency. Must be called after setup(). """ if not self.package_root: G_LOGGER.critical(f"include_dir() must not be called before setup()") return os.path.join(self.package_root, Dependency.PACKAGE_HEADER_SUBDIR)
def __init__(self, name: str = None, path: str = None, libs: List[str] = None, lib_dirs: List[str] = None): """ Represents a library. :param name: The name of the library. :param path: A path to the library. :param libs: Names of libraries this library depends on. :param lib_dirs: A list of directories required for loading this library. This would generally include directories containing libraries that this library is linked against. For example, if the project requires ``liba``, and ``liba`` is linked against ``libb``, then ``lib_dirs`` should include the containing directory of ``libb``. Note that either a name or path must be provided. If a name is provided, then the containing directory for this library should be provided to ``lib_dirs``, unless it is in the default linker/loader search path. """ if not (name or path): G_LOGGER.critical( f"Either a name or path must be provided to find a library") # TODO: FIXME: This will not handle non-standard library names (e.g. not in the form lib<name>.so) super().__init__(path) self.name = name or paths.libname_to_name(os.path.basename(self.path)) self.libs = libs or [] # For handling python's silly default arguments self.lib_dirs = [os.path.abspath(dir) for dir in lib_dirs] if lib_dirs is not None else []
def copy_path(src: str, dst: str) -> bool: try: os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copy2(src, dst) return True except PermissionError: G_LOGGER.error( f"Could not write to {dst}. Do you have sufficient privileges?") return False
def run_target(target: ProjectTarget, prof_name: str): G_LOGGER.log( f"\nRunning target: {target}, for profile: {prof_name}", colors=[Color.BOLD, Color.GREEN]) status = self._run_linked_node(target[prof_name]) if status.returncode: G_LOGGER.critical( f"Failed to run. Reconfiguring the project or running a clean build may resolve this." )
def __init__(self, path: str, inputs: List["Node"] = []): self.path = path self.inputs: List[Node] = [] self.outputs: List[Node] = [] G_LOGGER.debug( f"Constructing {type(self)} with path: {self.path}, with {len(inputs)} inputs: {inputs}" ) for inp in inputs: self.add_input(inp)
def fetch(self) -> str: """ Fetches the dependency into the specified location. :returns: The directory into which the dependency was fetched. """ if not self.dest_dir: G_LOGGER.critical( f"Cannot fetch before setting destination directory.")
def version(self) -> str: """ Specifies the version of the dependency to be fetched. This is used for caching purposes. :returns: A string representing the version - for example, a commit hash or a version number. """ if not self.dest_dir: G_LOGGER.critical( f"Cannot get version before setting destination directory.")
def _file_suffix(path: str, suffix: str, ext: str = None) -> str: split = os.path.splitext(os.path.basename(path)) basename = split[0] ext = ext or split[1] suffixed = f"{basename}{suffix}{(ext if ext else '')}" G_LOGGER.verbose( f"_file_suffix received path: {path}, split into {split}. Using suffix: {suffix}, generated final name: {suffixed}" ) return suffixed
def build(self, nodes: List[Node]) -> (subprocess.CompletedProcess, float): # Early exit if no targets were provided if not nodes: G_LOGGER.debug(f"No targets specified, skipping build.") return subprocess.CompletedProcess(args=[], returncode=0, stdout=b"", stderr=b"No targets specified"), 0 paths = [node.path for node in nodes] cmd = ["rbuild", "--threads", str(multiprocessing.cpu_count()), f"{self.config_file}"] + paths G_LOGGER.verbose(f"Build command: {' '.join(cmd)}\nTarget file paths: {paths}") return utils.time_subprocess(cmd)
def _files_in_dir(self, dir: str): dir = self.abspath(dir) G_LOGGER.verbose(f"Searching for files in: {dir}") files = [] for path in glob.iglob(os.path.join(dir, "**"), recursive=True): if os.path.isfile(path) and not _is_in_directories(path, self.exclude_dirs): files.append(os.path.abspath(path)) else: G_LOGGER.verbose(f"Rejecting path: {path}, because it is either not a file, or falls in one of the excluded directories.") return files
def uninstall_header(header): uninstall_path = os.path.join(header_install_path, os.path.basename(header)) if dry_run: G_LOGGER.info( f"Would remove header: {header} from {uninstall_path}") else: os.remove(uninstall_path) G_LOGGER.info( f"Uninstalled header: {header} from {uninstall_path}")
def install_header(header): install_path = os.path.join(header_install_path, os.path.basename(header)) if dry_run: G_LOGGER.info( f"Would install header: {header} to {install_path}") else: if utils.copy_path(header, install_path): G_LOGGER.info( f"Installed header: {header} to {install_path}")
def get_path_include_dir(included_path: str, included_token: str) -> str: # included_path is the full path of the included file. # included_token is the token used to include the file. # Absolute paths do not require include directories. if os.path.isabs(included_token): return None include_dir = included_path[:-len(included_token)] if not os.path.isdir(include_dir): # It would be completely ridiculous if this actually displays ever. G_LOGGER.critical(f"While attempting to find include dir to use for {included_path} (Note: included in {path}), found that {include_dir} does not exist!") return os.path.abspath(include_dir)
def source(self, path: str) -> SourceNode: candidates = self.find(path) if len(candidates) > 1: G_LOGGER.warning(f"For {path}, found multiple candidates: {candidates}. Using {candidates[0]}. If this is incorrect, please disambiguate by providing either an absolute path, or a longer relative path.") elif len(candidates) == 0: G_LOGGER.critical(f"Could not find {path}. Does it exist?") path = candidates[0] node = self.graph.find_node_with_path(path) if not node: return self.graph.add(SourceNode(candidates[0])) return node
def find_dependencies(): unique_deps: Set[Dependency] = set() for target in targets: unique_deps.update(target.dependencies) required_deps = self.public_header_dependencies + list(unique_deps) G_LOGGER.info(f"Fetching dependencies: {required_deps}") for dep in required_deps: meta = dep.setup() self.files.add_include_dir(dep.include_dir()) [self.files.add_include_dir(dir) for dir in meta.include_dirs]
def install_target(target, prof_name): node: LinkedNode = target[prof_name] install_dir = library_install_path if target.is_lib else executable_install_path install_path = os.path.join(install_dir, os.path.basename(node.path)) if dry_run: G_LOGGER.info( f"Would install target: {node.path} to {install_path}") else: if utils.copy_path(node.path, install_path): G_LOGGER.info( f"Installed target: {node.path} to {install_path}")
def export(self, path: str = None) -> None: f""" Export this project to the specified path. This enables the project to be used with SBuildr's dependency management system, as well as with the command-line sbuildr utility. :param path: The path at which to export the project. Defaults to {Project.DEFAULT_SAVED_PROJECT_NAME} in the project's build directory. """ path = path or os.path.join(self.build_dir, Project.DEFAULT_SAVED_PROJECT_NAME) os.makedirs(os.path.dirname(path), exist_ok=True) G_LOGGER.info(f"Exporting project to {path}") with open(path, "wb") as f: pickle.dump(self, f)
def uninstall_target(target, prof_name): node: LinkedNode = target[prof_name] uninstall_dir = library_install_path if target.is_lib else executable_install_path uninstall_path = os.path.join(uninstall_dir, os.path.basename(node.path)) if dry_run: G_LOGGER.info( f"Would remove target: {node.path} from {uninstall_path}") else: os.remove(uninstall_path) G_LOGGER.info( f"Uninstalled target: {node.path} from {uninstall_path}")
def load(path: str = None) -> "Project": f""" Load a project from the specified path. :param path: The path from which to load the project. Defaults to {os.path.abspath(os.path.join("build", Project.DEFAULT_SAVED_PROJECT_NAME))} :returns: The loaded project. """ path = path or os.path.abspath( os.path.join("build", Project.DEFAULT_SAVED_PROJECT_NAME)) G_LOGGER.debug(f"Loading project from {path}") with open(path, "rb") as f: return pickle.load(f)
def str_hash(obj) -> str: """ Returns a string representation of the hash of a string constructed from the specified object. Args: :param obj: The object to hash. Returns: str: The resulting hash. """ in_str = " ".join(obj).strip() generated_hash = hashlib.md5(in_str.encode()).hexdigest() G_LOGGER.verbose(f"Generated hash {generated_hash} from '{in_str}'") return generated_hash
def fetch(self) -> str: super().fetch() subprocess.run(["git", "init"], cwd=self.dest_dir, capture_output=True) # Stash any local changes made by external sources G_LOGGER.info(f"Stashing changes in {self.dest_dir}") subprocess.run(["git", "stash"], capture_output=False, cwd=self.dest_dir) checkout = self.commit or self.tag or self.branch G_LOGGER.info( f"Pulling: {self.url} at {checkout} into {self.dest_dir}") # # TODO: Error checking here? Pull may fail if this is a local repo. subprocess.run( [ "git", "pull", "--force", "--recurse-submodules", "--tags", self.url, checkout ], capture_output=False, cwd=self.dest_dir, ) G_LOGGER.info(f"Checking out: {checkout}") checkout_status = subprocess.run(["git", "checkout", checkout], capture_output=True, cwd=self.dest_dir) if checkout_status.returncode: G_LOGGER.critical( f"Failed to checkout {checkout} with:\n{utils.subprocess_output(checkout_status)}" ) return self.dest_dir
def clean(self, nuke: bool = False, dry_run: bool = True): """ Removes build directories and project artifacts. :param nuke: Whether to remove all build directories associated with the project, including profile build directories. :param dry_run: Whether this is a dry-run, in which case SBuildr will only display which directories would be removed rather than removing them. Defaults to True. """ # TODO(3): Add per-target cleaning. to_remove = [] if dry_run: G_LOGGER.warning(f"Clean dry-run, will not remove files.") # By default, cleans all targets for all profiles. to_remove = [ self.profiles[prof_name].build_dir for prof_name in self.all_profile_names() ] + [self.common_build_dir] G_LOGGER.info( f"Cleaning targets for profiles: {self.all_profile_names()}") if nuke: # The nuclear option to_remove += [self.build_dir] G_LOGGER.info(f"Initiating Nuclear Protocol!") # Remove for path in to_remove: if dry_run: G_LOGGER.info(f"Would remove: {path}") else: self.files.rm(path)