def install( self, targets: List[ProjectTarget] = None, profile_names: List[str] = None, headers: List[str] = None, header_install_path: str = paths.default_header_install_path(), library_install_path: str = paths.default_library_install_path(), executable_install_path: str = paths.default_executable_install_path(), dry_run: bool = True, ): """ Install the specified targets for the specified profiles. :param targets: The targets to install. Defaults to all non-internal project targets. :param profile_names: The profiles for which to install. Defaults to the "release" profile. :param headers: The headers to install. Defaults to all headers that are part of the interface as per :func:`interfaces` . :param header_install_path: The path to which to install headers. This defaults to one of the default locations for the host OS. :param library_install_path: The path to which to install libraries. This defaults to one of the default locations for the host OS. :param executable_install_path: The path to which to install executables. This defaults to one of the default locations for the host OS. :param dry_run: Whether to perform a dry-run only, with no file copying. Defaults to True. """ targets = utils.default_value(targets, self.install_targets()) profile_names = utils.default_value(profile_names, [self.install_profile()]) headers = [self.find(header) for header in headers ] if headers is not None else list(self.public_headers) if dry_run: G_LOGGER.warning(f"Install dry-run, will not copy files.") def install_target(target, prof_name): node: LinkedNode = target[prof_name] install_dir = library_install_path if target.is_lib else executable_install_path install_path = os.path.join(install_dir, os.path.basename(node.path)) if dry_run: G_LOGGER.info( f"Would install target: {node.path} to {install_path}") else: if utils.copy_path(node.path, install_path): G_LOGGER.info( f"Installed target: {node.path} to {install_path}") for prof_name in profile_names: for target in targets: install_target(target, prof_name) def install_header(header): install_path = os.path.join(header_install_path, os.path.basename(header)) if dry_run: G_LOGGER.info( f"Would install header: {header} to {install_path}") else: if utils.copy_path(header, install_path): G_LOGGER.info( f"Installed header: {header} to {install_path}") for header in headers: install_header(header)
def build(self, targets: List[ProjectTarget] = None, profile_names: List[str] = None) -> float: """ Builds the specified targets for this project. Configuration should be run prior to calling this function. :param targets: The targets to build. Defaults to all targets. :param profile_names: The profiles for which to build the targets. Defaults to all profiles. :returns: Time elapsed during the build. """ targets = utils.default_value(targets, self.all_targets()) profile_names = utils.default_value(profile_names, self.all_profile_names()) G_LOGGER.info( f"Building targets: {[target.name for target in targets]} for profiles: {profile_names}" ) G_LOGGER.debug(f"Targets: {targets}") def select_nodes(targets: List[ProjectTarget], profile_names: List[str]) -> List[Node]: # Create all required profile build directories and populate nodes. nodes = [] for prof_name in profile_names: if prof_name not in self.profiles: G_LOGGER.critical( f"Profile {prof_name} does not exist in the project. Available profiles: {self.all_profile_names()}" ) # Populate nodes. for target in targets: if prof_name in target: node = target[prof_name] G_LOGGER.verbose( f"For target: {target}, profile: {prof_name}, found path: {node.path}" ) nodes.append(node) else: G_LOGGER.debug( f"Skipping target: {target.name} for profile: {prof_name}, as it does not exist." ) return nodes nodes = select_nodes(targets, profile_names) if not nodes: return # Create all required build directories. self.files.mkdir(self.common_build_dir) profile_build_dirs = [ self.profiles[prof_name].build_dir for prof_name in profile_names ] [self.files.mkdir(dir) for dir in profile_build_dirs] G_LOGGER.verbose( f"Created build directories: {self.common_build_dir}, {profile_build_dirs}" ) if not self.backend: G_LOGGER.critical( f"Backend has not been configured. Please call `configure()` prior to attempting to build" ) status, time_elapsed = self.backend.build(nodes) if status.returncode: G_LOGGER.critical( f"Failed with to build. Reconfiguring the project or running a clean build may resolve this." ) G_LOGGER.info( f"Built {plural('target', len(targets))} for {plural('profile', len(profile_names))} in {time_elapsed} seconds." ) return time_elapsed
def run_tests(self, targets: List[ProjectTarget] = None, profile_names: List[str] = None): """ Run tests from this project. Runs all tests from the project for all profiles by default. :param targets: The test targets to run. Raises an exception if the target is not a test target. :param profile_names: The profiles for which to run the tests. Defaults to all profiles. """ for target in targets: if target.name not in self.tests: G_LOGGER.critical( f"Could not find test: {target.name} in project.\n\tAvailable tests:\n\t\t{list(self.tests.keys())}" ) tests = utils.default_value(targets, self.test_targets()) profile_names = utils.default_value(profile_names, self.all_profile_names()) if not tests: G_LOGGER.warning( f"No tests found. Have you registered tests using project.test()?" ) return class TestResult: def __init__(self): self.failed = 0 self.passed = 0 def run_test(test, prof_name): G_LOGGER.log(f"\nRunning test: {test}, for profile: {prof_name}", colors=[Color.BOLD, Color.GREEN]) status = self._run_linked_node(test[prof_name]) if status.returncode: G_LOGGER.log( f"\nFAILED {test}, for profile: {prof_name}:\n{test[prof_name].path}", colors=[Color.BOLD, Color.RED], ) test_results[prof_name].failed += 1 failed_targets[prof_name].add(test[prof_name].name) else: G_LOGGER.log(f"\nPASSED {test}", colors=[Color.BOLD, Color.GREEN]) test_results[prof_name].passed += 1 test_results = defaultdict(TestResult) failed_targets = defaultdict(set) for prof_name in profile_names: G_LOGGER.log(f"\n{utils.wrap_str(f' Profile: {prof_name} ')}", colors=[Color.BOLD, Color.GREEN]) for test in tests: run_test(test, prof_name) # Display summary G_LOGGER.log(f"\n{utils.wrap_str(f' Test Results Summary ')}\n", colors=[Color.BOLD, Color.GREEN]) for prof_name, result in test_results.items(): if result.passed or result.failed: G_LOGGER.log(f"Profile: {prof_name}", colors=[Color.BOLD, Color.GREEN]) if result.passed: G_LOGGER.log(f"\tPASSED {plural('test', result.passed)}", colors=[Color.BOLD, Color.GREEN]) if result.failed: G_LOGGER.log( f"\tFAILED {plural('test', result.failed)}: {failed_targets[prof_name]}", colors=[Color.BOLD, Color.RED], )
def configure(self, targets: List[ProjectTarget] = None, profile_names: List[str] = None, BackendType: type = RBuildBackend) -> None: """ Configure does 3 things: 1. Finds dependencies for the specified targets. This involves potentially fetching and building dependencies if they do not exist in the cache. 2. Configures the project's build graph after discovering libraries for targets. Before calling configure(), a target's libs/lib_dirs lists are not guaranteed to be complete. 3. Configure the project for build using the specified backend type. This includes generating any build configuration files required by this project's backend. This function must be called prior to building. :param targets: The targets for which to configure the project. Defaults to all targets. :param profile_names: The names of profiles for which to configure the project. Defaults to all profiles. :param BackendType: The type of backend to use. Since SBuildr is a meta-build system, it can support multiple backends to perform builds. For example, RBuild (i.e. ``sbuildr.backends.RBuildBackend``) can be used for fast incremental builds. Note that this should be a type rather than an instance of a backend. """ targets = utils.default_value(targets, self.all_targets()) profile_names = utils.default_value(profile_names, self.all_profile_names()) def find_dependencies(): unique_deps: Set[Dependency] = set() for target in targets: unique_deps.update(target.dependencies) required_deps = self.public_header_dependencies + list(unique_deps) G_LOGGER.info(f"Fetching dependencies: {required_deps}") for dep in required_deps: meta = dep.setup() self.files.add_include_dir(dep.include_dir()) [self.files.add_include_dir(dir) for dir in meta.include_dirs] def configure_graph(): self.files.scan_all() for profile in self.profiles.values(): profile.configure_libraries() def combined_graph(): all_nodes = [ target[prof_name] for target in targets for prof_name in profile_names ] for node in all_nodes: all_nodes.extend(node.inputs) graph = Graph(set(all_nodes)) # Need to rename all the files in the build graph so that they have hashes. for layer in graph.layers(): for node in layer: if isinstance(node, CompiledNode): signature = node.compiler.signature( node.inputs[0].path, node.include_dirs, node.flags) node.path = paths.insert_suffix( node.path, f".{signature}") elif isinstance(node, LinkedNode): signature = node.linker.signature( [inp.path for inp in node.inputs], node.libs, node.lib_dirs, node.flags) node.hashed_path = paths.insert_suffix( node.hashed_path, f".{signature}") return graph self.graph = combined_graph() def configure_backend(): self.backend = BackendType(self.build_dir) self.files.mkdir(self.build_dir) self.backend.configure(self.graph) find_dependencies() configure_graph() configure_backend()