Exemplo n.º 1
0
    def fetch(self) -> str:
        """
        Fetches the dependency into the specified location.

        :returns: The directory into which the dependency was fetched.
        """
        if not self.dest_dir:
            G_LOGGER.critical(
                f"Cannot fetch before setting destination directory.")
Exemplo n.º 2
0
 def get_path_include_dir(included_path: str, included_token: str) -> str:
     # included_path is the full path of the included file.
     # included_token is the token used to include the file.
     # Absolute paths do not require include directories.
     if os.path.isabs(included_token):
         return None
     include_dir = included_path[:-len(included_token)]
     if not os.path.isdir(include_dir):
         # It would be completely ridiculous if this actually displays ever.
         G_LOGGER.critical(f"While attempting to find include dir to use for {included_path} (Note: included in {path}), found that {include_dir} does not exist!")
     return os.path.abspath(include_dir)
Exemplo n.º 3
0
 def source(self, path: str) -> SourceNode:
     candidates = self.find(path)
     if len(candidates) > 1:
         G_LOGGER.warning(f"For {path}, found multiple candidates: {candidates}. Using {candidates[0]}. If this is incorrect, please disambiguate by providing either an absolute path, or a longer relative path.")
     elif len(candidates) == 0:
         G_LOGGER.critical(f"Could not find {path}. Does it exist?")
     path = candidates[0]
     node = self.graph.find_node_with_path(path)
     if not node:
         return self.graph.add(SourceNode(candidates[0]))
     return node
Exemplo n.º 4
0
    def find(self, path) -> str:
        """
        Attemps to locate a path in the project. If no paths were found, or multiple ambiguous paths were found, raises an exception.

        :param path: The path to find. This may be an absolute path, partial path, or file/directory name.

        :returns: An absolute path to the matching file or directory.
        """
        candidates = self.files.find(path)
        if len(candidates) == 0:
            G_LOGGER.critical(f"Could not find path: {path}")
        elif len(candidates) > 1:
            G_LOGGER.critical(
                f"For path: {path}, found multiple candidates: {candidates}. Please provide a longer path to disambiguate."
            )
        return candidates[0]
Exemplo n.º 5
0
    def setup(self, force=False) -> DependencyMetadata:
        """
        Fetch, build, and install the dependency if the dependency does not exist in the cache. After setting up the dependency, all references to libraries in the dependency are updated according to the metadata reported by the builder. If the dependency is found in the cache, loads the metadata from the cache instead.

        :param force: Force the dependency to be fetched, built and installed, even if it already exists in the cache.

        :returns: A list of include directories from this dependency.
        """
        # Create the destination directory for the fetcher
        os.makedirs(self.fetcher.dest_dir, exist_ok=True)

        def update_package_root():
            name = self.fetcher.dependency_name
            self.version = self.fetcher.version()
            dir = f"{name}-{self.version}" if self.version else name
            self.package_root = os.path.join(self.cache_root, Dependency.CACHE_PACKAGES_SUBDIR, dir)

        update_package_root()
        metadata_path = os.path.join(self.package_root, Dependency.METADATA_FILENAME)
        meta = None
        if os.path.exists(metadata_path):
            meta = DependencyMetadata.load(metadata_path)

        if force or meta is None or meta.META_API_VERSION != DependencyMetadata.META_API_VERSION:
            G_LOGGER.info(f"{self.package_root} does not contain package metadata. Fetching dependency.")
            self.fetcher.fetch()
            # Install
            lib_dir = os.path.join(self.package_root, Dependency.PACKAGE_LIBRARY_SUBDIR)
            exec_dir = os.path.join(self.package_root, Dependency.PACKAGE_EXECUTABLE_SUBDIR)
            meta = self.builder.install(self.fetcher.dest_dir, header_dir=self.include_dir(), lib_dir=lib_dir, exec_dir=exec_dir)
            meta.save(metadata_path)

        # TODO: FIXME: Make this more resilient to copies by moving this logic to Project. FileManager already tracks all dependency libraries as Library nodes.
        # Next, update all libraries that have been requested from this dependency.
        for name, lib in self.libraries.items():
            if name not in meta.libraries:
                G_LOGGER.critical(f"Requested library: {name} is not present in dependency: {self.name}")
            metalib = meta.libraries[name]
            lib.path = metalib.path
            lib.libs.extend(metalib.libs)
            lib.lib_dirs.extend(metalib.lib_dirs)
            G_LOGGER.verbose(f"Correcting library: {name} to {lib}")
        return meta
Exemplo n.º 6
0
 def select_nodes(targets: List[ProjectTarget],
                  profile_names: List[str]) -> List[Node]:
     # Create all required profile build directories and populate nodes.
     nodes = []
     for prof_name in profile_names:
         if prof_name not in self.profiles:
             G_LOGGER.critical(
                 f"Profile {prof_name} does not exist in the project. Available profiles: {self.all_profile_names()}"
             )
         # Populate nodes.
         for target in targets:
             if prof_name in target:
                 node = target[prof_name]
                 G_LOGGER.verbose(
                     f"For target: {target}, profile: {prof_name}, found path: {node.path}"
                 )
                 nodes.append(node)
             else:
                 G_LOGGER.debug(
                     f"Skipping target: {target.name} for profile: {prof_name}, as it does not exist."
                 )
     return nodes
Exemplo n.º 7
0
    def __init__(self, root_dir: str, dirs: Set[str]=set(), exclude_dirs: Set[str]=set(), writable_dirs: Set[str]=set()):
        self.exclude_dirs: Set[str] = exclude_dirs
        # writable_dirs are the only locations to which FileManager is allowed to write.
        self.writable_dirs: Set[str] = writable_dirs
        G_LOGGER.verbose(f"Excluded directories: {exclude_dirs}. Writable directories: {writable_dirs}")

        # Include dirs/"header" files are only considered when searching for includes.
        self.include_dirs: List[str] = []
        self.header_files: List[str] = [] # List to enable header priority

        self.files: Set[str] = set()

        self.root_dir = os.path.abspath(root_dir)
        if not os.path.isdir(self.root_dir):
            G_LOGGER.critical(f"Root Directory: {self.root_dir} does not exist, or is not a directory.")
        self.add_dir(self.root_dir)

        # Remove directories that are within exclude_dirs after converting all directories to abspaths.
        for dir in dirs:
            self.add_dir(dir)
        G_LOGGER.debug(f"Found {len(self.files)} files")
        G_LOGGER.verbose(f"{self.files}")
        # Keep track of all files relevant to building the project.
        self.graph = Graph()
Exemplo n.º 8
0
    def install(self, source_dir: str, header_dir: str, lib_dir: str,
                exec_dir: str) -> DependencyMetadata:
        # Configuration scripts should export the project.
        configure_status = subprocess.run(
            [sys.executable, self.build_script_path],
            capture_output=True,
            cwd=source_dir,
            env={"PYTHONPATH": os.path.pathsep.join(sys.path)})
        if configure_status.returncode:
            G_LOGGER.critical(
                f"Failed to run build configuration script: {self.build_script_path} in {source_dir} with:\n{utils.subprocess_output(configure_status)}"
            )

        saved_project = os.path.join(source_dir, self.project_save_path)
        if not os.path.exists(saved_project):
            G_LOGGER.critical(
                f"Project was not saved to: {saved_project}. Please ensure this path is correct, and that the build configuration script in {self.build_script_path} is saving the project"
            )

        project = Project.load(saved_project)
        if project.PROJECT_API_VERSION != Project.PROJECT_API_VERSION:
            G_LOGGER.critical(
                f"This project has an older API version. System Project API version: {Project.PROJECT_API_VERSION}, Project version: {project.PROJECT_API_VERSION}. Please specify the path to which the project is saved by this dependency's build script using the project_save_path parameter."
            )

        self.install_profile = self.install_profile or project.install_profile(
        )
        project.configure(project.install_targets(),
                          profile_names=[self.install_profile])
        project.build(project.install_targets(), [self.install_profile])

        project.install(targets=project.install_targets(),
                        profile_names=[self.install_profile],
                        header_install_path=header_dir,
                        library_install_path=lib_dir,
                        executable_install_path=exec_dir,
                        dry_run=False)

        libraries = {}
        for name, target in project.libraries.items():
            if not target.internal:
                lib = target[self.install_profile]
                libraries[name] = LibraryMetadata(path=os.path.join(
                    lib_dir, paths.name_to_libname(name)),
                                                  libs=lib.libs,
                                                  lib_dirs=lib.lib_dirs)
        include_dirs = project.files.include_dirs
        return DependencyMetadata(libraries, include_dirs)
Exemplo n.º 9
0
 def add_input(self, node: SourceNode):
     if len(self.inputs) > 0:
         G_LOGGER.critical(
             f"Cannot create a CompiledNode with more than one source. This node already has one input: {self.inputs}"
         )
     super().add_input(node)
Exemplo n.º 10
0
 def test_critical_raises_exception(self):
     with pytest.raises(SBuildrException) as exc:
         message = "A test message"
         G_LOGGER.critical(message)
     assert exc.match(message)
Exemplo n.º 11
0
    def run_tests(self,
                  targets: List[ProjectTarget] = None,
                  profile_names: List[str] = None):
        """
        Run tests from this project. Runs all tests from the project for all profiles by default.

        :param targets: The test targets to run. Raises an exception if the target is not a test target.
        :param profile_names: The profiles for which to run the tests. Defaults to all profiles.
        """
        for target in targets:
            if target.name not in self.tests:
                G_LOGGER.critical(
                    f"Could not find test: {target.name} in project.\n\tAvailable tests:\n\t\t{list(self.tests.keys())}"
                )

        tests = utils.default_value(targets, self.test_targets())
        profile_names = utils.default_value(profile_names,
                                            self.all_profile_names())
        if not tests:
            G_LOGGER.warning(
                f"No tests found. Have you registered tests using project.test()?"
            )
            return

        class TestResult:
            def __init__(self):
                self.failed = 0
                self.passed = 0

        def run_test(test, prof_name):
            G_LOGGER.log(f"\nRunning test: {test}, for profile: {prof_name}",
                         colors=[Color.BOLD, Color.GREEN])
            status = self._run_linked_node(test[prof_name])
            if status.returncode:
                G_LOGGER.log(
                    f"\nFAILED {test}, for profile: {prof_name}:\n{test[prof_name].path}",
                    colors=[Color.BOLD, Color.RED],
                )
                test_results[prof_name].failed += 1
                failed_targets[prof_name].add(test[prof_name].name)
            else:
                G_LOGGER.log(f"\nPASSED {test}",
                             colors=[Color.BOLD, Color.GREEN])
                test_results[prof_name].passed += 1

        test_results = defaultdict(TestResult)
        failed_targets = defaultdict(set)
        for prof_name in profile_names:
            G_LOGGER.log(f"\n{utils.wrap_str(f' Profile: {prof_name} ')}",
                         colors=[Color.BOLD, Color.GREEN])
            for test in tests:
                run_test(test, prof_name)

        # Display summary
        G_LOGGER.log(f"\n{utils.wrap_str(f' Test Results Summary ')}\n",
                     colors=[Color.BOLD, Color.GREEN])
        for prof_name, result in test_results.items():
            if result.passed or result.failed:
                G_LOGGER.log(f"Profile: {prof_name}",
                             colors=[Color.BOLD, Color.GREEN])
                if result.passed:
                    G_LOGGER.log(f"\tPASSED {plural('test', result.passed)}",
                                 colors=[Color.BOLD, Color.GREEN])
                if result.failed:
                    G_LOGGER.log(
                        f"\tFAILED {plural('test', result.failed)}: {failed_targets[prof_name]}",
                        colors=[Color.BOLD, Color.RED],
                    )
Exemplo n.º 12
0
    def build(self,
              targets: List[ProjectTarget] = None,
              profile_names: List[str] = None) -> float:
        """
        Builds the specified targets for this project. Configuration should be run prior to calling this function.

        :param targets: The targets to build. Defaults to all targets.
        :param profile_names: The profiles for which to build the targets. Defaults to all profiles.

        :returns: Time elapsed during the build.
        """
        targets = utils.default_value(targets, self.all_targets())
        profile_names = utils.default_value(profile_names,
                                            self.all_profile_names())
        G_LOGGER.info(
            f"Building targets: {[target.name for target in targets]} for profiles: {profile_names}"
        )
        G_LOGGER.debug(f"Targets: {targets}")

        def select_nodes(targets: List[ProjectTarget],
                         profile_names: List[str]) -> List[Node]:
            # Create all required profile build directories and populate nodes.
            nodes = []
            for prof_name in profile_names:
                if prof_name not in self.profiles:
                    G_LOGGER.critical(
                        f"Profile {prof_name} does not exist in the project. Available profiles: {self.all_profile_names()}"
                    )
                # Populate nodes.
                for target in targets:
                    if prof_name in target:
                        node = target[prof_name]
                        G_LOGGER.verbose(
                            f"For target: {target}, profile: {prof_name}, found path: {node.path}"
                        )
                        nodes.append(node)
                    else:
                        G_LOGGER.debug(
                            f"Skipping target: {target.name} for profile: {prof_name}, as it does not exist."
                        )
            return nodes

        nodes = select_nodes(targets, profile_names)
        if not nodes:
            return

        # Create all required build directories.
        self.files.mkdir(self.common_build_dir)
        profile_build_dirs = [
            self.profiles[prof_name].build_dir for prof_name in profile_names
        ]
        [self.files.mkdir(dir) for dir in profile_build_dirs]
        G_LOGGER.verbose(
            f"Created build directories: {self.common_build_dir}, {profile_build_dirs}"
        )

        if not self.backend:
            G_LOGGER.critical(
                f"Backend has not been configured. Please call `configure()` prior to attempting to build"
            )
        status, time_elapsed = self.backend.build(nodes)
        if status.returncode:
            G_LOGGER.critical(
                f"Failed with to build. Reconfiguring the project or running a clean build may resolve this."
            )
        G_LOGGER.info(
            f"Built {plural('target', len(targets))} for {plural('profile', len(profile_names))} in {time_elapsed} seconds."
        )
        return time_elapsed
Exemplo n.º 13
0
    def _target(
        self,
        name: str,
        ext_path: str,
        sources: List[str],
        flags: BuildFlags,
        libs: List[Union[DependencyLibrary, ProjectTarget, Library]],
        compiler: compiler.Compiler,
        include_dirs: List[str],
        linker: linker.Linker,
        depends: List[Dependency],
        internal: bool,
        is_lib: bool,
    ) -> ProjectTarget:

        if not all([
                isinstance(lib, ProjectTarget) or isinstance(lib, Library)
                or isinstance(lib, DependencyLibrary) for lib in libs
        ]):
            G_LOGGER.critical(
                f"Libraries must be instances of either sbuildr.Library, sbuildr.dependencies.DependencyLibrary or sbuildr.ProjectTarget"
            )

        if os.path.basename(ext_path) != ext_path:
            G_LOGGER.critical(
                f"Target: {ext_path} looks like a path. Target names should not contain characters that are unsupported by the filesystem."
            )

        dependencies: List[Dependency] = [] + depends  # Create copy
        for lib in libs:
            if isinstance(lib, DependencyLibrary):
                dependencies.append(lib.dependency)
                # Add all Library targets from dependencies to the file manager's graph, since they are independent of profiles
                # TODO: Add `library` function to FileManager
                self.files.graph.add(lib.library)
                G_LOGGER.verbose(f"Adding {lib.library} to file manager.")
        # Inherit dependencies from any input libraries as well
        [
            dependencies.extend(lib.dependencies) for lib in libs
            if isinstance(lib, ProjectTarget)
        ]

        libs: List[Union[ProjectTarget, Library]] = [
            lib.library if isinstance(lib, DependencyLibrary) else lib
            for lib in libs
        ]

        source_nodes: List[CompiledNode] = [
            self.files.source(path) for path in sources
        ]
        G_LOGGER.verbose(
            f"For sources: {sources}, found source paths: {source_nodes}")

        target = ProjectTarget(name=name,
                               internal=internal,
                               is_lib=is_lib,
                               dependencies=dependencies)
        for profile_name, profile in self.profiles.items():
            # Convert all libraries to nodes. These will be inputs to the target.
            # Profile will later convert them to library names and directories.
            lib_nodes: List[Library] = [
                lib[profile_name] if isinstance(lib, ProjectTarget) else lib
                for lib in libs
            ]
            input_nodes = [lib for lib in lib_nodes]
            G_LOGGER.verbose(
                f"Library inputs for target: {name} are: {input_nodes}")

            # Per-target flags always overwrite profile flags.
            flags = profile.flags + flags

            # First, add or retrieve object nodes for each source.
            for source_node in source_nodes:
                obj_path = os.path.join(
                    self.common_build_dir,
                    f"{os.path.splitext(os.path.basename(source_node.path))[0]}.o"
                )
                # User defined includes are always prepended the ones deduced for SourceNodes.
                obj_node = CompiledNode(obj_path, source_node, compiler,
                                        include_dirs, flags)
                input_nodes.append(profile.graph.add(obj_node))

            # Hard links are needed because during linkage, the library must have a clean name.
            hashed_path = os.path.join(self.common_build_dir, ext_path)
            path = os.path.join(profile.build_dir,
                                paths.insert_suffix(ext_path, profile.suffix))
            target[profile_name] = profile.graph.add(
                LinkedNode(path,
                           input_nodes,
                           linker,
                           hashed_path=hashed_path,
                           flags=flags))
            G_LOGGER.debug(
                f"Adding target: {name}, with hashed path: {hashed_path}, public path: {path} to profile: {profile_name}"
            )
        return target