Exemplo n.º 1
0
 def _finalise_logging(self):
     # remove our file logger
     fab_logger = logging.getLogger('fab')
     log_file_handlers = list(
         by_type(fab_logger.handlers, RotatingFileHandler))
     assert len(log_file_handlers) == 1
     fab_logger.removeHandler(log_file_handlers[0])
Exemplo n.º 2
0
    def run(self, artefact_store, config):
        """
        Uses multiprocessing, unless disabled in the *config*.

        :param artefact_store:
            Contains artefacts created by previous Steps, and where we add our new artefacts.
            This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process.
        :param config:
            The :class:`fab.build_config.BuildConfig` object where we can read settings
            such as the project workspace folder or the multiprocessing flag.

        """
        super().run(artefact_store, config)

        # get all the source to compile, for all build trees, into one big lump
        build_lists: Dict = self.source_getter(artefact_store)
        to_compile = sum(build_lists.values(), [])
        logger.info(f"compiling {len(to_compile)} c files")

        # compile everything in one go
        results = self.run_mp(items=to_compile, func=self._compile_file)
        check_for_errors(results, caller_label=self.name)
        compiled_c = by_type(results, CompiledFile)

        lookup = {compiled_file.analysed_file: compiled_file for compiled_file in compiled_c}
        logger.info(f"compiled {len(lookup)} c files")

        # add the targets' new object files to the artefact store
        target_object_files = artefact_store.setdefault(COMPILED_FILES, defaultdict(set))
        for root, source_files in build_lists.items():
            new_objects = [lookup[af].output_fpath for af in source_files]
            target_object_files[root].update(new_objects)
Exemplo n.º 3
0
def check_for_errors(results, caller_label=None):
    """
    Check an iterable of results for any exceptions and handle them gracefully.

    This is a helper function for steps which use multiprocessing,
    getting multiple results back from :meth:`~fab.steps.Step.run_mp` all in one go.

    :param results:
        An iterable of results.
    :param caller_label:
        Optional human-friendly name of the caller for logging.

    """
    caller_label = f'during {caller_label}' if caller_label else ''

    exceptions = list(by_type(results, Exception))
    if exceptions:
        formatted_errors = "\n\n".join(map(str, exceptions))
        raise RuntimeError(
            f"{formatted_errors}\n\n{len(exceptions)} error(s) found {caller_label}"
        )
Exemplo n.º 4
0
    def run(self, artefact_store, config):
        """
        Uses multiprocessing, unless disabled in the *config*.

        :param artefact_store:
            Contains artefacts created by previous Steps, and where we add our new artefacts.
            This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process.
        :param config:
            The :class:`fab.build_config.BuildConfig` object where we can read settings
            such as the project workspace folder or the multiprocessing flag.

        """
        super().run(artefact_store, config)

        files = list(self.source_getter(artefact_store))
        logger.info(f'preprocessing {len(files)} files')

        results = self.run_mp(items=files, func=self._process_artefact)
        check_for_errors(results, caller_label=self.name)

        log_or_dot_finish(logger)
        artefact_store[self.output_collection] = list(by_type(results, Path))
Exemplo n.º 5
0
    def run(self, artefact_store, config):
        """
        Uses multiprocessing, unless disabled in the *config*.

        :param artefact_store:
            Contains artefacts created by previous Steps, and where we add our new artefacts.
            This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process.
        :param config:
            The :class:`fab.build_config.BuildConfig` object where we can read settings
            such as the project workspace folder or the multiprocessing flag.

        """
        super().run(artefact_store, config)

        # get all the source to compile, for all build trees, into one big lump
        build_lists: Dict[str, List] = self.source_getter(artefact_store)
        to_compile = sum(build_lists.values(), [])
        logger.info(f"compiling {len(to_compile)} fortran files")

        # compile everything in multiple passes
        all_compiled: List[CompiledFile] = []  # todo: use set?
        already_compiled_files: Set[Path] = set([])  # a quick lookup

        per_pass = []
        while to_compile:

            compile_next = self.get_compile_next(already_compiled_files,
                                                 to_compile)

            logger.info(
                f"\ncompiling {len(compile_next)} of {len(to_compile)} remaining files"
            )
            results_this_pass = self.run_mp(items=compile_next,
                                            func=self.compile_file)
            check_for_errors(results_this_pass, caller_label=self.name)

            # check what we did compile
            compiled_this_pass: Set[CompiledFile] = set(
                by_type(results_this_pass, CompiledFile))
            per_pass.append(len(compiled_this_pass))
            if len(compiled_this_pass) == 0:
                logger.error("nothing compiled this pass")
                break

            # remove compiled files from list
            logger.debug(f"compiled {len(compiled_this_pass)} files")

            # results are not the same instances as passed in, due to mp copying
            compiled_fpaths = {
                i.analysed_file.fpath
                for i in compiled_this_pass
            }
            all_compiled.extend(compiled_this_pass)
            already_compiled_files.update(compiled_fpaths)

            # remove from remaining to compile
            to_compile = set(
                filter(lambda af: af.fpath not in compiled_fpaths, to_compile))

        log_or_dot_finish(logger)
        logger.debug(f"compiled per pass {per_pass}")
        logger.info(f"total fortran compiled {sum(per_pass)}")

        if to_compile:
            logger.debug(
                f"there were still {len(to_compile)} files left to compile")
            for af in to_compile:
                logger.debug(af.fpath)
            logger.error(
                f"there were still {len(to_compile)} files left to compile")
            exit(1)

        # add the targets' new object files to the artefact store
        lookup = {
            compiled_file.analysed_file.fpath: compiled_file
            for compiled_file in all_compiled
        }
        target_object_files = artefact_store.setdefault(
            COMPILED_FILES, defaultdict(set))
        for root, source_files in build_lists.items():
            new_objects = [
                lookup[af.fpath].output_fpath for af in source_files
            ]
            target_object_files[root].update(new_objects)