Esempio n. 1
0
    def visit(self, candidate: Path) -> List[Path]:
        new_candidates: List[Path] = []
        try:
            task_class = self._extension_map[candidate.suffix]
            reader: TextReader = FileTextReader(candidate)
            hasher: TextReaderAdler32 = TextReaderAdler32(reader)

            if issubclass(task_class, Analyser):
                task: Task = task_class(hasher, self._state)
            elif issubclass(task_class, SingleFileCommand):
                flags = self._command_flags_map.get(task_class, [])
                task = CommandTask(
                    task_class(Path(hasher.filename), self._workspace, flags))
            else:
                message = \
                    f'Unhandled class "{task_class}" in extension map.'
                raise TypeError(message)
            # TODO: Make SQLite connection multiprocess safe
            # self._queue.add_to_queue(task)
            task.run()
            new_candidates.extend(task.products)
            # TODO: The hasher part here likely needs to be
            #       moved once the task is run by the queue
            for _ in hasher.line_by_line():
                pass  # Make sure we've read the whole file.
            file_info = FileInfoDatabase(self._state)
            file_info.add_file_info(candidate, hasher.hash)
        except KeyError:
            pass
        return new_candidates
Esempio n. 2
0
    def run(self, stream=sys.stdout):
        file_view = FileInfoDatabase(self._state)
        print("File View", file=stream)
        for file_info in file_view:
            print(f"  File   : {file_info.filename}", file=stream)
            # Where files are generated in the working directory
            # by third party tools, we cannot guarantee the hashes
            if file_info.filename.match(f'{self._workspace}/*'):
                print('    Hash : --hidden-- (generated file)')
            else:
                print(f"    Hash : {file_info.adler32}", file=stream)

        fortran_view = FortranWorkingState(self._state)
        header = False
        for info in fortran_view:
            if not header:
                print("Fortran View", file=stream)
                header = True
            print(f"  Program unit    : {info.unit.name}", file=stream)
            print(f"    Found in      : {info.unit.found_in}", file=stream)
            print(f"    Prerequisites : {', '.join(info.depends_on)}",
                  file=stream)

        c_view = CWorkingState(self._state)
        header = False
        for info in c_view:
            if not header:
                print("C View", file=stream)
                header = True
            print(f"  Symbol          : {info.symbol.name}", file=stream)
            print(f"    Found in      : {info.symbol.found_in}", file=stream)
            print(f"    Prerequisites : {', '.join(info.depends_on)}",
                  file=stream)
Esempio n. 3
0
    def run(self, source: Path):

        self._queue.run()

        visitor = SourceVisitor(self._extend_queue)
        descender = TreeDescent(source)
        descender.descend(visitor)

        self._queue.check_queue_done()
        self._queue.shutdown()

        file_db = FileInfoDatabase(self._state)
        for file_info in file_db:
            print(file_info.filename)
            # Where files are generated in the working directory
            # by third party tools, we cannot guarantee the hashes
            if file_info.filename.match(f'{self._workspace}/*'):
                print('    hash: --hidden-- (generated file)')
            else:
                print(f'    hash: {file_info.adler32}')

        fortran_db = FortranWorkingState(self._state)
        for fortran_info in fortran_db:
            print(fortran_info.unit.name)
            print('    found in: ' + str(fortran_info.unit.found_in))
            print('    depends on: ' + str(fortran_info.depends_on))

        c_db = CWorkingState(self._state)
        for c_info in c_db:
            print(c_info.symbol.name)
            print('    found_in: ' + str(c_info.symbol.found_in))
            print('    depends on: ' + str(c_info.depends_on))
Esempio n. 4
0
    def test_getter(self, tmp_path: Path):
        test_unit = FileInfoDatabase(SqliteStateDatabase(tmp_path))
        with pytest.raises(FabException):
            test_unit.get_file_info(Path('anything'))

        test_unit.add_file_info(Path('teapot.c'), 31337)
        assert test_unit.get_file_info(Path('teapot.c')) \
            == FileInfo(Path('teapot.c'), 31337)
Esempio n. 5
0
    def test_iteration(self, tmp_path: Path):
        test_unit = FileInfoDatabase(SqliteStateDatabase(tmp_path))
        assert list(iter(test_unit)) == []

        test_unit.add_file_info(Path('foo.f90'), 1234)
        assert list(iter(test_unit)) == [FileInfo(Path('foo.f90'), 1234)]

        test_unit.add_file_info(Path('bar/baz.f90'), 5786)
        assert list(iter(test_unit)) == [FileInfo(Path('bar/baz.f90'), 5786),
                                         FileInfo(Path('foo.f90'), 1234)]

        # Add a new version of an existing file
        #
        test_unit.add_file_info(Path('foo.f90'), 987)
        assert list(iter(test_unit)) == [FileInfo(Path('bar/baz.f90'), 5786),
                                         FileInfo(Path('foo.f90'), 987)]
Esempio n. 6
0
    def __init__(
        self,
        parent: ttk.Notebook,
        database: StateDatabase,
    ):
        super().__init__(parent)

        file_db = FileInfoDatabase(database)

        self._file_list = FileListFrame(self, file_db)
        self._file_list.pack(side=tk.LEFT,
                             padx=5,
                             pady=5,
                             fill=tk.BOTH,
                             expand=True)

        self._file_details = FileInfoFrame(self, file_db)
        self._file_details.pack(side=tk.LEFT, padx=10, pady=10, fill=tk.Y)

        self.select_file(self._file_list.get_selected())
Esempio n. 7
0
    def process(self,
                artifact: Artifact,
                discovery: Dict[str, DiscoveryState],
                objects: List[Artifact],
                lock: LockT) -> List[Artifact]:

        new_artifacts: List[Artifact] = []
        new_discovery: Dict[str, DiscoveryState] = {}
        new_objects: List[Artifact] = []
        # Identify tasks that are completely new
        if (artifact.state is New
                and artifact.filetype is Unknown):
            # Use the pathmap list to work out the
            # filetype and starting state
            new_artifact = None
            for pathmap in self._pathmaps:
                if artifact.location in pathmap:
                    new_artifact = Artifact(artifact.location,
                                            pathmap.filetype,
                                            pathmap.state)
            # Assuming we found a match and were able
            # to create the artifact, return it so that
            # it can be added to the queue
            if new_artifact is not None:
                # Also store its hash in the file database
                file_info = FileInfoDatabase(self._database)
                file_info.add_file_info(artifact.location,
                                        new_artifact.hash)
                new_artifacts.append(new_artifact)

        elif artifact.state is Analysed:

            # Work out whether this artifact needs to be
            # included in the build or not - if any of its
            # definitions are mentioned in the (shared)
            # discovery mapping, or if it is defining
            # the target of the build then it should be included

            # TODO: Looping through a list of what could
            # eventually contain every unit/symbol in the build has
            # the potential to become an issue for performance.
            # Longer term we probably want to drop using the shared
            # discovery array in favour of database lookups
            required = False
            for definition in artifact.defines:
                # Is this the target?
                if (definition == self.target
                        or definition in discovery):
                    required = True
                    break

            if required:
                # Update the discovery list to indicate that
                # the definitions from this Artifact are present
                # (but not yet compiled)
                for definition in artifact.defines:
                    if definition not in discovery:
                        new_discovery[definition] = DiscoveryState.SEEN

                # Now check whether the Artifact's dependencies
                # have already been seen and compiled
                compiled = [False]*len(artifact.depends_on)
                for idep, dependency in enumerate(artifact.depends_on):
                    # Only applies to str dependencies
                    if isinstance(dependency, Path):
                        continue
                    if dependency in discovery:
                        # Are the dependencies compiled?
                        if discovery[dependency] == DiscoveryState.COMPILED:
                            compiled[idep] = True
                    else:
                        # If the dependency isn't in the list at all yet
                        # then add an entry so the system knows we are
                        # expecting it later (for the above check)
                        new_discovery[dependency] = DiscoveryState.AWARE_OF

                # If the dependencies are satisfied (or there weren't
                # any) then this file can be compiled now
                if len(compiled) == 0 or all(compiled):
                    for definition in artifact.defines:
                        task = self._taskmap[(artifact.filetype,
                                              artifact.state)]
                        new_artifacts.extend(task.run([artifact]))
                        new_discovery[definition] = DiscoveryState.COMPILED
                else:
                    # If the dependencies weren't all satisfied then
                    # back on the queue for another pass later
                    new_artifacts.append(artifact)
            else:
                # If it wasn't required it could be later, so
                # put it back on the queue, unless the target
                # has been compiled, in which case it wasn't
                # needed at all!
                if (self._target not in discovery
                        or discovery[self._target] != DiscoveryState.COMPILED):
                    new_artifacts.append(artifact)

        elif artifact.state is Compiled:
            # Begin populating the list for linking
            new_objects.append(artifact)
            # But do not return a new artifact - this object
            # is "done" as far as the processing is concerned

            # But, if this is the file containing the target
            # that means everything must have been compiled
            # by this point; so we can do the linking step
            if self._target in artifact.defines:
                task = self._taskmap[(artifact.filetype,
                                      artifact.state)]
                new_artifacts.extend(task.run(objects + [artifact]))

        elif artifact.state is Linked:
            # Nothing to do at present with the final linked
            # executable, but included here for completeness
            pass
        else:
            # If the object specifies any paths in its dependencies
            # then these must exist before it can be processed
            # TODO: This needs more thorough logic and to come from
            # the database eventually
            ready = True
            for dependency in artifact.depends_on:
                if isinstance(dependency, Path):
                    if not dependency.exists():
                        ready = False

            if ready:
                # An artifact with a filetype and state set
                # will have an appropriate task that should
                # be used to run it (though unlike the old
                # implementation this is probably returning
                # the instance of the Task not the class)
                if ((artifact.filetype, artifact.state)
                        in self._taskmap):
                    task = self._taskmap[(artifact.filetype,
                                          artifact.state)]

                    new_artifacts.extend(task.run([artifact]))
            else:
                new_artifacts.append(artifact)

        # Update shared arrays
        lock.acquire()
        objects.extend(new_objects)
        for key, value in new_discovery.items():
            discovery[key] = value
        lock.release()

        return new_artifacts