Beispiel #1
0
        def move_file(source, dest):
            log('Moving: {} -> {}', relpath(source), relpath(dest))

            # Can't prevent race conditions. But this should catch logic bugs.
            assert not dest.exists()

            repo.rename_with_cache_hint(source, dest)
Beispiel #2
0
def entry_point():
    try:
        main(**vars(parse_args()))
    except KeyboardInterrupt:
        log('Operation interrupted.')
        sys.exit(1)
    except UserError as e:
        log('error: {}', e)
        sys.exit(2)
Beispiel #3
0
    def update(self, *, file_checked_progress_fn, data_read_progress_fn):
        """
        Update the hashes of all files in the tree and remove entries for
        files which do not exist anymore.
        """

        # We can't trust hashes computed for files which do not have a mtime
        # that is smaller than the current time. These files could still be
        # written to without visibly changing their mtime. If we hash such a
        # file we store 0 as their mtime, which forces re-computing the hash
        # next time the tree is scanned.
        current_mtime = self._get_current_mtime()

        # List of updated entries.
        new_entries = []

        # Used to look up cache entries by path while scanning. This
        # includes records from an existing write log. Entries of
        # unchanged paths are copied to new_cache_files.
        entries_by_path_mtime = {
            (i.path, i.mtime): i
            for i in self._store.get() + self._write_log.records
        }

        for path in iter_regular_files(self._root_path, self._filter_fn):
            # TODO: We're stat'ing the file (at least) a second time. iter_regular_files() already had to stat the file.
            stat = _stat_path(path)
            mtime = stat.st_mtime

            # Find a cache entry with correct path and mtime.
            entry = entries_by_path_mtime.get((path, mtime))

            # Hash the file and create a new entry, if non was found.
            if entry is None:
                # Force hashing the file again when the mtime is too recent.
                if mtime >= current_mtime:
                    mtime = 0

                # Do not log small files.
                if stat.st_size >= 1 << 24:
                    log('Hashing {} ({}) ...', relpath(path),
                        format_size(stat.st_size))

                hash = file_digest(path, progress_fn=data_read_progress_fn)
                entry = CachedFile(path, mtime, hash)

                # We're using the write log only to prevent losing the work
                # of hashing files.
                self._write_log.append(entry)

            new_entries.append(entry)
            file_checked_progress_fn()

        # Save the new list of entries.
        self._store.set(new_entries)
        self._write_log.flush()
Beispiel #4
0
def initialize_repository(root_dir):
    if not root_dir.exists():
        raise UserError('Path does not exist: {}', root_dir)
    elif not root_dir.is_dir():
        raise UserError('Path is not a directory: {}', root_dir)

    filemaster_dir = root_dir / filemaster_dir_name

    if filemaster_dir.exists():
        raise UserError(
            'Cannot create directory at {} because the path already exists.',
            filemaster_dir)

    # Initialize an empty repository.
    filemaster_dir.mkdir()
    initialize_file_cache(filemaster_dir / _file_cache_store_name)
    initialize_file_index(filemaster_dir / _file_index_store_name)

    log('Initialized empty database at {}.', filemaster_dir)
Beispiel #5
0
def with_repository(update_cache, *, root_dir=None, clear_cache=False):
    """
    Return a context yielding a `Repository` instance for the current
    directory or the specified directory, throwing a user error if no
    repository is found.

    The updated index of the repository is saved when the context is left,
    unless an error is thrown.

    :param update_cache:
        Whether to update the cache before returning the repository.
    """

    # Uses root_dir, if specified, otherwise searches for a repository.
    root_dir = find_filemaster_root(root_dir)
    filemaster_dir = root_dir / filemaster_dir_name
    cache_store_path = filemaster_dir / _file_cache_store_name

    with with_file_cache(cache_store_path, root_dir, file_filter_fn) as cache:
        index = FileIndex(store_path=filemaster_dir / _file_index_store_name)

        if clear_cache:
            # Produce different messages when the cache is cleared, depending on
            # whether updating the cache is disabled or not.
            if update_cache:
                log('Recreating the file cache ...')
            else:
                log('Clearing the files cache ...')

            cache.clear()

        if update_cache:
            update_cache_with_status(cache)

        aggregated_files = index.aggregate_files(cache.get_cached_files())
        repo = Repository(root_dir, aggregated_files, cache)

        yield repo

        index.set([i.index_entry for i in repo.aggregated_files])
Beispiel #6
0
 def create_directory(path):
     log('Creating directory: {}', relpath(path))
     path.mkdir()
Beispiel #7
0
 def move_file(source, dest):
     log('Would move: {} -> {}', relpath(source), relpath(dest))
Beispiel #8
0
 def create_directory(path):
     log('Would create directory: {}', relpath(path))