class NotifyIfDeletedAction(ActionBase):
    def __init__(self, path_converter):
        super(NotifyIfDeletedAction, self).__init__()
        self.file_deleted = Signal(str)  # relative_path

        self._patch_converter = path_converter

    def add_new_event(self, fs_event):
        if fs_event.event_type == DELETE:
            src_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.src
            self.file_deleted.emit(
                self._patch_converter.create_relpath(src_path))
class NotifyIfModifiedAction(ActionBase):
    def __init__(self, path_converter):
        super(NotifyIfModifiedAction, self).__init__()
        self.file_modified = Signal(
            str,  # relative_path
            float)  # modified time

        self._patch_converter = path_converter

    def add_new_event(self, fs_event):
        if fs_event.event_type == MODIFY:
            src_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.src
            self.file_modified.emit(
                self._patch_converter.create_relpath(src_path), fs_event.mtime)
class CheckLongPathAction(ActionBase):
    def __init__(self, path_converter, max_relpath_len, long_paths):
        super(CheckLongPathAction, self).__init__()
        self.long_path_added = Signal(FilePath)
        self.long_path_removed = Signal(FilePath)
        self._path_converter = path_converter
        self._max_relpath_len = max_relpath_len
        self._long_paths = long_paths

    def _on_new_event(self, fs_event):
        if fs_event.event_type in (CREATE, MODIFY) and \
                self._is_path_long(fs_event.src):
            self._long_paths.add(fs_event.src)
            self.long_path_added.emit(fs_event.src)
            self.event_suppressed(fs_event)
        elif fs_event.event_type == MOVE and \
                self._is_path_long(fs_event.dst):
            if self._is_path_long(fs_event.src):
                self._long_paths.discard(fs_event.src)
                self._long_paths.add(fs_event.dst)
                self.long_path_added.emit(fs_event.dst)
                self.long_path_removed.emit(fs_event.src)
                self.event_suppressed(fs_event)
            else:
                self._long_paths.add(fs_event.dst)
                self.long_path_added.emit(fs_event.dst)
                fs_event.event_type = DELETE
                self.event_passed(fs_event)
        elif fs_event.event_type == MOVE and \
                self._is_path_long(fs_event.src):
            fs_event.event_type = CREATE
            self.event_passed(fs_event)
        elif fs_event.event_type == DELETE and \
                self._is_path_long(fs_event.src):
            self._long_paths.discard(fs_event.src)
            self.long_path_removed.emit(fs_event.src)
            self.event_suppressed(fs_event)
        else:
            self.event_passed(fs_event)

    def _is_path_long(self, path):
        rel_path = self._path_converter.create_relpath(path)
        file_name = rel_path.split('/')[-1]
        return len(bytes(rel_path.encode('utf-8'))) > \
               self._max_relpath_len or \
               len(bytes(file_name.encode('utf-8'))) > MAX_FILE_NAME_LEN
예제 #4
0
class NotifyIfMovedAction(ActionBase):
    def __init__(self, path_converter):
        super(NotifyIfMovedAction, self).__init__()
        self.file_moved = Signal(str, str)  # old, new file path

        self._patch_converter = path_converter

    def add_new_event(self, fs_event):
        if fs_event.event_type == MOVE:
            src_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.src
            dst_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.dst
            self.file_moved.emit(
                self._patch_converter.create_relpath(src_path),
                self._patch_converter.create_relpath(dst_path))
예제 #5
0
class MoveFileRecentCopyAction(ActionBase):
    def __init__(self, root, copies_storage):
        super(MoveFileRecentCopyAction, self).__init__()
        self._root = root
        self._copies_storage = copies_storage

        self.copy_added = Signal(str)

    def _on_new_event(self, fs_event):
        file_synced_copy_name = FilePath(
            join(get_copies_dir(self._root), fs_event.new_hash)).longpath
        file_recent_copy_name = FilePath(fs_event.file_recent_copy).longpath

        self._copies_storage.add_copy_reference(
            fs_event.new_hash,
            reason="MoveFileRecentCopyAction {}".format(fs_event.src))

        if exists(file_recent_copy_name):
            if not exists(file_synced_copy_name):
                try:
                    shutil.move(file_recent_copy_name, file_synced_copy_name)
                    self.copy_added.emit(fs_event.new_hash)
                except (OSError, IOError):
                    self._copies_storage.remove_copy_reference(
                        fs_event.new_hash,
                        reason="MoveFileRecentCopyAction {}".format(
                            fs_event.src))
                    self.event_returned(fs_event)
                    return
                if stat(file_synced_copy_name).st_size != fs_event.file_size:
                    self.event_returned(fs_event)
                    return
            fs_event.file_synced_copy = FilePath(file_synced_copy_name)

            self.event_passed(fs_event)
        else:
            self.event_returned(fs_event)

    def _is_sutable(self, fs_event):
        return (not fs_event.is_dir and fs_event.event_type in (CREATE, MODIFY)
                and fs_event.file_recent_copy)
class MakeFileRecentCopyAction(ActionBase):
    def __init__(self, root):
        super(MakeFileRecentCopyAction, self).__init__()
        self._root = root
        self.no_disk_space = Signal(object, str, bool)

    def _on_new_event(self, fs_event):
        if fs_event.file_size + get_signature_file_size(fs_event.file_size) > \
                get_free_space_by_filepath(fs_event.src):
            self.no_disk_space.emit(fs_event, fs_event.src, False)
            self.event_suppressed(fs_event)
            return

        file_recent_copy_name = FilePath(
            join(get_copies_dir(self._root),
                 'recent_copy_' + str(fs_event.id)))
        fs_event.file_recent_copy = file_recent_copy_name
        recent_copy_longpath = FilePath(file_recent_copy_name).longpath
        try:
            copy_file(FilePath(fs_event.src).longpath, recent_copy_longpath)
        except (OSError, IOError) as e:
            if e.errno == errno.ENOSPC:
                self.no_disk_space.emit(fs_event, fs_event.src, True)
                self.event_suppressed(fs_event)
                return

            self.event_returned(fs_event)
            return

        recent_copy_size = stat(recent_copy_longpath).st_size
        if recent_copy_size != fs_event.file_size:
            self.event_returned(fs_event)
            return

        self.event_passed(fs_event)

    def _is_sutable(self, fs_event):
        return (not fs_event.is_dir and fs_event.event_type in (CREATE, MODIFY)
                and fs_event.file_size and not fs_event.is_link)
예제 #7
0
class FilesystemMonitor(QObject):
    """
    Class provides all functions needed to work with filesystem
    in scope of project
    """
    max_file_name_length = MAX_FILE_NAME_LEN - 5
    selective_sync_conflict_suffix = "selective sync conflict"

    started = pyqtSignal()
    stopped = pyqtSignal()
    process_offline = pyqtSignal(bool)

    def __init__(self,
                 root,
                 events_processing_delay,
                 copies_storage,
                 get_sync_dir_size,
                 conflict_file_suffix='',
                 tracker=None,
                 storage=None,
                 excluded_dirs=(),
                 parent=None,
                 max_relpath_len=3096,
                 db_file_created_cb=None):
        QObject.__init__(self, parent=parent)
        freeze_support()

        self._tracker = tracker

        self._root = root

        self._path_converter = PathConverter(self._root)
        self._storage = storage if storage else Storage(
            self._path_converter, db_file_created_cb)
        self._copies_storage = copies_storage
        self._copies_storage.delete_copy.connect(self.on_delete_copy)
        self.possibly_sync_folder_is_removed = \
            self._storage.possibly_sync_folder_is_removed
        self.db_or_disk_full = self._storage.db_or_disk_full
        self._get_sync_dir_size = get_sync_dir_size
        self._conflict_file_suffix = conflict_file_suffix

        self._rsync = Rsync

        _hide_files = HIDDEN_FILES
        _hide_dirs = HIDDEN_DIRS

        self._clean_recent_copies()

        self._actions = FsEventActions(
            self._root,
            events_processing_delay=events_processing_delay,
            path_converter=self._path_converter,
            storage=self._storage,
            copies_storage=self._copies_storage,
            rsync=self._rsync,
            tracker=self._tracker,
            parent=None,
            max_relpath_len=max_relpath_len,
        )

        self._watch = WatchdogHandler(root=FilePath(self._root).longpath,
                                      hidden_files=_hide_files,
                                      hidden_dirs=_hide_dirs)

        self._download_watch = WatchdogHandler(root=FilePath(
            self._root).longpath,
                                               hidden_files=_hide_files,
                                               hidden_dirs=_hide_dirs,
                                               patterns=['*.download'],
                                               is_special=True)

        self._observer = ObserverWrapper(self._storage,
                                         self._get_sync_dir_size,
                                         self._tracker,
                                         parent=None)
        self._observer.event_handled.connect(
            self._observer.on_event_is_handled_slot)
        self._actions.event_passed.connect(
            lambda ev: self._observer.event_handled.emit(ev, False))
        self._actions.event_suppressed.connect(
            lambda ev: self._observer.event_handled.emit(ev, True))

        # Add FS root for events tracking
        self._observer.schedule(self._watch, root)

        self._local_processor = LocalProcessor(self._root, self._storage,
                                               self._path_converter,
                                               self._tracker)
        self.event_is_arrived = self._local_processor.event_is_arrived
        self._quiet_processor = QuietProcessor(self._root, self._storage,
                                               self._path_converter,
                                               self.Exceptions)

        self._files_list = FilesList(self._storage, self._root)

        self._thread = QThread()
        self._thread.started.connect(self._on_thread_started)
        self._actions.moveToThread(self._thread)
        self._observer.moveToThread(self._thread)

        self._watch.event_is_arrived.connect(self._on_event_arrived)
        self._download_watch.event_is_arrived.connect(self._on_event_arrived)
        self._actions.event_passed.connect(self._local_processor.process)

        self._local_events_flag = False
        self._actions.event_passed.connect(self._set_local_events_flag)

        self.error_happens = self._actions.error_happens
        self.no_disk_space = self._actions.no_disk_space
        self.idle = self._actions.idle
        self.working = self._actions.working
        self.file_added_to_ignore = self._actions.file_added_to_ignore
        self.file_removed_from_ignore = self._actions.file_removed_from_ignore
        self.file_added_to_indexing = self._actions.file_added_to_indexing
        self.file_removed_from_indexing = self._actions.file_removed_from_indexing
        self.file_added = self._actions.file_added
        self.file_modified = self._actions.file_modified
        self.file_deleted = Signal(str)
        self._actions.file_deleted.connect(self.file_deleted)
        self._quiet_processor.file_deleted.connect(self.file_deleted)
        self._quiet_processor.file_modified.connect(self.file_modified)
        self.file_moved = self._quiet_processor.file_moved
        self._actions.file_moved.connect(lambda o, n: self.file_moved(o, n))
        self.access_denied = self._quiet_processor.access_denied

        self.file_list_changed = self._files_list.file_list_changed
        self.file_added.connect(self._files_list.on_file_added)
        self.file_deleted.connect(self._files_list.on_file_deleted)
        self.file_moved.connect(self._files_list.on_file_moved)
        self.file_modified.connect(self._files_list.on_file_modified)
        self.idle.connect(self._files_list.on_idle)

        self.process_offline.connect(self._observer.process_offline_changes)

        self.copy_added = Signal(str)
        self._actions.copy_added.connect(self.copy_added)

        self._actions.rename_file.connect(self._rename_file)

        self.special_file_event = Signal(
            str,  # path
            int,  # event type
            str)  # new path
        self._special_files = list()
        self._excluded_dirs = list(map(FilePath, excluded_dirs))

        self._online_processing_allowed = False
        self._online_modifies_processing_allowed = False

        self._paths_with_modify_quiet = set()

    def on_initial_sync_finished(self):
        logger.debug("on_initial_sync_finished")
        self._actions.on_initial_sync_finished()
        if not self._actions.get_fs_events_count() \
                and not self._observer.is_processing_offline:
            self.idle.emit()

    def _on_processed_offline_changes(self):
        logger.debug("_on_processed_offline_changes")
        if not self._actions.get_fs_events_count():
            self.idle.emit()

    def on_initial_sync_started(self):
        logger.debug("on_initial_sync_started")
        self._actions.on_initial_sync_started()
        self._online_processing_allowed = False
        self._online_modifies_processing_allowed = False

    def start_online_processing(self):
        logger.debug("start_online_processing")
        if not self._online_processing_allowed:
            logger.debug("start_online_processing, emit process_offline")
            self.process_offline.emit(self._online_modifies_processing_allowed)
        self._online_processing_allowed = True

    def start_online_modifies_processing(self):
        logger.debug("start_online_modifies_processing")
        if not self._online_modifies_processing_allowed:
            logger.debug(
                "start_online_modifies_processing, emit process_offline")
            self.process_offline.emit(True)
        self._online_modifies_processing_allowed = True

    def get_root(self):
        return self._root

    def root_exists(self):
        return op.isdir(self._root)

    def _on_thread_started(self):
        logger.info("Start monitoring of '%s'", self._root)
        self._observer.offline_event_occured.connect(self._on_event_arrived)
        self._observer.processed_offline_changes.connect(
            self._on_processed_offline_changes)
        self.started.emit()
        self._actions.start.emit()
        self._observer.start.emit()
        self._local_events_flag = False

    @benchmark
    def start(self):
        logger.debug("start")
        self._observer.set_active()
        if self._thread.isRunning():
            self._on_thread_started()
        else:
            self._thread.start()
        self._files_list.start()

    def stop(self):
        logger.info("stopped monitoring")
        try:
            self._observer.offline_event_occured.disconnect(
                self._on_event_arrived)
        except RuntimeError:
            logger.warning("Can't disconnect offline_event_occured")
        try:
            self._observer.processed_offline_changes.disconnect(
                self._on_processed_offline_changes)
        except RuntimeError:
            logger.warning("Can't disconnect processed_offline_changes")
        self._actions.stop()
        self._observer.stop()
        self._files_list.stop()
        self.stopped.emit()

    def quit(self):
        self.stop()
        self._thread.quit()
        self._thread.wait()

    def is_processing(self, file_path):
        return self._actions.is_processing(
            self._path_converter.create_abspath(file_path))

    def is_known(self, file_path):
        if file_path.endswith(FILE_LINK_SUFFIX):
            file_path = file_path[:-len(FILE_LINK_SUFFIX)]
        return self._storage.get_known_file(file_path) is not None

    def process_offline_changes(self):
        if self._local_events_flag:
            self.process_offline.emit(self._online_modifies_processing_allowed)
            self._local_events_flag = False

    def _set_local_events_flag(self, fs_event):
        if not fs_event.is_offline:
            self._local_events_flag = True

    def clean_storage(self):
        self._storage.clean()
        delete_file_links(self._root)

    def clean_copies(self, with_files=True):
        self._copies_storage.clean(with_files=with_files)

    def move_files_to_copies(self):
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            files_with_hashes = session\
                .query(File.relative_path, File.file_hash) \
                .filter(File.is_folder == 0) \
                .all()
            copies_dir = get_copies_dir(self._root)
            for (file, hashsum) in files_with_hashes:
                hash_path = op.join(copies_dir, hashsum)
                file_path = self._path_converter.create_abspath(file)
                if not op.exists(hash_path):
                    try:
                        os.rename(file_path, hash_path)
                    except Exception as e:
                        logger.error("Error moving file to copy: %s", e)
                remove_file(file_path)
        abs_path = FilePath(self._root).longpath
        folders_plus_hidden = [
            self._path_converter.create_abspath(f)
            for f in os.listdir(abs_path) if f not in HIDDEN_DIRS
        ]
        for folder in folders_plus_hidden:
            if not op.isdir(folder):
                continue

            try:
                remove_dir(folder)
            except Exception as e:
                logger.error("Error removing dir '%s' (%s)", folder, e)
        logger.info("Removed all files and folders")
        self._storage.clean()

    def clean(self):
        files = self._storage.get_known_files()
        for file in files:
            try:
                remove_file(file)
            except Exception as e:
                logger.error("Error removing file '%s' (%s)", file, e)
        folders = self._storage.get_known_folders()
        for folder in sorted(folders, key=len):
            try:
                remove_dir(folder)
            except Exception as e:
                logger.error("Error removing dir '%s' (%s)", folder, e)
        logger.info("Removed all files and folders")
        self._storage.clean()

    def accept_delete(self,
                      path,
                      is_directory=False,
                      events_file_id=None,
                      is_offline=True):
        '''
        Processes file deletion

        @param path Name of file relative to sync directory [unicode]
        '''

        full_path = self._path_converter.create_abspath(path)
        object_type = 'directory' if is_directory else 'file'

        logger.debug("Deleting '%s' %s...", path, object_type)
        if is_directory:
            self._quiet_processor.delete_directory(full_path, events_file_id)
        else:
            self._quiet_processor.delete_file(full_path, events_file_id,
                                              is_offline)
        self.file_removed_from_indexing.emit(FilePath(full_path), True)

        logger.info("'%s' %s is deleted", path, object_type)

    def set_patch_uuid(self, patch_path, diff_file_uuid):
        shutil.move(patch_path, self.get_patch_path(diff_file_uuid))

    def get_patch_path(self, diff_file_uuid):
        return os.path.join(get_patches_dir(self._root), diff_file_uuid)

    def create_directory(self, path, events_file_id):
        full_path = self._path_converter.create_abspath(path)
        try:
            self._quiet_processor.create_directory(
                full_path,
                events_file_id=events_file_id,
                wrong_file_id=self.Exceptions.WrongFileId)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(full_path),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def apply_patch(self, filename, patch, new_hash, old_hash, events_file_id):
        '''
        Applies given patch for the file specified

        @param filename Name of file relative to sync directory [unicode]
        @param patch Patch data [dict]
        '''

        full_fn = self._path_converter.create_abspath(filename)

        try:
            self._apply_patch(full_fn,
                              patch,
                              new_hash,
                              old_hash,
                              events_file_id=events_file_id)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(full_fn),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def accept_move(self,
                    src,
                    dst,
                    is_directory=False,
                    events_file_id=None,
                    is_offline=True):
        src_full_path = self._path_converter.create_abspath(src)
        dst_full_path = self._path_converter.create_abspath(dst)

        try:
            object_type = 'directory' if is_directory else 'file'
            logger.debug("Moving '%s' %s to '%s'...", src, object_type, dst)
            if is_directory:
                self._quiet_processor.move_directory(
                    src_full_path,
                    dst_full_path,
                    events_file_id,
                    self.Exceptions.FileAlreadyExists,
                    self.Exceptions.FileNotFound,
                    wrong_file_id=self.Exceptions.WrongFileId)
            else:
                self._quiet_processor.move_file(
                    src_full_path,
                    dst_full_path,
                    events_file_id,
                    self.Exceptions.FileAlreadyExists,
                    self.Exceptions.FileNotFound,
                    wrong_file_id=self.Exceptions.WrongFileId,
                    is_offline=is_offline)
            logger.info("'%s' %s is moved to '%s'", src, object_type, dst)
            self.file_removed_from_indexing.emit(FilePath(src_full_path), True)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(dst_full_path),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def change_events_file_id(self, old_id, new_id):
        self._storage.change_events_file_id(old_id, new_id)

    class Exceptions(object):
        """ User-defined exceptions are stored here """
        class FileNotFound(Exception):
            """ File doesn't exist exception"""
            def __init__(self, file):
                self.file = file

            def __str__(self):
                return repr(self.file)

        class FileAlreadyExists(Exception):
            """ File already exists exception (for move) """
            def __init__(self, path):
                self.path = path

            def __str__(self):
                return "File already exists {}".format(self.path)

        class AccessDenied(Exception):
            """ Access denied exception (for move or delete) """
            def __init__(self, path):
                self.path = path

            def __str__(self):
                return "Access denied for {}".format(self.path)

        class WrongFileId(Exception):
            """ Wrong file if exception """
            def __init__(self, path, file_id_expected=None, file_id_got=None):
                self.path = path
                self.file_id_expected = file_id_expected
                self.file_id_got = file_id_got

            def __str__(self):
                return "Wrong file id for {}. Expected id {}. Got id {}".format(
                    self.path, self.file_id_expected, self.file_id_got)

        class CopyDoesNotExists(Exception):
            def __init__(self, hash):
                self.hash = hash

            def __str__(self):
                return "Copy with hash {} does not exists".format(self.hash)

    def _apply_patch(self,
                     filename,
                     patch,
                     new_hash,
                     old_hash,
                     silent=True,
                     events_file_id=None):
        start_time = time.time()
        patch_size = os.stat(patch).st_size
        success = False
        try:
            patched_new_hash, old_hash = self._quiet_processor.patch_file(
                filename,
                patch,
                silent=silent,
                events_file_id=events_file_id,
                wrong_file_id=self.Exceptions.WrongFileId)
            assert patched_new_hash == new_hash
            success = True
            self.copy_added.emit(new_hash)
        except Rsync.AlreadyPatched:
            success = True
        except:
            raise
        finally:
            if self._tracker:
                try:
                    file_size = os.stat(filename).st_size
                except OSError:
                    file_size = 0
                duration = time.time() - start_time
                self._tracker.monitor_patch_accept(file_size, patch_size,
                                                   duration, success)

    def generate_conflict_file_name(self,
                                    filename,
                                    is_folder=False,
                                    name_suffix=None,
                                    with_time=True):
        orig_filename = filename
        directory, filename = op.split(filename)
        original_ext = ''
        if is_folder:
            original_name = filename
        else:
            # consider ext as 2 '.'-delimited last filename substrings
            # if they don't contain spaces
            dots_list = filename.split('.')
            name_parts_len = len(dots_list)
            for k in range(1, min(name_parts_len, 3)):
                if ' ' in dots_list[-k]:
                    break

                original_ext = '.{}{}'.format(dots_list[k], original_ext)
                name_parts_len -= 1
            original_name = '.'.join(dots_list[:name_parts_len])

        index = 0
        if name_suffix is None:
            name_suffix = self._conflict_file_suffix
        date_today = date.today().strftime('%d-%m-%y') if with_time else ''
        suffix = '({} {})'.format(name_suffix, date_today)
        while len(bytes(suffix.encode('utf-8'))) > \
                int(self.max_file_name_length / 3):
            suffix = suffix[int(len(suffix) / 2):]

        name = '{}{}{}'.format(original_name, suffix, original_ext)
        while True:
            to_cut = len(bytes(name.encode('utf-8'))) - \
                     self.max_file_name_length
            if to_cut <= 0:
                break
            if len(original_name) > to_cut:
                original_name = original_name[:-to_cut]
            else:
                remained = to_cut - len(original_name) + 1
                original_name = original_name[:1]
                if remained < len(original_ext):
                    original_ext = original_ext[remained:]
                else:
                    original_ext = original_ext[int(len(original_ext) / 2):]
            name = '{}{}{}'.format(original_name, suffix, original_ext)

        while op.exists(
                self._path_converter.create_abspath(
                    FilePath(op.join(directory, name)))):
            index += 1
            name = '{}{} {}{}'.format(original_name, suffix, index,
                                      original_ext)
        conflict_file_name = FilePath(op.join(directory, name))
        logger.info(
            "Generated conflict file name: %s, original name: %s, "
            "is_folder: %s, name_suffix: %s, with_time: %s",
            conflict_file_name, orig_filename, is_folder, name_suffix,
            with_time)
        return conflict_file_name

    def move_file(self, src, dst, is_offline=True):
        src_full_path = self._path_converter.create_abspath(src)
        dst_full_path = self._path_converter.create_abspath(dst)
        is_offline = True if op.isdir(src_full_path) else is_offline
        src_hard_path = self._quiet_processor.get_hard_path(
            src_full_path, is_offline)
        dst_hard_path = self._quiet_processor.get_hard_path(
            dst_full_path, is_offline)

        if not op.exists(src_hard_path):
            raise self.Exceptions.FileNotFound(src_full_path)
        elif op.exists(dst_hard_path):
            raise self.Exceptions.FileAlreadyExists(dst_full_path)

        dst_parent_folder_path = op.dirname(dst_full_path)
        if not op.exists(dst_parent_folder_path):
            self._on_event_arrived(
                FsEvent(DELETE,
                        dst_parent_folder_path,
                        True,
                        is_offline=True,
                        quiet=True))

        try:
            os.rename(src_hard_path, dst_hard_path)
        except OSError as e:
            logger.warning("Can't move file (dir) %s. Reason: %s",
                           src_full_path, e)
            if e.errno == errno.EACCES:
                self._quiet_processor.access_denied()
                raise self.Exceptions.AccessDenied(src_full_path)
            else:
                raise e

    def copy_file(self, src, dst, is_directory=False, is_offline=True):
        is_offline = True if is_directory else is_offline
        src_full_path = self._path_converter.create_abspath(src)
        dst_full_path = self._path_converter.create_abspath(dst)
        src_hard_path = self._quiet_processor.get_hard_path(
            src_full_path, is_offline)
        dst_hard_path = self._quiet_processor.get_hard_path(
            dst_full_path, is_offline)

        if not op.exists(src_hard_path):
            raise self.Exceptions.FileNotFound(src_full_path)

        if is_directory:
            shutil.copytree(src_full_path, dst_full_path)
        else:
            common.utils.copy_file(src_hard_path, dst_hard_path)

    def restore_file_from_copy(self,
                               file_name,
                               copy_hash,
                               events_file_id,
                               search_by_id=False):
        try:
            old_hash = self._quiet_processor.create_file_from_copy(
                file_name,
                copy_hash,
                silent=True,
                events_file_id=events_file_id,
                search_by_id=search_by_id,
                wrong_file_id=self.Exceptions.WrongFileId,
                copy_does_not_exists=self.Exceptions.CopyDoesNotExists)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(
                            self._path_converter.create_abspath(file_name)),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

        return old_hash

    def create_file_from_copy(self,
                              file_name,
                              copy_hash,
                              events_file_id,
                              search_by_id=False):
        self.restore_file_from_copy(file_name,
                                    copy_hash,
                                    events_file_id=events_file_id,
                                    search_by_id=search_by_id)

    @benchmark
    def make_copy_from_existing_files(self, copy_hash):
        self._quiet_processor.make_copy_from_existing_files(copy_hash)

    def create_empty_file(self,
                          file_name,
                          file_hash,
                          events_file_id,
                          search_by_id=False,
                          is_offline=True):
        try:
            self._quiet_processor.create_empty_file(
                file_name,
                file_hash,
                silent=True,
                events_file_id=events_file_id,
                search_by_id=search_by_id,
                wrong_file_id=self.Exceptions.WrongFileId,
                is_offline=is_offline)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(
                            self._path_converter.create_abspath(file_name)),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def on_delete_copy(self, hash, with_signature=True):
        if not hash:
            logger.error("Invalid hash '%s'", hash)
            return
        copy = op.join(get_copies_dir(self._root), hash)
        try:
            remove_file(copy)
            logger.info("File copy deleted %s", copy)
            if not with_signature:
                return

            signature = op.join(get_signatures_dir(self._root), hash)
            remove_file(signature)
            logger.info("File copy signature deleted %s", signature)
        except Exception as e:
            logger.error(
                "Can't delete copy. "
                "Possibly sync folder is removed %s", e)
            self.possibly_sync_folder_is_removed()

    def delete_old_signatures(self, delete_all=False):
        logger.debug("Deleting old signatures...")
        self._quiet_processor.delete_old_signatures(
            get_signatures_dir(self._root), delete_all)

    def path_exists(self, path, is_offline=True):
        full_path = self._path_converter.create_abspath(path)
        hard_path = self._quiet_processor.get_hard_path(full_path, is_offline)
        return op.exists(hard_path)

    def rename_excluded(self, rel_path):
        logger.debug("Renaming excluded dir %s", rel_path)
        new_path = self.generate_conflict_file_name(
            rel_path,
            name_suffix=self.selective_sync_conflict_suffix,
            with_time=False)
        self.move_file(rel_path, new_path)

    def _rename_file(self, abs_path):
        rel_path = self._path_converter.create_relpath(abs_path)
        new_path = self.generate_conflict_file_name(rel_path,
                                                    is_folder=False,
                                                    name_suffix="",
                                                    with_time=True)
        self.move_file(rel_path, new_path)

    def db_file_exists(self):
        return self._storage.db_file_exists()

    def _clean_recent_copies(self):
        mask = op.join(get_copies_dir(self._root), "*.recent_copy_[0-9]*")
        recent_copies = glob.glob(mask)
        list(map(os.remove, recent_copies))

    def add_special_file(self, path):
        self._special_files.append(path)
        watch = None
        if not (path in FilePath(self._root)):
            watch = self._download_watch
        self._observer.add_special_file(path, watch)

    def remove_special_file(self, path):
        logger.debug("Removing special file %s...", path)
        if not (path in FilePath(self._root)):
            self._observer.remove_special_file(path)
        try:
            self._special_files.remove(path)
        except ValueError:
            logger.warning("Can't remove special file %s from list %s", path,
                           self._special_files)

    def change_special_file(self, old_file, new_file):
        self.add_special_file(new_file)
        self.remove_special_file(old_file)

    def _on_event_arrived(self, fs_event, is_special=False):
        logger.debug(
            "Event arrived %s, special %s, online_processing_allowed: %s, "
            "online_modifies_processing_allowed: %s", fs_event, is_special,
            self._online_processing_allowed,
            self._online_modifies_processing_allowed)
        if is_special or fs_event.src in self._special_files:
            self.special_file_event.emit(fs_event.src, fs_event.event_type,
                                         fs_event.dst)
        elif fs_event.is_offline or self._online_processing_allowed:
            if not self._online_modifies_processing_allowed and \
                    not fs_event.is_offline and fs_event.event_type == MODIFY:
                return
            elif fs_event.src in self._paths_with_modify_quiet \
                    and fs_event.event_type in (CREATE, MODIFY):
                fs_event.is_offline = True
                fs_event.quiet = True

            path = fs_event.src if fs_event.event_type == CREATE \
                else fs_event.dst if fs_event.event_type == MOVE else ""
            name = op.basename(path)
            parent_path = op.dirname(path)
            stripped_name = name.strip()
            if stripped_name != name:
                new_path = op.join(parent_path, stripped_name)
                if op.exists(new_path):
                    new_path = self.generate_conflict_file_name(
                        new_path,
                        is_folder=fs_event.is_dir,
                        name_suffix="",
                        with_time=True)
                logger.debug("Renaming '%s' to '%s'...", path, new_path)
                os.rename(FilePath(path).longpath, FilePath(new_path).longpath)

                path = new_path

                if fs_event.event_type == CREATE:
                    fs_event.src = new_path
                elif fs_event.event_type == MOVE:
                    fs_event.dst = new_path

            hidden_dir = FilePath(
                self._path_converter.create_abspath(HIDDEN_DIRS[0]))
            if fs_event.event_type == MOVE:
                if FilePath(fs_event.src) in hidden_dir or \
                        op.basename(fs_event.src).startswith('._'):
                    fs_event.event_type = CREATE
                    fs_event.src = fs_event.dst
                    fs_event.dst = None
                elif FilePath(fs_event.dst) in hidden_dir or \
                        op.basename(fs_event.dst).startswith('._'):
                    fs_event.event_type = DELETE
                    fs_event.dst = None
            if FilePath(fs_event.src) in hidden_dir or \
                    op.basename(fs_event.src).startswith('._'):
                return

            if FilePath(path) in self._excluded_dirs:
                self.rename_excluded(self._path_converter.create_relpath(path))
            else:
                self._actions.add_new_event(fs_event)

    def get_long_paths(self):
        return self._actions.get_long_paths()

    def set_excluded_dirs(self, excluded_dirs):
        self._excluded_dirs = list(map(FilePath, excluded_dirs))

    def remove_dir_from_excluded(self, directory):
        try:
            self._excluded_dirs.remove(directory)
        except Exception as e:
            logger.warning("Can't remove excluded dir %s from %s. Reason: %s",
                           directory, self._excluded_dirs, e)

    def sync_events_file_id(self, file_path, events_file_id, is_folder):
        self._quiet_processor.sync_events_file_id(file_path, events_file_id,
                                                  is_folder)

    def sync_events_file_id_by_old_id(self, events_file_id,
                                      old_events_file_id):
        self._quiet_processor.sync_events_file_id_by_old_id(
            events_file_id, old_events_file_id)

    def set_collaboration_folder_icon(self, folder_name):
        set_custom_folder_icon('collaboration', self._root, folder_name)

    def reset_collaboration_folder_icon(self, folder_name):
        reset_custom_folder_icon(self._root,
                                 folder_name,
                                 resource_name='collaboration')

    def reset_all_collaboration_folder_icons(self):
        root_folders = [
            f for f in os.listdir(self._root)
            if op.isdir(self._path_converter.create_abspath(f))
        ]
        logger.debug("root_folders %s", root_folders)
        list(map(self.reset_collaboration_folder_icon, root_folders))

    def get_excluded_dirs_to_change(self,
                                    excluded_dirs,
                                    src_path,
                                    dst_path=None):
        src_path = FilePath(src_path)
        if dst_path:
            dst_path = FilePath(dst_path)
        excluded_dirs = list(map(FilePath, excluded_dirs))
        dirs_to_add = []
        dirs_to_delete = list(filter(lambda ed: ed in src_path, excluded_dirs))
        if dst_path is not None and \
                not is_contained_in_dirs(dst_path, excluded_dirs):
            # we have to add new excluded dirs only if folder is not moved
            # to excluded dir
            l = len(src_path)
            dirs_to_add = [dst_path + d[l:] for d in dirs_to_delete]
        logger.debug(
            "get_excluded_dirs_to_change. "
            "excluded_dirs %s, src_path %s, dst_path %s, "
            "dirs_to_delete %s, dirs_to_add %s", excluded_dirs, src_path,
            dst_path, dirs_to_delete, dirs_to_add)
        return dirs_to_delete, dirs_to_add

    def change_excluded_dirs(self, dirs_to_delete, dirs_to_add):
        for directory in dirs_to_delete:
            self.remove_dir_from_excluded(directory)
        for directory in dirs_to_add:
            self._excluded_dirs.append(directory)

    def clear_excluded_dirs(self):
        self._excluded_dirs = []

    def get_fs_events_count(self):
        return self._actions.get_fs_events_count()

    def force_create_copies(self):
        self._storage.clear_files_hash_mtime()
        self.delete_old_signatures(delete_all=True)
        self._local_events_flag = True
        self.process_offline_changes()

    def get_file_list(self):
        return self._files_list.get()

    def get_actual_events_file_id(self, path, is_folder=None):
        abs_path = self._path_converter.create_abspath(path)
        file = self._storage.get_known_file(abs_path, is_folder=is_folder)
        return file.events_file_id if file else None

    def is_directory(self, path):
        abs_path = self._path_converter.create_abspath(path)
        return op.isdir(abs_path)

    def set_waiting(self, to_wait):
        self._actions.set_waiting(to_wait)

    def set_path_quiet(self, path):
        logger.debug("Setting path %s quiet...", path)
        self._paths_with_modify_quiet.add(FilePath(path))

    def clear_paths_quiet(self):
        logger.debug("Clearing quiet paths...")
        self._paths_with_modify_quiet.clear()

    def delete_files_with_empty_events_file_ids(self):
        if self._storage.delete_files_with_empty_events_file_ids():
            self.working.emit()

    def is_file_in_storage(self, events_file_id):
        return self._storage.get_known_file_by_id(events_file_id)
예제 #8
0
class Storage(object):
    """
    Interface for requesting info on registered files and folders
    """
    def __init__(self, path_converter, db_file_created_cb=None):
        self._pc = path_converter

        self.possibly_sync_folder_is_removed = Signal()
        self.db_or_disk_full = Signal()

        self._db_file = self._pc.create_abspath('.pvtbox/storage.db')
        logger.debug("DB file: %s", self._db_file)
        new_db_file = not exists(self._db_file)
        if new_db_file and callable(db_file_created_cb):
            db_file_created_cb()

        make_dirs(self._db_file)

        if not new_db_file:
            # Database migration. It can be executed before opening db
            try:
                upgrade_db("storage_db", db_filename=self._db_file)
            except Exception as e:
                remove_file(self._db_file)
                new_db_file = True
                logger.warning(
                    "Can't upgrade storage db. "
                    "Reason: (%s) Creating...", e)
                if callable(db_file_created_cb):
                    db_file_created_cb()

        self._engine = create_engine('sqlite:///{}'.format(
            FilePath(self._db_file)),
                                     connect_args={
                                         'timeout': 60 * 1000,
                                         'check_same_thread': False,
                                     })
        self._engine.pool_timeout = 60 * 60 * 1000
        self._Session = sessionmaker(bind=self._engine)

        Base.metadata.create_all(self._engine, checkfirst=True)

        if new_db_file:
            try:
                stamp_db("storage_db", db_filename=self._db_file)
            except Exception as e:
                logger.error("Error stamping storage db: %s", e)

        self._lock = threading.RLock()

    @contextmanager
    def create_session(self, read_only=True, locked=False):
        session = self._Session()
        session.expire_on_commit = False
        session.autoflush = False
        if read_only:
            session.flush = lambda: None

        if not read_only and locked:
            logger.debug("session %s acquiring lock...", hex(id(session)))
            self._lock.acquire()
            logger.debug("session %s acquired lock.", hex(id(session)))

        try:
            yield session
            session.commit()
        except OperationalError as e:
            logger.warning("OperationalError: %s", e)
            try:
                session.rollback()
            except Exception as e:
                logger.warning(
                    "OperationalError, exception while trying to rollback session: %s",
                    e)
                pass
            if is_db_or_disk_full(e):
                self.db_or_disk_full.emit()
            else:
                raise
        except Exception as e:
            logger.warning("Exception: %s", e)
            session.rollback()
            raise
        finally:
            if not read_only and locked:
                self._lock.release()
                logger.debug("session %s released lock.", hex(id(session)))
            session.close()

    @with_session(True)
    def _get_known_paths(self,
                         is_folder,
                         parent_dir=None,
                         exclude_dirs=None,
                         session=None):
        query = session.query(File.relative_path)
        query = query.filter(File.is_folder == is_folder)
        paths = query.all()
        if parent_dir:
            parent_dir = self._pc.create_relpath(parent_dir)
            result = []
            for path in paths:
                if is_contained_in(path[0], parent_dir):
                    result.append(path)
            paths = result
        if exclude_dirs:
            result = []
            # Optimize perfomance using iterator based solution
            for ed in exclude_dirs:
                for pp in paths:
                    if not is_contained_in(pp[0], ed):
                        result.append(pp)
            paths = result

        return [FilePath(self._pc.create_abspath(x[0])) for x in paths]

    @benchmark
    def get_known_files(self,
                        parent_dir=None,
                        exclude_dirs=None,
                        session=None):
        """
        Returns absolute paths of files known at the moment.

        @param parent_dir Name of parent dir to limit results to [unicode]
        @return Known files paths (absolute) [(unicode, )]
        """

        return self._get_known_paths(is_folder=False,
                                     parent_dir=parent_dir,
                                     exclude_dirs=exclude_dirs,
                                     session=session)

    @benchmark
    def get_known_folders(self,
                          parent_dir=None,
                          exclude_dirs=None,
                          session=None):
        """
        Returns absolute paths of folders known at the moment

        @param parent_dir Name of parent dir to limit results to [unicode]
        @return Known folders paths (absolute) [(unicode, )]
        """

        return self._get_known_paths(is_folder=True,
                                     parent_dir=parent_dir,
                                     exclude_dirs=exclude_dirs,
                                     session=session)

    @with_session(True)
    def get_known_file(self, abs_path, is_folder=None, session=None):
        rel_path = self._pc.create_relpath(abs_path)

        query = session.query(File).filter(File.relative_path == rel_path)
        if is_folder is not None:
            query.filter(File.is_folder == is_folder)

        return query.one_or_none()

    @with_session(True)
    def get_known_file_by_id(self, file_id, session=None):
        return session.query(File)\
            .filter(File.events_file_id == file_id)\
            .one_or_none()

    @with_session(False)
    def get_new_file(self, abs_path, is_folder, session=None):
        rel_path = self._pc.create_relpath(abs_path)

        file = File(relative_path=rel_path, is_folder=is_folder)

        return file

    def update_file_signature(self, file, signature):
        signature_path = self._pc.create_abspath(file.signature_rel_path)
        make_dirs(signature_path)
        with open(signature_path, 'wb') as f:
            dump(signature, f, protocol=2)

    def get_file_signature(self, file):
        abs_path = self._pc.create_abspath(file.signature_rel_path)
        try:
            with open(abs_path, 'rb') as f:
                return load(f)
        except (IOError, OSError, EOFError):
            return None

    @with_session(False)
    def save_file(self, file, session=None):
        return session.merge(file)

    @with_session(False)
    def delete_file(self, file, session=None):
        session.delete(file)

    def clean(self):
        try:
            self._engine.execute("delete from files")
            logger.info("Cleaned storage data base")
        except Exception as e:
            logger.error("Failed to clean DB (%s)", e)
            if not self.db_file_exists():
                raise e

    @with_session(False)
    def delete_directories(self, dirs=[], session=None):
        paths_deleted = []
        if not dirs:
            return paths_deleted

        files = session.query(File).all()
        dirs_rel = [self._pc.create_relpath(p) for p in dirs]
        for file in files:
            if is_contained_in_dirs(file.relative_path, dirs_rel):
                if not file.is_folder:
                    paths_deleted.append(file.relative_path)
                session.delete(file)
        return paths_deleted

    def db_file_exists(self):
        return exists(self._db_file) and getsize(self._db_file) > 0

    @with_session(False, True)
    def change_events_file_id(self, old_id, new_id, session=None):
        file = self.get_known_file_by_id(old_id, session=session)
        if file:
            file.events_file_id = new_id
            logger.debug("Changed events_file_id for %s from %s to %s",
                         file.relative_path, old_id, new_id)
        else:
            logger.warning("Could not find file with events_file_id = %s",
                           old_id)

    @with_session(True)
    def get_known_folder_children(self, parent_dir_rel_path, session=None):
        path_like = parent_dir_rel_path + '/%'
        children = session.query(File)\
            .filter(
            or_(File.relative_path == parent_dir_rel_path,
                File.relative_path.like(path_like)))\
            .all()
        return children

    @with_session(False, True)
    def delete_known_folder_children(self, parent_dir_rel_path, session=None):
        path_like = parent_dir_rel_path + '/%'
        session.query(File)\
            .filter(
            or_(File.relative_path == parent_dir_rel_path,
                File.relative_path.like(path_like)))\
            .delete(synchronize_session=False)

    @with_session(False, True)
    def move_known_folder_children(self,
                                   old_dir_rel_path,
                                   new_dir_rel_path,
                                   session=None):
        path_like = old_dir_rel_path + '/%'
        files = session.query(File) \
            .filter(
            or_(File.relative_path == old_dir_rel_path,
                File.relative_path.like(path_like))) \
            .all()
        mappings = [{
            'id':
            f.id,
            'relative_path':
            FilePath(
                join(new_dir_rel_path,
                     relpath(f.relative_path, old_dir_rel_path)))
        } for f in files]
        session.bulk_update_mappings(File, mappings)

    @with_session(True)
    def hash_in_storage(self, file_hash, session=None):
        if not file_hash:
            return None

        files_count = session.query(func.count())\
            .select_from(File)\
            .filter(File.file_hash == file_hash)\
            .scalar()
        return files_count > 0

    @with_session(False, True)
    def clear_files_hash_mtime(self, session=None):
        session.execute(
            update(File).where(File.is_folder == 0).values(file_hash=None,
                                                           mtime=0))

    @with_session(True)
    def get_last_files(self, limit, offset=0, session=None):
        files = session.query(File) \
            .filter(File.is_folder == 0) \
            .order_by(File.mtime.desc()) \
            .offset(offset).limit(limit) \
            .all()
        return files

    def get_file_by_hash(self, hash, exclude, session):
        return session.query(File) \
            .filter(File.file_hash == hash) \
            .filter(File.id.notin_(exclude)) \
            .first()

    @with_session(False, True)
    def delete_files_with_empty_events_file_ids(self, session=None):
        files_with_empty_ids = session.query(File) \
            .filter(File.events_file_id.is_(None)) \
            .all()
        for file in files_with_empty_ids:
            if file.is_folder:
                self.delete_known_folder_children(file.relative_path,
                                                  session=session)
                type_str = "folder"
            else:
                session.delete(file)
                type_str = "file"
            logger.debug("Deleted %s %s with empty events_file_id", type_str,
                         file.relative_path)
        return bool(files_with_empty_ids)
예제 #9
0
class EventStrategy(object):
    """
    Desribe the handling stratrgy for file events
    """
    DUMMY_PAGE_SIZE = 500

    def __init__(self,
                 db,
                 event,
                 get_download_backups_mode,
                 is_smart_sync=False):
        super(EventStrategy, self).__init__()
        self.event = event
        self.event_id = event.id if event else 0
        self.file_id = event.file.id if event and event.file else 0
        self.db = db
        self._is_smart_sync = is_smart_sync
        self._cached_file_path = None
        self._events_queue = None
        self._download_backups = get_download_backups_mode()
        self._force_move = False

        self.change_processing_events_counts = Signal(int,
                                                      int)  # (local, remote)
        self.append_local_event = Signal(Event, str, str, int, bool)
        self.rename_or_delete_dst_path = Signal(str, int, Session, bool)

    ''' Public methods templates ==============================================
    '''

    @atomic
    def apply(self,
              session=None,
              fs=None,
              excluded_dirs=None,
              patches_storage=None,
              collaborated_folders=(),
              events_queue=None):
        event = self.event
        assert event.file_id

        logger.debug('applying %s', self)
        change_name = True
        parent_found = True
        if event.type != 'delete':
            self._events_queue = events_queue
            change_name, parent_found = self._apply_move_if_needed(
                session, fs, excluded_dirs, patches_storage, events_queue)

        if parent_found:
            self._apply_event(session, fs, excluded_dirs, patches_storage)
        if event.state == 'received' and not event.file.excluded:
            # update file strategy cannot apply patch
            return

        logger.debug('after _apply_event %s', self)
        self._set_actual_file_state_to_this(
            session,
            update_file_event=not event.file.excluded,
            change_name=change_name)

        if event.is_folder and event.type != 'delete':
            self.set_collaborated_folder_icon(session, fs,
                                              collaborated_folders)
        self.db.expunge_parents(event.file, session)
        logger.debug('applied %s', self)

    def _apply_event(self, session, fs, excluded_dirs, patches_storage):
        pass

    def _create_file_from_copy(self, path, fs, search_by_id=False):
        pass

    def _apply_move_if_needed(self, session, fs, excluded_dirs,
                              patches_storage, events_queue):
        event = self.event
        assert event.file_id
        parent_found = True
        folder = self.find_folder_by_uuid(session, event.folder_uuid)
        if folder == event.file.folder and event.file_name == event.file.name:
            return True, parent_found

        move_events = list(
            filter(
                lambda e: e.server_event_id and e.type == 'move' and
                (not event.server_event_id or e.server_event_id > event.
                 server_event_id), event.file.events))
        if move_events and not self._force_move and event.is_folder:
            # skip this if we have subsequent moves
            return False, parent_found

        # Calculate object path for further use
        event_path = event.file.path

        if folder and not folder.is_existing and not folder.excluded:
            logger.debug("Parent folder does not exist for %s", event_path)
            parent_found = False
            if self._process_parent_not_found(session):
                fs.accept_delete(event_path,
                                 is_directory=event.is_folder,
                                 events_file_id=event.file_id,
                                 is_offline=event.file.is_offline)
            return True, parent_found

        logger.debug('moving %s', event.file)
        new_path = ('/'.join([folder.path, event.file_name])
                    if folder else event.file_name)

        # Check whether event paths are excluded from sync
        is_path_excluded = is_contained_in_dirs(event_path, excluded_dirs)
        is_new_path_excluded = is_contained_in_dirs(new_path, excluded_dirs)

        # Both source and destination paths are excluded
        if is_path_excluded and is_new_path_excluded:
            assert False, 'Excluded-excluded must never occur'
        # None of source and destination paths are excluded
        elif not is_path_excluded and not is_new_path_excluded:
            # Regular move event processing
            try:
                fs.accept_move(event_path,
                               new_path,
                               is_directory=event.is_folder,
                               events_file_id=event.file_id,
                               is_offline=event.file.is_offline)
            except fs.Exceptions.FileAlreadyExists:
                if event.file.event_id and not event.file.is_deleted:
                    if not self._rename_or_delete_dst_path(
                            new_path, session, event.file.is_offline):
                        raise SkipEventForNow()
                    else:
                        # retry move after renaming new path
                        return self._apply_move_if_needed(
                            session, fs, excluded_dirs, patches_storage,
                            events_queue)
            except fs.Exceptions.FileNotFound:
                subsequent_local_moves_deletes = list(
                    filter(
                        lambda ev: ev.id > event.id and ev.type in
                        ('delete', 'move') and ev.state in
                        ('occured', 'conflicted', 'sent'), event.file.events))
                if not subsequent_local_moves_deletes and \
                        not self.check_previous_delete(
                            session, events_queue, fs):
                    # file/folder moved or deleted locally and
                    # no events in db for now
                    # so wait
                    logger.warning("Source file (folder) %s not found.",
                                   event_path)
                    raise SkipEventForNow()
            except fs.Exceptions.WrongFileId:
                if not self.event.is_folder or \
                        not self._apply_folder_delete_if_any(session, fs):
                    raise SkipEventForNow()

                # retry move after deleting folder
                return self._apply_move_if_needed(session, fs, excluded_dirs,
                                                  patches_storage,
                                                  events_queue)
            except Exception as e:
                # ignore move if file is unavailable
                logger.warning("Can't move file (folder) %s. Reason %s",
                               event_path, e)
                raise SkipEventForNow()

            event.file.name = event.file_name
            event.file.folder = folder
            if folder:
                event.file.folder_id = folder.id
        # Source path is excluded
        elif is_path_excluded and not is_new_path_excluded:
            self.event.file.excluded = False
            self.event.file.folder = folder
            if event.is_folder:
                # Create directory at destination path
                fs.create_directory(new_path, self.event.file_id)
            else:
                # Create file at destination path
                if self.event.file_size:
                    self._create_file_from_copy(new_path, fs)
                else:
                    fs.create_empty_file(new_path,
                                         self.event.file_hash,
                                         self.event.file_id,
                                         is_offline=self.event.file.is_offline)
        # Destination path is excluded
        elif not is_path_excluded and is_new_path_excluded:
            if not hasattr(self, '_excluded_ready') or \
                    not self._excluded_ready:
                self._excluded_ready = False
                raise SkipExcludedMove

            self.event.file.excluded = True
            self.event.file.event_id = None
            if not self.event.is_folder:
                self.event.state = 'received'
            else:
                self.db.mark_child_excluded(self.event.file_id, session)

            # Delete object at source path
            fs.accept_delete(event_path,
                             is_directory=event.is_folder,
                             is_offline=event.file.is_offline)
        return True, parent_found

    def _rename_or_delete_dst_path(self, path, session, is_offline=True):
        try:
            self.rename_or_delete_dst_path.emit(path, self.event.file_id,
                                                session, is_offline)
        except RenameDstPathFailed:
            return False

        return True

    @db_read
    def ready_to_apply(self, session=None, is_deleted=False):
        return self._ready_to_apply(session, is_deleted=is_deleted)

    @benchmark
    def _ready_to_apply(self, session, is_deleted=False, files=None):
        ready = (not self.event.file.is_locally_modified and
                 (self.event.is_folder or self.ready_to_skip(session=session))
                 and not is_deleted
                 and self.event.state in ('registered', 'sent', 'downloaded'))

        return ready

    @atomic
    def ready_to_register(self, session):
        return self._ready_to_register(session)

    def _ready_to_register(self, session):
        return False

    @benchmark
    @db_read
    def ready_to_skip(self, session=None):
        event = self.event
        file = event.file
        file_event = file.event
        if file.event_id and not file.event:
            file_event = session.query(Event) \
                .filter(Event.id == file.event_id) \
                .one_or_none()
        return (event.id and
                (not file_event or file.event_id == event.last_event_id
                 or file_event.type == 'delete' or not event.last_event_id or
                 (file.last_skipped_event_id
                  and file.last_skipped_event_id == event.last_event_id)))

    @db_read
    def skip_if_file_will_be_deleted(self, session):
        '''Should be overriden in concrete strategy, if it can be skipped'''
        return not self.event.file.is_existing

    @atomic
    def skip(self, session, min_server_event_id=0, fs=None):
        if self.event.type == 'delete':
            self.event.file.event_id = self.event.id
            if fs:
                fs.sync_events_file_id_by_old_id(None, self.event.file_id)
        else:
            self.event.file.last_skipped_event_id = self.event.id
            # add dummy delete if there is no delete event for file
            file = session.query(File) \
                .filter(File.id == self.event.file_id) \
                .one_or_none()
            if min_server_event_id and file:
                delete_events = list(
                    filter(lambda e: e.type == 'delete', file.events))
                if not delete_events:
                    self._add_dummy_delete(file, self.event,
                                           min_server_event_id, session)

    def postpone_after_save(self):
        ''' Should be overriden in concrete strategy
            if it must be postponed after save in db'''
        return False

    @db_read
    def file_will_be_deleted(self, session, file=None):
        return self._file_will_be_deleted(
            session=session, file=file if file else self.event.file)

    @benchmark
    def _file_will_be_deleted(self, session, file):
        return file.events and file.events[-1].type == 'delete'

    @db_read
    def get_old_uuid(self, session):
        if self.event.last_event:
            return self.event.last_event.uuid
        else:
            return None

    def set_collaborated_folder_icon(self, session, fs, collaborated_folders):
        pass

    def set_excluded_ready(self):
        self._excluded_ready = True

    ''' Utility functions ===================================================
    '''

    def _apply_patch(self, fs, file_path, patch_uuid):
        try:
            fs.apply_patch(file_path, fs.get_patch_path(patch_uuid),
                           self.event.file_hash,
                           self.event.file_hash_before_event,
                           self.event.file_id)
        except Exception as e:
            logger.error("Can't apply patch %s for file %s. Error %s",
                         patch_uuid, file_path, e)
            return False
        return True

    def _get_last_nonconflicted_state(self, session, fs):
        ''' Also make a conflicted copy of the current file state.'''
        assert self.event.file_id
        assert self.event.last_event_id, \
            'Getting last nonconflicted state for create event'

        event = session.query(Event) \
            .filter(Event.server_event_id.isnot(None)) \
            .filter(Event.file_id == self.event.file_id) \
            .filter(Event.state.in_(['sent', 'downloaded'])) \
            .filter(Event.type != 'delete') \
            .filter(Event.id <= self.event.last_event_id) \
            .order_by(Event.id.desc()).first()

        assert event is not None, \
            'Probably getting last nonconflicted state for create event'

        return event

    def _set_actual_file_state_to_this(self,
                                       session,
                                       update_file_event=True,
                                       change_name=True):
        file = self.event.file

        if update_file_event:
            file.event = self.event
            file.event_id = self.event.id
            file.last_skipped_event_id = None
        if self.event.type in ('create', 'move') and change_name:
            file.name = self.event.file_name
            if not file.folder:
                folder = self.find_folder_by_uuid(session,
                                                  self.event.folder_uuid)
                file.folder_id = folder.id if folder else None
        logger.debug(
            "_set_actual_file_state_to_this. "
            "event_id %s, file.event_id %s, file.name %s", self.event.id,
            file.event_id, file.name)

    def find_folder_by_uuid(self, session, uuid):
        if not uuid:
            return None

        try:
            folder = session.query(File) \
                .filter(File.is_folder) \
                .filter(File.uuid == uuid) \
                .one()
        except:
            raise FolderUUIDNotFound(uuid)

        return folder

    def _get_target_path(self, session):
        if self.event.state in ('occured', 'sent', 'conflicted'):
            target_path = self.event.file.path
        else:
            target_path = self.db.get_path_from_event(self.event, session)
        return target_path

    def __str__(self):
        return '{self.__class__.__name__}: {self.event}'.format(self=self)

    def check_event_path_excluded(self, excluded_dirs):
        return False

    @db_read
    def get_file_path(self, session):
        if not self._cached_file_path:
            self._cached_file_path = self.event.file.path

        return self._cached_file_path

    @atomic
    def event_newer_than_applied(self, session):
        return not self.event.file.event or \
               self.event.server_event_id and \
               (not self.event.file.event.server_event_id or
                self.event.server_event_id >
                self.event.file.event.server_event_id)

    def make_conflicting_copy(self, fs):
        raise NotImplemented()

    @benchmark
    def is_event_skipped(self, session):
        event = self.event
        try:
            session.expire(event)
        except Exception:
            event = session.query(Event).filter(Event.id == event.id).one()

        return event.file.last_skipped_event_id and \
               event.id < event.file.last_skipped_event_id

    def force_move(self):
        self._force_move = True

    def _add_dummy_delete_events(self, session=None):
        assert self.event.file.is_folder

        self._dummy_deletes = []
        folder_uuid = self.event.file.uuid
        self.db.get_files_by_folder_uuid(folder_uuid,
                                         self._files_page_processor_cb,
                                         include_folders=True,
                                         include_deleted=False,
                                         session=session)
        self._save_dummy_delete_events(session)

    def _files_page_processor_cb(self, files_page, folders_uuids, session):
        file_ids = [f.id for f in files_page]
        all_events = session.query(Event) \
                .filter(Event.file_id.in_(tuple(file_ids))).all()
        for file in files_page:
            if self._events_queue.is_processing_stopped():
                raise ProcessingAborted
            events = filter(lambda e: e.file_id == file.id, all_events)
            events = sorted(events,
                            key=lambda e: e.server_event_id
                            if e.server_event_id else 0,
                            reverse=True)
            if events and not (
                    events[0].type == 'delete' or events[-1].type == 'delete'
                    and events[-1].server_event_id
                    and events[-1].server_event_id < 0
                    or events[0].type == 'move' and events[0].server_event_id
                    and events[0].folder_uuid not in folders_uuids):
                min_server_event_id = self._events_queue\
                    .get_min_server_event_id()
                session.expire(file)
                if not file.uuid:
                    file.uuid = generate_uuid()
                    for one_event in events:
                        one_event.file_uuid = file.uuid
                self._add_dummy_delete(file, events[0], min_server_event_id,
                                       session)
                self._events_queue.cancel_file_download(file.id, session)
                self._events_queue.change_processing_events_counts(
                    remote_inc=1)
                self._events_queue.events_added.set()
                if len(self._dummy_deletes) >= self.DUMMY_PAGE_SIZE:
                    self._save_dummy_delete_events(session)

        self._events_queue.allow_loading_remotes()

    def _save_dummy_delete_events(self, session):
        if self._dummy_deletes:
            logger.debug("Saving %s dummy deletes in db",
                         len(self._dummy_deletes))
            try:
                session.bulk_insert_mappings(Event, self._dummy_deletes)
            finally:
                self._dummy_deletes = []

    def _add_dummy_delete(self,
                          file,
                          event,
                          server_event_id,
                          session,
                          add_to_dummies=True):
        msg = {
            'event_id': server_event_id,
            'event_type': 'delete',
            'is_folder': file.is_folder,
            'uuid': file.uuid,
            'event_uuid': event.uuid,
            'file_name': event.file_name,
            'file_name_before_event': event.file_name,
            'file_size': event.file_size,
            'last_event_id': event.server_event_id,
            'file_hash_before_event': event.file_hash,
            'parent_folder_uuid': event.folder_uuid,
            'timestamp': calendar.timegm(event.timestamp.utctimetuple()),
        }
        logger.debug("Formed dummy delete message '%s'...", msg)

        new_event, _ = deserialize_event(msg)
        new_event.last_event_id = event.id
        new_event.file_id = file.id
        new_event.state = 'downloaded'
        if add_to_dummies:
            if hasattr(self, "_dummy_deletes"):
                # many dummy deletes
                self._dummy_deletes.append(self.db.get_mapping(new_event))
            else:
                # one dummy delete
                session.add(new_event)
        return new_event

    def _update_excluded_dirs(self,
                              fs,
                              excluded_dirs,
                              session=None,
                              signals=None,
                              change_in_db=True):
        assert self.event.is_folder

        if not excluded_dirs:
            return

        logger.debug("Updating excluded dirs")
        if self.event.type == 'delete':
            src_path = self.db.get_path_by_events(self.event, session)
            dst_path = None
        else:  # self.event.type == 'move'
            try:
                prev_event = self.event.file.events[-2]
            except IndexError:
                logger.warning("No prev event for %s", self.event)
                src_path = ""
            else:
                src_path = self.db.get_path_by_events(prev_event, session)
            dst_path = self.db.get_path_by_events(self.event, session)
        dirs_to_delete, dirs_to_add = fs.get_excluded_dirs_to_change(
            excluded_dirs, src_path, dst_path)
        if not dirs_to_delete:
            return

        if dirs_to_add:
            change_in_db = False

        signals.change_excluded_dirs.emit(dirs_to_delete, dirs_to_add)
        if change_in_db:
            for path in dirs_to_delete:
                self._mark_dir_not_excluded(path, session)

    def _mark_dir_not_excluded(self, path, session):
        try:
            folders = self.db.find_folders_by_future_path(path,
                                                          session=session,
                                                          include_deleted=True)
        except Exception:
            logger.error("Error finding folders %s by path", path)
            return
        assert folders, "Excluded dir has to be in db"

        for folder in folders:
            if folder.excluded:
                folder.excluded = False
                self.db.mark_child_excluded(folder.id,
                                            session,
                                            is_excluded=False)

    def _apply_folder_delete_if_any(self, session, fs):
        path = self.event.file.path
        actual_file_id = fs.get_actual_events_file_id(path, is_folder=True)
        logger.debug(
            "Trying to delete folder %s "
            "with actual events_file_id %s...", path, actual_file_id)
        delete_events = session.query(Event) \
            .filter(Event.file_id == actual_file_id) \
            .filter(Event.type == 'delete') \
            .filter(Event.state == 'downloaded') \
            .all()
        delete_events = sorted(filter(
            lambda e: not e.file.event_id or e.id > e.file.event_id,
            delete_events),
                               key=lambda e: e.server_event_id)
        if not delete_events:
            return False

        delete_events[-1].file.event_id = delete_events[-1].id
        fs.accept_delete(path,
                         is_directory=True,
                         events_file_id=actual_file_id)
        self.change_processing_events_counts.emit(0, -1)
        return True

    def _generate_uuid(self, session):
        while True:
            uuid = generate_uuid()
            events = session.query(Event).filter(Event.uuid == uuid).all()
            if not events:
                return uuid
            logger.warning("Events with uuid '%s' exist %s", uuid, events)

    def _process_parent_not_found(self, session):
        with self.db.db_lock:
            next_events = list(
                filter(lambda e: e.id > self.event.id, self.event.file.events))
            move_delete_exists = any(e.type in ('move', 'delete')
                                     for e in next_events)
            if move_delete_exists:
                # don't do anything
                return False

            if self.event.is_folder:
                self._add_dummy_delete_events(session)
                delattr(self, "_dummy_deletes")

            server_event_id = self._events_queue.get_min_server_event_id()
            self._add_dummy_delete(self.event.file, self.event,
                                   server_event_id, session)
            return True

    def _check_previous_delete(self, event, file_events, session, events_queue,
                               fs):
        return False, False, None

    @atomic
    def check_previous_delete(self, session=None, events_queue=None, fs=None):
        event = session.query(Event) \
            .filter(Event.id == self.event.id) \
            .one_or_none()
        if not event:
            return False

        has_deletes, \
        add_dummy, \
        new_delete_event = self._check_previous_delete(
            event, event.file.events, session, events_queue, fs)

        if not has_deletes:
            return False

        if new_delete_event:
            new_delete_event.file_id = event.file_id
            session.add(new_delete_event)

        if add_dummy:
            min_server_event_id = events_queue.get_min_server_event_id()
            self._add_dummy_delete(event.file, event, min_server_event_id,
                                   session)
            events_queue.change_processing_events_counts(remote_inc=1)

        return True

    @atomic
    def add_dummy_if_parent_deleted(self, session=None, events_queue=None):
        logger.debug("Adding dummy when parent is deleted...")
        event = session.query(Event) \
            .filter(Event.id == self.event.id) \
            .one_or_none()
        has_deletes = any(e.type == 'delete' for e in event.file.events)
        if not event or has_deletes:
            return False

        folder = self.find_folder_by_uuid(session, event.folder_uuid)
        if not folder or not folder.is_deleted_registered:
            return False

        if not self.event.file.folder_id:
            self.event.file.folder_id = folder.id
        min_server_event_id = events_queue.get_min_server_event_id()
        self._add_dummy_delete(event.file, event, min_server_event_id, session)
        events_queue.change_processing_events_counts(remote_inc=1)
        return True

    def _check_offline(self, session):
        not_applied = 0
        event = self.event
        folder = self.find_folder_by_uuid(session, event.folder_uuid)
        if folder and folder.is_offline and not event.file.is_offline:
            not_applied = self.db.make_offline(event.file_uuid,
                                               session=session,
                                               is_offline=True)
        return not_applied
예제 #10
0
class QuietProcessor(object):
    def __init__(self, root, storage, path_converter, exceptions):
        self._root = root
        self._storage = storage
        self._path_converter = path_converter
        self._exceptions = exceptions

        self._tmp_id = 0
        self._tmp_id_lock = RLock()

        self._init_temp_dir()

        self.file_moved = Signal(str, str)
        self.file_deleted = Signal(str)
        self.file_modified = Signal(str, float)
        self.access_denied = Signal(str)

    def delete_file(self, full_path, events_file_id=None, is_offline=True):
        full_path = unicodedata.normalize('NFC', full_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if not file:
                if events_file_id is not None:
                    logger.warning("Skipping file deletion because "
                                   "file with same events_file_id not found")
                    return
                file = self._storage.get_known_file(full_path, session=session)
            else:
                full_path = _full_path

            if file:
                try:
                    remove_file(self.get_hard_path(full_path, is_offline))
                except OSError as e:
                    logger.warning("Can't remove file. Reason: %s", e)
                    if e.errno == errno.EACCES:
                        self._raise_access_denied(full_path)
                    else:
                        raise e
                self._storage.delete_file(file, session=session)

        self.file_deleted.emit(self._path_converter.create_relpath(full_path))

    def delete_directory(self, full_path, events_file_id=None):
        full_path = unicodedata.normalize('NFC', full_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if file:
                full_path = _full_path
            elif events_file_id is not None:
                logger.warning("Skipping directory deletion because "
                               "directory with same events_file_id not found")
                return

            rel_path = self._path_converter.create_relpath(full_path)
            files = self._storage.get_known_folder_children(rel_path,
                                                            session=session)
            try:
                temp_path = join(self._temp_dir, basename(full_path))
                if isdir(temp_path):
                    remove_dir(temp_path, suppress_not_exists_exception=True)
                elif isfile(temp_path):
                    remove_file(temp_path)
                if isdir(full_path):
                    os.rename(full_path, temp_path)
                    try:
                        remove_dir(temp_path,
                                   suppress_not_exists_exception=True)
                    except Exception:
                        logger.debug("Dir %s delete failed", temp_path)
            except OSError as e:
                logger.warning("Can't remove dir %s. Reason: %s", full_path, e)
                if e.errno == errno.EACCES:
                    self._raise_access_denied(full_path)
                elif e.errno != errno.ENOENT:  # directory does not exist
                    raise e

            deleted_paths = [f.relative_path for f in files]
            self._storage.delete_known_folder_children(rel_path,
                                                       session=session)

        for path in deleted_paths:
            self.file_deleted.emit(path)

    def create_directory(self, full_path, events_file_id, wrong_file_id=None):
        full_path = unicodedata.normalize('NFC', full_path)

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            assert exists(dirname(full_path))
            file = self._storage.get_known_file(full_path,
                                                True,
                                                session=session)

            if file is None:
                mkdir(full_path)
                file = self._storage.get_new_file(full_path,
                                                  True,
                                                  session=session)
            elif events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                logger.error("Wrong file id for %s. Expected %s. Got %s",
                             full_path, events_file_id,
                             file.events_file_id if file else None)
                raise wrong_file_id(full_path, events_file_id,
                                    file.events_file_id)

            file.events_file_id = events_file_id
            self._storage.save_file(file, session=session)

    def patch_file(self,
                   full_fn,
                   patch_archive,
                   silent=True,
                   events_file_id=None,
                   wrong_file_id=None):
        full_fn = unicodedata.normalize('NFC', full_fn)

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if file:
                full_fn = _full_path
            else:
                file = self._storage.get_known_file(full_fn,
                                                    is_folder=False,
                                                    session=session)
            if (file is None or file and events_file_id and
                file.events_file_id
                and file.events_file_id != events_file_id) and \
                        wrong_file_id:
                logger.error("Wrong file id for %s. Expected %s. Got %s",
                             full_fn, events_file_id,
                             file.events_file_id if file else None)
                raise wrong_file_id(full_fn, events_file_id,
                                    file.events_file_id if file else None)

            # file = self._storage.get_new_file(full_fn, False,
            #                                   session=session)

            assert exists(dirname(full_fn))
            hash, signature, old_hash = Rsync.accept_patch(
                patch_archive=patch_archive,
                unpatched_file=full_fn,
                known_old_hash=file.file_hash if file else None,
                root=self._root)

            if silent:
                file.mtime = os.stat(full_fn).st_mtime
                file.size = os.stat(full_fn).st_size
                file.file_hash = hash
                file.events_file_id = events_file_id
                file.was_updated = True
                self._storage.save_file(file, session=session)
                self._storage.update_file_signature(file, signature)
                self.file_modified.emit(file.relative_path, file.mtime)

        return hash, old_hash

    def move_file(self,
                  src_full_path,
                  dst_full_path,
                  events_file_id=None,
                  already_exists=None,
                  file_not_found=None,
                  wrong_file_id=None,
                  is_offline=True):
        dst_full_path = unicodedata.normalize('NFC', dst_full_path)
        dst_rel_path = self._path_converter.create_relpath(dst_full_path)
        src_full_path = unicodedata.normalize('NFC', src_full_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if not file:
                file = self._storage.get_known_file(src_full_path,
                                                    False,
                                                    session=session)
            else:
                src_full_path = _full_path
            src_rel_path = self._path_converter.create_relpath(src_full_path)
            if src_rel_path == dst_rel_path or not self._check_paths_exist(
                    src_full_path, dst_full_path, already_exists,
                    file_not_found):
                return

            assert exists(dirname(dst_full_path))
            if file:
                if events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                    logger.error("Wrong file id for %s. Expected %s. Got %s",
                                 dst_full_path, events_file_id,
                                 file.events_file_id)
                    raise wrong_file_id(src_full_path, events_file_id,
                                        file.events_file_id)

                file.relative_path = self._path_converter.create_relpath(
                    dst_full_path)
                try:
                    shutil.move(src=self.get_hard_path(src_full_path,
                                                       is_offline),
                                dst=self.get_hard_path(dst_full_path,
                                                       is_offline))
                except OSError as e:
                    logger.warning("Can't move file. Reason: %s", e)
                    if e.errno == errno.EACCES:
                        self._raise_access_denied(src_full_path)
                    else:
                        raise e
                self._storage.save_file(file, session=session)

            self.file_moved(src_rel_path, dst_rel_path)

    def move_directory(self,
                       src_full_path,
                       dst_full_path,
                       events_file_id=None,
                       already_exists=None,
                       file_not_found=None,
                       wrong_file_id=None):
        dst_full_path = unicodedata.normalize('NFC', dst_full_path)
        dst_rel_path = self._path_converter.create_relpath(dst_full_path)
        src_full_path = unicodedata.normalize('NFC', src_full_path)

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if not file:
                file = self._storage.get_known_file(src_full_path,
                                                    True,
                                                    session=session)
            else:
                src_full_path = _full_path

            src_rel_path = self._path_converter.create_relpath(src_full_path)
            if src_rel_path == dst_rel_path or not self._check_paths_exist(
                    src_full_path, dst_full_path, already_exists,
                    file_not_found):
                return

            assert exists(dirname(dst_full_path))
            if file:
                if events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                    logger.error("Wrong file id for %s. Expected %s. Got %s",
                                 src_full_path, events_file_id,
                                 file.events_file_id if file else None)
                    raise wrong_file_id(src_full_path, events_file_id,
                                        file.events_file_id)
            try:
                os.rename(src_full_path, dst_full_path)
            except OSError as e:
                logger.warning("Can't move dir %s. Reason: %s", src_full_path,
                               e)
                if e.errno == errno.EACCES:
                    self._raise_access_denied(src_full_path)
                else:
                    raise e

            self._storage.move_known_folder_children(src_rel_path,
                                                     dst_rel_path,
                                                     session=session)

            self.file_moved(src_rel_path, str(dst_rel_path))

    def create_file_from_copy(self,
                              file_rel_path,
                              copy_hash,
                              silent,
                              events_file_id,
                              search_by_id=False,
                              wrong_file_id=None,
                              copy_does_not_exists=None):
        dst_full_path = self._path_converter.create_abspath(file_rel_path)
        copy_full_path = join(get_copies_dir(self._root), copy_hash)
        if copy_does_not_exists is not None and not exists(copy_full_path):
            if not self.make_copy_from_existing_files(copy_hash):
                raise copy_does_not_exists(copy_hash)
        return self._create_file(copy_full_path, dst_full_path, silent,
                                 copy_hash, events_file_id, search_by_id,
                                 wrong_file_id)

    def make_copy_from_existing_files(self, copy_hash):
        copy_full_path = join(get_copies_dir(self._root), copy_hash)
        if exists(copy_full_path):
            return True

        tmp_full_path = self._get_temp_path(copy_full_path)
        with self._storage.create_session(read_only=True,
                                          locked=False) as session:
            excludes = []
            while True:
                file = self._storage.get_file_by_hash(copy_hash,
                                                      exclude=excludes,
                                                      session=session)
                if not file:
                    return False

                file_path = self._path_converter.create_abspath(
                    file.relative_path)
                if not exists(file_path):
                    excludes.append(file.id)
                    continue

                try:
                    copy_file(file_path, tmp_full_path)
                    hash = Rsync.hash_from_block_checksum(
                        Rsync.block_checksum(tmp_full_path))
                    if hash == copy_hash:
                        os.rename(tmp_full_path, copy_full_path)
                        return True
                    else:
                        excludes.append(file.id)
                        remove_file(tmp_full_path)
                except Exception as e:
                    logger.warning("Can't operate tmp file %s. Reason: (%s)",
                                   tmp_full_path, e)
                    if file.id not in excludes:
                        excludes.append(file.id)
                    try:
                        remove_file(tmp_full_path)
                    except Exception:
                        tmp_full_path = self._get_temp_path(copy_full_path)

    def _get_temp_path(self, copy_full_path):
        while True:
            with self._tmp_id_lock:
                self._tmp_id += 1
            tmp_full_path = "{}_{}.tmp".format(copy_full_path, self._tmp_id)
            if not exists(tmp_full_path):
                return tmp_full_path

    def create_empty_file(self,
                          file_rel_path,
                          file_hash,
                          silent,
                          events_file_id,
                          search_by_id=False,
                          wrong_file_id=None,
                          is_offline=True):
        dst_full_path = self._path_converter.create_abspath(file_rel_path)
        self._create_file(None, dst_full_path, silent, file_hash,
                          events_file_id, search_by_id, wrong_file_id,
                          is_offline)

    def _create_file(self,
                     src_full_path,
                     dst_full_path,
                     silent,
                     file_hash,
                     events_file_id,
                     search_by_id,
                     wrong_file_id,
                     is_offline=True):

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file = None
            file_exists = False
            was_updated = True
            if search_by_id:
                file, _full_path = self._get_file_by_id(
                    events_file_id, session)
                if file:
                    dst_full_path = _full_path

            assert exists(dirname(dst_full_path))
            hard_path = self.get_hard_path(dst_full_path, is_offline)
            if not file:
                file = self._storage.get_known_file(dst_full_path,
                                                    is_folder=False,
                                                    session=session)
                if file and events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                    logger.error("Wrong file id for %s. Expected %s. Got %s",
                                 dst_full_path, events_file_id,
                                 file.events_file_id)
                    raise wrong_file_id(dst_full_path, events_file_id,
                                        file.events_file_id)
            if file:
                file_exists = file.file_hash == file_hash and \
                              (exists(dst_full_path) and is_offline or
                               exists(hard_path) and not is_offline)
                logger.debug(
                    "The fact that file %s with same hash "
                    "already exists in storage and filesystem is %s",
                    dst_full_path, file_exists)

            if file is None:
                # if search_by_id and wrong_file_id:
                #     logger.error("Wrong file id for %s. Expected %s. Got None",
                #                  dst_full_path, events_file_id)
                #     raise wrong_file_id(dst_full_path,
                #                         events_file_id,
                #                         None)

                file = self._storage.get_new_file(dst_full_path,
                                                  False,
                                                  session=session)
                was_updated = False
            old_hash = file.file_hash

            signature = None
            if not file_exists:
                if src_full_path:
                    # create file from copy
                    if not exists(get_signature_path(file_hash)):
                        signature = Rsync.block_checksum(src_full_path)
                    tmp_full_path = self._get_temp_path(src_full_path)
                    copy_file(src_full_path, tmp_full_path)
                    try:
                        remove_file(dst_full_path)
                        os.rename(tmp_full_path, dst_full_path)
                        copy_time(dst_full_path + FILE_LINK_SUFFIX,
                                  dst_full_path)
                        remove_file(dst_full_path + FILE_LINK_SUFFIX)
                    except Exception as e:
                        logger.warning(
                            "Can't rename to dst file %s. "
                            "Reason: %s", dst_full_path, e)
                        try:
                            remove_file(tmp_full_path)
                        except Exception:
                            pass
                        raise e
                else:
                    create_empty_file(hard_path)
                    if not is_offline:
                        self.write_events_file_id(hard_path, events_file_id)
                        set_ext_invisible(hard_path)
                    if hard_path.endswith(FILE_LINK_SUFFIX):
                        copy_time(dst_full_path, hard_path)
                        remove_file(dst_full_path)
                    else:
                        copy_time(hard_path, dst_full_path)
                        remove_file(dst_full_path + FILE_LINK_SUFFIX)

            if silent:
                file.mtime = os.stat(hard_path).st_mtime
                file.size = os.stat(hard_path).st_size
                file.file_hash = file_hash
                file.events_file_id = events_file_id
                file.was_updated = was_updated
                logger.debug("Saving file. id=%s", file.events_file_id)
                self._storage.save_file(file, session=session)
                if src_full_path and signature:
                    # create file from copy
                    self._storage.update_file_signature(file, signature)
                if was_updated:
                    self.file_modified.emit(file.relative_path, file.mtime)

            return old_hash

    def sync_events_file_id(self, file_path, events_file_id, is_folder):
        full_path = self._path_converter.create_abspath(file_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file = self._storage.get_known_file(full_path,
                                                is_folder=is_folder,
                                                session=session)
            if file:
                file.events_file_id = events_file_id
                self._storage.save_file(file, session=session)
            else:
                logger.warning("Can't sync events_file_id for path %s",
                               file_path)

    def sync_events_file_id_by_old_id(self, events_file_id,
                                      old_events_file_id):
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _ = self._get_file_by_id(old_events_file_id, session)
            if file:
                file.events_file_id = events_file_id
                self._storage.save_file(file, session=session)
            else:
                logger.debug("Can't sync events_file_id for old_id %s",
                             old_events_file_id)

    def _get_file_by_id(self, events_file_id, session):
        file = full_path = None
        if events_file_id:
            file = self._storage.get_known_file_by_id(events_file_id, session)
            if file:
                full_path = self._path_converter.create_abspath(
                    file.relative_path)
            else:
                logger.warning("Can't find file by id %s", events_file_id)
        return file, full_path

    def _check_paths_exist(self, src_full_path, dst_full_path, already_exists,
                           file_not_found):
        if exists(dst_full_path):
            if exists(src_full_path):
                if already_exists:
                    raise already_exists(dst_full_path)
                else:
                    return False
            else:
                logger.debug(
                    "Destination exists %s, source does not exist %s."
                    " Moving accepted", dst_full_path, src_full_path)
                return False

        if not exists(src_full_path):
            if file_not_found:
                raise file_not_found(src_full_path)
            else:
                return False

        return True

    def delete_old_signatures(self, signatures_dir, delete_all=False):
        # we believe that signatires dir contains only signature files
        # and no subdirs
        try:
            signatures_to_delete = os.listdir(signatures_dir)
        except Exception as e:
            logger.warning("Can't delete old signatures. Reason: %s", e)
            return

        if not delete_all:
            # taking storage lock to prevent adding new signatures
            # during deletion
            with self._storage.create_session(read_only=False,
                                              locked=True) as session:
                signatures_to_delete = filter(
                    lambda h: not self._storage.hash_in_storage(
                        h, session=session), signatures_to_delete)

        try:
            list(
                map(lambda s: remove_file(join(signatures_dir, s)),
                    signatures_to_delete))
        except Exception as e:
            logger.warning("Can't delete old signatures. Reason: %s", e)

    def _init_temp_dir(self):
        self._temp_dir = get_temp_dir(self._root)
        if exists(self._temp_dir):
            try:
                remove_dir(self._temp_dir)
            except Exception as e:
                logger.warning("Can't remove temp dir. Reason: %s", e)

        self._temp_dir = get_temp_dir(self._root, create=True)

    def _raise_access_denied(self, full_path):
        self.access_denied(full_path)
        raise self._exceptions.AccessDenied(full_path)

    def get_hard_path(self, full_path, is_offline=True):
        suffix = "" if is_offline else FILE_LINK_SUFFIX
        return full_path + suffix

    def write_events_file_id(self, hard_path, events_file_id):
        with open(hard_path, 'wb') as f:
            pickle.dump(events_file_id, f)
예제 #11
0
class Settings(object):
    class _MigrationFailed(ExpectedError):
        pass

    def __init__(self,
                 cfg,
                 main_cfg,
                 start_service,
                 exit_service,
                 parent=None,
                 size=None,
                 migrate=False,
                 dp=1,
                 get_offline_dirs=lambda: None,
                 set_offline_dirs=lambda o, no: None):
        super(Settings, self).__init__()
        self._cfg = cfg
        self._main_cfg = main_cfg
        self._start_service = start_service
        self._exit_service = exit_service
        self._parent = parent
        self._size = size
        self._dp = dp
        self._get_offline_dirs = get_offline_dirs
        self._set_offline_dirs = set_offline_dirs

        self._dialog = QDialog(parent)
        self._dialog.setWindowIcon(QIcon(':/images/icon.png'))
        self._dialog.setAttribute(Qt.WA_MacFrameworkScaled)
        self._ui = settings.Ui_Dialog()
        self._ui.setupUi(self._dialog)
        self._max_root_len = get_max_root_len(self._cfg)
        self._migrate = migrate
        self._migration = None
        self._migration_cancelled = False

        try:
            self._ui.account_type.setText(
                license_display_name_from_constant(self._cfg.license_type))
            self._ui.account_type.setVisible(True)
            self._ui.account_type_header.setVisible(True)
            self._ui.account_upgrade.setVisible(True)
        except KeyError:
            pass
        upgrade_license_types = (FREE_LICENSE, FREE_TRIAL_LICENSE)
        if self._cfg.license_type in upgrade_license_types:
            self._ui.account_upgrade.setText('<a href="{}">{}</a>'.format(
                GET_PRO_URI.format(self._cfg.host), tr('Upgrade')))
            self._ui.account_upgrade.setTextFormat(Qt.RichText)
            self._ui.account_upgrade.setTextInteractionFlags(
                Qt.TextBrowserInteraction)
            self._ui.account_upgrade.setOpenExternalLinks(True)
            self._ui.account_upgrade.setAlignment(Qt.AlignLeft)
        else:
            self._ui.account_upgrade.setText("")

        self._ui.centralWidget.setFrameShape(QFrame.NoFrame)
        self._ui.centralWidget.setLineWidth(1)

        self._ui.language_comboBox.addItem(tr('English'))
        self._ui.language_comboBox.setEnabled(False)

        self._connect_slots()
        self._set_fonts()
        self._ui.tabWidget.setCurrentIndex(0)

        self._smart_sync_dialog = None

        self.logged_out = Signal(bool)
        self.logging_disabled_changed = Signal(bool)

        # FIXMe: without line below app crashes on exit after settings opened
        self._dialog.mousePressEvent = self.on_mouse_press_event

    def on_mouse_press_event(self, ev):
        pass

    def _connect_slots(self):
        ui = self._ui

        ui.logout_button.clicked.connect(self._logout)

        ui.download_auto_radioButton.clicked.connect(
            lambda: ui.download_limit_edit.setEnabled(
                False) or ui.download_limit_edit.clear())
        ui.download_limit_radioButton.clicked.connect(
            lambda: ui.download_limit_edit.setEnabled(True))

        ui.upload_auto_radioButton.clicked.connect(
            lambda: ui.upload_limit_edit.setEnabled(
                False) or ui.upload_limit_edit.clear())
        ui.upload_limit_radioButton.clicked.connect(
            lambda: ui.upload_limit_edit.setEnabled(True))

        ui.buttonBox.accepted.connect(self._dialog.accept)
        ui.buttonBox.rejected.connect(self._dialog.reject)

        ui.smart_sync_button.clicked.connect(
            self._on_smart_sync_button_clicked)

        ui.location_button.clicked.connect(
            self._on_sync_folder_location_button_clicked)

        ui.location_button.enterEvent = lambda _: \
            ui.location_button.setIcon(QIcon(
                ':/images/settings/pencil_hovered.svg'))
        ui.location_button.leaveEvent = lambda _: \
            ui.location_button.setIcon(QIcon(
                ':/images/settings/pencil.svg'))
        ui.smart_sync_button.enterEvent = lambda _: \
            ui.smart_sync_button.setIcon(QIcon(
                ':/images/settings/folder_sync_hovered.svg'))
        ui.smart_sync_button.leaveEvent = lambda _: \
            ui.smart_sync_button.setIcon(QIcon(
                ':/images/settings/folder_sync.svg'))
        ui.logout_button.enterEvent = lambda _: \
            ui.logout_button.setIcon(QIcon(
                ':/images/settings/logout_hovered.svg'))
        ui.logout_button.leaveEvent = lambda _: \
            ui.logout_button.setIcon(QIcon(
                ':/images/settings/logout.svg'))

    def _set_fonts(self):
        ui = self._ui
        controls = [ui.tabWidget, ui.language_comboBox]
        controls.extend([c for c in ui.tabWidget.findChildren(QLabel)])
        controls.extend([c for c in ui.tabWidget.findChildren(QLineEdit)])
        controls.extend([c for c in ui.tabWidget.findChildren(QPushButton)])
        controls.extend([c for c in ui.tabWidget.findChildren(QCheckBox)])
        controls.extend([c for c in ui.tabWidget.findChildren(QRadioButton)])

        for control in controls:
            font = control.font()
            font_size = control.font().pointSize() * self._dp
            if font_size > 0:
                control_font = QFont(font.family(), font_size)
                control_font.setBold(font.bold())
                control.setFont(control_font)

    def _logout(self):
        userAnswer = msgbox(tr('Keep local files on device?'),
                            buttons=[
                                (tr('Clear all'), 'Wipe'),
                                (tr('Keep'), 'Keep'),
                            ],
                            parent=self._dialog,
                            default_index=1,
                            enable_close_button=True)

        if userAnswer == '':
            return

        wipe_all = userAnswer == 'Wipe'
        if not wipe_all:
            self._cfg.set_settings({'user_password_hash': ""})

        self.logged_out.emit(wipe_all)

        self._dialog.reject()

    def show(self, on_finished):
        def finished():
            if self._dialog.result() == QDialog.Accepted:
                self._apply_settings()
            self._dialog.finished.disconnect(finished)
            on_finished()

        self._setup_to_ui()
        if self._migrate:
            self._ui.tabWidget.setCurrentIndex(1)  # Account page
            QTimer.singleShot(100,
                              self._on_sync_folder_location_button_clicked)
        self._dialog.finished.connect(finished)
        self._dialog.raise_()
        self._dialog.setModal(True)
        self._dialog.show()

    def _setup_to_ui(self):
        ui = self._ui
        cfg = self._cfg

        portable = is_portable()

        if cfg.get_setting('lang', None) is None:
            self._ui.language_comboBox.setCurrentIndex(0)
        else:
            lang = cfg.lang if cfg.lang in get_available_languages() else 'en'
            assert lang in get_available_languages()
            for i in range(1, ui.language_comboBox.count()):
                if ui.language_comboBox.itemText(i) == lang:
                    ui.language_comboBox.setCurrentIndex(i)
                    break

        ui.location_edit.setText(
            FilePath(cfg.sync_directory) if cfg.sync_directory else '')
        ui.location_button.setEnabled(not portable)
        if portable:
            ui.location_button.setToolTip(tr("Disabled in portable version"))
        ui.email_label.setText(cfg.user_email if cfg.user_email else '')

        def set_limit(limit, auto_btn, manual_btn, edit):
            edit.setValidator(QRegExpValidator(QRegExp("\\d{1,9}")))
            if limit:
                manual_btn.setChecked(True)
                edit.setText(str(limit))
            else:
                auto_btn.setChecked(True)
                auto_btn.click()

        set_limit(limit=cfg.download_limit,
                  auto_btn=ui.download_auto_radioButton,
                  manual_btn=ui.download_limit_radioButton,
                  edit=ui.download_limit_edit)
        set_limit(limit=cfg.upload_limit,
                  auto_btn=ui.upload_auto_radioButton,
                  manual_btn=ui.upload_limit_radioButton,
                  edit=ui.upload_limit_edit)

        ui.autologin_checkbox.setChecked(self._main_cfg.autologin)
        ui.autologin_checkbox.setEnabled(not portable)
        if portable:
            ui.autologin_checkbox.setToolTip(
                tr("Disabled in portable version"))
        ui.tracking_checkbox.setChecked(cfg.send_statistics)
        ui.autoupdate_checkbox.setChecked(self._main_cfg.autoupdate)
        ui.download_backups_checkBox.setChecked(cfg.download_backups)
        ui.is_smart_sync_checkBox.setChecked(cfg.smart_sync)
        ui.disable_logging_checkBox.setChecked(self._main_cfg.logging_disabled)

        # Disable smart sync for free license
        if not cfg.license_type or cfg.license_type == FREE_LICENSE:
            ui.is_smart_sync_checkBox.setText(
                tr("SmartSync+ is not available for your license"))
            ui.is_smart_sync_checkBox.setChecked(False)
            ui.is_smart_sync_checkBox.setCheckable(False)
            ui.smart_sync_button.setEnabled(False)

        ui.startup_checkbox.setChecked(is_in_system_startup())
        ui.startup_checkbox.setEnabled(not portable)
        if portable:
            ui.startup_checkbox.setToolTip(tr("Disabled in portable version"))

    def _apply_settings(self):
        service_settings, main_settings = self._get_configs_from_ui()
        if main_settings['logging_disabled'] != \
                self._main_cfg.logging_disabled:
            self.logging_disabled_changed.emit(
                main_settings['logging_disabled'])
        self._cfg.set_settings(service_settings)
        self._main_cfg.set_settings(main_settings)
        if self._ui.startup_checkbox.isChecked():
            if not is_in_system_startup():
                add_to_system_startup()
        else:
            if is_in_system_startup():
                remove_from_system_startup()

    def _config_is_changed(self):
        service_settings, main_settings = self._get_configs_from_ui()
        for param, value in service_settings.items():
            if self._cfg.get_setting(param) != value:
                return True
        for param, value in main_settings.items():
            if self._main_cfg.get_setting(param) != value:
                return True

        return False

    def _get_configs_from_ui(self):
        ui = self._ui
        return {
            'lang': (str(ui.language_comboBox.currentText())
                     if ui.language_comboBox.currentIndex() > 0 else None),
            'upload_limit': (0 if ui.upload_auto_radioButton.isChecked()
                             or not ui.upload_limit_edit.text() else int(
                                 ui.upload_limit_edit.text())),
            'download_limit': (0 if ui.download_auto_radioButton.isChecked()
                               or not ui.download_limit_edit.text() else int(
                                   ui.download_limit_edit.text())),
            'send_statistics':
            bool(ui.tracking_checkbox.isChecked()),
            'download_backups':
            bool(ui.download_backups_checkBox.isChecked()),
            'smart_sync':
            bool(ui.is_smart_sync_checkBox.isChecked()),
            'autologin':
            bool(ui.autologin_checkbox.isChecked()),
        }, {
            'autologin': bool(ui.autologin_checkbox.isChecked()),
            'autoupdate': bool(ui.autoupdate_checkbox.isChecked()),
            'logging_disabled': bool(ui.disable_logging_checkBox.isChecked()),
            'download_backups': bool(ui.download_backups_checkBox.isChecked()),
        }

    def _on_smart_sync_button_clicked(self):
        self._get_offline_dirs()
        root = str(self._ui.location_edit.text())
        self._smart_sync_dialog = SmartSyncDialog(self._dialog)
        offline, online = self._smart_sync_dialog.show(root_path=root,
                                                       hide_dotted=True)
        if offline or online:
            logger.info("Directories set to be offline: (%s)",
                        ", ".join(map(lambda s: u"'%s'" % s, offline)))
            self._set_offline_dirs(offline, online)

    def offline_dirs(self, offline_dirs):
        root = str(self._ui.location_edit.text())
        pc = PathConverter(root)
        offline_dirs_abs_paths = set(
            map(lambda p: pc.create_abspath(p), offline_dirs))
        if self._smart_sync_dialog:
            self._smart_sync_dialog.set_offline_paths(offline_dirs_abs_paths)

    def _on_sync_folder_location_button_clicked(self):
        selected_folder = QFileDialog.getExistingDirectory(
            self._dialog, tr('Choose Pvtbox folder location'),
            get_parent_dir(FilePath(self._cfg.sync_directory)))
        selected_folder = ensure_unicode(selected_folder)

        try:
            if not selected_folder:
                raise self._MigrationFailed("Folder is not selected")

            if len(selected_folder + "/Pvtbox") > self._max_root_len:
                if not self._migrate:
                    msgbox(tr("Destination path too long. "
                              "Please select shorter path."),
                           tr("Path too long"),
                           parent=self._dialog)
                raise self._MigrationFailed("Destination path too long")

            free_space = get_free_space(selected_folder)
            selected_folder = get_data_dir(dir_parent=selected_folder,
                                           create=False)
            if FilePath(selected_folder) == FilePath(self._cfg.sync_directory):
                raise self._MigrationFailed("Same path selected")

            if FilePath(selected_folder) in FilePath(self._cfg.sync_directory):
                msgbox(tr("Can't migrate into existing Pvtbox folder.\n"
                          "Please choose other location"),
                       tr("Invalid Pvtbox folder location"),
                       parent=self._dialog)
                raise self._MigrationFailed(
                    "Can't migrate into existing Pvtbox folder")

            if self._size and free_space < self._size:
                logger.debug(
                    "No disk space in %s. Free space: %s. Needed: %s.",
                    selected_folder, free_space, self._size)
                msgbox(tr(
                    "Insufficient disk space for migration to\n{}.\n"
                    "Please clean disk", selected_folder),
                       tr("No disk space"),
                       parent=self._dialog)
                raise self._MigrationFailed(
                    "Insufficient disk space for migration")

            self._migration_cancelled = False
            dialog = QProgressDialog(self._dialog)
            dialog.setWindowTitle(tr('Migrating to new Pvtbox folder'))
            dialog.setWindowIcon(QIcon(':/images/icon.svg'))
            dialog.setModal(True)
            dialog.setMinimum(0)
            dialog.setMaximum(100)
            dialog.setMinimumSize(400, 80)
            dialog.setAutoClose(False)

            def progress(value):
                logger.debug("Migration dialog progress received: %s", value)
                dialog.setValue(value)

            def migration_failed(error):
                logger.warning("Migration failed with error: %s", error)
                msgbox(error,
                       tr('Migration to new Pvtbox folder error'),
                       parent=dialog)
                dialog.cancel()
                self._migration_cancelled = True
                done()

            def cancel():
                logger.debug("Migration dialog cancelled")
                self._migration_cancelled = True
                self._migration.cancel()

            def done():
                logger.debug("Migration done")
                try:
                    self._migration.progress.disconnect(progress)
                    self._migration.failed.disconnect(migration_failed)
                    self._migration.done.disconnect(done)
                    dialog.canceled.disconnect(cancel)
                except Exception as e:
                    logger.warning("Can't disconnect signal %s", e)
                dialog.hide()
                dialog.done(QDialog.Accepted)
                dialog.close()

            self._migration = SyncDirMigration(self._cfg, parent=self._dialog)
            self._migration.progress.connect(progress, Qt.QueuedConnection)
            self._migration.failed.connect(migration_failed,
                                           Qt.QueuedConnection)
            self._migration.done.connect(done, Qt.QueuedConnection)
            dialog.canceled.connect(cancel)
            self._exit_service()
            old_dir = self._cfg.sync_directory
            self._migration.migrate(old_dir, selected_folder)

            def on_finished():
                logger.info("Migration dialog closed")
                if not self._migration_cancelled:
                    logger.debug("Setting new location")
                    self._ui.location_edit.setText(FilePath(selected_folder))

                    disable_file_logging(logger)
                    shutil.rmtree(op.join(old_dir, '.pvtbox'),
                                  ignore_errors=True)
                    set_root_directory(FilePath(selected_folder))
                    enable_file_logging(logger)

                    make_dir_hidden(get_patches_dir(selected_folder))

                self._start_service()

            dialog.finished.connect(on_finished)
            dialog.show()

        except self._MigrationFailed as e:
            logger.warning("Sync dir migration failed. Reason: %s", e)
        finally:
            if self._migrate:
                self._dialog.accept()
예제 #12
0
class DelayAction(ActionBase):
    workers_count = max(multiprocessing.cpu_count(), 1)
    processing_events_limit = workers_count * 8

    def __init__(self, events_processing_delay, tracker):
        super(DelayAction, self).__init__()
        self.idle = Signal()
        self.working = Signal()
        self.file_added_to_indexing = Signal(FilePath)
        self.file_removed_from_indexing = Signal(FilePath, bool)

        self._started = False
        self._delay = events_processing_delay
        self._tracker = tracker
        self._offline_delay = 0.5
        self._delayed_paths = {}
        self._delayed_move_dst_paths = set()
        self._lock = threading.RLock()
        self._time_provider = time.time
        self._executor = ThreadPoolExecutor(max_workers=self.workers_count)
        self._processing_events = set()
        self._batch_checking_paths = set()
        self._loud_events = set()
        self._is_idle = True

        self._sorted_offline_events = None

    def _on_new_event(self, fs_event):
        if not self._started:
            return
        if not fs_event.time:
            fs_event.time = self._time_provider()
        with self._lock:
            if fs_event.src in self._delayed_move_dst_paths:
                return

            if not fs_event.quiet:
                if not self._loud_events:
                    logger.debug("Added first loud event")
                    # self.working.emit()
                self._loud_events.add(fs_event.src)
                self.file_added_to_indexing.emit(fs_event.src)

            prev_event = self._delayed_paths.get(fs_event.src, None)
            prev_sort_key = self._event_sort_key(prev_event) if prev_event \
                else None
            if prev_event and prev_event.quiet:
                prev_event.quiet = fs_event.quiet
            if fs_event.event_type is MOVE or prev_event is None or \
                    prev_event.event_type is not MOVE and fs_event.is_offline:
                if prev_event and not prev_event.quiet and fs_event.quiet:
                    fs_event.quiet = False
                    fs_event.is_offline = False
                self._delayed_paths[fs_event.src] = fs_event
                self._update_event_size_and_time(fs_event)
            elif prev_event:
                self._update_event_size_and_time(prev_event)

            new_event = self._delayed_paths[fs_event.src]
            if new_event.is_offline and \
                    (not prev_event or
                     prev_sort_key != self._event_sort_key(new_event)):
                self._sorted_offline_events = None

            if fs_event.event_type in (MOVE,) and not fs_event.is_dir:
                self._delayed_move_dst_paths.add(fs_event.dst)

            logger.debug(
                '%s processing fs_events, %s delayed fs_events, '
                '%s delayed move paths',
                len(self._processing_events),
                len(self._delayed_paths),
                len(self._delayed_move_dst_paths))

    def _update_event_size_and_time(self, fs_event):
        src_longpath = FilePath(fs_event.src).longpath
        dst_longpath = FilePath(fs_event.dst).longpath \
            if fs_event.dst and fs_event.event_type == MOVE \
            else None
        if dst_longpath and isfile(dst_longpath):
            path = dst_longpath
        elif isfile(src_longpath):
            path = src_longpath
        else:
            fs_event.mtime = fs_event.time
            return

        try:
            st = stat(path)
        except:
            self._on_new_event(fs_event)
            return

        fs_event.file_size = st.st_size
        fs_event.mtime = st.st_mtime

    def dispatch(self):
        with self._lock:
            events_to_add = self.processing_events_limit - \
                            len(self._processing_events)
            if events_to_add <= 0 or not self._delayed_paths and self._is_idle:
                return

            if self._is_idle and self._loud_events:
                self.working.emit()
                self._is_idle = False
            elif not self._is_idle and \
                    (not self._loud_events or
                     not self._processing_events and not self._delayed_paths):
                self.idle.emit()
                self._is_idle = True
                if self._loud_events:
                    logger.warning("Loud events uncleen %s", self._loud_events)
                    for path in self._loud_events:
                        self.file_removed_from_indexing(FilePath(path), False)
                    self._loud_events.clear()

            expired_events, offline_events = self._get_expired_events(
                events_to_add)
            self._batch_checking_paths.update(
                set(event.src for event in expired_events),
                set(event.src for event in offline_events))

        if expired_events or offline_events:
            self._batch_check_if_changing(expired_events, offline_events)
        for fs_event in expired_events:
            if not self._executor or events_to_add <= 0:
                break
            self._process_event(fs_event)
            events_to_add -= 1
        for fs_event in offline_events:
            if not self._executor or events_to_add <= 0:
                break
            self._process_event(fs_event)
            events_to_add -= 1
        with self._lock:
            self._batch_checking_paths.clear()

    def _event_sort_key(self, fs_event):
        priority = (
            10 if fs_event.is_dir and fs_event.event_type == CREATE
            else 9 if fs_event.is_dir
            else 8 if fs_event.event_type == CREATE
            else 7 if fs_event.event_type == DELETE
            else 6 if fs_event.event_type == MOVE
            else 5)
        sub_priority = 1 if fs_event.quiet else 1000000
        addition = -int(fs_event.mtime) if fs_event.is_offline \
            else 0
        return 1 - priority * sub_priority * 1000 + addition + \
               len(fs_event.src)

    def _process_event(self, fs_event):
        try:
            with self._lock:
                if fs_event.src in self._processing_events:
                    logger.debug('fs_event.src in self._processing_events')
                    self._on_new_event(fs_event)
                    return

            if self._executor:
                self._processing_events.add(fs_event.src)
                self._executor.submit(self._pass_event, fs_event)
        except Exception as e:
            tb = traceback.format_list(traceback.extract_stack())
            if self._tracker:
                self._tracker.error(tb, str(e))

            logger.error('Filesystem monitor actions exception: %s\n%s',
                         e, tb)
            self.event_returned(fs_event)

    def _pass_event(self, fs_event):
        try:
            self.event_passed(fs_event)
        finally:
            with self._lock:
                self._processing_events.discard(fs_event.src)
                if self._loud_events:
                    self._loud_events.discard(fs_event.src)
                    if not self._loud_events:
                        logger.debug("All loud events processed")

                if not fs_event.quiet:
                    self.file_removed_from_indexing(
                        FilePath(fs_event.src),
                        fs_event.event_type in (DELETE, MOVE))

                logger.debug(
                    '%s processing fs_events, %s delayed fs_events, '
                    '%s delayed move paths, '
                    'workers count: %s',
                    len(self._processing_events), len(self._delayed_paths),
                    len(self._delayed_move_dst_paths),
                    self.workers_count)

    def is_processing(self, file_path):
        file_path = FilePath(file_path)
        with self._lock:
            return (
                file_path in self._processing_events
                or file_path in self._delayed_move_dst_paths
                or file_path in self._batch_checking_paths
                or (file_path in self._delayed_paths
                    and not self._delayed_paths.get(file_path).is_offline
                    and not self._delayed_paths.get(file_path).is_dir)
            )

    def _get_expired_events(self, events_to_add):
        if events_to_add <= 0:
            return
        now = self._time_provider()
        with self._lock:
            expired_events = filter(lambda event:
                                    not event.is_offline and
                                    event.time + self._delay <= now,
                                    self._delayed_paths.values())
            sorted_expired_events = sorted(
                expired_events,
                key=self._event_sort_key)

            processing_paths = self._processing_events.copy()
            result_expired_events = []
            result_offline_events = []
            for event in sorted_expired_events:
                if not self._to_exclude_path(event.src, processing_paths):
                    result_expired_events.append(event)
                    processing_paths.add(event.src)
                    events_to_add -= 1
                    if events_to_add == 0:
                        break

            if events_to_add > 0:
                if self._sorted_offline_events is None:
                    logger.debug("Sorting offline events...")
                    offline_events = filter(lambda event: event.is_offline,
                                            self._delayed_paths.values())
                    self._sorted_offline_events = sorted(
                        offline_events,
                        key=self._event_sort_key)

                i = 0
                for event in self._sorted_offline_events[:]:
                    if self._to_exclude_path(event.src, processing_paths):
                        i += 1
                        continue

                    result_offline_events.append(event)
                    processing_paths.add(event.src)
                    del self._sorted_offline_events[i]
                    events_to_add -= 1
                    if events_to_add == 0:
                        break

            for delayed_event in result_offline_events:
                self._delayed_paths.pop(delayed_event.src, None)
            for delayed_event in result_expired_events:
                self._delayed_paths.pop(delayed_event.src, None)
                if delayed_event.event_type in (MOVE,):
                    self._delayed_move_dst_paths.discard(delayed_event.dst)

            logger.debug(
                "get_expired_events: "
                "%d expired events, "
                "%d expired offline events, "
                "%d events delayed, "
                "elapsed %s",
                len(result_expired_events), len(result_offline_events),
                len(self._delayed_paths),
                self._time_provider() - now)

            if not result_expired_events and not result_offline_events \
                    and self._delayed_paths:
                logger.debug("delayed_paths: %s, processing_paths: %s",
                             log_sequence(self._delayed_paths),
                             log_sequence(processing_paths))

            return result_expired_events, result_offline_events

    def _to_exclude_path(self, path, processing_paths):
        return any(map(lambda p: path in p, processing_paths))

    def start(self):
        with self._lock:
            if self._started:
                return
            self._started = True
            self._is_idle = True

            self._delayed_paths = dict()
            self._delayed_move_dst_paths = set()
            self._processing_events = set()
            self._batch_checking_paths = set()
            self._loud_events = set()
            self._sorted_offline_events = None

        if not self._executor:
            self._executor = ThreadPoolExecutor(max_workers=self.workers_count)

    def stop(self):
        logger.debug("Stopping fs events processing")
        with self._lock:
            self._delayed_paths.clear()
            self._delayed_move_dst_paths.clear()
            self._processing_events.clear()
            self._batch_checking_paths.clear()
            # todo: mb notify about files removed from indexing
            self._loud_events.clear()
            if not self._started:
                return
        with self._lock:
            executor = self._executor if self._executor else None
            self._executor = None
            self._started = False
        if executor:
            executor.shutdown(wait=True)
        logger.debug("monitor idle emitted")
        self.idle.emit()

    def _batch_check_if_changing(self, expired_events, offline_events):
        logger.debug('batch check if changing %s files',
                     len(expired_events) + len(offline_events))
        for fs_event in set(expired_events):
            src_longpath = FilePath(fs_event.src).longpath
            dst_longpath = FilePath(fs_event.dst).longpath \
                if fs_event.dst and fs_event.event_type == MOVE \
                else None
            if dst_longpath and isfile(dst_longpath):
                path = dst_longpath
            elif isfile(src_longpath):
                path = src_longpath
            else:
                continue

            try:
                st = stat(path)
            except Exception as e:
                logger.warning("Can't get stat for %s. Reason: %s", path, e)
                expired_events.remove(fs_event)
                self.event_returned.emit(fs_event)
                continue

            if fs_event.file_size != st.st_size or \
                    fs_event.mtime != st.st_mtime:
                expired_events.remove(fs_event)
                self.event_returned.emit(fs_event)
                continue
                
            if fs_event.dst:
                fs_event.mtime += 0.1

        for fs_event in set(offline_events):
            src_longpath = FilePath(fs_event.src).longpath
            if isfile(src_longpath):
                try:
                    st = stat(src_longpath)
                except:
                    offline_events.remove(fs_event)
                    self.event_returned.emit(fs_event)
                    continue

                if fs_event.file_size != st.st_size or \
                        fs_event.mtime != st.st_mtime:
                    fs_event.is_offline = False
                    offline_events.remove(fs_event)
                    self.event_returned.emit(fs_event)

    def get_fs_events_count(self):
        return len(self._loud_events)
class LoadInfoFromStorageAction(ActionBase):
    def __init__(self, storage):
        super(LoadInfoFromStorageAction, self).__init__()
        self._storage = storage
        self.rename_file = Signal(FilePath)

    def _on_new_event(self, fs_event):
        if fs_event.src.endswith(FILE_LINK_SUFFIX) and not fs_event.is_dir:
            fs_event.is_link = True
            path = fs_event.src[:-len(FILE_LINK_SUFFIX)]
        else:
            fs_event.is_link = False
            path = fs_event.src

        fs_event.file = self._storage.get_known_file(path)
        fs_event.in_storage = fs_event.file is not None

        suppress_event = False
        if fs_event.in_storage:
            if not fs_event.is_link and self._check_file_exists_on_fs(
                    fs_event.src + FILE_LINK_SUFFIX) and \
                    fs_event.event_type == CREATE:
                self.rename_file.emit(fs_event.src)
                return self.event_suppressed(fs_event)

            elif fs_event.is_link and fs_event.event_type == MODIFY:
                if not self._get_events_file_id_from_link(fs_event):
                    suppress_event = True

            self._load_info_from_storage(fs_event)
        else:
            # possibly copy of file link
            if fs_event.is_link:
                suppress_event = not self._get_copy_info_from_storage(fs_event)
                if not suppress_event:
                    set_ext_invisible(fs_event.src)

        if suppress_event:
            try:
                remove_file(fs_event.src)
            except Exception:
                pass
            return self.event_suppressed(fs_event)

        if fs_event.is_link:
            fs_event.file_size = fs_event.old_size

        self.event_passed(fs_event)

    def _load_info_from_storage(self, fs_event):
        fs_event.is_dir = fs_event.file.is_folder
        if not fs_event.is_dir:
            self._load_file_info_from_storage(fs_event)

    def _load_file_info_from_storage(self, fs_event):
        fs_event.old_hash = fs_event.file.file_hash
        fs_event.old_mtime = fs_event.file.mtime
        fs_event.old_size = fs_event.file.size
        fs_event.old_signature = self._storage.get_file_signature(
            fs_event.file)

    def _check_file_exists_on_fs(self, file):
        return op.exists(FilePath(file).longpath)

    def _get_events_file_id_from_link(self, fs_event):
        try:
            with open(fs_event.src, 'rb') as f:
                file_id = pickle.load(f)
                return int(file_id)
        except Exception:
            return None

    def _get_copy_info_from_storage(self, fs_event):
        file_id = self._get_events_file_id_from_link(fs_event)
        if not file_id:
            return False

        fs_event.file = self._storage.get_known_file_by_id(file_id)
        if not fs_event.file:
            return False

        self._load_info_from_storage(fs_event)
        fs_event.new_hash = fs_event.old_hash
        fs_event.file_size = fs_event.old_size
        fs_event.new_signature = fs_event.old_signature
        fs_event.file = None
        return True
예제 #14
0
class FilesList(object):
    CREATE_TOLERANCE_INTERVAL = 30

    def __init__(self, storage, root):
        super(FilesList, self).__init__()
        self._storage = storage
        self._pc = PathConverter(root)

        self._files_dict = dict()
        self._store_limit = FILE_LIST_COUNT_LIMIT * 10

        self.file_list_changed = Signal()

        self._lock = RLock()
        self._last_sent = None

    def on_file_added(self, rel_path, is_dir, mtime):
        if is_dir:
            return

        with self._lock:
            self._files_dict[rel_path] = (rel_path, is_dir, mtime, False)
        self.file_list_changed.emit()

    def on_file_deleted(self, rel_path):
        with self._lock:
            for file_path in self._files_dict.copy():
                if FilePath(file_path) in FilePath(rel_path) or \
                                file_path == rel_path:
                    self._files_dict.pop(file_path)
        self.file_list_changed.emit()

    def on_file_moved(self, old_path, new_path):
        with self._lock:
            for file_path in self._files_dict.copy():
                if FilePath(file_path) in FilePath(old_path) or \
                        file_path == old_path:
                    path = str(
                        FilePath(join(new_path, relpath(file_path, old_path))))
                    old_file = self._files_dict.pop(file_path)
                    self._files_dict[path] = (path, old_file[1], old_file[2],
                                              old_file[3])

        self.file_list_changed.emit()

    def on_file_modified(self, rel_path, mtime):
        with self._lock:
            old_mtime = self._files_dict.get(rel_path,
                                             ('', False, 0, False))[2]
            modified = (mtime - old_mtime) > self.CREATE_TOLERANCE_INTERVAL
            # can't modify directory
            self._files_dict[rel_path] = (rel_path, False, mtime, modified)
        self.file_list_changed.emit()

    def on_idle(self):
        self._clear_old()

    def get(self):
        files_to_return = []
        offset = 0
        while len(files_to_return) < FILE_LIST_COUNT_LIMIT:
            with self._lock:
                files = list(self._files_dict.values())
            # sort by mtime
            files.sort(key=lambda f: f[2], reverse=True)

            for item in files:
                path = item[0]

                abs_path = self._pc.create_abspath(path)
                if exists(abs_path) or exists(abs_path + FILE_LINK_SUFFIX):
                    files_to_return.append(item)
                    if len(files_to_return) >= FILE_LIST_COUNT_LIMIT:
                        break
                else:
                    logger.warning("File does not exists: %s", path)
                    with self._lock:
                        try:
                            self._files_dict.pop(path)
                        except KeyError:
                            pass

            if len(self._files_dict) < FILE_LIST_COUNT_LIMIT * 2:
                loaded = self._load_from_storage(offset)
                offset += self._store_limit
                if loaded:
                    files_to_return = []
                else:
                    break

        if self._last_sent is None or self._last_sent != files_to_return:
            self._last_sent = files_to_return
            return files_to_return
        else:
            return None

    def clear(self):
        with self._lock:
            self._files_dict.clear()

    def start(self):
        self._load_from_storage()
        self._last_sent = None

    def stop(self):
        with self._lock:
            self.clear()

    def _clear_old(self):
        with self._lock:
            if len(self._files_dict) <= self._store_limit:
                return

            files = list(self._files_dict.values())
            files.sort(key=lambda f: f[2], reverse=True)
            self._files_dict = {f[0]: f for f in files[:self._store_limit]}

    def _load_from_storage(self, offset=0):
        files_stored = self._storage.get_last_files(self._store_limit, offset)
        stored_dict = {
            file.relative_path:
            (file.relative_path, file.is_folder, file.mtime, file.was_updated)
            for file in files_stored
        }
        with self._lock:
            self._files_dict.update(stored_dict)
        return len(files_stored)
예제 #15
0
class Copies(object):
    """
    Interface for reference counting of files copies
    """
    def __init__(self,
                 root,
                 db_file_created_cb=None,
                 extended_logging=True,
                 to_upgrade=True):
        self.possibly_sync_folder_is_removed = Signal()
        self.delete_copy = Signal(
            str,  # copy hash
            bool)  # with signature
        self.db_or_disk_full = Signal()

        self._db_file = join(get_copies_dir(root), 'copies.db')
        new_db_file = not exists(self._db_file)
        if new_db_file and callable(db_file_created_cb):
            db_file_created_cb()

        if to_upgrade and not new_db_file:
            # Database migration. It can be executed before opening db
            try:
                upgrade_db("copies_db", db_filename=self._db_file)
            except Exception as e:
                remove_file(self._db_file)
                new_db_file = True
                logger.warning(
                    "Can't upgrade copies db. "
                    "Reason: (%s) Creating...", e)
                if callable(db_file_created_cb):
                    db_file_created_cb()

        self._engine = create_engine('sqlite:///{}'.format(
            FilePath(self._db_file)))
        self._Session = sessionmaker(bind=self._engine)

        Base.metadata.create_all(self._engine, checkfirst=True)

        if new_db_file:
            try:
                stamp_db("copies_db", db_filename=self._db_file)
            except Exception as e:
                logger.error("Error stamping copies db: %s", e)

        self._lock = RLock()
        self._root = root

        self._extended_logging = extended_logging

        if not self._extended_logging:
            self._logger = None
        else:
            self._logger = logging.getLogger('copies_logger')
            self._logger.debug("Copies init")

        self._last_changes = defaultdict(int)

    @contextmanager
    def create_session(self):
        with self._lock:
            session = self._Session()
            session.expire_on_commit = False
            session.autoflush = False

            try:
                yield session
                session.commit()
            except OperationalError as e:
                try:
                    session.rollback()
                except Exception:
                    pass
                finally:
                    self.possibly_sync_folder_is_removed()
                    logger.error("Possibly sync folder is removed %s", e)

                if is_db_or_disk_full(e):
                    self.db_or_disk_full.emit()
                else:
                    raise
            except:
                session.rollback()
                raise
            finally:
                session.close()

    def add_copy_reference(self, hash, reason="", postponed=False):
        if postponed:
            self._last_changes[hash] += 1
            copy_count = self._last_changes[hash]
        else:
            with self.create_session() as session:
                copy = session.query(Copy)\
                    .filter(Copy.hash == hash)\
                    .one_or_none()
                if copy is None:
                    copy = Copy(hash=hash, count=0)
                copy.count += 1
                copy_count = copy.count
                session.merge(copy)

        logger.debug(
            "File copy reference added, %s, "
            "count: %s, postponed is %s", hash, copy_count, postponed)

        if not self._extended_logging:
            return

        copies_dir = get_copies_dir(self._root)
        self._logger.debug(
            "File copy reference added, %s, count: %s. "
            "postponed is %s. File exists: %s. "
            "Reason: %s", hash, copy_count, postponed,
            exists(join(copies_dir, hash)), reason)

    def remove_copy_reference(self, hash, reason="", postponed=False):
        if postponed:
            self._last_changes[hash] -= 1
            copy_count = self._last_changes[hash]
        else:
            with self.create_session() as session:
                copy = session.query(Copy)\
                    .filter(Copy.hash == hash)\
                    .one_or_none()
                if not copy:
                    logger.warning(
                        "Trying to remove copy reference "
                        "for non-existant copy %s", hash)
                    return

                else:
                    copy.count -= 1
                    copy_count = copy.count
                session.merge(copy)

        logger.debug(
            "File copy reference removed, %s, "
            "count: %s, postponed is %s", hash, copy_count, postponed)

        if not self._extended_logging:
            return

        copies_dir = get_copies_dir(self._root)
        self._logger.debug(
            "File copy reference removed, %s, count: %s, "
            "postponed is %s. File exists: %s. Reason: %s", hash, copy_count,
            postponed, exists(join(copies_dir, hash)), reason)

    def commit_last_changes(self):
        if not self._last_changes:
            return

        hashes = list(self._last_changes.keys())
        hashes_len = len(hashes)
        with self.create_session() as session:
            mappings = []
            insert_mappings = []
            for i in range(0, hashes_len, DB_PAGE_SIZE):
                hashes_portion = hashes[:DB_PAGE_SIZE]
                hashes = hashes[DB_PAGE_SIZE:]
                copies = session.query(Copy)\
                    .filter(Copy.hash.in_(tuple(hashes_portion)))\
                    .all()
                mappings.extend([
                    dict(id=copy.id,
                         count=copy.count + self._last_changes[copy.hash])
                    for copy in copies
                ])
                hashes_absent = set(hashes_portion) - {c.hash for c in copies}
                insert_mappings.extend([
                    dict(hash=hash, count=self._last_changes[hash])
                    for hash in hashes_absent
                ])

            session.bulk_update_mappings(Copy, mappings)
            session.bulk_insert_mappings(Copy, insert_mappings)

        logger.debug("Commited last copies changes for %s hashes",
                     len(self._last_changes))
        if self._extended_logging:
            self._logger.debug("Commited last copies changes for %s",
                               self._last_changes)

        self.clear_last_changes()

    def clear_last_changes(self):
        self._last_changes.clear()

    def copy_exists(self, hash):
        return exists(self.get_copy_file_path(hash))

    def get_copy_size(self, hash):
        copy_path = self.get_copy_file_path(hash)
        return stat(copy_path).st_size if exists(copy_path) else 0

    def get_copy_file_path(self, hash):
        return join(get_copies_dir(self._root), hash)

    def clean(self, with_files=True, with_signatures=True):
        with self.create_session() as session:
            self._log_db(session)

            if with_files:
                copies = session.query(Copy).all()
                for copy in copies:
                    if copy.hash:
                        self.delete_copy(copy.hash, with_signatures)

        try:
            self._engine.execute("delete from copies")
            logger.info("Cleaned copies data base")
        except Exception as e:
            logger.error("Failed to clean copies DB (%s)", e)
            if not self.db_file_exists():
                raise e

        if self._extended_logging:
            do_rollover(self._logger, use_root=False)

    def clean_unnecessary(self):
        with self.create_session() as session:
            self._log_db(session)

            copies = session.query(Copy).filter(Copy.count <= 0).all()
            for copy in copies:
                if copy.hash:
                    self.delete_copy(copy.hash, True)

            session.query(Copy).filter(Copy.count <= 0).delete()

    def remove_copies_not_in_db(self):
        with self.create_session() as session:
            copies = session.query(Copy).all()
            exclude_files = {copy.hash for copy in copies}
        exclude_files.add('copies.db')
        copies_dir = get_copies_dir(self._root)
        try:
            files_to_delete = set(listdir(copies_dir)) - exclude_files
            files_to_delete = map(lambda f: join(copies_dir, f),
                                  files_to_delete)
            list(
                map(
                    remove_file,
                    filter(
                        lambda f: isfile(f) and not f.endswith('.download') and
                        not f.endswith('.info'), files_to_delete)))
        except Exception as e:
            self.possibly_sync_folder_is_removed()
            logger.warning("Can't remove copies files. Reason: %s", e)

    def db_file_exists(self):
        return exists(self._db_file) and getsize(self._db_file) > 0

    def _log_db(self, session):
        if not self._extended_logging:
            return

        copies_dir = get_copies_dir(self._root)
        copies = session.query(Copy).all()
        for i, copy in enumerate(copies):
            self._logger.debug("Copy %s: %s. File exists: %s", i, copy,
                               exists(join(copies_dir, copy.hash)))
예제 #16
0
class ObserverWrapper(QObject):
    """
    Wrapper for watchdog's Observer performing checks for offline events
    on observing start
    """

    event_handled = pyqtSignal(FsEvent, bool)

    start = pyqtSignal()

    processed_offline_changes = pyqtSignal()

    def __init__(self, storage, get_sync_dir_size, tracker=None, parent=None):
        """
        Constructor
        @param storage Storage class instance [Storage]
        @param tracker
            Statictics event tracker instance [stat_tracking.Tracker] or None
        """
        QObject.__init__(self, parent=parent)
        self._storage = storage
        self._observer = None
        self._active = True
        self._root_handlers = {}
        self._tracker = tracker
        self._reset_stats()
        self._started = False
        self.is_processing_offline = False
        self._processed_offline_changes = False
        self._get_sync_dir_size = get_sync_dir_size
        self._special_dirs = dict()
        self._special_files = set()
        self._lock = RLock()
        self.start.connect(self._start)

        # Signal to be emitted when detecting offline changes
        self.offline_event_occured = Signal(FsEvent, bool)

    def has_processed_offline_changes(self):
        return self._processed_offline_changes

    def _reset_stats(self):
        self._start_time = 0
        self._offline_stats_count = 0
        self._online_stats = defaultdict(int)
        self._offline_stats = defaultdict(int)
        self._start_stats_sended = False

    def set_active(self, active=True):
        logger.debug("set_active: %s", active)
        self._active = active

    def _start(self):
        """
        Starts monitoring of roots added with schedule().
        Creates wrapped observer instance
        """
        if not self._active:
            return
        logger.debug("Start")
        self._reset_stats()
        # Initialize observer
        self._observer = Observer()
        self._observer.start()
        # Detect offline changes (if any)
        self.is_processing_offline = False
        self._processed_offline_changes = False
        self._started = True

        self.process_offline_changes(process_modifies=False)
        # Register roots added previously
        with self._lock:
            if not self._active or not self._started:
                return
            for root, (event_handler,
                       recursive) in self._root_handlers.items():
                self._schedule_root(event_handler, root, recursive)

    def _schedule_root(self, event_handler, root, recursive):
        logger.info("Starting watching root '%s'...", root)
        if not self._observer:
            return
        return self._observer.schedule(event_handler,
                                       root,
                                       recursive=recursive)

    def stop(self):
        """
        Stops monitoring of roots added with schedule()
        """

        logger.debug("Stop")
        self._active = False
        if not self._started:
            logger.warning("Already stopped")
            return

        logger.info("Stop watching all roots")
        self._started = False
        self._processed_offline_changes = False
        try:
            self._observer.unschedule_all()
            self._observer.stop()
            self._observer.join()
        except Exception as e:
            logger.error('Exception while stopping fs observer: %s', e)
        self._observer = None

    def schedule(self, event_handler, root, recursive=True):
        """
        Register given event handler to be used for events from given root path

        @param event_handler
            Observer class instance [watchdog.observers.BaseObserver]
        @param root Path (absolute) to process event from [unicode]
        @param recursive
            Flag enabling processing events from nested folders/files [bool]
        """

        root = FilePath(root).longpath
        self._root_handlers[root] = (event_handler, recursive)
        watch = None
        if self._started:
            watch = self._schedule_root(event_handler, root, recursive)
        return watch

    def process_offline_changes(self, process_modifies):
        logger.debug("process_offline_changes")
        if self.is_processing_offline or not self._started:
            logger.debug(
                "process_offline_changes, already processing offline changes")
            return

        self.is_processing_offline = True
        for root in self._root_handlers.copy():
            self._check_root(root, process_modifies)

    def _check_root(self, root, process_modifies):
        """
        Check given root path for offline events

        @param root Path (absolute) to be checked [unicode]
        """
        if FilePath(root) in self._special_dirs:
            return

        logger.info("Checking root '%s' folder for offline changes...", root)

        self._start_time = time.time()

        logger.debug("Obtaining known files from storage...")
        known_files = set(self._storage.get_known_files())
        if not self._active or not self._started:
            return
        logger.debug("Known files: %s", len(known_files))

        logger.debug("Obtaining actual files and folders from filesystem...")
        actual_folders, actual_files = get_files_dir_list(
            root,
            exclude_dirs=self._root_handlers[root][0].hidden_dirs,
            exclude_files=self._root_handlers[root][0].hidden_files)
        if not self._active or not self._started:
            return
        logger.debug("Actual folders: %s", len(actual_folders))
        logger.debug("Actual files: %s", len(actual_files))

        actual_files = set(map(FilePath, actual_files)) - self._special_files
        actual_folders = set(map(FilePath, actual_folders))

        if not self._active or not self._started:
            return

        self._offline_stats['file_COUNT'] = len(actual_files)

        logger.debug("Finding files that were created...")
        files_created = actual_files.difference(known_files)
        if not self._active or not self._started:
            return

        logger.debug("Finding files that were deleted...")
        files_deleted = known_files.difference(actual_files)
        if not self._active or not self._started:
            return

        for path in files_deleted.copy():
            if not self._active or not self._started:
                return

            path_plus_suffix = path + FILE_LINK_SUFFIX
            if path_plus_suffix in files_created:
                files_deleted.discard(path)
                files_created.discard(path_plus_suffix)

        logger.debug("Obtaining known folders from storage...")
        known_folders = set(self._storage.get_known_folders())

        if not self._active or not self._started:
            return
        logger.debug("Known folders: %s", len(known_folders))

        logger.debug("Finding folders that were created...")
        folders_created = sorted(actual_folders.difference(known_folders),
                                 key=len,
                                 reverse=True)
        if not self._active or not self._started:
            return

        logger.debug("Finding folders that were deleted...")
        folders_deleted = sorted(known_folders.difference(actual_folders),
                                 key=len,
                                 reverse=True)

        if not self._active or not self._started:
            return

        logger.info("Folders found: %s (created: %s, deleted: %s)",
                    len(actual_folders), len(folders_created),
                    len(folders_deleted))

        self._offline_stats['dir_COUNT'] = len(actual_folders)

        logger.debug("Appending deleted files to processing...")
        for filename in files_deleted:
            if not self._active or not self._started:
                return
            self._emit_offline_event(
                FsEvent(event_type=DELETE,
                        src=filename,
                        is_dir=False,
                        is_offline=True))

        logger.debug("Appending deleted folders to processing...")
        for foldername in folders_deleted:
            if not self._active or not self._started:
                return
            self._emit_offline_event(
                FsEvent(event_type=DELETE,
                        src=foldername,
                        is_dir=True,
                        is_offline=True))

        logger.debug("Appending created files to processing...")
        for filename in files_created:
            if not self._active or not self._started:
                return
            self._emit_offline_event(
                FsEvent(event_type=CREATE,
                        src=filename,
                        is_dir=False,
                        is_offline=True))

        logger.debug("Appending created folders to processing...")
        for foldername in folders_created:
            if not self._active or not self._started:
                return
            self._emit_offline_event(
                FsEvent(event_type=CREATE,
                        src=foldername,
                        is_dir=True,
                        is_offline=True))

        self._processed_offline_changes = True
        self.is_processing_offline = False
        logger.debug("Emitting ofline events processed signal")
        self.processed_offline_changes.emit()

        if not process_modifies:
            return

        logger.debug("Finding files with possible modifications...")
        same_files = actual_files.intersection(known_files)

        if not self._active or not self._started:
            return

        logger.info(
            "Files found: %s (created: %s, deleted: %s, remaining: %s)",
            len(actual_files), len(files_created), len(files_deleted),
            len(same_files))

        logger.debug("Appending possible modified files to processing...")
        for filename in same_files:
            if not self._active or not self._started:
                return
            # Actual file modification will be checked by event filters
            # applied in WatchdogHandler instance
            self._emit_offline_event(
                FsEvent(
                    event_type=MODIFY,
                    src=filename,
                    is_dir=False,
                    is_offline=True,
                    quiet=True,
                ))
        logger.debug("work complete")

    def _emit_offline_event(self, fs_event):
        assert fs_event.is_offline
        self._offline_stats_count += 1
        self.offline_event_occured.emit(fs_event, False)

    def on_event_is_handled_slot(self, fs_event, suppressed=False):
        """
        Slot to process FsEventFilters.event_is_handled signal

        @param fs_event Event being reported [FsEvent]
        """

        logger.info("on_event_is_handled_slot: %s", fs_event)

        assert len(self._root_handlers) > 0

        if fs_event.is_offline:
            stat = self._offline_stats
        else:
            stat = self._online_stats

        # Determine name of stat counter
        stat_name_prefix = 'dir_' if fs_event.is_dir else 'file_'
        event_name = event_names[fs_event.event_type]
        stat_name = stat_name_prefix + event_name

        if suppressed:
            if not fs_event.is_dir:
                stat['file_IGNORED'] += 1
        else:
            # Increment counter corresponding to event obtained
            stat[stat_name] += 1

        if fs_event.is_offline:
            # Not handled offline events remaining
            if self._offline_stats_count > 0:
                self._offline_stats_count -= 1
                # All emitted events has been handled
                if self._offline_stats_count == 0:
                    # Online total counts should be based on offline ones
                    self._online_stats['file_COUNT'] += \
                        self._offline_stats['file_COUNT']
                    self._online_stats['dir_COUNT'] += \
                        self._offline_stats['dir_COUNT']
                    # Send stats accumulated
                    self._send_start_stats()
            else:
                logger.warning(
                    "FsEventFilters handled more offline events than "
                    "have been emitted by ObserverWrapper")
        else:
            counter_name = stat_name_prefix + 'COUNT'
            if event_name == 'CREATE':
                self._online_stats[counter_name] += 1
            elif event_name == 'DELETE':
                self._online_stats[counter_name] -= 1

    def _send_start_stats(self):
        if self._start_stats_sended:
            return

        duration = time.time() - self._start_time
        logger.info("ObserverWrapper started in %s seconds", duration)

        if self._tracker:
            self._tracker.monitor_start(self._offline_stats['file_COUNT'],
                                        self._offline_stats['dir_COUNT'],
                                        self._offline_stats['file_CREATE'],
                                        self._offline_stats['file_MODIFY'],
                                        self._offline_stats['file_MOVE'],
                                        self._offline_stats['file_DELETE'],
                                        self._offline_stats['dir_CREATE'],
                                        self._offline_stats['dir_DELETE'],
                                        self._get_sync_dir_size(), duration)
        self._start_stats_sended = True

    def _send_stop_stats(self):
        duration = time.time() - self._start_time
        logger.info("ObserverWrapper worked %s seconds", duration)

        if self._tracker:
            self._tracker.monitor_stop(self._online_stats['file_COUNT'],
                                       self._online_stats['dir_COUNT'],
                                       self._online_stats['file_CREATE'],
                                       self._online_stats['file_MODIFY'],
                                       self._online_stats['file_MOVE'],
                                       self._online_stats['file_DELETE'],
                                       self._online_stats['dir_CREATE'],
                                       self._online_stats['dir_MOVE'],
                                       self._online_stats['dir_DELETE'],
                                       duration,
                                       self._online_stats['file_IGNORED'])

    def add_special_file(self, path, event_handler):
        # Observer has to be started here. So watch is not None
        with self._lock:
            if not event_handler:
                self._special_files.add(FilePath(path))
            else:
                special_dir = FilePath(op.dirname(path))
                watch = self.schedule(event_handler,
                                      special_dir,
                                      recursive=False)
                self._special_dirs[special_dir] = watch

    def remove_special_file(self, path):
        logger.debug("Removing special file %s...", path)
        with self._lock:
            special_dir = FilePath(op.dirname(path))
            if special_dir in self._special_dirs:
                watch = self._special_dirs.pop(special_dir)
                self._root_handlers.pop(special_dir.longpath)
                if self._started:
                    self._observer.unschedule(watch)
                logger.debug("Unscheduled path %s", path)
            elif FilePath(path) in self._special_files:
                self._special_files.discard(FilePath(path))
            else:
                logger.warning("Can't remove special file %s from %s and %s",
                               path, self._special_dirs, self._special_files)