def __init__(self, path_converter):
        super(NotifyIfModifiedAction, self).__init__()
        self.file_modified = Signal(
            str,  # relative_path
            float)  # modified time

        self._patch_converter = path_converter
Пример #2
0
    def __init__(self, storage, get_sync_dir_size, tracker=None, parent=None):
        """
        Constructor
        @param storage Storage class instance [Storage]
        @param tracker
            Statictics event tracker instance [stat_tracking.Tracker] or None
        """
        QObject.__init__(self, parent=parent)
        self._storage = storage
        self._observer = None
        self._active = True
        self._root_handlers = {}
        self._tracker = tracker
        self._reset_stats()
        self._started = False
        self.is_processing_offline = False
        self._processed_offline_changes = False
        self._get_sync_dir_size = get_sync_dir_size
        self._special_dirs = dict()
        self._special_files = set()
        self._lock = RLock()
        self.start.connect(self._start)

        # Signal to be emitted when detecting offline changes
        self.offline_event_occured = Signal(FsEvent, bool)
 def __init__(self, path_converter, max_relpath_len, long_paths):
     super(CheckLongPathAction, self).__init__()
     self.long_path_added = Signal(FilePath)
     self.long_path_removed = Signal(FilePath)
     self._path_converter = path_converter
     self._max_relpath_len = max_relpath_len
     self._long_paths = long_paths
Пример #4
0
    def __init__(self, storage, root):
        super(FilesList, self).__init__()
        self._storage = storage
        self._pc = PathConverter(root)

        self._files_dict = dict()
        self._store_limit = FILE_LIST_COUNT_LIMIT * 10

        self.file_list_changed = Signal()

        self._lock = RLock()
        self._last_sent = None
Пример #5
0
    def __init__(self,
                 root,
                 db_file_created_cb=None,
                 extended_logging=True,
                 to_upgrade=True):
        self.possibly_sync_folder_is_removed = Signal()
        self.delete_copy = Signal(
            str,  # copy hash
            bool)  # with signature
        self.db_or_disk_full = Signal()

        self._db_file = join(get_copies_dir(root), 'copies.db')
        new_db_file = not exists(self._db_file)
        if new_db_file and callable(db_file_created_cb):
            db_file_created_cb()

        if to_upgrade and not new_db_file:
            # Database migration. It can be executed before opening db
            try:
                upgrade_db("copies_db", db_filename=self._db_file)
            except Exception as e:
                remove_file(self._db_file)
                new_db_file = True
                logger.warning(
                    "Can't upgrade copies db. "
                    "Reason: (%s) Creating...", e)
                if callable(db_file_created_cb):
                    db_file_created_cb()

        self._engine = create_engine('sqlite:///{}'.format(
            FilePath(self._db_file)))
        self._Session = sessionmaker(bind=self._engine)

        Base.metadata.create_all(self._engine, checkfirst=True)

        if new_db_file:
            try:
                stamp_db("copies_db", db_filename=self._db_file)
            except Exception as e:
                logger.error("Error stamping copies db: %s", e)

        self._lock = RLock()
        self._root = root

        self._extended_logging = extended_logging

        if not self._extended_logging:
            self._logger = None
        else:
            self._logger = logging.getLogger('copies_logger')
            self._logger.debug("Copies init")

        self._last_changes = defaultdict(int)
class NotifyIfDeletedAction(ActionBase):
    def __init__(self, path_converter):
        super(NotifyIfDeletedAction, self).__init__()
        self.file_deleted = Signal(str)  # relative_path

        self._patch_converter = path_converter

    def add_new_event(self, fs_event):
        if fs_event.event_type == DELETE:
            src_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.src
            self.file_deleted.emit(
                self._patch_converter.create_relpath(src_path))
Пример #7
0
 def __init__(self, storage, path_converter, tracker=None):
     super(UpdateStorageAction, self).__init__()
     self._storage = storage
     self._path_converter = path_converter
     self.event_processed = Signal(FsEvent)
     self._tracker = tracker
     self._waiting = False
class NotifyIfModifiedAction(ActionBase):
    def __init__(self, path_converter):
        super(NotifyIfModifiedAction, self).__init__()
        self.file_modified = Signal(
            str,  # relative_path
            float)  # modified time

        self._patch_converter = path_converter

    def add_new_event(self, fs_event):
        if fs_event.event_type == MODIFY:
            src_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.src
            self.file_modified.emit(
                self._patch_converter.create_relpath(src_path), fs_event.mtime)
Пример #9
0
    def __init__(self):
        super(ActionBase, self).__init__()
        self._active = True
        self.event_passed = Signal(FsEvent)
        self.event_returned = Signal(FsEvent)
        self.event_spawned = Signal(FsEvent)
        self.event_suppressed = Signal(FsEvent)
        self.event_is_processing = Signal(FsEvent)

        self.event_passed.connect(self._on_event_passed)
        self.event_returned.connect(self._on_event_returned)
        self.event_suppressed.connect(self._on_event_suppressed)
        self.event_spawned.connect(self._on_event_spawned)
Пример #10
0
    def notify(self, signal_type, source_name, signal_data):
        if signal_data is None:
            return

        signal = Signal(Signal.SOURCE_VIEW, source_name, signal_type, signal_data)

        with self._mutex:
            self._signals_handler.notify(signal)
class CheckLongPathAction(ActionBase):
    def __init__(self, path_converter, max_relpath_len, long_paths):
        super(CheckLongPathAction, self).__init__()
        self.long_path_added = Signal(FilePath)
        self.long_path_removed = Signal(FilePath)
        self._path_converter = path_converter
        self._max_relpath_len = max_relpath_len
        self._long_paths = long_paths

    def _on_new_event(self, fs_event):
        if fs_event.event_type in (CREATE, MODIFY) and \
                self._is_path_long(fs_event.src):
            self._long_paths.add(fs_event.src)
            self.long_path_added.emit(fs_event.src)
            self.event_suppressed(fs_event)
        elif fs_event.event_type == MOVE and \
                self._is_path_long(fs_event.dst):
            if self._is_path_long(fs_event.src):
                self._long_paths.discard(fs_event.src)
                self._long_paths.add(fs_event.dst)
                self.long_path_added.emit(fs_event.dst)
                self.long_path_removed.emit(fs_event.src)
                self.event_suppressed(fs_event)
            else:
                self._long_paths.add(fs_event.dst)
                self.long_path_added.emit(fs_event.dst)
                fs_event.event_type = DELETE
                self.event_passed(fs_event)
        elif fs_event.event_type == MOVE and \
                self._is_path_long(fs_event.src):
            fs_event.event_type = CREATE
            self.event_passed(fs_event)
        elif fs_event.event_type == DELETE and \
                self._is_path_long(fs_event.src):
            self._long_paths.discard(fs_event.src)
            self.long_path_removed.emit(fs_event.src)
            self.event_suppressed(fs_event)
        else:
            self.event_passed(fs_event)

    def _is_path_long(self, path):
        rel_path = self._path_converter.create_relpath(path)
        file_name = rel_path.split('/')[-1]
        return len(bytes(rel_path.encode('utf-8'))) > \
               self._max_relpath_len or \
               len(bytes(file_name.encode('utf-8'))) > MAX_FILE_NAME_LEN
Пример #12
0
    def __init__(self, path_converter, db_file_created_cb=None):
        self._pc = path_converter

        self.possibly_sync_folder_is_removed = Signal()
        self.db_or_disk_full = Signal()

        self._db_file = self._pc.create_abspath('.pvtbox/storage.db')
        logger.debug("DB file: %s", self._db_file)
        new_db_file = not exists(self._db_file)
        if new_db_file and callable(db_file_created_cb):
            db_file_created_cb()

        make_dirs(self._db_file)

        if not new_db_file:
            # Database migration. It can be executed before opening db
            try:
                upgrade_db("storage_db", db_filename=self._db_file)
            except Exception as e:
                remove_file(self._db_file)
                new_db_file = True
                logger.warning(
                    "Can't upgrade storage db. "
                    "Reason: (%s) Creating...", e)
                if callable(db_file_created_cb):
                    db_file_created_cb()

        self._engine = create_engine('sqlite:///{}'.format(
            FilePath(self._db_file)),
                                     connect_args={
                                         'timeout': 60 * 1000,
                                         'check_same_thread': False,
                                     })
        self._engine.pool_timeout = 60 * 60 * 1000
        self._Session = sessionmaker(bind=self._engine)

        Base.metadata.create_all(self._engine, checkfirst=True)

        if new_db_file:
            try:
                stamp_db("storage_db", db_filename=self._db_file)
            except Exception as e:
                logger.error("Error stamping storage db: %s", e)

        self._lock = threading.RLock()
Пример #13
0
class NotifyIfMovedAction(ActionBase):
    def __init__(self, path_converter):
        super(NotifyIfMovedAction, self).__init__()
        self.file_moved = Signal(str, str)  # old, new file path

        self._patch_converter = path_converter

    def add_new_event(self, fs_event):
        if fs_event.event_type == MOVE:
            src_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.src
            dst_path = fs_event.src[: -len(FILE_LINK_SUFFIX)] \
                if fs_event.is_link \
                else fs_event.dst
            self.file_moved.emit(
                self._patch_converter.create_relpath(src_path),
                self._patch_converter.create_relpath(dst_path))
Пример #14
0
    def __init__(self, config_file, check=True):
        # Emitted when some parameter values has changed
        # dict: name -> namedtuple(old_value, new_value)
        self.settings_changed = Signal(dict)

        self.encrypt = True
        self.config_file_name = config_file
        self.config = None
        self._defaults = set()
        self.refresh(check=check)
Пример #15
0
    def __init__(self,
                 db,
                 event,
                 get_download_backups_mode,
                 is_smart_sync=False):
        super(EventStrategy, self).__init__()
        self.event = event
        self.event_id = event.id if event else 0
        self.file_id = event.file.id if event and event.file else 0
        self.db = db
        self._is_smart_sync = is_smart_sync
        self._cached_file_path = None
        self._events_queue = None
        self._download_backups = get_download_backups_mode()
        self._force_move = False

        self.change_processing_events_counts = Signal(int,
                                                      int)  # (local, remote)
        self.append_local_event = Signal(Event, str, str, int, bool)
        self.rename_or_delete_dst_path = Signal(str, int, Session, bool)
Пример #16
0
class MoveFileRecentCopyAction(ActionBase):
    def __init__(self, root, copies_storage):
        super(MoveFileRecentCopyAction, self).__init__()
        self._root = root
        self._copies_storage = copies_storage

        self.copy_added = Signal(str)

    def _on_new_event(self, fs_event):
        file_synced_copy_name = FilePath(
            join(get_copies_dir(self._root), fs_event.new_hash)).longpath
        file_recent_copy_name = FilePath(fs_event.file_recent_copy).longpath

        self._copies_storage.add_copy_reference(
            fs_event.new_hash,
            reason="MoveFileRecentCopyAction {}".format(fs_event.src))

        if exists(file_recent_copy_name):
            if not exists(file_synced_copy_name):
                try:
                    shutil.move(file_recent_copy_name, file_synced_copy_name)
                    self.copy_added.emit(fs_event.new_hash)
                except (OSError, IOError):
                    self._copies_storage.remove_copy_reference(
                        fs_event.new_hash,
                        reason="MoveFileRecentCopyAction {}".format(
                            fs_event.src))
                    self.event_returned(fs_event)
                    return
                if stat(file_synced_copy_name).st_size != fs_event.file_size:
                    self.event_returned(fs_event)
                    return
            fs_event.file_synced_copy = FilePath(file_synced_copy_name)

            self.event_passed(fs_event)
        else:
            self.event_returned(fs_event)

    def _is_sutable(self, fs_event):
        return (not fs_event.is_dir and fs_event.event_type in (CREATE, MODIFY)
                and fs_event.file_recent_copy)
Пример #17
0
class LocalProcessor(object):
    def __init__(self, root, storage, path_converter, tracker):
        self._root = root
        self._path_converter = path_converter
        self._tracker = tracker
        self._storage = storage
        self.event_is_arrived = Signal(dict, object)

    def process(self, fs_event):
        """ crates message about event """

        template_message = dict(event=fs_event.event_type,
                                time=time.time(),
                                type=DIRECTORY if fs_event.is_dir else FILE)
        src = fs_event.src if not fs_event.is_link \
            else fs_event.src[: -len(FILE_LINK_SUFFIX)]
        dst = fs_event.dst if not fs_event.is_link or \
                              fs_event.event_type != MOVE \
            else fs_event.dst[: -len(FILE_LINK_SUFFIX)]

        if fs_event.event_type == MOVE:
            template_message['src'] = self._path_converter.create_relpath(src)
            template_message['dst'] = self._path_converter.create_relpath(dst)
            template_message['hash'] = fs_event.old_hash
        else:
            template_message['path'] = self._path_converter.create_relpath(src)
            template_message['hash'] = fs_event.new_hash
            template_message['old_hash'] = fs_event.old_hash
        template_message['file_size'] = fs_event.file_size

        self.event_is_arrived.check_connected()
        self.event_is_arrived(template_message, fs_event)
        if fs_event.event_type != MOVE:
            path = template_message['path']
        else:
            path = template_message['src']

        logger.info("Event type %s for path '%s'",
                    event_names[template_message['event']], path)
class MakeFileRecentCopyAction(ActionBase):
    def __init__(self, root):
        super(MakeFileRecentCopyAction, self).__init__()
        self._root = root
        self.no_disk_space = Signal(object, str, bool)

    def _on_new_event(self, fs_event):
        if fs_event.file_size + get_signature_file_size(fs_event.file_size) > \
                get_free_space_by_filepath(fs_event.src):
            self.no_disk_space.emit(fs_event, fs_event.src, False)
            self.event_suppressed(fs_event)
            return

        file_recent_copy_name = FilePath(
            join(get_copies_dir(self._root),
                 'recent_copy_' + str(fs_event.id)))
        fs_event.file_recent_copy = file_recent_copy_name
        recent_copy_longpath = FilePath(file_recent_copy_name).longpath
        try:
            copy_file(FilePath(fs_event.src).longpath, recent_copy_longpath)
        except (OSError, IOError) as e:
            if e.errno == errno.ENOSPC:
                self.no_disk_space.emit(fs_event, fs_event.src, True)
                self.event_suppressed(fs_event)
                return

            self.event_returned(fs_event)
            return

        recent_copy_size = stat(recent_copy_longpath).st_size
        if recent_copy_size != fs_event.file_size:
            self.event_returned(fs_event)
            return

        self.event_passed(fs_event)

    def _is_sutable(self, fs_event):
        return (not fs_event.is_dir and fs_event.event_type in (CREATE, MODIFY)
                and fs_event.file_size and not fs_event.is_link)
Пример #19
0
class ActionBase(object):
    def __init__(self):
        super(ActionBase, self).__init__()
        self._active = True
        self.event_passed = Signal(FsEvent)
        self.event_returned = Signal(FsEvent)
        self.event_spawned = Signal(FsEvent)
        self.event_suppressed = Signal(FsEvent)
        self.event_is_processing = Signal(FsEvent)

        self.event_passed.connect(self._on_event_passed)
        self.event_returned.connect(self._on_event_returned)
        self.event_suppressed.connect(self._on_event_suppressed)
        self.event_spawned.connect(self._on_event_spawned)

    def set_active(self, active=True):
        self._active = active

    def _on_event_passed(self, fs_event):
        pass

    def _on_event_returned(self, fs_event):
        logger.debug('%s RETURNED the event %s for additional processing',
                     self.__class__.__name__, fs_event)

    def _on_event_spawned(self, fs_event):
        logger.debug('%s SPAWNED new event %s for processing',
                     self.__class__.__name__, fs_event)

    def _on_event_suppressed(self, fs_event):
        logger.debug('%s SUPRESSED the event %s', self.__class__.__name__,
                     fs_event)

    def add_new_event(self, fs_event):
        if self._is_sutable(fs_event):
            logger.debug('%s received the event %s', self.__class__.__name__,
                         fs_event)
            self.event_is_processing(fs_event)
            self._on_new_event(fs_event)
        else:
            self.event_passed(fs_event)

    def _is_sutable(self, fs_event):
        return self._active

    def _on_new_event(self, fs_event):
        self.event_passed(fs_event)
Пример #20
0
    def __init__(self, root, storage, path_converter, exceptions):
        self._root = root
        self._storage = storage
        self._path_converter = path_converter
        self._exceptions = exceptions

        self._tmp_id = 0
        self._tmp_id_lock = RLock()

        self._init_temp_dir()

        self.file_moved = Signal(str, str)
        self.file_deleted = Signal(str)
        self.file_modified = Signal(str, float)
        self.access_denied = Signal(str)
Пример #21
0
    def __init__(self,
                 root,
                 hidden_dirs,
                 hidden_files,
                 patterns=None,
                 is_special=False):
        self._hidden_files = hidden_files
        self._hidden_dirs = hidden_dirs
        self._is_special = is_special

        ignore_patterns = []
        for hidden_file in hidden_files:
            ignore_patterns.append('*' + hidden_file)
        for hidden_dir in hidden_dirs:
            ignore_patterns.append(
                FilePath(op.join(root, hidden_dir)).longpath)
            ignore_patterns.append(
                FilePath(op.join(root, hidden_dir, '*')).longpath)
        super(WatchdogHandler, self).__init__(ignore_patterns=ignore_patterns,
                                              patterns=patterns)

        self.event_is_arrived = Signal(FsEvent, bool)
Пример #22
0
    def __init__(self, events_processing_delay, tracker):
        super(DelayAction, self).__init__()
        self.idle = Signal()
        self.working = Signal()
        self.file_added_to_indexing = Signal(FilePath)
        self.file_removed_from_indexing = Signal(FilePath, bool)

        self._started = False
        self._delay = events_processing_delay
        self._tracker = tracker
        self._offline_delay = 0.5
        self._delayed_paths = {}
        self._delayed_move_dst_paths = set()
        self._lock = threading.RLock()
        self._time_provider = time.time
        self._executor = ThreadPoolExecutor(max_workers=self.workers_count)
        self._processing_events = set()
        self._batch_checking_paths = set()
        self._loud_events = set()
        self._is_idle = True

        self._sorted_offline_events = None
Пример #23
0
    def __init__(self,
                 root,
                 events_processing_delay,
                 copies_storage,
                 get_sync_dir_size,
                 conflict_file_suffix='',
                 tracker=None,
                 storage=None,
                 excluded_dirs=(),
                 parent=None,
                 max_relpath_len=3096,
                 db_file_created_cb=None):
        QObject.__init__(self, parent=parent)
        freeze_support()

        self._tracker = tracker

        self._root = root

        self._path_converter = PathConverter(self._root)
        self._storage = storage if storage else Storage(
            self._path_converter, db_file_created_cb)
        self._copies_storage = copies_storage
        self._copies_storage.delete_copy.connect(self.on_delete_copy)
        self.possibly_sync_folder_is_removed = \
            self._storage.possibly_sync_folder_is_removed
        self.db_or_disk_full = self._storage.db_or_disk_full
        self._get_sync_dir_size = get_sync_dir_size
        self._conflict_file_suffix = conflict_file_suffix

        self._rsync = Rsync

        _hide_files = HIDDEN_FILES
        _hide_dirs = HIDDEN_DIRS

        self._clean_recent_copies()

        self._actions = FsEventActions(
            self._root,
            events_processing_delay=events_processing_delay,
            path_converter=self._path_converter,
            storage=self._storage,
            copies_storage=self._copies_storage,
            rsync=self._rsync,
            tracker=self._tracker,
            parent=None,
            max_relpath_len=max_relpath_len,
        )

        self._watch = WatchdogHandler(root=FilePath(self._root).longpath,
                                      hidden_files=_hide_files,
                                      hidden_dirs=_hide_dirs)

        self._download_watch = WatchdogHandler(root=FilePath(
            self._root).longpath,
                                               hidden_files=_hide_files,
                                               hidden_dirs=_hide_dirs,
                                               patterns=['*.download'],
                                               is_special=True)

        self._observer = ObserverWrapper(self._storage,
                                         self._get_sync_dir_size,
                                         self._tracker,
                                         parent=None)
        self._observer.event_handled.connect(
            self._observer.on_event_is_handled_slot)
        self._actions.event_passed.connect(
            lambda ev: self._observer.event_handled.emit(ev, False))
        self._actions.event_suppressed.connect(
            lambda ev: self._observer.event_handled.emit(ev, True))

        # Add FS root for events tracking
        self._observer.schedule(self._watch, root)

        self._local_processor = LocalProcessor(self._root, self._storage,
                                               self._path_converter,
                                               self._tracker)
        self.event_is_arrived = self._local_processor.event_is_arrived
        self._quiet_processor = QuietProcessor(self._root, self._storage,
                                               self._path_converter,
                                               self.Exceptions)

        self._files_list = FilesList(self._storage, self._root)

        self._thread = QThread()
        self._thread.started.connect(self._on_thread_started)
        self._actions.moveToThread(self._thread)
        self._observer.moveToThread(self._thread)

        self._watch.event_is_arrived.connect(self._on_event_arrived)
        self._download_watch.event_is_arrived.connect(self._on_event_arrived)
        self._actions.event_passed.connect(self._local_processor.process)

        self._local_events_flag = False
        self._actions.event_passed.connect(self._set_local_events_flag)

        self.error_happens = self._actions.error_happens
        self.no_disk_space = self._actions.no_disk_space
        self.idle = self._actions.idle
        self.working = self._actions.working
        self.file_added_to_ignore = self._actions.file_added_to_ignore
        self.file_removed_from_ignore = self._actions.file_removed_from_ignore
        self.file_added_to_indexing = self._actions.file_added_to_indexing
        self.file_removed_from_indexing = self._actions.file_removed_from_indexing
        self.file_added = self._actions.file_added
        self.file_modified = self._actions.file_modified
        self.file_deleted = Signal(str)
        self._actions.file_deleted.connect(self.file_deleted)
        self._quiet_processor.file_deleted.connect(self.file_deleted)
        self._quiet_processor.file_modified.connect(self.file_modified)
        self.file_moved = self._quiet_processor.file_moved
        self._actions.file_moved.connect(lambda o, n: self.file_moved(o, n))
        self.access_denied = self._quiet_processor.access_denied

        self.file_list_changed = self._files_list.file_list_changed
        self.file_added.connect(self._files_list.on_file_added)
        self.file_deleted.connect(self._files_list.on_file_deleted)
        self.file_moved.connect(self._files_list.on_file_moved)
        self.file_modified.connect(self._files_list.on_file_modified)
        self.idle.connect(self._files_list.on_idle)

        self.process_offline.connect(self._observer.process_offline_changes)

        self.copy_added = Signal(str)
        self._actions.copy_added.connect(self.copy_added)

        self._actions.rename_file.connect(self._rename_file)

        self.special_file_event = Signal(
            str,  # path
            int,  # event type
            str)  # new path
        self._special_files = list()
        self._excluded_dirs = list(map(FilePath, excluded_dirs))

        self._online_processing_allowed = False
        self._online_modifies_processing_allowed = False

        self._paths_with_modify_quiet = set()
Пример #24
0
class FilesystemMonitor(QObject):
    """
    Class provides all functions needed to work with filesystem
    in scope of project
    """
    max_file_name_length = MAX_FILE_NAME_LEN - 5
    selective_sync_conflict_suffix = "selective sync conflict"

    started = pyqtSignal()
    stopped = pyqtSignal()
    process_offline = pyqtSignal(bool)

    def __init__(self,
                 root,
                 events_processing_delay,
                 copies_storage,
                 get_sync_dir_size,
                 conflict_file_suffix='',
                 tracker=None,
                 storage=None,
                 excluded_dirs=(),
                 parent=None,
                 max_relpath_len=3096,
                 db_file_created_cb=None):
        QObject.__init__(self, parent=parent)
        freeze_support()

        self._tracker = tracker

        self._root = root

        self._path_converter = PathConverter(self._root)
        self._storage = storage if storage else Storage(
            self._path_converter, db_file_created_cb)
        self._copies_storage = copies_storage
        self._copies_storage.delete_copy.connect(self.on_delete_copy)
        self.possibly_sync_folder_is_removed = \
            self._storage.possibly_sync_folder_is_removed
        self.db_or_disk_full = self._storage.db_or_disk_full
        self._get_sync_dir_size = get_sync_dir_size
        self._conflict_file_suffix = conflict_file_suffix

        self._rsync = Rsync

        _hide_files = HIDDEN_FILES
        _hide_dirs = HIDDEN_DIRS

        self._clean_recent_copies()

        self._actions = FsEventActions(
            self._root,
            events_processing_delay=events_processing_delay,
            path_converter=self._path_converter,
            storage=self._storage,
            copies_storage=self._copies_storage,
            rsync=self._rsync,
            tracker=self._tracker,
            parent=None,
            max_relpath_len=max_relpath_len,
        )

        self._watch = WatchdogHandler(root=FilePath(self._root).longpath,
                                      hidden_files=_hide_files,
                                      hidden_dirs=_hide_dirs)

        self._download_watch = WatchdogHandler(root=FilePath(
            self._root).longpath,
                                               hidden_files=_hide_files,
                                               hidden_dirs=_hide_dirs,
                                               patterns=['*.download'],
                                               is_special=True)

        self._observer = ObserverWrapper(self._storage,
                                         self._get_sync_dir_size,
                                         self._tracker,
                                         parent=None)
        self._observer.event_handled.connect(
            self._observer.on_event_is_handled_slot)
        self._actions.event_passed.connect(
            lambda ev: self._observer.event_handled.emit(ev, False))
        self._actions.event_suppressed.connect(
            lambda ev: self._observer.event_handled.emit(ev, True))

        # Add FS root for events tracking
        self._observer.schedule(self._watch, root)

        self._local_processor = LocalProcessor(self._root, self._storage,
                                               self._path_converter,
                                               self._tracker)
        self.event_is_arrived = self._local_processor.event_is_arrived
        self._quiet_processor = QuietProcessor(self._root, self._storage,
                                               self._path_converter,
                                               self.Exceptions)

        self._files_list = FilesList(self._storage, self._root)

        self._thread = QThread()
        self._thread.started.connect(self._on_thread_started)
        self._actions.moveToThread(self._thread)
        self._observer.moveToThread(self._thread)

        self._watch.event_is_arrived.connect(self._on_event_arrived)
        self._download_watch.event_is_arrived.connect(self._on_event_arrived)
        self._actions.event_passed.connect(self._local_processor.process)

        self._local_events_flag = False
        self._actions.event_passed.connect(self._set_local_events_flag)

        self.error_happens = self._actions.error_happens
        self.no_disk_space = self._actions.no_disk_space
        self.idle = self._actions.idle
        self.working = self._actions.working
        self.file_added_to_ignore = self._actions.file_added_to_ignore
        self.file_removed_from_ignore = self._actions.file_removed_from_ignore
        self.file_added_to_indexing = self._actions.file_added_to_indexing
        self.file_removed_from_indexing = self._actions.file_removed_from_indexing
        self.file_added = self._actions.file_added
        self.file_modified = self._actions.file_modified
        self.file_deleted = Signal(str)
        self._actions.file_deleted.connect(self.file_deleted)
        self._quiet_processor.file_deleted.connect(self.file_deleted)
        self._quiet_processor.file_modified.connect(self.file_modified)
        self.file_moved = self._quiet_processor.file_moved
        self._actions.file_moved.connect(lambda o, n: self.file_moved(o, n))
        self.access_denied = self._quiet_processor.access_denied

        self.file_list_changed = self._files_list.file_list_changed
        self.file_added.connect(self._files_list.on_file_added)
        self.file_deleted.connect(self._files_list.on_file_deleted)
        self.file_moved.connect(self._files_list.on_file_moved)
        self.file_modified.connect(self._files_list.on_file_modified)
        self.idle.connect(self._files_list.on_idle)

        self.process_offline.connect(self._observer.process_offline_changes)

        self.copy_added = Signal(str)
        self._actions.copy_added.connect(self.copy_added)

        self._actions.rename_file.connect(self._rename_file)

        self.special_file_event = Signal(
            str,  # path
            int,  # event type
            str)  # new path
        self._special_files = list()
        self._excluded_dirs = list(map(FilePath, excluded_dirs))

        self._online_processing_allowed = False
        self._online_modifies_processing_allowed = False

        self._paths_with_modify_quiet = set()

    def on_initial_sync_finished(self):
        logger.debug("on_initial_sync_finished")
        self._actions.on_initial_sync_finished()
        if not self._actions.get_fs_events_count() \
                and not self._observer.is_processing_offline:
            self.idle.emit()

    def _on_processed_offline_changes(self):
        logger.debug("_on_processed_offline_changes")
        if not self._actions.get_fs_events_count():
            self.idle.emit()

    def on_initial_sync_started(self):
        logger.debug("on_initial_sync_started")
        self._actions.on_initial_sync_started()
        self._online_processing_allowed = False
        self._online_modifies_processing_allowed = False

    def start_online_processing(self):
        logger.debug("start_online_processing")
        if not self._online_processing_allowed:
            logger.debug("start_online_processing, emit process_offline")
            self.process_offline.emit(self._online_modifies_processing_allowed)
        self._online_processing_allowed = True

    def start_online_modifies_processing(self):
        logger.debug("start_online_modifies_processing")
        if not self._online_modifies_processing_allowed:
            logger.debug(
                "start_online_modifies_processing, emit process_offline")
            self.process_offline.emit(True)
        self._online_modifies_processing_allowed = True

    def get_root(self):
        return self._root

    def root_exists(self):
        return op.isdir(self._root)

    def _on_thread_started(self):
        logger.info("Start monitoring of '%s'", self._root)
        self._observer.offline_event_occured.connect(self._on_event_arrived)
        self._observer.processed_offline_changes.connect(
            self._on_processed_offline_changes)
        self.started.emit()
        self._actions.start.emit()
        self._observer.start.emit()
        self._local_events_flag = False

    @benchmark
    def start(self):
        logger.debug("start")
        self._observer.set_active()
        if self._thread.isRunning():
            self._on_thread_started()
        else:
            self._thread.start()
        self._files_list.start()

    def stop(self):
        logger.info("stopped monitoring")
        try:
            self._observer.offline_event_occured.disconnect(
                self._on_event_arrived)
        except RuntimeError:
            logger.warning("Can't disconnect offline_event_occured")
        try:
            self._observer.processed_offline_changes.disconnect(
                self._on_processed_offline_changes)
        except RuntimeError:
            logger.warning("Can't disconnect processed_offline_changes")
        self._actions.stop()
        self._observer.stop()
        self._files_list.stop()
        self.stopped.emit()

    def quit(self):
        self.stop()
        self._thread.quit()
        self._thread.wait()

    def is_processing(self, file_path):
        return self._actions.is_processing(
            self._path_converter.create_abspath(file_path))

    def is_known(self, file_path):
        if file_path.endswith(FILE_LINK_SUFFIX):
            file_path = file_path[:-len(FILE_LINK_SUFFIX)]
        return self._storage.get_known_file(file_path) is not None

    def process_offline_changes(self):
        if self._local_events_flag:
            self.process_offline.emit(self._online_modifies_processing_allowed)
            self._local_events_flag = False

    def _set_local_events_flag(self, fs_event):
        if not fs_event.is_offline:
            self._local_events_flag = True

    def clean_storage(self):
        self._storage.clean()
        delete_file_links(self._root)

    def clean_copies(self, with_files=True):
        self._copies_storage.clean(with_files=with_files)

    def move_files_to_copies(self):
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            files_with_hashes = session\
                .query(File.relative_path, File.file_hash) \
                .filter(File.is_folder == 0) \
                .all()
            copies_dir = get_copies_dir(self._root)
            for (file, hashsum) in files_with_hashes:
                hash_path = op.join(copies_dir, hashsum)
                file_path = self._path_converter.create_abspath(file)
                if not op.exists(hash_path):
                    try:
                        os.rename(file_path, hash_path)
                    except Exception as e:
                        logger.error("Error moving file to copy: %s", e)
                remove_file(file_path)
        abs_path = FilePath(self._root).longpath
        folders_plus_hidden = [
            self._path_converter.create_abspath(f)
            for f in os.listdir(abs_path) if f not in HIDDEN_DIRS
        ]
        for folder in folders_plus_hidden:
            if not op.isdir(folder):
                continue

            try:
                remove_dir(folder)
            except Exception as e:
                logger.error("Error removing dir '%s' (%s)", folder, e)
        logger.info("Removed all files and folders")
        self._storage.clean()

    def clean(self):
        files = self._storage.get_known_files()
        for file in files:
            try:
                remove_file(file)
            except Exception as e:
                logger.error("Error removing file '%s' (%s)", file, e)
        folders = self._storage.get_known_folders()
        for folder in sorted(folders, key=len):
            try:
                remove_dir(folder)
            except Exception as e:
                logger.error("Error removing dir '%s' (%s)", folder, e)
        logger.info("Removed all files and folders")
        self._storage.clean()

    def accept_delete(self,
                      path,
                      is_directory=False,
                      events_file_id=None,
                      is_offline=True):
        '''
        Processes file deletion

        @param path Name of file relative to sync directory [unicode]
        '''

        full_path = self._path_converter.create_abspath(path)
        object_type = 'directory' if is_directory else 'file'

        logger.debug("Deleting '%s' %s...", path, object_type)
        if is_directory:
            self._quiet_processor.delete_directory(full_path, events_file_id)
        else:
            self._quiet_processor.delete_file(full_path, events_file_id,
                                              is_offline)
        self.file_removed_from_indexing.emit(FilePath(full_path), True)

        logger.info("'%s' %s is deleted", path, object_type)

    def set_patch_uuid(self, patch_path, diff_file_uuid):
        shutil.move(patch_path, self.get_patch_path(diff_file_uuid))

    def get_patch_path(self, diff_file_uuid):
        return os.path.join(get_patches_dir(self._root), diff_file_uuid)

    def create_directory(self, path, events_file_id):
        full_path = self._path_converter.create_abspath(path)
        try:
            self._quiet_processor.create_directory(
                full_path,
                events_file_id=events_file_id,
                wrong_file_id=self.Exceptions.WrongFileId)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(full_path),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def apply_patch(self, filename, patch, new_hash, old_hash, events_file_id):
        '''
        Applies given patch for the file specified

        @param filename Name of file relative to sync directory [unicode]
        @param patch Patch data [dict]
        '''

        full_fn = self._path_converter.create_abspath(filename)

        try:
            self._apply_patch(full_fn,
                              patch,
                              new_hash,
                              old_hash,
                              events_file_id=events_file_id)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(full_fn),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def accept_move(self,
                    src,
                    dst,
                    is_directory=False,
                    events_file_id=None,
                    is_offline=True):
        src_full_path = self._path_converter.create_abspath(src)
        dst_full_path = self._path_converter.create_abspath(dst)

        try:
            object_type = 'directory' if is_directory else 'file'
            logger.debug("Moving '%s' %s to '%s'...", src, object_type, dst)
            if is_directory:
                self._quiet_processor.move_directory(
                    src_full_path,
                    dst_full_path,
                    events_file_id,
                    self.Exceptions.FileAlreadyExists,
                    self.Exceptions.FileNotFound,
                    wrong_file_id=self.Exceptions.WrongFileId)
            else:
                self._quiet_processor.move_file(
                    src_full_path,
                    dst_full_path,
                    events_file_id,
                    self.Exceptions.FileAlreadyExists,
                    self.Exceptions.FileNotFound,
                    wrong_file_id=self.Exceptions.WrongFileId,
                    is_offline=is_offline)
            logger.info("'%s' %s is moved to '%s'", src, object_type, dst)
            self.file_removed_from_indexing.emit(FilePath(src_full_path), True)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(dst_full_path),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def change_events_file_id(self, old_id, new_id):
        self._storage.change_events_file_id(old_id, new_id)

    class Exceptions(object):
        """ User-defined exceptions are stored here """
        class FileNotFound(Exception):
            """ File doesn't exist exception"""
            def __init__(self, file):
                self.file = file

            def __str__(self):
                return repr(self.file)

        class FileAlreadyExists(Exception):
            """ File already exists exception (for move) """
            def __init__(self, path):
                self.path = path

            def __str__(self):
                return "File already exists {}".format(self.path)

        class AccessDenied(Exception):
            """ Access denied exception (for move or delete) """
            def __init__(self, path):
                self.path = path

            def __str__(self):
                return "Access denied for {}".format(self.path)

        class WrongFileId(Exception):
            """ Wrong file if exception """
            def __init__(self, path, file_id_expected=None, file_id_got=None):
                self.path = path
                self.file_id_expected = file_id_expected
                self.file_id_got = file_id_got

            def __str__(self):
                return "Wrong file id for {}. Expected id {}. Got id {}".format(
                    self.path, self.file_id_expected, self.file_id_got)

        class CopyDoesNotExists(Exception):
            def __init__(self, hash):
                self.hash = hash

            def __str__(self):
                return "Copy with hash {} does not exists".format(self.hash)

    def _apply_patch(self,
                     filename,
                     patch,
                     new_hash,
                     old_hash,
                     silent=True,
                     events_file_id=None):
        start_time = time.time()
        patch_size = os.stat(patch).st_size
        success = False
        try:
            patched_new_hash, old_hash = self._quiet_processor.patch_file(
                filename,
                patch,
                silent=silent,
                events_file_id=events_file_id,
                wrong_file_id=self.Exceptions.WrongFileId)
            assert patched_new_hash == new_hash
            success = True
            self.copy_added.emit(new_hash)
        except Rsync.AlreadyPatched:
            success = True
        except:
            raise
        finally:
            if self._tracker:
                try:
                    file_size = os.stat(filename).st_size
                except OSError:
                    file_size = 0
                duration = time.time() - start_time
                self._tracker.monitor_patch_accept(file_size, patch_size,
                                                   duration, success)

    def generate_conflict_file_name(self,
                                    filename,
                                    is_folder=False,
                                    name_suffix=None,
                                    with_time=True):
        orig_filename = filename
        directory, filename = op.split(filename)
        original_ext = ''
        if is_folder:
            original_name = filename
        else:
            # consider ext as 2 '.'-delimited last filename substrings
            # if they don't contain spaces
            dots_list = filename.split('.')
            name_parts_len = len(dots_list)
            for k in range(1, min(name_parts_len, 3)):
                if ' ' in dots_list[-k]:
                    break

                original_ext = '.{}{}'.format(dots_list[k], original_ext)
                name_parts_len -= 1
            original_name = '.'.join(dots_list[:name_parts_len])

        index = 0
        if name_suffix is None:
            name_suffix = self._conflict_file_suffix
        date_today = date.today().strftime('%d-%m-%y') if with_time else ''
        suffix = '({} {})'.format(name_suffix, date_today)
        while len(bytes(suffix.encode('utf-8'))) > \
                int(self.max_file_name_length / 3):
            suffix = suffix[int(len(suffix) / 2):]

        name = '{}{}{}'.format(original_name, suffix, original_ext)
        while True:
            to_cut = len(bytes(name.encode('utf-8'))) - \
                     self.max_file_name_length
            if to_cut <= 0:
                break
            if len(original_name) > to_cut:
                original_name = original_name[:-to_cut]
            else:
                remained = to_cut - len(original_name) + 1
                original_name = original_name[:1]
                if remained < len(original_ext):
                    original_ext = original_ext[remained:]
                else:
                    original_ext = original_ext[int(len(original_ext) / 2):]
            name = '{}{}{}'.format(original_name, suffix, original_ext)

        while op.exists(
                self._path_converter.create_abspath(
                    FilePath(op.join(directory, name)))):
            index += 1
            name = '{}{} {}{}'.format(original_name, suffix, index,
                                      original_ext)
        conflict_file_name = FilePath(op.join(directory, name))
        logger.info(
            "Generated conflict file name: %s, original name: %s, "
            "is_folder: %s, name_suffix: %s, with_time: %s",
            conflict_file_name, orig_filename, is_folder, name_suffix,
            with_time)
        return conflict_file_name

    def move_file(self, src, dst, is_offline=True):
        src_full_path = self._path_converter.create_abspath(src)
        dst_full_path = self._path_converter.create_abspath(dst)
        is_offline = True if op.isdir(src_full_path) else is_offline
        src_hard_path = self._quiet_processor.get_hard_path(
            src_full_path, is_offline)
        dst_hard_path = self._quiet_processor.get_hard_path(
            dst_full_path, is_offline)

        if not op.exists(src_hard_path):
            raise self.Exceptions.FileNotFound(src_full_path)
        elif op.exists(dst_hard_path):
            raise self.Exceptions.FileAlreadyExists(dst_full_path)

        dst_parent_folder_path = op.dirname(dst_full_path)
        if not op.exists(dst_parent_folder_path):
            self._on_event_arrived(
                FsEvent(DELETE,
                        dst_parent_folder_path,
                        True,
                        is_offline=True,
                        quiet=True))

        try:
            os.rename(src_hard_path, dst_hard_path)
        except OSError as e:
            logger.warning("Can't move file (dir) %s. Reason: %s",
                           src_full_path, e)
            if e.errno == errno.EACCES:
                self._quiet_processor.access_denied()
                raise self.Exceptions.AccessDenied(src_full_path)
            else:
                raise e

    def copy_file(self, src, dst, is_directory=False, is_offline=True):
        is_offline = True if is_directory else is_offline
        src_full_path = self._path_converter.create_abspath(src)
        dst_full_path = self._path_converter.create_abspath(dst)
        src_hard_path = self._quiet_processor.get_hard_path(
            src_full_path, is_offline)
        dst_hard_path = self._quiet_processor.get_hard_path(
            dst_full_path, is_offline)

        if not op.exists(src_hard_path):
            raise self.Exceptions.FileNotFound(src_full_path)

        if is_directory:
            shutil.copytree(src_full_path, dst_full_path)
        else:
            common.utils.copy_file(src_hard_path, dst_hard_path)

    def restore_file_from_copy(self,
                               file_name,
                               copy_hash,
                               events_file_id,
                               search_by_id=False):
        try:
            old_hash = self._quiet_processor.create_file_from_copy(
                file_name,
                copy_hash,
                silent=True,
                events_file_id=events_file_id,
                search_by_id=search_by_id,
                wrong_file_id=self.Exceptions.WrongFileId,
                copy_does_not_exists=self.Exceptions.CopyDoesNotExists)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(
                            self._path_converter.create_abspath(file_name)),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

        return old_hash

    def create_file_from_copy(self,
                              file_name,
                              copy_hash,
                              events_file_id,
                              search_by_id=False):
        self.restore_file_from_copy(file_name,
                                    copy_hash,
                                    events_file_id=events_file_id,
                                    search_by_id=search_by_id)

    @benchmark
    def make_copy_from_existing_files(self, copy_hash):
        self._quiet_processor.make_copy_from_existing_files(copy_hash)

    def create_empty_file(self,
                          file_name,
                          file_hash,
                          events_file_id,
                          search_by_id=False,
                          is_offline=True):
        try:
            self._quiet_processor.create_empty_file(
                file_name,
                file_hash,
                silent=True,
                events_file_id=events_file_id,
                search_by_id=search_by_id,
                wrong_file_id=self.Exceptions.WrongFileId,
                is_offline=is_offline)
        except AssertionError:
            self._on_event_arrived(
                FsEvent(DELETE,
                        op.dirname(
                            self._path_converter.create_abspath(file_name)),
                        True,
                        is_offline=True,
                        quiet=True))
            raise

    def on_delete_copy(self, hash, with_signature=True):
        if not hash:
            logger.error("Invalid hash '%s'", hash)
            return
        copy = op.join(get_copies_dir(self._root), hash)
        try:
            remove_file(copy)
            logger.info("File copy deleted %s", copy)
            if not with_signature:
                return

            signature = op.join(get_signatures_dir(self._root), hash)
            remove_file(signature)
            logger.info("File copy signature deleted %s", signature)
        except Exception as e:
            logger.error(
                "Can't delete copy. "
                "Possibly sync folder is removed %s", e)
            self.possibly_sync_folder_is_removed()

    def delete_old_signatures(self, delete_all=False):
        logger.debug("Deleting old signatures...")
        self._quiet_processor.delete_old_signatures(
            get_signatures_dir(self._root), delete_all)

    def path_exists(self, path, is_offline=True):
        full_path = self._path_converter.create_abspath(path)
        hard_path = self._quiet_processor.get_hard_path(full_path, is_offline)
        return op.exists(hard_path)

    def rename_excluded(self, rel_path):
        logger.debug("Renaming excluded dir %s", rel_path)
        new_path = self.generate_conflict_file_name(
            rel_path,
            name_suffix=self.selective_sync_conflict_suffix,
            with_time=False)
        self.move_file(rel_path, new_path)

    def _rename_file(self, abs_path):
        rel_path = self._path_converter.create_relpath(abs_path)
        new_path = self.generate_conflict_file_name(rel_path,
                                                    is_folder=False,
                                                    name_suffix="",
                                                    with_time=True)
        self.move_file(rel_path, new_path)

    def db_file_exists(self):
        return self._storage.db_file_exists()

    def _clean_recent_copies(self):
        mask = op.join(get_copies_dir(self._root), "*.recent_copy_[0-9]*")
        recent_copies = glob.glob(mask)
        list(map(os.remove, recent_copies))

    def add_special_file(self, path):
        self._special_files.append(path)
        watch = None
        if not (path in FilePath(self._root)):
            watch = self._download_watch
        self._observer.add_special_file(path, watch)

    def remove_special_file(self, path):
        logger.debug("Removing special file %s...", path)
        if not (path in FilePath(self._root)):
            self._observer.remove_special_file(path)
        try:
            self._special_files.remove(path)
        except ValueError:
            logger.warning("Can't remove special file %s from list %s", path,
                           self._special_files)

    def change_special_file(self, old_file, new_file):
        self.add_special_file(new_file)
        self.remove_special_file(old_file)

    def _on_event_arrived(self, fs_event, is_special=False):
        logger.debug(
            "Event arrived %s, special %s, online_processing_allowed: %s, "
            "online_modifies_processing_allowed: %s", fs_event, is_special,
            self._online_processing_allowed,
            self._online_modifies_processing_allowed)
        if is_special or fs_event.src in self._special_files:
            self.special_file_event.emit(fs_event.src, fs_event.event_type,
                                         fs_event.dst)
        elif fs_event.is_offline or self._online_processing_allowed:
            if not self._online_modifies_processing_allowed and \
                    not fs_event.is_offline and fs_event.event_type == MODIFY:
                return
            elif fs_event.src in self._paths_with_modify_quiet \
                    and fs_event.event_type in (CREATE, MODIFY):
                fs_event.is_offline = True
                fs_event.quiet = True

            path = fs_event.src if fs_event.event_type == CREATE \
                else fs_event.dst if fs_event.event_type == MOVE else ""
            name = op.basename(path)
            parent_path = op.dirname(path)
            stripped_name = name.strip()
            if stripped_name != name:
                new_path = op.join(parent_path, stripped_name)
                if op.exists(new_path):
                    new_path = self.generate_conflict_file_name(
                        new_path,
                        is_folder=fs_event.is_dir,
                        name_suffix="",
                        with_time=True)
                logger.debug("Renaming '%s' to '%s'...", path, new_path)
                os.rename(FilePath(path).longpath, FilePath(new_path).longpath)

                path = new_path

                if fs_event.event_type == CREATE:
                    fs_event.src = new_path
                elif fs_event.event_type == MOVE:
                    fs_event.dst = new_path

            hidden_dir = FilePath(
                self._path_converter.create_abspath(HIDDEN_DIRS[0]))
            if fs_event.event_type == MOVE:
                if FilePath(fs_event.src) in hidden_dir or \
                        op.basename(fs_event.src).startswith('._'):
                    fs_event.event_type = CREATE
                    fs_event.src = fs_event.dst
                    fs_event.dst = None
                elif FilePath(fs_event.dst) in hidden_dir or \
                        op.basename(fs_event.dst).startswith('._'):
                    fs_event.event_type = DELETE
                    fs_event.dst = None
            if FilePath(fs_event.src) in hidden_dir or \
                    op.basename(fs_event.src).startswith('._'):
                return

            if FilePath(path) in self._excluded_dirs:
                self.rename_excluded(self._path_converter.create_relpath(path))
            else:
                self._actions.add_new_event(fs_event)

    def get_long_paths(self):
        return self._actions.get_long_paths()

    def set_excluded_dirs(self, excluded_dirs):
        self._excluded_dirs = list(map(FilePath, excluded_dirs))

    def remove_dir_from_excluded(self, directory):
        try:
            self._excluded_dirs.remove(directory)
        except Exception as e:
            logger.warning("Can't remove excluded dir %s from %s. Reason: %s",
                           directory, self._excluded_dirs, e)

    def sync_events_file_id(self, file_path, events_file_id, is_folder):
        self._quiet_processor.sync_events_file_id(file_path, events_file_id,
                                                  is_folder)

    def sync_events_file_id_by_old_id(self, events_file_id,
                                      old_events_file_id):
        self._quiet_processor.sync_events_file_id_by_old_id(
            events_file_id, old_events_file_id)

    def set_collaboration_folder_icon(self, folder_name):
        set_custom_folder_icon('collaboration', self._root, folder_name)

    def reset_collaboration_folder_icon(self, folder_name):
        reset_custom_folder_icon(self._root,
                                 folder_name,
                                 resource_name='collaboration')

    def reset_all_collaboration_folder_icons(self):
        root_folders = [
            f for f in os.listdir(self._root)
            if op.isdir(self._path_converter.create_abspath(f))
        ]
        logger.debug("root_folders %s", root_folders)
        list(map(self.reset_collaboration_folder_icon, root_folders))

    def get_excluded_dirs_to_change(self,
                                    excluded_dirs,
                                    src_path,
                                    dst_path=None):
        src_path = FilePath(src_path)
        if dst_path:
            dst_path = FilePath(dst_path)
        excluded_dirs = list(map(FilePath, excluded_dirs))
        dirs_to_add = []
        dirs_to_delete = list(filter(lambda ed: ed in src_path, excluded_dirs))
        if dst_path is not None and \
                not is_contained_in_dirs(dst_path, excluded_dirs):
            # we have to add new excluded dirs only if folder is not moved
            # to excluded dir
            l = len(src_path)
            dirs_to_add = [dst_path + d[l:] for d in dirs_to_delete]
        logger.debug(
            "get_excluded_dirs_to_change. "
            "excluded_dirs %s, src_path %s, dst_path %s, "
            "dirs_to_delete %s, dirs_to_add %s", excluded_dirs, src_path,
            dst_path, dirs_to_delete, dirs_to_add)
        return dirs_to_delete, dirs_to_add

    def change_excluded_dirs(self, dirs_to_delete, dirs_to_add):
        for directory in dirs_to_delete:
            self.remove_dir_from_excluded(directory)
        for directory in dirs_to_add:
            self._excluded_dirs.append(directory)

    def clear_excluded_dirs(self):
        self._excluded_dirs = []

    def get_fs_events_count(self):
        return self._actions.get_fs_events_count()

    def force_create_copies(self):
        self._storage.clear_files_hash_mtime()
        self.delete_old_signatures(delete_all=True)
        self._local_events_flag = True
        self.process_offline_changes()

    def get_file_list(self):
        return self._files_list.get()

    def get_actual_events_file_id(self, path, is_folder=None):
        abs_path = self._path_converter.create_abspath(path)
        file = self._storage.get_known_file(abs_path, is_folder=is_folder)
        return file.events_file_id if file else None

    def is_directory(self, path):
        abs_path = self._path_converter.create_abspath(path)
        return op.isdir(abs_path)

    def set_waiting(self, to_wait):
        self._actions.set_waiting(to_wait)

    def set_path_quiet(self, path):
        logger.debug("Setting path %s quiet...", path)
        self._paths_with_modify_quiet.add(FilePath(path))

    def clear_paths_quiet(self):
        logger.debug("Clearing quiet paths...")
        self._paths_with_modify_quiet.clear()

    def delete_files_with_empty_events_file_ids(self):
        if self._storage.delete_files_with_empty_events_file_ids():
            self.working.emit()

    def is_file_in_storage(self, events_file_id):
        return self._storage.get_known_file_by_id(events_file_id)
Пример #25
0
class Storage(object):
    """
    Interface for requesting info on registered files and folders
    """
    def __init__(self, path_converter, db_file_created_cb=None):
        self._pc = path_converter

        self.possibly_sync_folder_is_removed = Signal()
        self.db_or_disk_full = Signal()

        self._db_file = self._pc.create_abspath('.pvtbox/storage.db')
        logger.debug("DB file: %s", self._db_file)
        new_db_file = not exists(self._db_file)
        if new_db_file and callable(db_file_created_cb):
            db_file_created_cb()

        make_dirs(self._db_file)

        if not new_db_file:
            # Database migration. It can be executed before opening db
            try:
                upgrade_db("storage_db", db_filename=self._db_file)
            except Exception as e:
                remove_file(self._db_file)
                new_db_file = True
                logger.warning(
                    "Can't upgrade storage db. "
                    "Reason: (%s) Creating...", e)
                if callable(db_file_created_cb):
                    db_file_created_cb()

        self._engine = create_engine('sqlite:///{}'.format(
            FilePath(self._db_file)),
                                     connect_args={
                                         'timeout': 60 * 1000,
                                         'check_same_thread': False,
                                     })
        self._engine.pool_timeout = 60 * 60 * 1000
        self._Session = sessionmaker(bind=self._engine)

        Base.metadata.create_all(self._engine, checkfirst=True)

        if new_db_file:
            try:
                stamp_db("storage_db", db_filename=self._db_file)
            except Exception as e:
                logger.error("Error stamping storage db: %s", e)

        self._lock = threading.RLock()

    @contextmanager
    def create_session(self, read_only=True, locked=False):
        session = self._Session()
        session.expire_on_commit = False
        session.autoflush = False
        if read_only:
            session.flush = lambda: None

        if not read_only and locked:
            logger.debug("session %s acquiring lock...", hex(id(session)))
            self._lock.acquire()
            logger.debug("session %s acquired lock.", hex(id(session)))

        try:
            yield session
            session.commit()
        except OperationalError as e:
            logger.warning("OperationalError: %s", e)
            try:
                session.rollback()
            except Exception as e:
                logger.warning(
                    "OperationalError, exception while trying to rollback session: %s",
                    e)
                pass
            if is_db_or_disk_full(e):
                self.db_or_disk_full.emit()
            else:
                raise
        except Exception as e:
            logger.warning("Exception: %s", e)
            session.rollback()
            raise
        finally:
            if not read_only and locked:
                self._lock.release()
                logger.debug("session %s released lock.", hex(id(session)))
            session.close()

    @with_session(True)
    def _get_known_paths(self,
                         is_folder,
                         parent_dir=None,
                         exclude_dirs=None,
                         session=None):
        query = session.query(File.relative_path)
        query = query.filter(File.is_folder == is_folder)
        paths = query.all()
        if parent_dir:
            parent_dir = self._pc.create_relpath(parent_dir)
            result = []
            for path in paths:
                if is_contained_in(path[0], parent_dir):
                    result.append(path)
            paths = result
        if exclude_dirs:
            result = []
            # Optimize perfomance using iterator based solution
            for ed in exclude_dirs:
                for pp in paths:
                    if not is_contained_in(pp[0], ed):
                        result.append(pp)
            paths = result

        return [FilePath(self._pc.create_abspath(x[0])) for x in paths]

    @benchmark
    def get_known_files(self,
                        parent_dir=None,
                        exclude_dirs=None,
                        session=None):
        """
        Returns absolute paths of files known at the moment.

        @param parent_dir Name of parent dir to limit results to [unicode]
        @return Known files paths (absolute) [(unicode, )]
        """

        return self._get_known_paths(is_folder=False,
                                     parent_dir=parent_dir,
                                     exclude_dirs=exclude_dirs,
                                     session=session)

    @benchmark
    def get_known_folders(self,
                          parent_dir=None,
                          exclude_dirs=None,
                          session=None):
        """
        Returns absolute paths of folders known at the moment

        @param parent_dir Name of parent dir to limit results to [unicode]
        @return Known folders paths (absolute) [(unicode, )]
        """

        return self._get_known_paths(is_folder=True,
                                     parent_dir=parent_dir,
                                     exclude_dirs=exclude_dirs,
                                     session=session)

    @with_session(True)
    def get_known_file(self, abs_path, is_folder=None, session=None):
        rel_path = self._pc.create_relpath(abs_path)

        query = session.query(File).filter(File.relative_path == rel_path)
        if is_folder is not None:
            query.filter(File.is_folder == is_folder)

        return query.one_or_none()

    @with_session(True)
    def get_known_file_by_id(self, file_id, session=None):
        return session.query(File)\
            .filter(File.events_file_id == file_id)\
            .one_or_none()

    @with_session(False)
    def get_new_file(self, abs_path, is_folder, session=None):
        rel_path = self._pc.create_relpath(abs_path)

        file = File(relative_path=rel_path, is_folder=is_folder)

        return file

    def update_file_signature(self, file, signature):
        signature_path = self._pc.create_abspath(file.signature_rel_path)
        make_dirs(signature_path)
        with open(signature_path, 'wb') as f:
            dump(signature, f, protocol=2)

    def get_file_signature(self, file):
        abs_path = self._pc.create_abspath(file.signature_rel_path)
        try:
            with open(abs_path, 'rb') as f:
                return load(f)
        except (IOError, OSError, EOFError):
            return None

    @with_session(False)
    def save_file(self, file, session=None):
        return session.merge(file)

    @with_session(False)
    def delete_file(self, file, session=None):
        session.delete(file)

    def clean(self):
        try:
            self._engine.execute("delete from files")
            logger.info("Cleaned storage data base")
        except Exception as e:
            logger.error("Failed to clean DB (%s)", e)
            if not self.db_file_exists():
                raise e

    @with_session(False)
    def delete_directories(self, dirs=[], session=None):
        paths_deleted = []
        if not dirs:
            return paths_deleted

        files = session.query(File).all()
        dirs_rel = [self._pc.create_relpath(p) for p in dirs]
        for file in files:
            if is_contained_in_dirs(file.relative_path, dirs_rel):
                if not file.is_folder:
                    paths_deleted.append(file.relative_path)
                session.delete(file)
        return paths_deleted

    def db_file_exists(self):
        return exists(self._db_file) and getsize(self._db_file) > 0

    @with_session(False, True)
    def change_events_file_id(self, old_id, new_id, session=None):
        file = self.get_known_file_by_id(old_id, session=session)
        if file:
            file.events_file_id = new_id
            logger.debug("Changed events_file_id for %s from %s to %s",
                         file.relative_path, old_id, new_id)
        else:
            logger.warning("Could not find file with events_file_id = %s",
                           old_id)

    @with_session(True)
    def get_known_folder_children(self, parent_dir_rel_path, session=None):
        path_like = parent_dir_rel_path + '/%'
        children = session.query(File)\
            .filter(
            or_(File.relative_path == parent_dir_rel_path,
                File.relative_path.like(path_like)))\
            .all()
        return children

    @with_session(False, True)
    def delete_known_folder_children(self, parent_dir_rel_path, session=None):
        path_like = parent_dir_rel_path + '/%'
        session.query(File)\
            .filter(
            or_(File.relative_path == parent_dir_rel_path,
                File.relative_path.like(path_like)))\
            .delete(synchronize_session=False)

    @with_session(False, True)
    def move_known_folder_children(self,
                                   old_dir_rel_path,
                                   new_dir_rel_path,
                                   session=None):
        path_like = old_dir_rel_path + '/%'
        files = session.query(File) \
            .filter(
            or_(File.relative_path == old_dir_rel_path,
                File.relative_path.like(path_like))) \
            .all()
        mappings = [{
            'id':
            f.id,
            'relative_path':
            FilePath(
                join(new_dir_rel_path,
                     relpath(f.relative_path, old_dir_rel_path)))
        } for f in files]
        session.bulk_update_mappings(File, mappings)

    @with_session(True)
    def hash_in_storage(self, file_hash, session=None):
        if not file_hash:
            return None

        files_count = session.query(func.count())\
            .select_from(File)\
            .filter(File.file_hash == file_hash)\
            .scalar()
        return files_count > 0

    @with_session(False, True)
    def clear_files_hash_mtime(self, session=None):
        session.execute(
            update(File).where(File.is_folder == 0).values(file_hash=None,
                                                           mtime=0))

    @with_session(True)
    def get_last_files(self, limit, offset=0, session=None):
        files = session.query(File) \
            .filter(File.is_folder == 0) \
            .order_by(File.mtime.desc()) \
            .offset(offset).limit(limit) \
            .all()
        return files

    def get_file_by_hash(self, hash, exclude, session):
        return session.query(File) \
            .filter(File.file_hash == hash) \
            .filter(File.id.notin_(exclude)) \
            .first()

    @with_session(False, True)
    def delete_files_with_empty_events_file_ids(self, session=None):
        files_with_empty_ids = session.query(File) \
            .filter(File.events_file_id.is_(None)) \
            .all()
        for file in files_with_empty_ids:
            if file.is_folder:
                self.delete_known_folder_children(file.relative_path,
                                                  session=session)
                type_str = "folder"
            else:
                session.delete(file)
                type_str = "file"
            logger.debug("Deleted %s %s with empty events_file_id", type_str,
                         file.relative_path)
        return bool(files_with_empty_ids)
Пример #26
0
    def __init__(self,
                 cfg,
                 main_cfg,
                 start_service,
                 exit_service,
                 parent=None,
                 size=None,
                 migrate=False,
                 dp=1,
                 get_offline_dirs=lambda: None,
                 set_offline_dirs=lambda o, no: None):
        super(Settings, self).__init__()
        self._cfg = cfg
        self._main_cfg = main_cfg
        self._start_service = start_service
        self._exit_service = exit_service
        self._parent = parent
        self._size = size
        self._dp = dp
        self._get_offline_dirs = get_offline_dirs
        self._set_offline_dirs = set_offline_dirs

        self._dialog = QDialog(parent)
        self._dialog.setWindowIcon(QIcon(':/images/icon.png'))
        self._dialog.setAttribute(Qt.WA_MacFrameworkScaled)
        self._ui = settings.Ui_Dialog()
        self._ui.setupUi(self._dialog)
        self._max_root_len = get_max_root_len(self._cfg)
        self._migrate = migrate
        self._migration = None
        self._migration_cancelled = False

        try:
            self._ui.account_type.setText(
                license_display_name_from_constant(self._cfg.license_type))
            self._ui.account_type.setVisible(True)
            self._ui.account_type_header.setVisible(True)
            self._ui.account_upgrade.setVisible(True)
        except KeyError:
            pass
        upgrade_license_types = (FREE_LICENSE, FREE_TRIAL_LICENSE)
        if self._cfg.license_type in upgrade_license_types:
            self._ui.account_upgrade.setText('<a href="{}">{}</a>'.format(
                GET_PRO_URI.format(self._cfg.host), tr('Upgrade')))
            self._ui.account_upgrade.setTextFormat(Qt.RichText)
            self._ui.account_upgrade.setTextInteractionFlags(
                Qt.TextBrowserInteraction)
            self._ui.account_upgrade.setOpenExternalLinks(True)
            self._ui.account_upgrade.setAlignment(Qt.AlignLeft)
        else:
            self._ui.account_upgrade.setText("")

        self._ui.centralWidget.setFrameShape(QFrame.NoFrame)
        self._ui.centralWidget.setLineWidth(1)

        self._ui.language_comboBox.addItem(tr('English'))
        self._ui.language_comboBox.setEnabled(False)

        self._connect_slots()
        self._set_fonts()
        self._ui.tabWidget.setCurrentIndex(0)

        self._smart_sync_dialog = None

        self.logged_out = Signal(bool)
        self.logging_disabled_changed = Signal(bool)

        # FIXMe: without line below app crashes on exit after settings opened
        self._dialog.mousePressEvent = self.on_mouse_press_event
Пример #27
0
class EventStrategy(object):
    """
    Desribe the handling stratrgy for file events
    """
    DUMMY_PAGE_SIZE = 500

    def __init__(self,
                 db,
                 event,
                 get_download_backups_mode,
                 is_smart_sync=False):
        super(EventStrategy, self).__init__()
        self.event = event
        self.event_id = event.id if event else 0
        self.file_id = event.file.id if event and event.file else 0
        self.db = db
        self._is_smart_sync = is_smart_sync
        self._cached_file_path = None
        self._events_queue = None
        self._download_backups = get_download_backups_mode()
        self._force_move = False

        self.change_processing_events_counts = Signal(int,
                                                      int)  # (local, remote)
        self.append_local_event = Signal(Event, str, str, int, bool)
        self.rename_or_delete_dst_path = Signal(str, int, Session, bool)

    ''' Public methods templates ==============================================
    '''

    @atomic
    def apply(self,
              session=None,
              fs=None,
              excluded_dirs=None,
              patches_storage=None,
              collaborated_folders=(),
              events_queue=None):
        event = self.event
        assert event.file_id

        logger.debug('applying %s', self)
        change_name = True
        parent_found = True
        if event.type != 'delete':
            self._events_queue = events_queue
            change_name, parent_found = self._apply_move_if_needed(
                session, fs, excluded_dirs, patches_storage, events_queue)

        if parent_found:
            self._apply_event(session, fs, excluded_dirs, patches_storage)
        if event.state == 'received' and not event.file.excluded:
            # update file strategy cannot apply patch
            return

        logger.debug('after _apply_event %s', self)
        self._set_actual_file_state_to_this(
            session,
            update_file_event=not event.file.excluded,
            change_name=change_name)

        if event.is_folder and event.type != 'delete':
            self.set_collaborated_folder_icon(session, fs,
                                              collaborated_folders)
        self.db.expunge_parents(event.file, session)
        logger.debug('applied %s', self)

    def _apply_event(self, session, fs, excluded_dirs, patches_storage):
        pass

    def _create_file_from_copy(self, path, fs, search_by_id=False):
        pass

    def _apply_move_if_needed(self, session, fs, excluded_dirs,
                              patches_storage, events_queue):
        event = self.event
        assert event.file_id
        parent_found = True
        folder = self.find_folder_by_uuid(session, event.folder_uuid)
        if folder == event.file.folder and event.file_name == event.file.name:
            return True, parent_found

        move_events = list(
            filter(
                lambda e: e.server_event_id and e.type == 'move' and
                (not event.server_event_id or e.server_event_id > event.
                 server_event_id), event.file.events))
        if move_events and not self._force_move and event.is_folder:
            # skip this if we have subsequent moves
            return False, parent_found

        # Calculate object path for further use
        event_path = event.file.path

        if folder and not folder.is_existing and not folder.excluded:
            logger.debug("Parent folder does not exist for %s", event_path)
            parent_found = False
            if self._process_parent_not_found(session):
                fs.accept_delete(event_path,
                                 is_directory=event.is_folder,
                                 events_file_id=event.file_id,
                                 is_offline=event.file.is_offline)
            return True, parent_found

        logger.debug('moving %s', event.file)
        new_path = ('/'.join([folder.path, event.file_name])
                    if folder else event.file_name)

        # Check whether event paths are excluded from sync
        is_path_excluded = is_contained_in_dirs(event_path, excluded_dirs)
        is_new_path_excluded = is_contained_in_dirs(new_path, excluded_dirs)

        # Both source and destination paths are excluded
        if is_path_excluded and is_new_path_excluded:
            assert False, 'Excluded-excluded must never occur'
        # None of source and destination paths are excluded
        elif not is_path_excluded and not is_new_path_excluded:
            # Regular move event processing
            try:
                fs.accept_move(event_path,
                               new_path,
                               is_directory=event.is_folder,
                               events_file_id=event.file_id,
                               is_offline=event.file.is_offline)
            except fs.Exceptions.FileAlreadyExists:
                if event.file.event_id and not event.file.is_deleted:
                    if not self._rename_or_delete_dst_path(
                            new_path, session, event.file.is_offline):
                        raise SkipEventForNow()
                    else:
                        # retry move after renaming new path
                        return self._apply_move_if_needed(
                            session, fs, excluded_dirs, patches_storage,
                            events_queue)
            except fs.Exceptions.FileNotFound:
                subsequent_local_moves_deletes = list(
                    filter(
                        lambda ev: ev.id > event.id and ev.type in
                        ('delete', 'move') and ev.state in
                        ('occured', 'conflicted', 'sent'), event.file.events))
                if not subsequent_local_moves_deletes and \
                        not self.check_previous_delete(
                            session, events_queue, fs):
                    # file/folder moved or deleted locally and
                    # no events in db for now
                    # so wait
                    logger.warning("Source file (folder) %s not found.",
                                   event_path)
                    raise SkipEventForNow()
            except fs.Exceptions.WrongFileId:
                if not self.event.is_folder or \
                        not self._apply_folder_delete_if_any(session, fs):
                    raise SkipEventForNow()

                # retry move after deleting folder
                return self._apply_move_if_needed(session, fs, excluded_dirs,
                                                  patches_storage,
                                                  events_queue)
            except Exception as e:
                # ignore move if file is unavailable
                logger.warning("Can't move file (folder) %s. Reason %s",
                               event_path, e)
                raise SkipEventForNow()

            event.file.name = event.file_name
            event.file.folder = folder
            if folder:
                event.file.folder_id = folder.id
        # Source path is excluded
        elif is_path_excluded and not is_new_path_excluded:
            self.event.file.excluded = False
            self.event.file.folder = folder
            if event.is_folder:
                # Create directory at destination path
                fs.create_directory(new_path, self.event.file_id)
            else:
                # Create file at destination path
                if self.event.file_size:
                    self._create_file_from_copy(new_path, fs)
                else:
                    fs.create_empty_file(new_path,
                                         self.event.file_hash,
                                         self.event.file_id,
                                         is_offline=self.event.file.is_offline)
        # Destination path is excluded
        elif not is_path_excluded and is_new_path_excluded:
            if not hasattr(self, '_excluded_ready') or \
                    not self._excluded_ready:
                self._excluded_ready = False
                raise SkipExcludedMove

            self.event.file.excluded = True
            self.event.file.event_id = None
            if not self.event.is_folder:
                self.event.state = 'received'
            else:
                self.db.mark_child_excluded(self.event.file_id, session)

            # Delete object at source path
            fs.accept_delete(event_path,
                             is_directory=event.is_folder,
                             is_offline=event.file.is_offline)
        return True, parent_found

    def _rename_or_delete_dst_path(self, path, session, is_offline=True):
        try:
            self.rename_or_delete_dst_path.emit(path, self.event.file_id,
                                                session, is_offline)
        except RenameDstPathFailed:
            return False

        return True

    @db_read
    def ready_to_apply(self, session=None, is_deleted=False):
        return self._ready_to_apply(session, is_deleted=is_deleted)

    @benchmark
    def _ready_to_apply(self, session, is_deleted=False, files=None):
        ready = (not self.event.file.is_locally_modified and
                 (self.event.is_folder or self.ready_to_skip(session=session))
                 and not is_deleted
                 and self.event.state in ('registered', 'sent', 'downloaded'))

        return ready

    @atomic
    def ready_to_register(self, session):
        return self._ready_to_register(session)

    def _ready_to_register(self, session):
        return False

    @benchmark
    @db_read
    def ready_to_skip(self, session=None):
        event = self.event
        file = event.file
        file_event = file.event
        if file.event_id and not file.event:
            file_event = session.query(Event) \
                .filter(Event.id == file.event_id) \
                .one_or_none()
        return (event.id and
                (not file_event or file.event_id == event.last_event_id
                 or file_event.type == 'delete' or not event.last_event_id or
                 (file.last_skipped_event_id
                  and file.last_skipped_event_id == event.last_event_id)))

    @db_read
    def skip_if_file_will_be_deleted(self, session):
        '''Should be overriden in concrete strategy, if it can be skipped'''
        return not self.event.file.is_existing

    @atomic
    def skip(self, session, min_server_event_id=0, fs=None):
        if self.event.type == 'delete':
            self.event.file.event_id = self.event.id
            if fs:
                fs.sync_events_file_id_by_old_id(None, self.event.file_id)
        else:
            self.event.file.last_skipped_event_id = self.event.id
            # add dummy delete if there is no delete event for file
            file = session.query(File) \
                .filter(File.id == self.event.file_id) \
                .one_or_none()
            if min_server_event_id and file:
                delete_events = list(
                    filter(lambda e: e.type == 'delete', file.events))
                if not delete_events:
                    self._add_dummy_delete(file, self.event,
                                           min_server_event_id, session)

    def postpone_after_save(self):
        ''' Should be overriden in concrete strategy
            if it must be postponed after save in db'''
        return False

    @db_read
    def file_will_be_deleted(self, session, file=None):
        return self._file_will_be_deleted(
            session=session, file=file if file else self.event.file)

    @benchmark
    def _file_will_be_deleted(self, session, file):
        return file.events and file.events[-1].type == 'delete'

    @db_read
    def get_old_uuid(self, session):
        if self.event.last_event:
            return self.event.last_event.uuid
        else:
            return None

    def set_collaborated_folder_icon(self, session, fs, collaborated_folders):
        pass

    def set_excluded_ready(self):
        self._excluded_ready = True

    ''' Utility functions ===================================================
    '''

    def _apply_patch(self, fs, file_path, patch_uuid):
        try:
            fs.apply_patch(file_path, fs.get_patch_path(patch_uuid),
                           self.event.file_hash,
                           self.event.file_hash_before_event,
                           self.event.file_id)
        except Exception as e:
            logger.error("Can't apply patch %s for file %s. Error %s",
                         patch_uuid, file_path, e)
            return False
        return True

    def _get_last_nonconflicted_state(self, session, fs):
        ''' Also make a conflicted copy of the current file state.'''
        assert self.event.file_id
        assert self.event.last_event_id, \
            'Getting last nonconflicted state for create event'

        event = session.query(Event) \
            .filter(Event.server_event_id.isnot(None)) \
            .filter(Event.file_id == self.event.file_id) \
            .filter(Event.state.in_(['sent', 'downloaded'])) \
            .filter(Event.type != 'delete') \
            .filter(Event.id <= self.event.last_event_id) \
            .order_by(Event.id.desc()).first()

        assert event is not None, \
            'Probably getting last nonconflicted state for create event'

        return event

    def _set_actual_file_state_to_this(self,
                                       session,
                                       update_file_event=True,
                                       change_name=True):
        file = self.event.file

        if update_file_event:
            file.event = self.event
            file.event_id = self.event.id
            file.last_skipped_event_id = None
        if self.event.type in ('create', 'move') and change_name:
            file.name = self.event.file_name
            if not file.folder:
                folder = self.find_folder_by_uuid(session,
                                                  self.event.folder_uuid)
                file.folder_id = folder.id if folder else None
        logger.debug(
            "_set_actual_file_state_to_this. "
            "event_id %s, file.event_id %s, file.name %s", self.event.id,
            file.event_id, file.name)

    def find_folder_by_uuid(self, session, uuid):
        if not uuid:
            return None

        try:
            folder = session.query(File) \
                .filter(File.is_folder) \
                .filter(File.uuid == uuid) \
                .one()
        except:
            raise FolderUUIDNotFound(uuid)

        return folder

    def _get_target_path(self, session):
        if self.event.state in ('occured', 'sent', 'conflicted'):
            target_path = self.event.file.path
        else:
            target_path = self.db.get_path_from_event(self.event, session)
        return target_path

    def __str__(self):
        return '{self.__class__.__name__}: {self.event}'.format(self=self)

    def check_event_path_excluded(self, excluded_dirs):
        return False

    @db_read
    def get_file_path(self, session):
        if not self._cached_file_path:
            self._cached_file_path = self.event.file.path

        return self._cached_file_path

    @atomic
    def event_newer_than_applied(self, session):
        return not self.event.file.event or \
               self.event.server_event_id and \
               (not self.event.file.event.server_event_id or
                self.event.server_event_id >
                self.event.file.event.server_event_id)

    def make_conflicting_copy(self, fs):
        raise NotImplemented()

    @benchmark
    def is_event_skipped(self, session):
        event = self.event
        try:
            session.expire(event)
        except Exception:
            event = session.query(Event).filter(Event.id == event.id).one()

        return event.file.last_skipped_event_id and \
               event.id < event.file.last_skipped_event_id

    def force_move(self):
        self._force_move = True

    def _add_dummy_delete_events(self, session=None):
        assert self.event.file.is_folder

        self._dummy_deletes = []
        folder_uuid = self.event.file.uuid
        self.db.get_files_by_folder_uuid(folder_uuid,
                                         self._files_page_processor_cb,
                                         include_folders=True,
                                         include_deleted=False,
                                         session=session)
        self._save_dummy_delete_events(session)

    def _files_page_processor_cb(self, files_page, folders_uuids, session):
        file_ids = [f.id for f in files_page]
        all_events = session.query(Event) \
                .filter(Event.file_id.in_(tuple(file_ids))).all()
        for file in files_page:
            if self._events_queue.is_processing_stopped():
                raise ProcessingAborted
            events = filter(lambda e: e.file_id == file.id, all_events)
            events = sorted(events,
                            key=lambda e: e.server_event_id
                            if e.server_event_id else 0,
                            reverse=True)
            if events and not (
                    events[0].type == 'delete' or events[-1].type == 'delete'
                    and events[-1].server_event_id
                    and events[-1].server_event_id < 0
                    or events[0].type == 'move' and events[0].server_event_id
                    and events[0].folder_uuid not in folders_uuids):
                min_server_event_id = self._events_queue\
                    .get_min_server_event_id()
                session.expire(file)
                if not file.uuid:
                    file.uuid = generate_uuid()
                    for one_event in events:
                        one_event.file_uuid = file.uuid
                self._add_dummy_delete(file, events[0], min_server_event_id,
                                       session)
                self._events_queue.cancel_file_download(file.id, session)
                self._events_queue.change_processing_events_counts(
                    remote_inc=1)
                self._events_queue.events_added.set()
                if len(self._dummy_deletes) >= self.DUMMY_PAGE_SIZE:
                    self._save_dummy_delete_events(session)

        self._events_queue.allow_loading_remotes()

    def _save_dummy_delete_events(self, session):
        if self._dummy_deletes:
            logger.debug("Saving %s dummy deletes in db",
                         len(self._dummy_deletes))
            try:
                session.bulk_insert_mappings(Event, self._dummy_deletes)
            finally:
                self._dummy_deletes = []

    def _add_dummy_delete(self,
                          file,
                          event,
                          server_event_id,
                          session,
                          add_to_dummies=True):
        msg = {
            'event_id': server_event_id,
            'event_type': 'delete',
            'is_folder': file.is_folder,
            'uuid': file.uuid,
            'event_uuid': event.uuid,
            'file_name': event.file_name,
            'file_name_before_event': event.file_name,
            'file_size': event.file_size,
            'last_event_id': event.server_event_id,
            'file_hash_before_event': event.file_hash,
            'parent_folder_uuid': event.folder_uuid,
            'timestamp': calendar.timegm(event.timestamp.utctimetuple()),
        }
        logger.debug("Formed dummy delete message '%s'...", msg)

        new_event, _ = deserialize_event(msg)
        new_event.last_event_id = event.id
        new_event.file_id = file.id
        new_event.state = 'downloaded'
        if add_to_dummies:
            if hasattr(self, "_dummy_deletes"):
                # many dummy deletes
                self._dummy_deletes.append(self.db.get_mapping(new_event))
            else:
                # one dummy delete
                session.add(new_event)
        return new_event

    def _update_excluded_dirs(self,
                              fs,
                              excluded_dirs,
                              session=None,
                              signals=None,
                              change_in_db=True):
        assert self.event.is_folder

        if not excluded_dirs:
            return

        logger.debug("Updating excluded dirs")
        if self.event.type == 'delete':
            src_path = self.db.get_path_by_events(self.event, session)
            dst_path = None
        else:  # self.event.type == 'move'
            try:
                prev_event = self.event.file.events[-2]
            except IndexError:
                logger.warning("No prev event for %s", self.event)
                src_path = ""
            else:
                src_path = self.db.get_path_by_events(prev_event, session)
            dst_path = self.db.get_path_by_events(self.event, session)
        dirs_to_delete, dirs_to_add = fs.get_excluded_dirs_to_change(
            excluded_dirs, src_path, dst_path)
        if not dirs_to_delete:
            return

        if dirs_to_add:
            change_in_db = False

        signals.change_excluded_dirs.emit(dirs_to_delete, dirs_to_add)
        if change_in_db:
            for path in dirs_to_delete:
                self._mark_dir_not_excluded(path, session)

    def _mark_dir_not_excluded(self, path, session):
        try:
            folders = self.db.find_folders_by_future_path(path,
                                                          session=session,
                                                          include_deleted=True)
        except Exception:
            logger.error("Error finding folders %s by path", path)
            return
        assert folders, "Excluded dir has to be in db"

        for folder in folders:
            if folder.excluded:
                folder.excluded = False
                self.db.mark_child_excluded(folder.id,
                                            session,
                                            is_excluded=False)

    def _apply_folder_delete_if_any(self, session, fs):
        path = self.event.file.path
        actual_file_id = fs.get_actual_events_file_id(path, is_folder=True)
        logger.debug(
            "Trying to delete folder %s "
            "with actual events_file_id %s...", path, actual_file_id)
        delete_events = session.query(Event) \
            .filter(Event.file_id == actual_file_id) \
            .filter(Event.type == 'delete') \
            .filter(Event.state == 'downloaded') \
            .all()
        delete_events = sorted(filter(
            lambda e: not e.file.event_id or e.id > e.file.event_id,
            delete_events),
                               key=lambda e: e.server_event_id)
        if not delete_events:
            return False

        delete_events[-1].file.event_id = delete_events[-1].id
        fs.accept_delete(path,
                         is_directory=True,
                         events_file_id=actual_file_id)
        self.change_processing_events_counts.emit(0, -1)
        return True

    def _generate_uuid(self, session):
        while True:
            uuid = generate_uuid()
            events = session.query(Event).filter(Event.uuid == uuid).all()
            if not events:
                return uuid
            logger.warning("Events with uuid '%s' exist %s", uuid, events)

    def _process_parent_not_found(self, session):
        with self.db.db_lock:
            next_events = list(
                filter(lambda e: e.id > self.event.id, self.event.file.events))
            move_delete_exists = any(e.type in ('move', 'delete')
                                     for e in next_events)
            if move_delete_exists:
                # don't do anything
                return False

            if self.event.is_folder:
                self._add_dummy_delete_events(session)
                delattr(self, "_dummy_deletes")

            server_event_id = self._events_queue.get_min_server_event_id()
            self._add_dummy_delete(self.event.file, self.event,
                                   server_event_id, session)
            return True

    def _check_previous_delete(self, event, file_events, session, events_queue,
                               fs):
        return False, False, None

    @atomic
    def check_previous_delete(self, session=None, events_queue=None, fs=None):
        event = session.query(Event) \
            .filter(Event.id == self.event.id) \
            .one_or_none()
        if not event:
            return False

        has_deletes, \
        add_dummy, \
        new_delete_event = self._check_previous_delete(
            event, event.file.events, session, events_queue, fs)

        if not has_deletes:
            return False

        if new_delete_event:
            new_delete_event.file_id = event.file_id
            session.add(new_delete_event)

        if add_dummy:
            min_server_event_id = events_queue.get_min_server_event_id()
            self._add_dummy_delete(event.file, event, min_server_event_id,
                                   session)
            events_queue.change_processing_events_counts(remote_inc=1)

        return True

    @atomic
    def add_dummy_if_parent_deleted(self, session=None, events_queue=None):
        logger.debug("Adding dummy when parent is deleted...")
        event = session.query(Event) \
            .filter(Event.id == self.event.id) \
            .one_or_none()
        has_deletes = any(e.type == 'delete' for e in event.file.events)
        if not event or has_deletes:
            return False

        folder = self.find_folder_by_uuid(session, event.folder_uuid)
        if not folder or not folder.is_deleted_registered:
            return False

        if not self.event.file.folder_id:
            self.event.file.folder_id = folder.id
        min_server_event_id = events_queue.get_min_server_event_id()
        self._add_dummy_delete(event.file, event, min_server_event_id, session)
        events_queue.change_processing_events_counts(remote_inc=1)
        return True

    def _check_offline(self, session):
        not_applied = 0
        event = self.event
        folder = self.find_folder_by_uuid(session, event.folder_uuid)
        if folder and folder.is_offline and not event.file.is_offline:
            not_applied = self.db.make_offline(event.file_uuid,
                                               session=session,
                                               is_offline=True)
        return not_applied
Пример #28
0
class QuietProcessor(object):
    def __init__(self, root, storage, path_converter, exceptions):
        self._root = root
        self._storage = storage
        self._path_converter = path_converter
        self._exceptions = exceptions

        self._tmp_id = 0
        self._tmp_id_lock = RLock()

        self._init_temp_dir()

        self.file_moved = Signal(str, str)
        self.file_deleted = Signal(str)
        self.file_modified = Signal(str, float)
        self.access_denied = Signal(str)

    def delete_file(self, full_path, events_file_id=None, is_offline=True):
        full_path = unicodedata.normalize('NFC', full_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if not file:
                if events_file_id is not None:
                    logger.warning("Skipping file deletion because "
                                   "file with same events_file_id not found")
                    return
                file = self._storage.get_known_file(full_path, session=session)
            else:
                full_path = _full_path

            if file:
                try:
                    remove_file(self.get_hard_path(full_path, is_offline))
                except OSError as e:
                    logger.warning("Can't remove file. Reason: %s", e)
                    if e.errno == errno.EACCES:
                        self._raise_access_denied(full_path)
                    else:
                        raise e
                self._storage.delete_file(file, session=session)

        self.file_deleted.emit(self._path_converter.create_relpath(full_path))

    def delete_directory(self, full_path, events_file_id=None):
        full_path = unicodedata.normalize('NFC', full_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if file:
                full_path = _full_path
            elif events_file_id is not None:
                logger.warning("Skipping directory deletion because "
                               "directory with same events_file_id not found")
                return

            rel_path = self._path_converter.create_relpath(full_path)
            files = self._storage.get_known_folder_children(rel_path,
                                                            session=session)
            try:
                temp_path = join(self._temp_dir, basename(full_path))
                if isdir(temp_path):
                    remove_dir(temp_path, suppress_not_exists_exception=True)
                elif isfile(temp_path):
                    remove_file(temp_path)
                if isdir(full_path):
                    os.rename(full_path, temp_path)
                    try:
                        remove_dir(temp_path,
                                   suppress_not_exists_exception=True)
                    except Exception:
                        logger.debug("Dir %s delete failed", temp_path)
            except OSError as e:
                logger.warning("Can't remove dir %s. Reason: %s", full_path, e)
                if e.errno == errno.EACCES:
                    self._raise_access_denied(full_path)
                elif e.errno != errno.ENOENT:  # directory does not exist
                    raise e

            deleted_paths = [f.relative_path for f in files]
            self._storage.delete_known_folder_children(rel_path,
                                                       session=session)

        for path in deleted_paths:
            self.file_deleted.emit(path)

    def create_directory(self, full_path, events_file_id, wrong_file_id=None):
        full_path = unicodedata.normalize('NFC', full_path)

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            assert exists(dirname(full_path))
            file = self._storage.get_known_file(full_path,
                                                True,
                                                session=session)

            if file is None:
                mkdir(full_path)
                file = self._storage.get_new_file(full_path,
                                                  True,
                                                  session=session)
            elif events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                logger.error("Wrong file id for %s. Expected %s. Got %s",
                             full_path, events_file_id,
                             file.events_file_id if file else None)
                raise wrong_file_id(full_path, events_file_id,
                                    file.events_file_id)

            file.events_file_id = events_file_id
            self._storage.save_file(file, session=session)

    def patch_file(self,
                   full_fn,
                   patch_archive,
                   silent=True,
                   events_file_id=None,
                   wrong_file_id=None):
        full_fn = unicodedata.normalize('NFC', full_fn)

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if file:
                full_fn = _full_path
            else:
                file = self._storage.get_known_file(full_fn,
                                                    is_folder=False,
                                                    session=session)
            if (file is None or file and events_file_id and
                file.events_file_id
                and file.events_file_id != events_file_id) and \
                        wrong_file_id:
                logger.error("Wrong file id for %s. Expected %s. Got %s",
                             full_fn, events_file_id,
                             file.events_file_id if file else None)
                raise wrong_file_id(full_fn, events_file_id,
                                    file.events_file_id if file else None)

            # file = self._storage.get_new_file(full_fn, False,
            #                                   session=session)

            assert exists(dirname(full_fn))
            hash, signature, old_hash = Rsync.accept_patch(
                patch_archive=patch_archive,
                unpatched_file=full_fn,
                known_old_hash=file.file_hash if file else None,
                root=self._root)

            if silent:
                file.mtime = os.stat(full_fn).st_mtime
                file.size = os.stat(full_fn).st_size
                file.file_hash = hash
                file.events_file_id = events_file_id
                file.was_updated = True
                self._storage.save_file(file, session=session)
                self._storage.update_file_signature(file, signature)
                self.file_modified.emit(file.relative_path, file.mtime)

        return hash, old_hash

    def move_file(self,
                  src_full_path,
                  dst_full_path,
                  events_file_id=None,
                  already_exists=None,
                  file_not_found=None,
                  wrong_file_id=None,
                  is_offline=True):
        dst_full_path = unicodedata.normalize('NFC', dst_full_path)
        dst_rel_path = self._path_converter.create_relpath(dst_full_path)
        src_full_path = unicodedata.normalize('NFC', src_full_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if not file:
                file = self._storage.get_known_file(src_full_path,
                                                    False,
                                                    session=session)
            else:
                src_full_path = _full_path
            src_rel_path = self._path_converter.create_relpath(src_full_path)
            if src_rel_path == dst_rel_path or not self._check_paths_exist(
                    src_full_path, dst_full_path, already_exists,
                    file_not_found):
                return

            assert exists(dirname(dst_full_path))
            if file:
                if events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                    logger.error("Wrong file id for %s. Expected %s. Got %s",
                                 dst_full_path, events_file_id,
                                 file.events_file_id)
                    raise wrong_file_id(src_full_path, events_file_id,
                                        file.events_file_id)

                file.relative_path = self._path_converter.create_relpath(
                    dst_full_path)
                try:
                    shutil.move(src=self.get_hard_path(src_full_path,
                                                       is_offline),
                                dst=self.get_hard_path(dst_full_path,
                                                       is_offline))
                except OSError as e:
                    logger.warning("Can't move file. Reason: %s", e)
                    if e.errno == errno.EACCES:
                        self._raise_access_denied(src_full_path)
                    else:
                        raise e
                self._storage.save_file(file, session=session)

            self.file_moved(src_rel_path, dst_rel_path)

    def move_directory(self,
                       src_full_path,
                       dst_full_path,
                       events_file_id=None,
                       already_exists=None,
                       file_not_found=None,
                       wrong_file_id=None):
        dst_full_path = unicodedata.normalize('NFC', dst_full_path)
        dst_rel_path = self._path_converter.create_relpath(dst_full_path)
        src_full_path = unicodedata.normalize('NFC', src_full_path)

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _full_path = self._get_file_by_id(events_file_id, session)
            if not file:
                file = self._storage.get_known_file(src_full_path,
                                                    True,
                                                    session=session)
            else:
                src_full_path = _full_path

            src_rel_path = self._path_converter.create_relpath(src_full_path)
            if src_rel_path == dst_rel_path or not self._check_paths_exist(
                    src_full_path, dst_full_path, already_exists,
                    file_not_found):
                return

            assert exists(dirname(dst_full_path))
            if file:
                if events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                    logger.error("Wrong file id for %s. Expected %s. Got %s",
                                 src_full_path, events_file_id,
                                 file.events_file_id if file else None)
                    raise wrong_file_id(src_full_path, events_file_id,
                                        file.events_file_id)
            try:
                os.rename(src_full_path, dst_full_path)
            except OSError as e:
                logger.warning("Can't move dir %s. Reason: %s", src_full_path,
                               e)
                if e.errno == errno.EACCES:
                    self._raise_access_denied(src_full_path)
                else:
                    raise e

            self._storage.move_known_folder_children(src_rel_path,
                                                     dst_rel_path,
                                                     session=session)

            self.file_moved(src_rel_path, str(dst_rel_path))

    def create_file_from_copy(self,
                              file_rel_path,
                              copy_hash,
                              silent,
                              events_file_id,
                              search_by_id=False,
                              wrong_file_id=None,
                              copy_does_not_exists=None):
        dst_full_path = self._path_converter.create_abspath(file_rel_path)
        copy_full_path = join(get_copies_dir(self._root), copy_hash)
        if copy_does_not_exists is not None and not exists(copy_full_path):
            if not self.make_copy_from_existing_files(copy_hash):
                raise copy_does_not_exists(copy_hash)
        return self._create_file(copy_full_path, dst_full_path, silent,
                                 copy_hash, events_file_id, search_by_id,
                                 wrong_file_id)

    def make_copy_from_existing_files(self, copy_hash):
        copy_full_path = join(get_copies_dir(self._root), copy_hash)
        if exists(copy_full_path):
            return True

        tmp_full_path = self._get_temp_path(copy_full_path)
        with self._storage.create_session(read_only=True,
                                          locked=False) as session:
            excludes = []
            while True:
                file = self._storage.get_file_by_hash(copy_hash,
                                                      exclude=excludes,
                                                      session=session)
                if not file:
                    return False

                file_path = self._path_converter.create_abspath(
                    file.relative_path)
                if not exists(file_path):
                    excludes.append(file.id)
                    continue

                try:
                    copy_file(file_path, tmp_full_path)
                    hash = Rsync.hash_from_block_checksum(
                        Rsync.block_checksum(tmp_full_path))
                    if hash == copy_hash:
                        os.rename(tmp_full_path, copy_full_path)
                        return True
                    else:
                        excludes.append(file.id)
                        remove_file(tmp_full_path)
                except Exception as e:
                    logger.warning("Can't operate tmp file %s. Reason: (%s)",
                                   tmp_full_path, e)
                    if file.id not in excludes:
                        excludes.append(file.id)
                    try:
                        remove_file(tmp_full_path)
                    except Exception:
                        tmp_full_path = self._get_temp_path(copy_full_path)

    def _get_temp_path(self, copy_full_path):
        while True:
            with self._tmp_id_lock:
                self._tmp_id += 1
            tmp_full_path = "{}_{}.tmp".format(copy_full_path, self._tmp_id)
            if not exists(tmp_full_path):
                return tmp_full_path

    def create_empty_file(self,
                          file_rel_path,
                          file_hash,
                          silent,
                          events_file_id,
                          search_by_id=False,
                          wrong_file_id=None,
                          is_offline=True):
        dst_full_path = self._path_converter.create_abspath(file_rel_path)
        self._create_file(None, dst_full_path, silent, file_hash,
                          events_file_id, search_by_id, wrong_file_id,
                          is_offline)

    def _create_file(self,
                     src_full_path,
                     dst_full_path,
                     silent,
                     file_hash,
                     events_file_id,
                     search_by_id,
                     wrong_file_id,
                     is_offline=True):

        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file = None
            file_exists = False
            was_updated = True
            if search_by_id:
                file, _full_path = self._get_file_by_id(
                    events_file_id, session)
                if file:
                    dst_full_path = _full_path

            assert exists(dirname(dst_full_path))
            hard_path = self.get_hard_path(dst_full_path, is_offline)
            if not file:
                file = self._storage.get_known_file(dst_full_path,
                                                    is_folder=False,
                                                    session=session)
                if file and events_file_id and file.events_file_id and \
                        file.events_file_id != events_file_id and \
                        wrong_file_id:
                    logger.error("Wrong file id for %s. Expected %s. Got %s",
                                 dst_full_path, events_file_id,
                                 file.events_file_id)
                    raise wrong_file_id(dst_full_path, events_file_id,
                                        file.events_file_id)
            if file:
                file_exists = file.file_hash == file_hash and \
                              (exists(dst_full_path) and is_offline or
                               exists(hard_path) and not is_offline)
                logger.debug(
                    "The fact that file %s with same hash "
                    "already exists in storage and filesystem is %s",
                    dst_full_path, file_exists)

            if file is None:
                # if search_by_id and wrong_file_id:
                #     logger.error("Wrong file id for %s. Expected %s. Got None",
                #                  dst_full_path, events_file_id)
                #     raise wrong_file_id(dst_full_path,
                #                         events_file_id,
                #                         None)

                file = self._storage.get_new_file(dst_full_path,
                                                  False,
                                                  session=session)
                was_updated = False
            old_hash = file.file_hash

            signature = None
            if not file_exists:
                if src_full_path:
                    # create file from copy
                    if not exists(get_signature_path(file_hash)):
                        signature = Rsync.block_checksum(src_full_path)
                    tmp_full_path = self._get_temp_path(src_full_path)
                    copy_file(src_full_path, tmp_full_path)
                    try:
                        remove_file(dst_full_path)
                        os.rename(tmp_full_path, dst_full_path)
                        copy_time(dst_full_path + FILE_LINK_SUFFIX,
                                  dst_full_path)
                        remove_file(dst_full_path + FILE_LINK_SUFFIX)
                    except Exception as e:
                        logger.warning(
                            "Can't rename to dst file %s. "
                            "Reason: %s", dst_full_path, e)
                        try:
                            remove_file(tmp_full_path)
                        except Exception:
                            pass
                        raise e
                else:
                    create_empty_file(hard_path)
                    if not is_offline:
                        self.write_events_file_id(hard_path, events_file_id)
                        set_ext_invisible(hard_path)
                    if hard_path.endswith(FILE_LINK_SUFFIX):
                        copy_time(dst_full_path, hard_path)
                        remove_file(dst_full_path)
                    else:
                        copy_time(hard_path, dst_full_path)
                        remove_file(dst_full_path + FILE_LINK_SUFFIX)

            if silent:
                file.mtime = os.stat(hard_path).st_mtime
                file.size = os.stat(hard_path).st_size
                file.file_hash = file_hash
                file.events_file_id = events_file_id
                file.was_updated = was_updated
                logger.debug("Saving file. id=%s", file.events_file_id)
                self._storage.save_file(file, session=session)
                if src_full_path and signature:
                    # create file from copy
                    self._storage.update_file_signature(file, signature)
                if was_updated:
                    self.file_modified.emit(file.relative_path, file.mtime)

            return old_hash

    def sync_events_file_id(self, file_path, events_file_id, is_folder):
        full_path = self._path_converter.create_abspath(file_path)
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file = self._storage.get_known_file(full_path,
                                                is_folder=is_folder,
                                                session=session)
            if file:
                file.events_file_id = events_file_id
                self._storage.save_file(file, session=session)
            else:
                logger.warning("Can't sync events_file_id for path %s",
                               file_path)

    def sync_events_file_id_by_old_id(self, events_file_id,
                                      old_events_file_id):
        with self._storage.create_session(read_only=False,
                                          locked=True) as session:
            file, _ = self._get_file_by_id(old_events_file_id, session)
            if file:
                file.events_file_id = events_file_id
                self._storage.save_file(file, session=session)
            else:
                logger.debug("Can't sync events_file_id for old_id %s",
                             old_events_file_id)

    def _get_file_by_id(self, events_file_id, session):
        file = full_path = None
        if events_file_id:
            file = self._storage.get_known_file_by_id(events_file_id, session)
            if file:
                full_path = self._path_converter.create_abspath(
                    file.relative_path)
            else:
                logger.warning("Can't find file by id %s", events_file_id)
        return file, full_path

    def _check_paths_exist(self, src_full_path, dst_full_path, already_exists,
                           file_not_found):
        if exists(dst_full_path):
            if exists(src_full_path):
                if already_exists:
                    raise already_exists(dst_full_path)
                else:
                    return False
            else:
                logger.debug(
                    "Destination exists %s, source does not exist %s."
                    " Moving accepted", dst_full_path, src_full_path)
                return False

        if not exists(src_full_path):
            if file_not_found:
                raise file_not_found(src_full_path)
            else:
                return False

        return True

    def delete_old_signatures(self, signatures_dir, delete_all=False):
        # we believe that signatires dir contains only signature files
        # and no subdirs
        try:
            signatures_to_delete = os.listdir(signatures_dir)
        except Exception as e:
            logger.warning("Can't delete old signatures. Reason: %s", e)
            return

        if not delete_all:
            # taking storage lock to prevent adding new signatures
            # during deletion
            with self._storage.create_session(read_only=False,
                                              locked=True) as session:
                signatures_to_delete = filter(
                    lambda h: not self._storage.hash_in_storage(
                        h, session=session), signatures_to_delete)

        try:
            list(
                map(lambda s: remove_file(join(signatures_dir, s)),
                    signatures_to_delete))
        except Exception as e:
            logger.warning("Can't delete old signatures. Reason: %s", e)

    def _init_temp_dir(self):
        self._temp_dir = get_temp_dir(self._root)
        if exists(self._temp_dir):
            try:
                remove_dir(self._temp_dir)
            except Exception as e:
                logger.warning("Can't remove temp dir. Reason: %s", e)

        self._temp_dir = get_temp_dir(self._root, create=True)

    def _raise_access_denied(self, full_path):
        self.access_denied(full_path)
        raise self._exceptions.AccessDenied(full_path)

    def get_hard_path(self, full_path, is_offline=True):
        suffix = "" if is_offline else FILE_LINK_SUFFIX
        return full_path + suffix

    def write_events_file_id(self, hard_path, events_file_id):
        with open(hard_path, 'wb') as f:
            pickle.dump(events_file_id, f)
Пример #29
0
    def __init__(self, path_converter):
        super(NotifyIfMovedAction, self).__init__()
        self.file_moved = Signal(str, str)  # old, new file path

        self._patch_converter = path_converter
    def __init__(self, path_converter):
        super(NotifyIfDeletedAction, self).__init__()
        self.file_deleted = Signal(str)  # relative_path

        self._patch_converter = path_converter