def __init__(self, db_file): self._db_file = db_file self._has_events = True new_db_file = not exists(self._db_file) if not new_db_file: # Database migration. It can be executed before opening db try: upgrade_db("stats_db", db_filename=self._db_file) except Exception as e: remove_file(self._db_file) new_db_file = True logger.warning( "Can't upgrade stats db. " "Reason: (%s) Creating...", e) self._engine = create_engine('sqlite:///{}'.format( FilePath(self._db_file))) self._Session = sessionmaker(bind=self._engine) Base.metadata.create_all(self._engine, checkfirst=True) if new_db_file: try: stamp_db("stats_db", db_filename=self._db_file) except Exception as e: logger.error("Error stamping stats db: %s", e) logger.debug("Stats DB init")
def _stop_service(self): try: remove_file(self._port_file) except Exception as e: logger.warning("Removing port file exception: %s", e) platform = get_platform() logger.debug("Stopping service process. Platform: %s", platform) result = 0 try: while True: if self._service_process is None or platform == 'Windows': kill_all_services() logger.debug("All services killed") result = 1 else: self._service_process.terminate() if self._service_process.wait(timeout=10) in (0, -15): result = 1 if result != 0: break else: logger.debug("Service killing: result == 0!") self._service_process = None time.sleep(0.5) except OSError as e: if e.errno == errno.ESRCH: pass else: logger.warning("Stopping service exception: %s", e) except Exception as e: logger.warning("Stopping service exception: %s", e) logger.debug("Stopping service returned %s", result) self._close_stderr_log()
def move_files_to_copies(self): with self._storage.create_session(read_only=False, locked=True) as session: files_with_hashes = session\ .query(File.relative_path, File.file_hash) \ .filter(File.is_folder == 0) \ .all() copies_dir = get_copies_dir(self._root) for (file, hashsum) in files_with_hashes: hash_path = op.join(copies_dir, hashsum) file_path = self._path_converter.create_abspath(file) if not op.exists(hash_path): try: os.rename(file_path, hash_path) except Exception as e: logger.error("Error moving file to copy: %s", e) remove_file(file_path) abs_path = FilePath(self._root).longpath folders_plus_hidden = [ self._path_converter.create_abspath(f) for f in os.listdir(abs_path) if f not in HIDDEN_DIRS ] for folder in folders_plus_hidden: if not op.isdir(folder): continue try: remove_dir(folder) except Exception as e: logger.error("Error removing dir '%s' (%s)", folder, e) logger.info("Removed all files and folders") self._storage.clean()
def _delete_empty_file_or_folder(self): self._is_deleting = True fullname = self._get_full_name(cancel=False, existing_file=True) if fullname: self._fullname = fullname if not self._fullname: self._is_deleting = False return self._fullname = FilePath(self._fullname) logger.debug("Removing special file %s", self._fullname) if not self._in_data_dir: self._sync.remove_special_file(self._fullname) try: if self._is_folder: remove_dir(self._fullname) else: remove_file(self._fullname) except Exception as e: logger.warning("Can't delete file or folder %s. Reason %s", self._fullname, e) self._update_spec_files() if self._in_data_dir: self._sync.remove_special_file(self._fullname) self._fullname = "" self._is_deleting = False
def _clean_spec_files(self): logger.debug("Cleaning spec files from '%s'...", self._filename) try: with open(self._filename, 'rb') as f: self._spec_files = pickle.load(f) for share_hash in self._spec_files: path, is_directory = self._spec_files[share_hash] try: if is_directory: remove_dir(path) else: remove_file(path) logger.debug("Special file (folder) removed %s", path) except Exception as e: logger.warning("Can't delete file or folder %s. Reason %s", path, e) except Exception as e: logger.warning("Failed to load special files data (%s)", e) try: remove_file(self._filename) except Exception: pass self._spec_files = dict() try: with open(self._filename, 'wb') as f: pickle.dump(self._spec_files, f, protocol=2) except Exception as e: logger.warning("Failed to save special files data (%s)", e)
def run(self): while True: try: task = self.que_manager.getQue(self.get_que_name) if task is None: time.sleep(0.5) continue image_dir = task['image_dir'] for f in os.listdir(image_dir): img_path = os.path.join(image_dir, f) md5 = utils.calc_md5(img_path) if utils.get_key(md5, rcli=utils.g_md5_redis) is Not None: # md5 duplicated, remove image and process next utils.remove_file(img_path) continue # md5 unique, record to redis utils.set_key(md5, 1, rcli=utils.g_md5_redis) # image quality check score = image_util.calc_quality(img_path) if score < quality_thres: utils.remove_file(img_path) continue # put to feature detect que task = {} task['img_path'] = img_path self.que_manager.putQue(json.dumps(task), utils.msg_queues['feature_detect']) except Exception as e: log_util.error("ImageETL run error: %s", str(e)) time.sleep(0.5)
def delete_file(self, full_path, events_file_id=None, is_offline=True): full_path = unicodedata.normalize('NFC', full_path) with self._storage.create_session(read_only=False, locked=True) as session: file, _full_path = self._get_file_by_id(events_file_id, session) if not file: if events_file_id is not None: logger.warning("Skipping file deletion because " "file with same events_file_id not found") return file = self._storage.get_known_file(full_path, session=session) else: full_path = _full_path if file: try: remove_file(self.get_hard_path(full_path, is_offline)) except OSError as e: logger.warning("Can't remove file. Reason: %s", e) if e.errno == errno.EACCES: self._raise_access_denied(full_path) else: raise e self._storage.delete_file(file, session=session) self.file_deleted.emit(self._path_converter.create_relpath(full_path))
def _clear_share_download(self): data_dir = self._cfg.sync_directory if self._cfg else get_data_dir() downloads_dir = get_downloads_dir(data_dir=data_dir, create=True) download_name = op.join(downloads_dir, self._current_share_hash) if self._is_folder: remove_dir(download_name) else: remove_file(download_name)
def install_update(self): if not self._update_ready or self._status == UPDATER_STATUS_INSTALLING: return False self._status = UPDATER_STATUS_INSTALLING self.emit_status() logger.info('Installing update') try: assert self._update_file_path and isfile(self._update_file_path) logger.debug("self._update_file_path %s", self._update_file_path) path, name = split(self._update_file_path) old_cwd = os.getcwd() os.chdir(path) system = get_platform() if system == 'Windows': from common.config import load_config config = load_config() root = config.sync_directory log_basename = time.strftime('%Y%m%d_%H%M%S.log') log_filename = get_bases_filename(root, log_basename) if not self._is_ascii(log_filename): log_filename = log_basename args = [name, '/verysilent', '/Log={}'.format(log_filename)] if is_portable(): args.append('/PATH={}'.format(get_application_path())) subprocess.Popen( args, creationflags=0x00000200 # CREATE_NEW_PROCESS_GROUP | 0x00000008, # DETACHED_PROCESS close_fds=True) elif system == 'Darwin': bundle_path = normpath( join(get_application_path(), '..', '..', '..', '..')) logger.debug("bundle_path: %s", bundle_path) subprocess.call( ['ditto', '-xk', self._update_file_path, bundle_path]) subprocess.call( ['xattr', '-d', '-r', 'com.apple.quarantine', bundle_path]) logger.debug("Update completed, restart") remove_file(get_cfg_filename('lock')) if is_portable(): launcher_path = normpath( join(bundle_path, "..", "Pvtbox-Mac.command")) else: launcher_path = bundle_path subprocess.call(['open', launcher_path]) os.chdir(old_cwd) Application.exit() except Exception as e: logger.warning("Can't install update. Reason: %s", e) self._status = UPDATER_STATUS_INSTALL_ERROR self.emit_status() return False self._status = UPDATER_STATUS_INSTALLED self.emit_status() return True
def clean(self): logger.debug("Cleaning download files %s", self.download_path) try: remove_file(self.download_path) except: pass try: remove_file(self._info_path) except: pass
def _sync__uploads_file(self): try: with open(self._filename, 'wb') as f: pickle.dump(self.task_to_report, f) except Exception as e: logger.error("Failed to save upload task data (%s)", e) try: remove_file(self._filename) except Exception: pass
def archive(): # uses function attributes to track progress # archive.size, archive.progress, archive.stop logs_dir = get_bases_dir(self._config.sync_directory) log_files = glob("{}{}*.log".format(logs_dir, os.sep)) log_sizes = list(map(os.path.getsize, log_files)) # mark overall size archive.size = sum(log_sizes) old_archives = glob("{}{}2*_logs.zip".format(logs_dir, os.sep)) try: list(map(remove_file, old_archives)) except Exception as e: logger.warning("Can't delete old archives. Reason: (%s)", e) if get_free_space(logs_dir) < archive.size // 5: # archive.size // 5 is approx future archive size msg = tr("Insufficient disk space to archive logs. " "Please clean disk") self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) archive_name = time.strftime('%Y%m%d_%H%M%S_logs.zip') archive_path = "{}{}{}".format(logs_dir, os.sep, archive_name) archive_dir = op.dirname(archive_path) f = zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) try: with cwd(archive_dir): for i, log_file in enumerate(log_files): if not op.isfile(log_file): continue f.write(op.basename(log_file)) # mark progress archive.progress += log_sizes[i] if archive.stop: return except Exception as e: msg = tr("Can't archive logs.") logger.warning(msg + " Reason: (%s)", e) self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) finally: f.close() if archive.stop: remove_file(archive_path) return archive_path
def __init__(self, root, db_file_created_cb=None, extended_logging=True, to_upgrade=True): self.possibly_sync_folder_is_removed = Signal() self.delete_copy = Signal( str, # copy hash bool) # with signature self.db_or_disk_full = Signal() self._db_file = join(get_copies_dir(root), 'copies.db') new_db_file = not exists(self._db_file) if new_db_file and callable(db_file_created_cb): db_file_created_cb() if to_upgrade and not new_db_file: # Database migration. It can be executed before opening db try: upgrade_db("copies_db", db_filename=self._db_file) except Exception as e: remove_file(self._db_file) new_db_file = True logger.warning( "Can't upgrade copies db. " "Reason: (%s) Creating...", e) if callable(db_file_created_cb): db_file_created_cb() self._engine = create_engine('sqlite:///{}'.format( FilePath(self._db_file))) self._Session = sessionmaker(bind=self._engine) Base.metadata.create_all(self._engine, checkfirst=True) if new_db_file: try: stamp_db("copies_db", db_filename=self._db_file) except Exception as e: logger.error("Error stamping copies db: %s", e) self._lock = RLock() self._root = root self._extended_logging = extended_logging if not self._extended_logging: self._logger = None else: self._logger = logging.getLogger('copies_logger') self._logger.debug("Copies init") self._last_changes = defaultdict(int)
def _close_info_file(self, to_remove=False): if not self._info_file: return try: self._info_file.close() if to_remove: remove_file(self._info_path) except Exception as e: logger.debug( "Can't close or remove info file " "for task id %s. Reason: %s", self.id, e) self._info_file = None
def _on_cleanup(self): if self._downloads: return logger.debug("Cleaning all download files in %s", self._cleanup_directories) for paths in chain( (iglob(join(d, '*.download')) for d in self._cleanup_directories), (iglob(join(d, '*.info')) for d in self._cleanup_directories)): for path in paths: try: remove_file(path) except: pass
def clean(self): files = self._storage.get_known_files() for file in files: try: remove_file(file) except Exception as e: logger.error("Error removing file '%s' (%s)", file, e) folders = self._storage.get_known_folders() for folder in sorted(folders, key=len): try: remove_dir(folder) except Exception as e: logger.error("Error removing dir '%s' (%s)", folder, e) logger.info("Removed all files and folders") self._storage.clean()
def _complete_download(self, force_complete=False): if (not self._wanted_chunks or force_complete) and \ not self._finished: logger.debug("download %s completed", self.id) self._nodes_requested_chunks.clear() for node_id in self._nodes_last_receive_time.keys(): self.abort_data.emit(node_id, self.id, None) if not force_complete: self.download_finishing.emit() if not force_complete and self.file_hash: hash_check_result = self._check_file_hash() if hash_check_result is not None: return hash_check_result self._started = False self._finished = True self.stop_download_chunks() self._close_info_file(to_remove=True) if not self._close_file(): return False try: if force_complete: remove_file(self.download_path) self.download_complete.emit(self) else: shutil.move(self.download_path, self.file_path) self._send_end_statistic() self.download_complete.emit(self) if self.file_hash: self.copy_added.emit(self.file_hash) except EnvironmentError as e: logger.error( "Download task %s can't (re)move file. " "Reason: %s", self.id, e) self._send_error_statistic() self.download_failed.emit(self) self.possibly_sync_folder_is_removed.emit() return False result = True else: result = not self._wanted_chunks return result
def __init__(self, path_converter, db_file_created_cb=None): self._pc = path_converter self.possibly_sync_folder_is_removed = Signal() self.db_or_disk_full = Signal() self._db_file = self._pc.create_abspath('.pvtbox/storage.db') logger.debug("DB file: %s", self._db_file) new_db_file = not exists(self._db_file) if new_db_file and callable(db_file_created_cb): db_file_created_cb() make_dirs(self._db_file) if not new_db_file: # Database migration. It can be executed before opening db try: upgrade_db("storage_db", db_filename=self._db_file) except Exception as e: remove_file(self._db_file) new_db_file = True logger.warning( "Can't upgrade storage db. " "Reason: (%s) Creating...", e) if callable(db_file_created_cb): db_file_created_cb() self._engine = create_engine('sqlite:///{}'.format( FilePath(self._db_file)), connect_args={ 'timeout': 60 * 1000, 'check_same_thread': False, }) self._engine.pool_timeout = 60 * 60 * 1000 self._Session = sessionmaker(bind=self._engine) Base.metadata.create_all(self._engine, checkfirst=True) if new_db_file: try: stamp_db("storage_db", db_filename=self._db_file) except Exception as e: logger.error("Error stamping storage db: %s", e) self._lock = threading.RLock()
def _open_uploads_file(self): logger.info("Loading upload task data from '%s'...", self._filename) # Load previously stored download task data try: if not op.exists(self._filename): with open(self._filename, 'wb') as f: pickle.dump(self.task_to_report, f) # Not reported complete download tasks info as task_id: info with open(self._filename, 'rb') as f: self.task_to_report = pickle.load(f) except Exception as e: logger.error("Failed to load upload task data (%s)", e) self.task_to_report = None try: remove_file(self._filename) except Exception: pass
def on_delete_copy(self, hash, with_signature=True): if not hash: logger.error("Invalid hash '%s'", hash) return copy = op.join(get_copies_dir(self._root), hash) try: remove_file(copy) logger.info("File copy deleted %s", copy) if not with_signature: return signature = op.join(get_signatures_dir(self._root), hash) remove_file(signature) logger.info("File copy signature deleted %s", signature) except Exception as e: logger.error( "Can't delete copy. " "Possibly sync folder is removed %s", e) self.possibly_sync_folder_is_removed()
def _on_new_event(self, fs_event): if fs_event.src.endswith(FILE_LINK_SUFFIX) and not fs_event.is_dir: fs_event.is_link = True path = fs_event.src[:-len(FILE_LINK_SUFFIX)] else: fs_event.is_link = False path = fs_event.src fs_event.file = self._storage.get_known_file(path) fs_event.in_storage = fs_event.file is not None suppress_event = False if fs_event.in_storage: if not fs_event.is_link and self._check_file_exists_on_fs( fs_event.src + FILE_LINK_SUFFIX) and \ fs_event.event_type == CREATE: self.rename_file.emit(fs_event.src) return self.event_suppressed(fs_event) elif fs_event.is_link and fs_event.event_type == MODIFY: if not self._get_events_file_id_from_link(fs_event): suppress_event = True self._load_info_from_storage(fs_event) else: # possibly copy of file link if fs_event.is_link: suppress_event = not self._get_copy_info_from_storage(fs_event) if not suppress_event: set_ext_invisible(fs_event.src) if suppress_event: try: remove_file(fs_event.src) except Exception: pass return self.event_suppressed(fs_event) if fs_event.is_link: fs_event.file_size = fs_event.old_size self.event_passed(fs_event)
def _update_spec_files(self, path=None, is_directory=False): if not self._current_share_hash: return current_hash = self._current_share_hash if path: self._spec_files[current_hash] = (path, is_directory) else: self._spec_files.pop(current_hash, None) try: with open(self._filename, 'wb') as f: pickle.dump(self._spec_files, f, protocol=2) logger.debug("Saved special files data for hashes %s", list(self._spec_files.keys())) except Exception as e: logger.error("Failed to save special files data (%s)", e) try: remove_file(self._filename) except Exception: pass
def make_copy_from_existing_files(self, copy_hash): copy_full_path = join(get_copies_dir(self._root), copy_hash) if exists(copy_full_path): return True tmp_full_path = self._get_temp_path(copy_full_path) with self._storage.create_session(read_only=True, locked=False) as session: excludes = [] while True: file = self._storage.get_file_by_hash(copy_hash, exclude=excludes, session=session) if not file: return False file_path = self._path_converter.create_abspath( file.relative_path) if not exists(file_path): excludes.append(file.id) continue try: copy_file(file_path, tmp_full_path) hash = Rsync.hash_from_block_checksum( Rsync.block_checksum(tmp_full_path)) if hash == copy_hash: os.rename(tmp_full_path, copy_full_path) return True else: excludes.append(file.id) remove_file(tmp_full_path) except Exception as e: logger.warning("Can't operate tmp file %s. Reason: (%s)", tmp_full_path, e) if file.id not in excludes: excludes.append(file.id) try: remove_file(tmp_full_path) except Exception: tmp_full_path = self._get_temp_path(copy_full_path)
def delete_directory(self, full_path, events_file_id=None): full_path = unicodedata.normalize('NFC', full_path) with self._storage.create_session(read_only=False, locked=True) as session: file, _full_path = self._get_file_by_id(events_file_id, session) if file: full_path = _full_path elif events_file_id is not None: logger.warning("Skipping directory deletion because " "directory with same events_file_id not found") return rel_path = self._path_converter.create_relpath(full_path) files = self._storage.get_known_folder_children(rel_path, session=session) try: temp_path = join(self._temp_dir, basename(full_path)) if isdir(temp_path): remove_dir(temp_path, suppress_not_exists_exception=True) elif isfile(temp_path): remove_file(temp_path) if isdir(full_path): os.rename(full_path, temp_path) try: remove_dir(temp_path, suppress_not_exists_exception=True) except Exception: logger.debug("Dir %s delete failed", temp_path) except OSError as e: logger.warning("Can't remove dir %s. Reason: %s", full_path, e) if e.errno == errno.EACCES: self._raise_access_denied(full_path) elif e.errno != errno.ENOENT: # directory does not exist raise e deleted_paths = [f.relative_path for f in files] self._storage.delete_known_folder_children(rel_path, session=session) for path in deleted_paths: self.file_deleted.emit(path)
def run(self): while True: try: # get task from finish que task = self.que_manager.getQue(self.get_que_name) if task is None: time.sleep(0.5) continue log_util.debug("get one task from ingest queue: %s", task) # ingest or delete image task = json.loads(task.decode()) if task['ingest']: # ingest image im_type = 'jpg' if image_util.is_gif(task['img_path']): im_type = 'gif' self.ingest_one(task['img_path'], im_type) else: # delete image utils.remove_file(task['img_path']) except Exception as e: log_util.error("Ingestor run error: %s", str(e))
def upload(path): # uses function attributes to track progress # upload.size, upload.progress, upload.stop upload.size = op.getsize(path) res = self._parent.web_api.upload_file(path, "application/zip", callback) was_error = False msg = tr("Can't upload archive file") if res and "result" in res: if res["result"] == "success": filename = res.get("file_name", "") else: was_error = True msg = str(res.get("info", msg)) else: was_error = True if was_error and not upload.stop: self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) remove_file(path) return filename
def delete_old_signatures(self, signatures_dir, delete_all=False): # we believe that signatires dir contains only signature files # and no subdirs try: signatures_to_delete = os.listdir(signatures_dir) except Exception as e: logger.warning("Can't delete old signatures. Reason: %s", e) return if not delete_all: # taking storage lock to prevent adding new signatures # during deletion with self._storage.create_session(read_only=False, locked=True) as session: signatures_to_delete = filter( lambda h: not self._storage.hash_in_storage( h, session=session), signatures_to_delete) try: list( map(lambda s: remove_file(join(signatures_dir, s)), signatures_to_delete)) except Exception as e: logger.warning("Can't delete old signatures. Reason: %s", e)
def _download_update_job(self, req): if self._stopped: return logger.debug("Update download") os = get_platform() if os == 'Windows': suffix = '.exe' elif os == 'Darwin': suffix = '.zip' else: suffix = '' update_file = NamedTemporaryFile(prefix='Pvtbox_', suffix=suffix, delete=False) size = \ int(float(req.headers.get('content-length', 0)) / 1024 / 1024) + 1 downloaded = 0 checksum = hashlib.md5() self.downloading_update.emit(downloaded, size) logger.debug("Downloading update, %s of %s", downloaded, size) try: for chunk in req.iter_content(chunk_size=1024 * 1024): if self._stopped: break if chunk: # filter out keep-alive new chunks update_file.write(chunk) checksum.update(chunk) downloaded += 1 if not self._stopped: self.downloading_update.emit(downloaded, size) logger.debug("Downloading update, %s of %s", downloaded, size) except Exception as e: logger.error("Error downloading update %s", e) self._status = UPDATER_STATUS_DOWNLOAD_ERROR if not self._stopped: self.emit_status() finally: update_file.close() if self._stopped: return success = checksum.hexdigest() == self._md5 if success: logger.debug("Update downloaded successfully, hashsum matches") self._update_file_path = update_file.name self._status = UPDATER_STATUS_READY else: logger.warning( "Update download failed: hashsum mismatch, expected: %s, actual: %s", checksum.hexdigest(), self._md5) self._status = UPDATER_STATUS_DOWNLOAD_ERROR remove_file(update_file.name) self.emit_status() self._downloading_update = False self._update_ready = success self.update_ready.emit(success)
def _delete(self, dirs=[], files=[]): for dir in dirs: remove_dir(dir) for file in files: remove_file(file)
def _create_file(self, src_full_path, dst_full_path, silent, file_hash, events_file_id, search_by_id, wrong_file_id, is_offline=True): with self._storage.create_session(read_only=False, locked=True) as session: file = None file_exists = False was_updated = True if search_by_id: file, _full_path = self._get_file_by_id( events_file_id, session) if file: dst_full_path = _full_path assert exists(dirname(dst_full_path)) hard_path = self.get_hard_path(dst_full_path, is_offline) if not file: file = self._storage.get_known_file(dst_full_path, is_folder=False, session=session) if file and events_file_id and file.events_file_id and \ file.events_file_id != events_file_id and \ wrong_file_id: logger.error("Wrong file id for %s. Expected %s. Got %s", dst_full_path, events_file_id, file.events_file_id) raise wrong_file_id(dst_full_path, events_file_id, file.events_file_id) if file: file_exists = file.file_hash == file_hash and \ (exists(dst_full_path) and is_offline or exists(hard_path) and not is_offline) logger.debug( "The fact that file %s with same hash " "already exists in storage and filesystem is %s", dst_full_path, file_exists) if file is None: # if search_by_id and wrong_file_id: # logger.error("Wrong file id for %s. Expected %s. Got None", # dst_full_path, events_file_id) # raise wrong_file_id(dst_full_path, # events_file_id, # None) file = self._storage.get_new_file(dst_full_path, False, session=session) was_updated = False old_hash = file.file_hash signature = None if not file_exists: if src_full_path: # create file from copy if not exists(get_signature_path(file_hash)): signature = Rsync.block_checksum(src_full_path) tmp_full_path = self._get_temp_path(src_full_path) copy_file(src_full_path, tmp_full_path) try: remove_file(dst_full_path) os.rename(tmp_full_path, dst_full_path) copy_time(dst_full_path + FILE_LINK_SUFFIX, dst_full_path) remove_file(dst_full_path + FILE_LINK_SUFFIX) except Exception as e: logger.warning( "Can't rename to dst file %s. " "Reason: %s", dst_full_path, e) try: remove_file(tmp_full_path) except Exception: pass raise e else: create_empty_file(hard_path) if not is_offline: self.write_events_file_id(hard_path, events_file_id) set_ext_invisible(hard_path) if hard_path.endswith(FILE_LINK_SUFFIX): copy_time(dst_full_path, hard_path) remove_file(dst_full_path) else: copy_time(hard_path, dst_full_path) remove_file(dst_full_path + FILE_LINK_SUFFIX) if silent: file.mtime = os.stat(hard_path).st_mtime file.size = os.stat(hard_path).st_size file.file_hash = file_hash file.events_file_id = events_file_id file.was_updated = was_updated logger.debug("Saving file. id=%s", file.events_file_id) self._storage.save_file(file, session=session) if src_full_path and signature: # create file from copy self._storage.update_file_signature(file, signature) if was_updated: self.file_modified.emit(file.relative_path, file.mtime) return old_hash