def _on_new_event(self, fs_event): if fs_event.file_size + get_signature_file_size(fs_event.file_size) > \ get_free_space_by_filepath(fs_event.src): self.no_disk_space.emit(fs_event, fs_event.src, False) self.event_suppressed(fs_event) return file_recent_copy_name = FilePath( join(get_copies_dir(self._root), 'recent_copy_' + str(fs_event.id))) fs_event.file_recent_copy = file_recent_copy_name recent_copy_longpath = FilePath(file_recent_copy_name).longpath try: copy_file(FilePath(fs_event.src).longpath, recent_copy_longpath) except (OSError, IOError) as e: if e.errno == errno.ENOSPC: self.no_disk_space.emit(fs_event, fs_event.src, True) self.event_suppressed(fs_event) return self.event_returned(fs_event) return recent_copy_size = stat(recent_copy_longpath).st_size if recent_copy_size != fs_event.file_size: self.event_returned(fs_event) return self.event_passed(fs_event)
def _on_download_success(self, task_id, file_info, download_path): logger.info("Download task SUCCESS obj_id='%s'", task_id) # remove successful task self._tasks[self._current_share_hash].remove(task_id) self._downloaded_tasks[self._current_share_hash].add(task_id) copy_file(download_path, file_info.fullname) self._finish_task_download()
def make_copy_from_existing_files(self, copy_hash): copy_full_path = join(get_copies_dir(self._root), copy_hash) if exists(copy_full_path): return True tmp_full_path = self._get_temp_path(copy_full_path) with self._storage.create_session(read_only=True, locked=False) as session: excludes = [] while True: file = self._storage.get_file_by_hash(copy_hash, exclude=excludes, session=session) if not file: return False file_path = self._path_converter.create_abspath( file.relative_path) if not exists(file_path): excludes.append(file.id) continue try: copy_file(file_path, tmp_full_path) hash = Rsync.hash_from_block_checksum( Rsync.block_checksum(tmp_full_path)) if hash == copy_hash: os.rename(tmp_full_path, copy_full_path) return True else: excludes.append(file.id) remove_file(tmp_full_path) except Exception as e: logger.warning("Can't operate tmp file %s. Reason: (%s)", tmp_full_path, e) if file.id not in excludes: excludes.append(file.id) try: remove_file(tmp_full_path) except Exception: tmp_full_path = self._get_temp_path(copy_full_path)
def prepare_data_in_folders(self, x, y, path): """ It copies images into their corresponding class folders. :param x: list of images paths :param y: list of classes/labels :param path: base path where class folders will be created """ full_path = os.path.abspath(path) if os.path.exists(full_path): # just in case it exists and contains previous executions data remove_dir(full_path) else: create_dir(full_path) i = 0 for f in x: fname = f.split('/')[-1] cls = get_value(y[i], self.classes) dst = os.path.join(full_path, cls) if not os.path.exists(dst): create_dir(dst) dst = os.path.join(dst, fname) copy_file(f, dst) i += 1
def migrate(self, old_dir, new_dir): logger.info("Starting sync dir migration from %s, to %s", old_dir, new_dir) old_dir = FilePath(old_dir).longpath new_dir = FilePath(new_dir).longpath old_files = get_filelist(old_dir) old_dirs = get_dir_list(old_dir) total_count = len(old_files) + len(old_dirs) + 1 progress = 0 sent_progress = 0 logger.debug("Migration progress: %s/%s (%s%%)", 0, total_count, sent_progress) count = 1 copied_dirs = [] copied_files = [] make_dirs(new_dir, is_folder=True) copied_dirs.append(new_dir) logger.debug("Migration progress: %s/%s (%s%%)", count, total_count, sent_progress) self.progress.emit(sent_progress) for dir in old_dirs: if self._cancelled.isSet(): self._delete(dirs=copied_dirs) logger.debug("Migration done because cancelled") self.done.emit() return new_dir_path = ensure_unicode(op.join( new_dir, op.relpath(dir, start=old_dir))) try: make_dirs(new_dir_path, is_folder=True) except Exception as e: logger.error("Make dirs error: %s", e) self.failed.emit(str(e)) self._delete(dirs=copied_dirs) return copied_dirs.append(new_dir_path) count += 1 progress = int(count / total_count * 100) if progress > sent_progress: sent_progress = progress self.progress.emit(sent_progress) logger.debug("Migration progress: %s/%s (%s%%)", count, total_count, sent_progress) for file in old_files: if self._cancelled.isSet(): self._delete(dirs=copied_dirs, files=copied_files) logger.debug("Migration done because cancelled") self.done.emit() return if file in HIDDEN_FILES: continue new_file_path = ensure_unicode(op.join( new_dir, op.relpath(file, start=old_dir))) logger.info("Copying file %s, to %s", file, new_file_path) try: copy_file(file, new_file_path, preserve_file_date=True) except Exception as e: logger.error("Copy file error: %s", e) self.failed.emit(str(e)) self._delete(dirs=copied_dirs, files=copied_files) return copied_files.append(new_file_path) count += 1 progress = int(count / total_count * 100) if progress > sent_progress: sent_progress = progress self.progress.emit(sent_progress) logger.debug("Migration progress: %s/%s (%s%%)", count, total_count, sent_progress) logger.debug("Saving new config") self._cfg.set_settings(dict(sync_directory=FilePath(new_dir))) self._cfg.sync() logger.info("New config saved") logger.debug("Updating shortcuts") create_shortcuts(new_dir) remove_shortcuts(old_dir) logger.debug("Resetting custom folder icons") reset_all_custom_folder_icons(old_dir) logger.debug("Migration done") self.done.emit() logger.info("Migration thread end")
def add_to_sync_dir(paths, move, callback): ''' Copies given paths (files or directories) into sync directory. If destination path exists, new name in sync directory will be created @param paths to be copied [list] ''' # Get sync directory path root = params.cfg.sync_directory if not root: logger.warning("Sync directory is not set") return logger.debug("Copying %d paths", len(paths)) signals.show.emit() result_paths = [] offline_paths = [] online_paths = [] for path in paths: is_file = op.isfile(path) path = FilePath(path) # Path is in sync directory already if path in FilePath(root): logger.debug("Path '%s' is in sync directory '%s' already", path, root) result_paths.append(path) if is_file and not move: if path.endswith(FILE_LINK_SUFFIX): online_paths.append(path) else: offline_paths.append(path) continue if not op.exists(path.longpath): logger.warning( "Path requested for copying does not exist " "or not enough rights " "are granted to access it: '%s'", FilePath(path)) Application.show_tray_notification( tr("Failed to copy to synchronized directory. Specified path " "does not exist."), tr("Sharing")) continue basename = op.basename(path) destname = get_next_name(FilePath(op.join(root, basename)).longpath) if not _check_free_space(path, root, move, is_file): continue file_dir = 'file' if is_file else 'dir' logger.debug("Copying (moving) %s '%s' into sync directory...", file_dir, path) # Emit corresponding signal signals.copying_started.emit(path) # Copy or move file or directory into sync directory try: if move: shutil.move(path, destname) elif is_file: copy_file(path, destname) else: shutil.copytree(path, destname) except Exception as e: logger.error("Failed to copy (move) '%s' into sync directory (%s)", path, e) signals.copying_failed.emit(path) continue # Emit corresponding signal signals.copying_finished.emit(path) result_paths.append(destname) logger.debug("Copied successfully") if offline_paths: signals.offline_paths.emit(offline_paths, False, True) if online_paths: signals.offline_paths.emit(online_paths, True, True) logger.debug("All paths copied") if callable(callback): callback(result_paths)
def _create_file(self, src_full_path, dst_full_path, silent, file_hash, events_file_id, search_by_id, wrong_file_id, is_offline=True): with self._storage.create_session(read_only=False, locked=True) as session: file = None file_exists = False was_updated = True if search_by_id: file, _full_path = self._get_file_by_id( events_file_id, session) if file: dst_full_path = _full_path assert exists(dirname(dst_full_path)) hard_path = self.get_hard_path(dst_full_path, is_offline) if not file: file = self._storage.get_known_file(dst_full_path, is_folder=False, session=session) if file and events_file_id and file.events_file_id and \ file.events_file_id != events_file_id and \ wrong_file_id: logger.error("Wrong file id for %s. Expected %s. Got %s", dst_full_path, events_file_id, file.events_file_id) raise wrong_file_id(dst_full_path, events_file_id, file.events_file_id) if file: file_exists = file.file_hash == file_hash and \ (exists(dst_full_path) and is_offline or exists(hard_path) and not is_offline) logger.debug( "The fact that file %s with same hash " "already exists in storage and filesystem is %s", dst_full_path, file_exists) if file is None: # if search_by_id and wrong_file_id: # logger.error("Wrong file id for %s. Expected %s. Got None", # dst_full_path, events_file_id) # raise wrong_file_id(dst_full_path, # events_file_id, # None) file = self._storage.get_new_file(dst_full_path, False, session=session) was_updated = False old_hash = file.file_hash signature = None if not file_exists: if src_full_path: # create file from copy if not exists(get_signature_path(file_hash)): signature = Rsync.block_checksum(src_full_path) tmp_full_path = self._get_temp_path(src_full_path) copy_file(src_full_path, tmp_full_path) try: remove_file(dst_full_path) os.rename(tmp_full_path, dst_full_path) copy_time(dst_full_path + FILE_LINK_SUFFIX, dst_full_path) remove_file(dst_full_path + FILE_LINK_SUFFIX) except Exception as e: logger.warning( "Can't rename to dst file %s. " "Reason: %s", dst_full_path, e) try: remove_file(tmp_full_path) except Exception: pass raise e else: create_empty_file(hard_path) if not is_offline: self.write_events_file_id(hard_path, events_file_id) set_ext_invisible(hard_path) if hard_path.endswith(FILE_LINK_SUFFIX): copy_time(dst_full_path, hard_path) remove_file(dst_full_path) else: copy_time(hard_path, dst_full_path) remove_file(dst_full_path + FILE_LINK_SUFFIX) if silent: file.mtime = os.stat(hard_path).st_mtime file.size = os.stat(hard_path).st_size file.file_hash = file_hash file.events_file_id = events_file_id file.was_updated = was_updated logger.debug("Saving file. id=%s", file.events_file_id) self._storage.save_file(file, session=session) if src_full_path and signature: # create file from copy self._storage.update_file_signature(file, signature) if was_updated: self.file_modified.emit(file.relative_path, file.mtime) return old_hash
def _accept_patch(patch_info, patch_data, unpatched_file, root): file_blocks_hashes = SortedDict() blocksize = patch_info['blocksize'] temp_name = os.path.join(get_patches_dir(root), '.patching_' + generate_uuid()) blocks = SortedDict( (int(k), v) for k, v in patch_info['blocks'].items()) source_file = None if op.exists(unpatched_file): source_file = open(unpatched_file, "rb") with open(temp_name, "wb") as temp_file: # count = 0 # min = 999999999.0 # max = 0.0 # avg = 0.0 # sum = 0.0 for offset, block in blocks.items(): # count += 1 # start_time = time.time() block_offset = int(block['offset']) if block['new']: patch_data.seek(block_offset) data_size = block['data_size'] data = patch_data.read(data_size) else: if block['from_patch']: patch_offset = blocks[block_offset]['offset'] data_size = blocks[block_offset].get( 'data_size', blocksize) patch_data.seek(patch_offset) data = patch_data.read(data_size) else: if source_file is None: raise IOError("Source file not found") source_file.seek(block_offset) data = source_file.read(blocksize) temp_file.seek(offset) temp_file.write(data) file_blocks_hashes[offset] = block['hash'] # diff = time.time() - start_time # min = diff if diff < min else min # max = diff if diff > max else max # avg = diff if avg == 0 else (avg + diff) / 2 # sum += diff # logger.debug( # 'processed block %s:%s in %s', count, len(blocks), diff) # logger.debug( # 'processing blocks time:%s, min:%s, max:%s, avg:%s', # sum, min, max, avg) if source_file: source_file.close() logger.debug('calculating patched file signature') file_signature = Rsync.block_checksum(temp_name, blocksize=blocksize) logger.debug('calculated patched file signature') if file_signature != file_blocks_hashes: remove_file(temp_name) raise IOError( "Invalid patch result, expected signature: {}, actual: {}". format(file_blocks_hashes, file_signature)) new_hash = patch_info['new_hash'] logger.debug('moving patched file') copy = join(get_copies_dir(root), new_hash) if not exists(copy): copy_file(temp_name, copy) shutil.move(temp_name, unpatched_file) logger.debug('moved patched file') return new_hash, file_blocks_hashes, patch_info['old_hash']