def create_dl_jobs(node_id: str, local_path: str, exclude: 'List[re._pattern_type]', jobs: list) -> int: """Populates passed jobs list with download partials.""" local_path = local_path if local_path else '' node = cache.get_node(node_id) if not node.is_available(): return 0 if node.is_folder(): return traverse_dl_folder(node_id, local_path, exclude, jobs) loc_name = node.name for reg in exclude: if re.match(reg, loc_name): logger.info('Skipping download of "%s" because of exclusion pattern.' % loc_name) return 0 flp = os.path.join(local_path, loc_name) if os.path.isfile(flp): logger.info('Skipping download of existing file "%s"' % loc_name) if os.path.getsize(flp) != node.size: logger.info('Skipped file "%s" has different size than local file.' % loc_name) return SIZE_MISMATCH return 0 prog = progress.FileProgress(node.size) fo = partial(download_file, node_id, local_path, pg_handler=prog) jobs.append(fo) return 0
def create_upload_jobs(dirs: list, path: str, parent_id: str, overwr: bool, force: bool, dedup: bool, exclude: list, exclude_paths: list, jobs: list) -> int: """Creates upload job if passed path is a file, delegates directory traversal otherwise. Detects soft links that link to an already queued directory. :param dirs: List of directories' inodes traversed so far""" if os.path.realpath(path) in [os.path.realpath(p) for p in exclude_paths]: logger.info('Skipping upload of path "%s".' % path) return 0 if not os.access(path, os.R_OK): logger.error('Path "%s" is not accessible.' % path) return INVALID_ARG_RETVAL if os.path.isdir(path): ino = os.stat(path).st_ino if ino in dirs: logger.warning('Duplicate directory detected: "%s".' % path) return DUPLICATE_DIR dirs.append(ino) return traverse_ul_dir(dirs, path, parent_id, overwr, force, dedup, exclude, exclude_paths, jobs) elif os.path.isfile(path): short_nm = os.path.basename(path) for reg in exclude: if re.match(reg, short_nm): logger.info('Skipping upload of "%s" because of exclusion pattern.' % short_nm) return 0 prog = progress.FileProgress(os.path.getsize(path)) fo = partial(upload_file, path, parent_id, overwr, force, dedup, pg_handler=prog) jobs.append(fo) return 0
def overwrite_action(args: argparse.Namespace) -> int: if not os.path.isfile(args.file): logger.error('Invalid file.') return INVALID_ARG_RETVAL prog = progress.FileProgress(os.path.getsize(args.file)) ql = QueuedLoader(max_retries=args.max_retries) job = partial(overwrite, args.node, args.file, pg_handler=prog) ql.add_jobs([job]) return ql.start()
def upload_stream_action(args: argparse.Namespace) -> int: if not cache.get_node(args.parent): logger.critical('Invalid upload folder') return INVALID_ARG_RETVAL prog = progress.FileProgress(0) ql = QueuedLoader(max_retries=0) job = partial(upload_stream, sys.stdin.buffer, args.name, args.parent, args.deduplicate, pg_handler=prog) ql.add_jobs([job]) return ql.start()