Beispiel #1
0
def overwrite_action(args: argparse.Namespace) -> int:
    if not os.path.isfile(args.file):
        logger.error('Invalid file.')
        return INVALID_ARG_RETVAL

    prog = progress.FileProgress(os.path.getsize(args.file))
    ql = QueuedLoader(max_retries=args.max_retries)
    job = partial(overwrite, args.node, args.file, pg_handler=prog)
    ql.add_jobs([job])

    return ql.start()
Beispiel #2
0
def download_action(args: argparse.Namespace) -> int:
    excl_re = regex_helper(args)

    jobs = []
    ret_val = 0
    ret_val |= create_dl_jobs(args.node, args.path, excl_re, jobs)

    ql = QueuedLoader(args.max_connections, max_retries=args.max_retries)
    ql.add_jobs(jobs)

    return ret_val | ql.start()
Beispiel #3
0
def download_action(args: argparse.Namespace) -> int:
    excl_re = regex_helper(args)

    jobs = []
    ret_val = 0
    ret_val |= create_dl_jobs(args.node, args.path, excl_re, jobs)

    ql = QueuedLoader(args.max_connections, max_retries=args.max_retries)
    ql.add_jobs(jobs)

    return ret_val | ql.start()
Beispiel #4
0
def overwrite_action(args: argparse.Namespace) -> int:
    if not os.path.isfile(args.file):
        logger.error('Invalid file.')
        return INVALID_ARG_RETVAL

    prog = progress.FileProgress(os.path.getsize(args.file))
    ql = QueuedLoader(max_retries=args.max_retries)
    job = partial(overwrite, args.node, args.file, pg_handler=prog)
    ql.add_jobs([job])

    return ql.start()
Beispiel #5
0
def upload_stream_action(args: argparse.Namespace) -> int:
    if not cache.get_node(args.parent):
        logger.critical('Invalid upload folder')
        return INVALID_ARG_RETVAL

    prog = progress.FileProgress(0)
    ql = QueuedLoader(max_retries=0)
    job = partial(upload_stream,
                  sys.stdin.buffer, args.name, args.parent, args.deduplicate, pg_handler=prog)
    ql.add_jobs([job])

    return ql.start()
Beispiel #6
0
def upload_stream_action(args: argparse.Namespace) -> int:
    if not cache.get_node(args.parent):
        logger.critical('Invalid upload folder')
        return INVALID_ARG_RETVAL

    prog = progress.FileProgress(0)
    ql = QueuedLoader(max_retries=0)
    job = partial(upload_stream,
                  sys.stdin.buffer, args.name, args.parent, args.deduplicate, pg_handler=prog)
    ql.add_jobs([job])

    return ql.start()
Beispiel #7
0
def upload_action(args: argparse.Namespace) -> int:
    excl_re = regex_helper(args)

    jobs = []
    ret_val = 0
    for path in args.path:
        if not os.path.exists(path):
            logger.error('Path "%s" does not exist.' % path)
            ret_val |= INVALID_ARG_RETVAL
            continue

        ret_val |= create_upload_jobs(path, args.parent, args.overwrite, args.force, args.deduplicate, excl_re, jobs)

    ql = QueuedLoader(args.max_connections, max_retries=args.max_retries)
    ql.add_jobs(jobs)

    return ret_val | ql.start()
Beispiel #8
0
def upload_action(args: argparse.Namespace) -> int:
    if not cache.get_node(args.parent):
        logger.critical('Invalid upload folder.')
        return INVALID_ARG_RETVAL

    excl_re = regex_helper(args)

    jobs = []
    ret_val = 0
    for path in args.path:
        if not os.path.exists(path):
            logger.error('Path "%s" does not exist.' % path)
            ret_val |= INVALID_ARG_RETVAL
            continue

        ret_val |= create_upload_jobs([], path, args.parent, args.overwrite, args.force,
                                      args.deduplicate, excl_re, args.exclude_path, jobs)

    ql = QueuedLoader(args.max_connections, max_retries=args.max_retries)
    ql.add_jobs(jobs)

    return ret_val | ql.start()
Beispiel #9
0
def upload_action(args: argparse.Namespace) -> int:
    if not cache.get_node(args.parent):
        logger.critical('Invalid upload folder.')
        return INVALID_ARG_RETVAL

    excl_re = regex_helper(args)

    jobs = []
    ret_val = 0
    for path in args.path:
        if not os.path.exists(path):
            logger.error('Path "%s" does not exist.' % path)
            ret_val |= INVALID_ARG_RETVAL
            continue

        ret_val |= create_upload_jobs([], path, args.parent, args.overwrite, args.force,
                                      args.deduplicate, args.remove_source_files,
                                      excl_re, args.exclude_path, jobs)

    ql = QueuedLoader(args.max_connections, max_retries=args.max_retries)
    ql.add_jobs(jobs)

    return ret_val | ql.start()