def main(): opt_parser, subparsers = get_parser() # plugins plugin_log = [str(plugins.Plugin)] for plugin in plugins.Plugin: if plugin.check_version(__version__): log = [] plugin.attach(subparsers, log) plugin_log.extend(log) else: plugin_log.append('Script version is not compatible with "%s".' % plugin) args = opt_parser.parse_args() set_log_level(args) for msg in plugin_log: logger.info(msg) set_encoding(force_utf=args.utf) check_py_version() if args.func not in offline_actions: if not common.init(CACHE_PATH): sys.exit(INIT_FAILED_RETVAL) if args.func not in nocache_actions: if not db.init(CACHE_PATH, args.check): sys.exit(INIT_FAILED_RETVAL) if args.func != sync_action: if not check_cache(): sys.exit(INIT_FAILED_RETVAL) format.init(args.color) if args.no_wait: common.BackOffRequest._wait = lambda: None autoresolve_attrs = ['child', 'parent', 'node'] resolve_remote_path_args(args, autoresolve_attrs, incl_trash=args.action not in no_autores_trash_actions) # call appropriate sub-parser action if args.func: sys.exit(args.func(args))
import unittest import logging import os import io import random import string import mmap from acdcli.api import account, common, content, metadata, trash, oauth from acdcli.api.common import RequestError from acdcli.utils import hashing logging.basicConfig(level=logging.INFO) common.BackOffRequest._wait = lambda: None path = os.path.join(os.path.dirname(__file__), 'cache_files') common.init(path) def gen_rand_nm(): return str.join('', (random.choice(string.ascii_letters + string.digits) for _ in range(64))) def gen_rand_sz(): return random.randint(1, 32 * 1024) def gen_rand_file(size=gen_rand_sz()) -> tuple: fn = gen_rand_nm() with open(fn, 'wb') as f: f.write(os.urandom(size)) return fn, size
def setUp(self): common.init(path)
def main(): utf_flag = False tty_flag = sys.stdout.isatty() enc = str.lower(sys.stdout.encoding) if not enc or (tty_flag and enc != 'utf-8'): import io sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding='utf-8') sys.stderr = io.TextIOWrapper(sys.stderr.detach(), encoding='utf-8') utf_flag = True max_ret = Argument('--max-retries', '-r', action='store', type=int, default=0, help='set the maximum number of retries [default: 0]') opt_parser = argparse.ArgumentParser( prog=_app_name, formatter_class=argparse.RawTextHelpFormatter, epilog='Hints: \n' ' * Remote locations may be specified as path in most cases, e.g. "/folder/file", or via ID \n' ' * If you need to enter a node ID that contains a leading dash (minus) sign, ' 'precede it by two dashes and a space, e.g. \'-- -xfH...\'\n' ' * actions marked with [+] have optional arguments' '') opt_parser.add_argument('-v', '--verbose', action='count', help='prints some info messages to stderr; use "-vv" to also get sqlalchemy info') opt_parser.add_argument('-d', '--debug', action='count', help='prints info and debug to stderr; use "-dd" to also get sqlalchemy debug messages') opt_parser.add_argument('-c', '--color', default=format.ColorMode['never'], choices=format.ColorMode.keys(), help='"never" [default] turns coloring off, ' '"always" turns coloring on ' 'and "auto" colors listings when stdout is a tty ' '[uses the Linux-style LS_COLORS environment variable]') opt_parser.add_argument('-nw', '--no-wait', action='store_true', help=argparse.SUPPRESS) subparsers = opt_parser.add_subparsers(title='action', dest='action') subparsers.required = True vers_sp = subparsers.add_parser('version', aliases=['v'], help='print version and exit\n') vers_sp.set_defaults(func=print_version_action) sync_sp = subparsers.add_parser('sync', aliases=['s'], help='[+] refresh node list cache; necessary for many actions') sync_sp.add_argument('--full', '-f', action='store_true', help='force a full sync') sync_sp.set_defaults(func=sync_action) old_sync_sp = subparsers.add_parser('old-sync', add_help=False) old_sync_sp.set_defaults(func=old_sync_action) clear_sp = subparsers.add_parser('clear-cache', aliases=['cc'], help='clear node cache [offline operation]\n\n') clear_sp.set_defaults(func=clear_action) tree_sp = subparsers.add_parser('tree', aliases=['t'], help='[+] print directory tree [offline operation]') tree_sp.add_argument('--include-trash', '-t', action='store_true') tree_sp.add_argument('node', nargs='?', default=None, help='root node for the tree') tree_sp.set_defaults(func=tree_action) list_c_sp = subparsers.add_parser('children', aliases=['ls', 'dir'], help='[+] list folder\'s children [offline operation]\n\n') list_c_sp.add_argument('--include-trash', '-t', action='store_true') list_c_sp.add_argument('--recursive', '-r', action='store_true') list_c_sp.add_argument('node') list_c_sp.set_defaults(func=children_action) find_sp = subparsers.add_parser('find', aliases=['f'], help='find nodes by name [offline operation] [case insensitive]') find_sp.add_argument('name') find_sp.set_defaults(func=find_action) find_hash_sp = subparsers.add_parser('find-md5', aliases=['fh'], help='find files by MD5 hash [offline operation]\n\n') find_hash_sp.add_argument('md5') find_hash_sp.set_defaults(func=find_md5_action) dummy_p = argparse.ArgumentParser().add_subparsers() re_dummy_sp = dummy_p.add_parser('', add_help=False) re_dummy_sp.add_argument('--max-connections', '-x', action='store', type=int, default=1, help='set the maximum concurrent connections [default: 1]') max_ret.attach(re_dummy_sp) re_dummy_sp.add_argument('--exclude-ending', '-xe', action='append', dest='exclude_fe', default=[], help='exclude files whose endings match the given string, e.g. "bak" [case insensitive]') re_dummy_sp.add_argument('--exclude-regex', '-xr', action='append', dest='exclude_re', default=[], help='exclude files whose names match the given regular expression,' ' e.g. "^thumbs\.db$" [case insensitive]') upload_sp = subparsers.add_parser('upload', aliases=['ul'], parents=[re_dummy_sp], help='[+] file and directory upload to a remote destination') upload_sp.add_argument('--overwrite', '-o', action='store_true', help='overwrite if local modification time is higher or local ctime is higher than remote ' 'modification time and local/remote file sizes do not match.') upload_sp.add_argument('--force', '-f', action='store_true', help='force overwrite') upload_sp.add_argument('--deduplicate', '-d', action='store_true', help='exclude duplicate files from upload') upload_sp.add_argument('path', nargs='+', help='a path to a local file or directory') upload_sp.add_argument('parent', help='remote parent folder') upload_sp.set_defaults(func=upload_action) overwrite_sp = subparsers.add_parser('overwrite', aliases=['ov'], help='overwrite file A [remote] with content of file B [local]') max_ret.attach(overwrite_sp) overwrite_sp.add_argument('node') overwrite_sp.add_argument('file') overwrite_sp.set_defaults(func=overwrite_action) download_sp = subparsers.add_parser('download', aliases=['dl'], parents=[re_dummy_sp], help='download a remote folder or file; will skip existing local files\n\n') download_sp.add_argument('node') download_sp.add_argument('path', nargs='?', default=None, help='local download path [optional]') download_sp.set_defaults(func=download_action) cr_fo_sp = subparsers.add_parser('create', aliases=['c', 'mkdir'], help='create folder using an absolute path\n\n') cr_fo_sp.add_argument('new_folder', help='an absolute folder path, e.g. "/my/dir/"; trailing slash is optional') cr_fo_sp.set_defaults(func=create_action) trash_sp = subparsers.add_parser('list-trash', aliases=['lt'], help='[+] list trashed nodes [offline operation]') trash_sp.add_argument('--recursive', '-r', action='store_true') trash_sp.set_defaults(func=list_trash_action) m_trash_sp = subparsers.add_parser('trash', aliases=['rm'], help='move node to trash') m_trash_sp.add_argument('node') m_trash_sp.set_defaults(func=trash_action) rest_sp = subparsers.add_parser('restore', aliases=['re'], help='restore node from trash\n\n') rest_sp.add_argument('node', help='ID of the node') rest_sp.set_defaults(func=restore_action) move_sp = subparsers.add_parser('move', aliases=['mv'], help='move node A into folder B') move_sp.add_argument('child') move_sp.add_argument('parent') move_sp.set_defaults(func=move_action) rename_sp = subparsers.add_parser('rename', aliases=['rn'], help='rename a node\n\n') rename_sp.add_argument('node') rename_sp.add_argument('name') rename_sp.set_defaults(func=rename_action) res_sp = subparsers.add_parser('resolve', aliases=['rs'], help='resolve a path to a node ID\n\n') res_sp.add_argument('path') res_sp.set_defaults(func=resolve_action) # maybe the child operations should not be exposed # they can be used for creating hardlinks add_c_sp = subparsers.add_parser('add-child', aliases=['ac'], help='add a node to a parent folder') add_c_sp.add_argument('parent') add_c_sp.add_argument('child') add_c_sp.set_defaults(func=add_child_action) rem_c_sp = subparsers.add_parser('remove-child', aliases=['rc'], help='remove a node from a parent folder\n\n') rem_c_sp.add_argument('parent') rem_c_sp.add_argument('child') rem_c_sp.set_defaults(func=remove_child_action) usage_sp = subparsers.add_parser('usage', aliases=['u'], help='show drive usage data') usage_sp.set_defaults(func=usage_action) quota_sp = subparsers.add_parser('quota', aliases=['q'], help='show drive quota [raw JSON]') quota_sp.set_defaults(func=quota_action) meta_sp = subparsers.add_parser('metadata', aliases=['m'], help='print a node\'s metadata [raw JSON]') meta_sp.add_argument('node') meta_sp.set_defaults(func=metadata_action) de_sp = subparsers.add_parser('delete-everything', add_help=False) de_sp.set_defaults(func=delete_everything_action) # useful for interactive mode dn_sp = subparsers.add_parser('init', aliases=['i'], add_help=False) dn_sp.set_defaults(func=None) # dump sql database creation sequence to stdout dmp_sp = subparsers.add_parser('dumpsql', add_help=False) dmp_sp.set_defaults(func=dump_sql_action) fuse_sp = subparsers.add_parser('mount', add_help=False) fuse_sp.add_argument('path') fuse_sp.set_defaults(func=mount_action) plugin_log = [str(plugins.Plugin)] for plugin in plugins.Plugin: if plugin.check_version(__version__): log = [] plugin.attach(subparsers, log) plugin_log.extend(log) else: plugin_log.append('Script version is not compatible with "%s".' % plugin) args = opt_parser.parse_args() set_log_level(args) for msg in plugin_log: logger.info(msg) if utf_flag: logger.info('Stdout/stderr encoding changed to UTF-8.') migrate_cache_files() if args.func not in offline_actions: if not common.init(CACHE_PATH): sys.exit(INIT_FAILED_RETVAL) if args.func not in nocache_actions: if not db.init(CACHE_PATH): sys.exit(INIT_FAILED_RETVAL) check_cache_age() format.init(args.color) if args.no_wait: common.BackOffRequest._wait = lambda: None autoresolve_attrs = ['child', 'parent', 'node'] resolve_remote_path_args(args, autoresolve_attrs, incl_trash=args.action not in no_autores_trash_actions) # call appropriate sub-parser action if args.func: sys.exit(args.func(args))
import unittest import logging import os import io import random import string import mmap from acdcli.api import account, common, content, metadata, trash, oauth from acdcli.api.common import RequestError from acdcli.utils import hashing logging.basicConfig(level=logging.INFO) common.BackOffRequest._wait = lambda: None path = os.path.join(os.path.dirname(__file__), 'cache_files') common.init(path) def gen_rand_nm(): return str.join('', (random.choice(string.ascii_letters + string.digits) for _ in range(64))) def gen_rand_sz(): return random.randint(1, 32 * 1024) def gen_rand_file(size=gen_rand_sz()) -> tuple: fn = gen_rand_nm() with open(fn, 'wb') as f: f.write(os.urandom(size))
def main(): utf_flag = False if sys.stdout.isatty(): if str.lower(sys.stdout.encoding) != 'utf-8': import io sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding='utf-8') sys.stderr = io.TextIOWrapper(sys.stderr.detach(), encoding='utf-8') utf_flag = True opt_parser = argparse.ArgumentParser( prog=_app_name, formatter_class=argparse.RawTextHelpFormatter, epilog='Hints: \n' ' * Remote locations may be specified as path in most cases, e.g. "/folder/file", or via ID \n' ' * If you need to enter a node ID that contains a leading dash (minus) sign, ' 'precede it by two dashes and a space, e.g. \'-- -xfH...\'\n' ' * actions marked with [+] have optional arguments' '') opt_parser.add_argument( '-v', '--verbose', action='count', help= 'prints some info messages to stderr; use "-vv" to also get sqlalchemy info.' ) opt_parser.add_argument('-d', '--debug', action='count', help='turn on debug mode') opt_parser.add_argument('-nw', '--no-wait', action='store_true', help=argparse.SUPPRESS) subparsers = opt_parser.add_subparsers(title='action', dest='action') subparsers.required = True sync_sp = subparsers.add_parser( 'sync', aliases=['s'], help='[+] refresh node list cache; necessary for many actions') sync_sp.add_argument('--full', '-f', action='store_true', help='force a full sync') sync_sp.set_defaults(func=sync_action) old_sync_sp = subparsers.add_parser('old-sync', add_help=False) old_sync_sp.set_defaults(func=old_sync_action) clear_sp = subparsers.add_parser( 'clear-cache', aliases=['cc'], help='clear node cache [offline operation]') clear_sp.set_defaults(func=clear_action) tree_sp = subparsers.add_parser( 'tree', aliases=['t'], help='[+] print directory tree [offline operation]') tree_sp.add_argument('--include-trash', '-t', action='store_true') tree_sp.add_argument('node', nargs='?', default=None, help='root node for the tree') tree_sp.set_defaults(func=tree_action) list_c_sp = subparsers.add_parser( 'children', aliases=['ls', 'dir'], help='[+] list folder\'s children [offline operation]') list_c_sp.add_argument('--include-trash', '-t', action='store_true') list_c_sp.add_argument('--recursive', '-r', action='store_true') list_c_sp.add_argument('node') list_c_sp.set_defaults(func=children_action) find_sp = subparsers.add_parser( 'find', aliases=['f'], help='find nodes by name [offline operation] [case insensitive]') find_sp.add_argument('name') find_sp.set_defaults(func=find_action) find_hash_sp = subparsers.add_parser( 'find-md5', aliases=['fh'], help='find files by MD5 hash [offline operation]') find_hash_sp.add_argument('md5') find_hash_sp.set_defaults(func=find_md5_action) re_dummy_sp = subparsers.add_parser('dummy', add_help=False) re_dummy_sp.add_argument( '--exclude-ending', '-xe', action='append', dest='exclude_fe', default=[], help= 'exclude files whose endings match the given string, e.g. "bak" [case insensitive]' ) re_dummy_sp.add_argument( '--exclude-regex', '-xr', action='append', dest='exclude_re', default=[], help='exclude files whose names match the given regular expression,' ' e.g. "^thumbs\.db$" [case insensitive]') upload_sp = subparsers.add_parser( 'upload', aliases=['ul'], parents=[re_dummy_sp], help='[+] file and directory upload to a remote destination') upload_sp.add_argument( '--overwrite', '-o', action='store_true', help= 'overwrite if local modification time is higher or local ctime is higher than remote ' 'modification time and local/remote file sizes do not match.') upload_sp.add_argument('--force', '-f', action='store_true', help='force overwrite') upload_sp.add_argument('path', nargs='+', help='a path to a local file or directory') upload_sp.add_argument('parent', help='remote parent folder') upload_sp.set_defaults(func=upload_action) overwrite_sp = subparsers.add_parser( 'overwrite', aliases=['ov'], help='overwrite file A [remote] with content of file B [local]') overwrite_sp.add_argument('node') overwrite_sp.add_argument('file') overwrite_sp.set_defaults(func=overwrite_action) download_sp = subparsers.add_parser( 'download', aliases=['dl'], parents=[re_dummy_sp], help='download a remote folder or file; will overwrite local files') download_sp.add_argument('node') download_sp.add_argument('path', nargs='?', default=None, help='local download path [optional]') download_sp.set_defaults(func=download_action) cr_fo_sp = subparsers.add_parser( 'create', aliases=['c', 'mkdir'], help='create folder using an absolute path') cr_fo_sp.add_argument( 'new_folder', help= 'an absolute folder path, e.g. "/my/dir/"; trailing slash is optional') cr_fo_sp.set_defaults(func=create_action) trash_sp = subparsers.add_parser( 'list-trash', aliases=['lt'], help='[+] list trashed nodes [offline operation]') trash_sp.add_argument('--recursive', '-r', action='store_true') trash_sp.set_defaults(func=list_trash_action) m_trash_sp = subparsers.add_parser('trash', aliases=['rm'], help='move node to trash') m_trash_sp.add_argument('node') m_trash_sp.set_defaults(func=trash_action) rest_sp = subparsers.add_parser('restore', aliases=['re'], help='restore from trash') rest_sp.add_argument('node', help='ID of the node') rest_sp.set_defaults(func=restore_action) move_sp = subparsers.add_parser('move', aliases=['mv'], help='move node A into folder B') move_sp.add_argument('child') move_sp.add_argument('parent') move_sp.set_defaults(func=move_action) rename_sp = subparsers.add_parser('rename', aliases=['rn'], help='rename a node') rename_sp.add_argument('node') rename_sp.add_argument('name') rename_sp.set_defaults(func=rename_action) res_sp = subparsers.add_parser('resolve', aliases=['rs'], help='resolve a path to a node ID') res_sp.add_argument('path') res_sp.set_defaults(func=resolve_action) # maybe the child operations should not be exposed # they can be used for creating hardlinks add_c_sp = subparsers.add_parser('add-child', aliases=['ac'], help='add a node to a parent folder') add_c_sp.add_argument('parent') add_c_sp.add_argument('child') add_c_sp.set_defaults(func=add_child_action) rem_c_sp = subparsers.add_parser('remove-child', aliases=['rc'], help='remove a node from a parent folder') rem_c_sp.add_argument('parent') rem_c_sp.add_argument('child') rem_c_sp.set_defaults(func=remove_child_action) usage_sp = subparsers.add_parser('usage', aliases=['u'], help='show drive usage data') usage_sp.set_defaults(func=usage_action) quota_sp = subparsers.add_parser('quota', aliases=['q'], help='show drive quota [raw JSON]') quota_sp.set_defaults(func=quota_action) meta_sp = subparsers.add_parser('metadata', aliases=['m'], help='print a node\'s metadata [raw JSON]') meta_sp.add_argument('node') meta_sp.set_defaults(func=metadata_action) # useful for interactive mode dn_sp = subparsers.add_parser('init', aliases=['i'], add_help=False) dn_sp.set_defaults(func=None) plugin_log = [str(plugins.Plugin)] for plugin in plugins.Plugin: if plugin.check_version(__version__): log = [] plugin.attach(subparsers, log) plugin_log.extend(log) else: plugin_log.append('Script version is not compatible with "%s".' % plugin) args = opt_parser.parse_args() set_log_level(args) for msg in plugin_log: logger.info(msg) if utf_flag: logger.info('Stdout/stderr encoding changed to UTF-8.') migrate_cache_files() # offline actions if args.func not in [ clear_action, tree_action, children_action, list_trash_action, find_action, resolve_action ]: if not common.init(CACHE_PATH): sys.exit(INIT_FAILED_RETVAL) # online actions if args.func not in [usage_action, quota_action]: db.init(CACHE_PATH) if args.no_wait: common.BackOffRequest._wait = lambda: None autoresolve_attrs = ['child', 'parent', 'node'] resolve_remote_path_args(args, autoresolve_attrs, [upload_action, list_trash_action]) # call appropriate sub-parser action if args.func: sys.exit(args.func(args))