def download_task(args): assert len(args) or args.input or args.all or args.category, 'Not enough arguments' lixian_download_tools.get_tool(args.tool) # check tool download_args = {'tool': args.tool, 'output': args.output, 'output_dir': args.output_dir, 'delete': args.delete, 'resuming': args._args['continue'], 'overwrite': args.overwrite, 'mini_hash': args.mini_hash, 'no_hash': not args.hash, 'no_bt_dir': not args.bt_dir, 'save_torrent_file': args.save_torrent_file, 'colors': args.colors} client = create_client(args) query = lixian_query.build_query(client, args) query.query_once() def sleep(n): assert isinstance(n, (int, basestring)), repr(n) import time if isinstance(n, basestring): n, u = re.match(r'^(\d+)([smh])?$', n.lower()).groups() n = int(n) * {None: 1, 's': 1, 'm': 60, 'h': 3600}[u] time.sleep(n) if args.watch_present: assert not args.output, 'not supported with watch option yet' tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, tasks, download_args) if not query.download_jobs: break if not tasks: sleep(args.watch_interval) query.refresh_status() tasks = query.pull_completed() elif args.watch: assert not args.output, 'not supported with watch option yet' tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, tasks, download_args) if (not query.download_jobs) and (not query.queries): break if not tasks: sleep(args.watch_interval) query.refresh_status() query.query_search() tasks = query.pull_completed() else: tasks = query.peek_download_jobs() if args.output: assert len(tasks) == 1 download_single_task(client, tasks[0], download_args) else: download_multiple_tasks(client, tasks, download_args)
def download_task(args): import lixian_download_tools download = lixian_download_tools.get_tool(args.tool) download_args = { "output": args.output, "output_dir": args.output_dir, "delete": args.delete, "resuming": args._args["continue"], "overwrite": args.overwrite, "mini_hash": args.mini_hash, "no_hash": not args.hash, } client = XunleiClient(args.username, args.password, args.cookies) links = None if len(args) or args.input: tasks = find_tasks_to_download(client, args) if args.output: assert len(tasks) == 1 download_single_task(client, download, tasks[0], download_args) else: download_multiple_tasks(client, download, tasks, download_args) elif args.all: tasks = client.read_all_tasks() download_multiple_tasks(client, download, tasks, download_args) else: usage(doc=lixian_help.download, message="Not enough arguments")
def download_task(args): args = parse_login_command_line(args, ['tool', 'output', 'output-dir', 'input'], ['delete', 'continue', 'overwrite', 'torrent', 'all', 'mini-hash', 'hash'], alias={'o': 'output', 'i': 'input', 'c':'continue', 'bt':'torrent'}, default={'tool':get_config('tool', 'wget'),'delete':get_config('delete'),'continue':get_config('continue'),'output-dir':get_config('output-dir'), 'mini-hash':get_config('mini-hash'), 'hash':get_config('hash', True)}, help=lixian_help.download) import lixian_download_tools download = lixian_download_tools.get_tool(args.tool) download_args = {'output':args.output, 'output_dir':args.output_dir, 'delete':args.delete, 'resuming':args._args['continue'], 'overwrite':args.overwrite, 'mini_hash':args.mini_hash, 'no_hash': not args.hash} client = XunleiClient(args.username, args.password, args.cookies) links = None if len(args) or args.input: tasks = find_tasks_to_download(client, args) if args.output: assert len(tasks) == 1 download_single_task(client, download, tasks[0], download_args) else: download_multiple_tasks(client, download, tasks, download_args) elif args.all: #tasks = client.read_all_completed() tasks = client.read_all_tasks() download_multiple_tasks(client, download, tasks, download_args) else: usage(doc=lixian_help.download, message='Not enough arguments')
def download_task(args): import lixian_download_tools download = lixian_download_tools.get_tool(args.tool) download_args = {'output': args.output, 'output_dir': args.output_dir, 'delete': args.delete, 'resuming': args._args['continue'], 'overwrite': args.overwrite, 'mini_hash': args.mini_hash, 'no_hash': not args.hash, 'no_bt_dir': not args.bt_dir, 'save_torrent_file': args.save_torrent_file, 'colors': args.colors} client = create_client(args) assert len(args) or args.input or args.all or args.category, 'Not enough arguments' query = lixian_query.build_query(client, args) query.query_once() def sleep(n): assert isinstance(n, (int, basestring)), repr(n) import time if isinstance(n, basestring): n, u = re.match(r'^(\d+)([smh])?$', n.lower()).groups() n = int(n) * {None: 1, 's': 1, 'm': 60, 'h': 3600}[u] time.sleep(n) if args.watch_present: assert not args.output, 'not supported with watch option yet' tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, download, tasks, download_args) if not query.download_jobs: break if not tasks: sleep(args.watch_interval) query.refresh_status() tasks = query.pull_completed() elif args.watch: assert not args.output, 'not supported with watch option yet' tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, download, tasks, download_args) if (not query.download_jobs) and (not query.queries): break if not tasks: sleep(args.watch_interval) query.refresh_status() query.query_search() tasks = query.pull_completed() else: tasks = query.peek_download_jobs() if args.output: assert len(tasks) == 1 download_single_task(client, download, tasks[0], download_args) else: download_multiple_tasks(client, download, tasks, download_args)
def download_file(client, path, task, options): download_tool = lixian_download_tools.get_tool(options['tool']) resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') url = str(task['xunlei_url']) def download1(download, path): if not os.path.exists(path): download() elif not resuming: if overwrite: download() else: raise Exception( '%s already exists. Please try --continue or --overwrite' % path) else: if download.finished(): pass else: download() def download1_checked(client, url, path, size): download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming) checked = 0 while checked < 10: download1(download, path) if download.finished(): break else: checked += 1 assert os.path.getsize( path) == size, 'incorrect downloaded file size (%s != %s)' % ( os.path.getsize(path), size) def download2(client, url, path, task): size = task['size'] if mini_hash and resuming and verify_mini_hash(path, task): return download1_checked(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get('colors')).yellow(): print 'hash error, redownloading...' os.rename(path, path + '.error') download1_checked(client, url, path, size) if not verify(path, task): raise Exception('hash check failed') download2(client, url, path, task)
def download_file(client, path, task, options): download_tool = lixian_download_tools.get_tool(options['tool']) resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') url = str(task['xunlei_url']) if options['node']: if options['node'] == 'best' or options['node'] == 'fastest': from lixian_util import parse_size if task['size'] >= parse_size(options['node_detection_threshold']): url = lixian_nodes.use_fastest_node(url, options['vod_nodes'], client.get_gdriveid()) else: url = lixian_nodes.switch_node(url, options['node'], client.get_gdriveid()) def download1(download, path): if not os.path.exists(path): download() elif not resuming: if overwrite: download() else: raise Exception('%s already exists. Please try --continue or --overwrite' % path) else: if download.finished(): pass else: download() def download1_checked(client, url, path, size): download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming) checked = 0 while checked < 10: download1(download, path) if download.finished(): break else: checked += 1 assert os.path.getsize(path) == size, 'incorrect downloaded file size (%s != %s)' % (os.path.getsize(path), size) def download2(client, url, path, task): size = task['size'] if mini_hash and resuming and verify_mini_hash(path, task): return download1_checked(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get('colors')).yellow(): print 'hash error, redownloading...' os.rename(path, path + '.error') download1_checked(client, url, path, size) if not verify(path, task): raise Exception('hash check failed') download2(client, url, path, task)
def download_file(client, path, task, options): download_tool = lixian_download_tools.get_tool(options["tool"]) resuming = options.get("resuming") overwrite = options.get("overwrite") mini_hash = options.get("mini_hash") no_hash = options.get("no_hash") url = str(task["xunlei_url"]) def download1(download, path): if not os.path.exists(path): download() elif not resuming: if overwrite: download() else: raise Exception("%s already exists. Please try --continue or --overwrite" % path) else: if download.finished(): pass else: download() def download1_checked(client, url, path, size): download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming) checked = 0 while checked < 10: download1(download, path) if download.finished(): break else: checked += 1 assert os.path.getsize(path) == size, "incorrect downloaded file size (%s != %s)" % ( os.path.getsize(path), size, ) def download2(client, url, path, task): size = task["size"] if mini_hash and resuming and verify_mini_hash(path, task): return download1_checked(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get("colors")).yellow(): print "hash error, redownloading..." os.rename(path, path + ".error") download1_checked(client, url, path, size) if not verify(path, task): raise Exception("hash check failed") download2(client, url, path, task)
def download_task(args): import lixian_download_tools download = lixian_download_tools.get_tool(args.tool) download_args = {'output':args.output, 'output_dir':args.output_dir, 'delete':args.delete, 'resuming':args._args['continue'], 'overwrite':args.overwrite, 'mini_hash':args.mini_hash, 'no_hash': not args.hash, 'no_bt_dir': not args.bt_dir, 'save_torrent_file':args.save_torrent_file} client = XunleiClient(args.username, args.password, args.cookies) links = None if len(args) or args.input: tasks = find_tasks_to_download(client, args) if args.output: assert len(tasks) == 1 download_single_task(client, download, tasks[0], download_args) else: download_multiple_tasks(client, download, tasks, download_args) elif args.all: tasks = client.read_all_tasks() download_multiple_tasks(client, download, tasks, download_args) else: usage(doc=lixian_help.download, message='Not enough arguments')
def download_task(args): import lixian_download_tools download = lixian_download_tools.get_tool(args.tool) download_args = {'output':args.output, 'output_dir':args.output_dir, 'delete':args.delete, 'resuming':args._args['continue'], 'overwrite':args.overwrite, 'mini_hash':args.mini_hash, 'no_hash': not args.hash} client = XunleiClient(args.username, args.password, args.cookies) links = None if len(args) or args.input: tasks = find_tasks_to_download(client, args) if args.output: assert len(tasks) == 1 download_single_task(client, download, tasks[0], download_args) else: download_multiple_tasks(client, download, tasks, download_args) elif args.all: tasks = client.read_all_tasks() download_multiple_tasks(client, download, tasks, download_args) else: usage(doc=lixian_help.download, message='Not enough arguments')
def download_task(args): args = parse_login_command_line( args, ["tool", "output", "output-dir", "input"], ["delete", "continue", "overwrite", "torrent", "all", "mini-hash", "hash"], alias={"o": "output", "i": "input", "c": "continue", "bt": "torrent"}, default={ "tool": get_config("tool", "wget"), "delete": get_config("delete"), "continue": get_config("continue"), "output-dir": get_config("output-dir"), "mini-hash": get_config("mini-hash"), "hash": get_config("hash", True), }, help=lixian_help.download, ) import lixian_download_tools download = lixian_download_tools.get_tool(args.tool) download_args = { "output": args.output, "output_dir": args.output_dir, "delete": args.delete, "resuming": args._args["continue"], "overwrite": args.overwrite, "mini_hash": args.mini_hash, "no_hash": not args.hash, } client = XunleiClient(args.username, args.password, args.cookies) links = None if len(args) or args.input: tasks = find_tasks_to_download(client, args) if args.output: assert len(tasks) == 1 download_single_task(client, download, tasks[0], download_args) else: download_multiple_tasks(client, download, tasks, download_args) elif args.all: # tasks = client.read_all_completed() tasks = client.read_all_tasks() download_multiple_tasks(client, download, tasks, download_args) else: usage(doc=lixian_help.download, message="Not enough arguments")
def download_file(client, path, task, options): download_tool = lixian_download_tools.get_tool(options['tool']) resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') url = str(task['xunlei_url']) if options['node']: if options['node'] == 'best' or options['node'] == 'fastest': from lixian_util import parse_size if task['size'] >= parse_size(options['node_detection_threshold']): url = lixian_nodes.use_fastest_node(url, options['vod_nodes'], client.get_gdriveid()) elif options['node'] == 'fast': from lixian_util import parse_size if task['size'] >= parse_size(options['node_detection_threshold']): url = lixian_nodes.use_fast_node(url, options['vod_nodes'], parse_size(options['node_detection_acceptable']), client.get_gdriveid()) else: url = lixian_nodes.switch_node(url, options['node'], client.get_gdriveid()) def download1(download, path): if not os.path.exists(path): download() elif not resuming: if overwrite: download() else: raise Exception('%s already exists. Please try --continue or --overwrite' % path) else: if download.finished(): pass else: download() def download1_checked(client, url, path, size): download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming) checked = 0 while checked < 10: download1(download, path) if download.finished(): break else: checked += 1 assert os.path.getsize(path) == size, 'incorrect downloaded file size (%s != %s)' % (os.path.getsize(path), size) def download2(client, url, path, task): size = task['size'] if mini_hash and resuming and verify_mini_hash(path, task): return download1_checked(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get('colors')).yellow(): print 'hash error, redownloading...' os.rename(path, path + '.error') download1_checked(client, url, path, size) if not verify(path, task): raise Exception('hash check failed') download2(client, url, path, task)
def download_file(client, path, task, options): download_tool = lixian_download_tools.get_tool(options['tool']) resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') async = options.get('async') url = str(task['xunlei_url']) if options['node']: url = switch_node(client, url, options['node']) def download1(download, path): if not os.path.exists(path): download() elif not resuming: if overwrite: download() else: raise Exception('%s already exists. Please try --continue or --overwrite' % path) else: if download.finished(): pass else: download() def download1_checked(client, url, path, size): download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming) checked = 0 while checked < 10: download1(download, path) if download.finished(): break else: checked += 1 assert os.path.getsize(path) == size, 'incorrect downloaded file size (%s != %s)' % (os.path.getsize(path), size) def download2(client, url, path, task): size = task['size'] if mini_hash and resuming and verify_mini_hash(path, task): return download1_checked(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get('colors')).yellow(): print 'hash error, redownloading...' os.rename(path, path + '.error') download1_checked(client, url, path, size) if not verify(path, task): raise Exception('hash check failed') def download_async(client, url, path, task): size=task['size'] download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming, async=True) download1(download, path) def download3(client, url, path, task, async): if async: download_async(client, url, path, task) else: download2(client, url, path, task) download3(client, url, path, task, async)
def download_task(args): assert len(args) or args.input or args.all or args.category, "Not enough arguments" lixian_download_tools.get_tool(args.tool) # check tool download_args = { "tool": args.tool, "output": args.output, "output_dir": args.output_dir, "delete": args.delete, "resuming": args._args["continue"], "overwrite": args.overwrite, "mini_hash": args.mini_hash, "no_hash": not args.hash, "no_bt_dir": not args.bt_dir, "save_torrent_file": args.save_torrent_file, "colors": args.colors, } client = create_client(args) query = lixian_query.build_query(client, args) query.query_once() def sleep(n): assert isinstance(n, (int, basestring)), repr(n) import time if isinstance(n, basestring): n, u = re.match(r"^(\d+)([smh])?$", n.lower()).groups() n = int(n) * {None: 1, "s": 1, "m": 60, "h": 3600}[u] time.sleep(n) if args.watch_present: assert not args.output, "not supported with watch option yet" tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, tasks, download_args) if not query.download_jobs: break if not tasks: sleep(args.watch_interval) query.refresh_status() tasks = query.pull_completed() elif args.watch: assert not args.output, "not supported with watch option yet" tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, tasks, download_args) if (not query.download_jobs) and (not query.queries): break if not tasks: sleep(args.watch_interval) query.refresh_status() query.query_search() tasks = query.pull_completed() else: tasks = query.peek_download_jobs() if args.output: assert len(tasks) == 1 download_single_task(client, tasks[0], download_args) else: download_multiple_tasks(client, tasks, download_args)
def download_task(args): assert len(args) or args.input or args.all or args.category, 'Not enough arguments' lixian_download_tools.get_tool(args.tool) # check tool download_args = {'tool': args.tool, 'output': args.output, 'output_dir': args.output_dir, 'delete': args.delete, 'resuming': args._args['continue'], 'async': args._args['async'], 'overwrite': args.overwrite, 'mini_hash': args.mini_hash, 'no_hash': not args.hash, 'no_bt_dir': not args.bt_dir, 'save_torrent_file': args.save_torrent_file, 'node': args.node, 'node_detection_threshold': args.node_detection_threshold, 'node_detection_acceptable': args.node_detection_acceptable, 'vod_nodes': args.vod_nodes, 'colors': args.colors} client = create_client(args) query = lixian_query.build_query(client, args) query.query_once() def sleep(n): assert isinstance(n, (int, basestring)), repr(n) import time if isinstance(n, basestring): n, u = re.match(r'^(\d+)([smh])?$', n.lower()).groups() n = int(n) * {None: 1, 's': 1, 'm': 60, 'h': 3600}[u] time.sleep(n) if args.watch_present: assert not args.output, 'not supported with watch option yet' tasks = query.pull_completed() while True: if tasks: download_multiple_tasks(client, tasks, download_args) if not query.download_jobs: break if not tasks: sleep(args.watch_interval) query.refresh_status() tasks = query.pull_completed() elif args.watch: assert not args.output, 'not supported with watch option yet' old_tasks = [] tasks = query.pull_completed() while True: try: new_tasks = [] for nt in tasks: is_new = True for ot in old_tasks: if nt['id'] == ot['id']: is_new = False break if is_new: new_tasks.append(nt) if new_tasks: download_multiple_tasks(client, new_tasks, download_args) if (not query.download_jobs) and (not query.queries): break if not new_tasks: sleep(args.watch_interval) query.refresh_status() query.query_search() old_tasks = tasks tasks = query.pull_completed() except Exception as e: print 'An error cought and passed in watch mode...' print e else: tasks = query.peek_download_jobs() if args.output: assert len(tasks) == 1 download_single_task(client, tasks[0], download_args) else: download_multiple_tasks(client, tasks, download_args)