def export_aria2_conf(args): client = XunleiClient(args.username, args.password, args.cookies) import lixian_query tasks = lixian_query.search_tasks(client, args) files = [] for task in tasks: if task["type"] == "bt": subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: files.append((subs[0]["xunlei_url"], subs[0]["name"], None)) else: for f in subs: import os.path files.append((f["xunlei_url"], f["name"], task["name"])) else: files.append((task["xunlei_url"], task["name"], None)) output = "" for url, name, dir in files: if type(url) == unicode: url = url.encode(default_encoding) output += url + "\n" output += " out=" + name.encode(default_encoding) + "\n" if dir: output += " dir=" + dir.encode(default_encoding) + "\n" output += " header=Cookie: gdriveid=" + client.get_gdriveid() + "\n" return output
def export_aria2_conf(args): client = create_client(args) import lixian_query tasks = lixian_query.search_tasks(client, args) files = [] for task in tasks: if task['type'] == 'bt': subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: files.append((subs[0]['xunlei_url'], subs[0]['name'], None)) else: for f in subs: files.append((f['xunlei_url'], f['name'], task['name'])) else: files.append((task['xunlei_url'], task['name'], None)) output = '' for url, name, dir in files: if type(url) == unicode: url = url.encode(default_encoding) output += url + '\n' output += ' out=' + name.encode(default_encoding) + '\n' if dir: output += ' dir=' + dir.encode(default_encoding) + '\n' output += ' header=Cookie: gdriveid=' + client.get_gdriveid() + '\n' return output
def speed_test(args): ''' usage: lx speed_test [--vod-nodes=0-50] [id|name] ''' assert len(args) client = create_client(args) import lixian_query tasks = lixian_query.search_tasks(client, args) if not tasks: raise Exception('No task found') task = tasks[0] urls = [] if task['type'] == 'bt': subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: raise Exception('No files found') subs = [f for f in subs if f['size'] > 1000*1000] or subs # skip files with length < 1M if single_file: urls.append((subs[0]['xunlei_url'], subs[0]['name'], None)) else: for f in subs: urls.append((f['xunlei_url'], f['name'], task['name'])) else: urls.append((task['xunlei_url'], task['name'], None)) url, filename, dirname = urls[0] name = dirname + '/' + filename if dirname else filename test_file(client, url, name, args)
def get_download_task_info(args, client): import lixian_query tasks = lixian_query.search_tasks(client, args) files = [] for task in tasks: if task['type'] == 'bt': subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: files.append((subs[0]['xunlei_url'], subs[0]['name'], None)) else: for f in subs: files.append((f['xunlei_url'], f['name'], task['name'])) else: files.append((task['xunlei_url'], task['name'], None)) return files
def export_download_urls(args): ''' usage: lx export-download-urls [id|name]... ''' assert len(args) or args.all or args.category, 'Not enough arguments' client = create_client(args) import lixian_query tasks = lixian_query.search_tasks(client, args) urls = [] for task in tasks: if task['type'] == 'bt': subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: urls.append((subs[0]['xunlei_url'], subs[0]['name'], None)) else: for f in subs: urls.append((f['xunlei_url'], f['name'], task['name'])) else: urls.append((task['xunlei_url'], task['name'], None)) for url, _, _ in urls: print url
def export_download_urls(args): """ usage: lx export-download-urls [id|name]... """ assert len(args) or args.all or args.category, "Not enough arguments" client = create_client(args) import lixian_query tasks = lixian_query.search_tasks(client, args) urls = [] for task in tasks: if task["type"] == "bt": subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: urls.append((subs[0]["xunlei_url"], subs[0]["name"], None)) else: for f in subs: urls.append((f["xunlei_url"], f["name"], task["name"])) else: urls.append((task["xunlei_url"], task["name"], None)) for url, _, _ in urls: print url
def download_single_task(client, download, task, options): output = options.get('output') output = output and os.path.expanduser(output) output_dir = options.get('output_dir') output_dir = output_dir and os.path.expanduser(output_dir) delete = options.get('delete') resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') no_bt_dir = options.get('no_bt_dir') save_torrent_file = options.get('save_torrent_file') assert client.get_gdriveid() if task['status_text'] != 'completed': if 'files' not in task: with colors(options.get('colors')).yellow(): print 'skip task %s as the status is %s' % (task['name'].encode(default_encoding), task['status_text']) return def download1(client, url, path, size): if not os.path.exists(path): download(client, url, path) elif not resuming: if overwrite: download(client, url, path) else: raise Exception('%s already exists. Please try --continue or --overwrite' % path) else: assert os.path.getsize(path) <= size, 'existing file bigger than expected, unsafe to continue nor overwrite' if os.path.getsize(path) < size: download(client, url, path, resuming) elif os.path.getsize(path) == size: pass else: raise NotImplementedError() def download2(client, url, path, task): size = task['size'] if mini_hash and resuming and verify_mini_hash(path, task): return download1(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get('colors')).yellow(): print 'hash error, redownloading...' os.remove(path) download1(client, url, path, size) if not verify(path, task): raise Exception('hash check failed') download_url = str(task['xunlei_url']) if output: output_path = output output_dir = os.path.dirname(output) output_name = os.path.basename(output) else: output_name = escape_filename(task['name']).encode(default_encoding) output_dir = output_dir or '.' output_path = os.path.join(output_dir, output_name) referer = str(client.get_referer()) gdriveid = str(client.get_gdriveid()) if task['type'] == 'bt': files, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if single_file: dirname = output_dir else: if no_bt_dir: output_path = os.path.dirname(output_path) dirname = output_path assert dirname # dirname must be non-empty, otherwise dirname + os.path.sep + ... might be dangerous if dirname and not os.path.exists(dirname): os.makedirs(dirname) for t in skipped: with colors(options.get('colors')).yellow(): print 'skip task %s/%s (%s) as the status is %s' % (t['id'], t['index'], t['name'].encode(default_encoding), t['status_text']) if mini_hash and resuming and verify_mini_bt_hash(dirname, files): print task['name'].encode(default_encoding), 'is already done' if delete and 'files' not in task: client.delete_task(task) return if not single_file: with colors(options.get('colors')).green(): print output_name + '/' for f in files: name = f['name'] if f['status_text'] != 'completed': print 'Skipped %s file %s ...' % (f['status_text'], name.encode(default_encoding)) continue if not single_file: print name.encode(default_encoding), '...' else: with colors(options.get('colors')).green(): print name.encode(default_encoding), '...' # XXX: if file name is escaped, hashing bt won't get correct file splitted_path = map(escape_filename, name.split('\\')) name = os.path.join(*splitted_path).encode(default_encoding) path = dirname + os.path.sep + name # fix issue #82 if splitted_path[:-1]: subdir = os.path.join(*splitted_path[:-1]).encode(default_encoding) subdir = dirname + os.path.sep + subdir # fix issue #82 if not os.path.exists(subdir): os.makedirs(subdir) download_url = str(f['xunlei_url']) download2(client, download_url, path, f) if save_torrent_file: info_hash = str(task['bt_hash']) if single_file: torrent = os.path.join(dirname, escape_filename(task['name']).encode(default_encoding) + '.torrent') else: torrent = os.path.join(dirname, info_hash + '.torrent') if os.path.exists(torrent): pass else: content = client.get_torrent_file_by_info_hash(info_hash) with open(torrent, 'wb') as ouput_stream: ouput_stream.write(content) if not no_hash: torrent_file = client.get_torrent_file(task) print 'Hashing bt ...' from lixian_progress import SimpleProgressBar bar = SimpleProgressBar() file_set = [f['name'].encode('utf-8').split('\\') for f in files] if 'files' in task else None verified = lixian_hash_bt.verify_bt(output_path, lixian_hash_bt.bdecode(torrent_file)['info'], file_set=file_set, progress_callback=bar.update) bar.done() if not verified: # note that we don't delete bt download folder if hash failed raise Exception('bt hash check failed') else: if output_dir and not os.path.exists(output_dir): os.makedirs(output_dir) with colors(options.get('colors')).green(): print output_name, '...' download2(client, download_url, output_path, task) if delete and 'files' not in task: client.delete_task(task)
def download_single_task(client, download, task, options): output = options.get('output') output = output and os.path.expanduser(output) output_dir = options.get('output_dir') output_dir = output_dir and os.path.expanduser(output_dir) delete = options.get('delete') resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') no_bt_dir = options.get('no_bt_dir') save_torrent_file = options.get('save_torrent_file') assert client.get_gdriveid() if task['status_text'] != 'completed': if 'files' not in task: with colors(options.get('colors')).yellow(): print 'skip task %s as the status is %s' % ( task['name'].encode(default_encoding), task['status_text']) return def download1(client, url, path, size): if not os.path.exists(path): download(client, url, path) elif not resuming: if overwrite: download(client, url, path) else: raise Exception( '%s already exists. Please try --continue or --overwrite' % path) else: assert os.path.getsize( path ) <= size, 'existing file bigger than expected, unsafe to continue nor overwrite' if os.path.getsize(path) < size: download(client, url, path, resuming) elif os.path.getsize(path) == size: pass else: raise NotImplementedError() def download2(client, url, path, task): size = task['size'] if mini_hash and resuming and verify_mini_hash(path, task): return download1(client, url, path, size) verify = verify_basic_hash if no_hash else verify_hash if not verify(path, task): with colors(options.get('colors')).yellow(): print 'hash error, redownloading...' os.remove(path) download1(client, url, path, size) if not verify(path, task): raise Exception('hash check failed') download_url = str(task['xunlei_url']) if output: output_path = output output_dir = os.path.dirname(output) output_name = os.path.basename(output) else: output_name = escape_filename(task['name']).encode(default_encoding) output_dir = output_dir or '.' output_path = os.path.join(output_dir, output_name) referer = str(client.get_referer()) gdriveid = str(client.get_gdriveid()) if task['type'] == 'bt': files, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if single_file: dirname = output_dir else: if no_bt_dir: output_path = os.path.dirname(output_path) dirname = output_path assert dirname # dirname must be non-empty, otherwise dirname + os.path.sep + ... might be dangerous if dirname and not os.path.exists(dirname): os.makedirs(dirname) for t in skipped: with colors(options.get('colors')).yellow(): print 'skip task %s/%s (%s) as the status is %s' % ( str(t['id']), t['index'], t['name'].encode(default_encoding), t['status_text']) if mini_hash and resuming and verify_mini_bt_hash(dirname, files): print task['name'].encode(default_encoding), 'is already done' if delete and 'files' not in task: client.delete_task(task) return if not single_file: with colors(options.get('colors')).green(): print output_name + '/' for f in files: name = f['name'] if f['status_text'] != 'completed': print 'Skipped %s file %s ...' % ( f['status_text'], name.encode(default_encoding)) continue if not single_file: print name.encode(default_encoding), '...' else: with colors(options.get('colors')).green(): print name.encode(default_encoding), '...' # XXX: if file name is escaped, hashing bt won't get correct file splitted_path = map(escape_filename, name.split('\\')) name = os.path.join(*splitted_path).encode(default_encoding) path = dirname + os.path.sep + name # fix issue #82 if splitted_path[:-1]: subdir = os.path.join( *splitted_path[:-1]).encode(default_encoding) subdir = dirname + os.path.sep + subdir # fix issue #82 if not os.path.exists(subdir): os.makedirs(subdir) download_url = str(f['xunlei_url']) download2(client, download_url, path, f) if save_torrent_file: info_hash = str(task['bt_hash']) if single_file: torrent = os.path.join( dirname, escape_filename(task['name']).encode(default_encoding) + '.torrent') else: torrent = os.path.join(dirname, info_hash + '.torrent') if os.path.exists(torrent): pass else: content = client.get_torrent_file_by_info_hash(info_hash) with open(torrent, 'wb') as ouput_stream: ouput_stream.write(content) if not no_hash: torrent_file = client.get_torrent_file(task) print 'Hashing bt ...' from lixian_progress import SimpleProgressBar bar = SimpleProgressBar() file_set = [f['name'].encode('utf-8').split('\\') for f in files] if 'files' in task else None verified = lixian_hash_bt.verify_bt( output_path, lixian_hash_bt.bdecode(torrent_file)['info'], file_set=file_set, progress_callback=bar.update) bar.done() if not verified: # note that we don't delete bt download folder if hash failed raise Exception('bt hash check failed') else: if output_dir and not os.path.exists(output_dir): os.makedirs(output_dir) with colors(options.get('colors')).green(): print output_name, '...' download2(client, download_url, output_path, task) if delete and 'files' not in task: client.delete_task(task)
def download_single_task(client, task, options): output = options.get('output') output = output and os.path.expanduser(output) output_dir = options.get('output_dir') output_dir = output_dir and os.path.expanduser(output_dir) delete = options.get('delete') resuming = options.get('resuming') overwrite = options.get('overwrite') mini_hash = options.get('mini_hash') no_hash = options.get('no_hash') no_bt_dir = options.get('no_bt_dir') save_torrent_file = options.get('save_torrent_file') assert client.get_gdriveid() if task['status_text'] != 'completed': if 'files' not in task: with colors(options.get('colors')).yellow(): print 'skip task %s as the status is %s' % (task['name'].encode(default_encoding), task['status_text']) return if output: output_path = output output_dir = os.path.dirname(output) output_name = os.path.basename(output) else: output_name = safe_encode_native_path(escape_filename(task['name'])) output_dir = output_dir or '.' output_path = os.path.join(output_dir, output_name) if task['type'] == 'bt': files, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if single_file: dirname = output_dir else: if no_bt_dir: output_path = os.path.dirname(output_path) dirname = output_path assert dirname # dirname must be non-empty, otherwise dirname + os.path.sep + ... might be dangerous ensure_dir_exists(dirname) for t in skipped: with colors(options.get('colors')).yellow(): print 'skip task %s/%s (%s) as the status is %s' % (str(t['id']), t['index'], t['name'].encode(default_encoding), t['status_text']) if mini_hash and resuming and verify_mini_bt_hash(dirname, files): print task['name'].encode(default_encoding), 'is already done' if delete and 'files' not in task: client.delete_task(task) return if not single_file: with colors(options.get('colors')).green(): print output_name + '/' for f in files: name = f['name'] if f['status_text'] != 'completed': print 'Skipped %s file %s ...' % (f['status_text'], name.encode(default_encoding)) continue if not single_file: print name.encode(default_encoding), '...' else: with colors(options.get('colors')).green(): print name.encode(default_encoding), '...' # XXX: if file name is escaped, hashing bt won't get correct file splitted_path = map(escape_filename, name.split('\\')) name = safe_encode_native_path(os.path.join(*splitted_path)) path = dirname + os.path.sep + name # fix issue #82 if splitted_path[:-1]: subdir = safe_encode_native_path(os.path.join(*splitted_path[:-1])) subdir = dirname + os.path.sep + subdir # fix issue #82 ensure_dir_exists(subdir) download_file(client, path, f, options) if save_torrent_file: info_hash = str(task['bt_hash']) if single_file: torrent = os.path.join(dirname, escape_filename(task['name']).encode(default_encoding) + '.torrent') else: torrent = os.path.join(dirname, info_hash + '.torrent') if os.path.exists(torrent): pass else: content = client.get_torrent_file_by_info_hash(info_hash) with open(torrent, 'wb') as ouput_stream: ouput_stream.write(content) if not no_hash: torrent_file = client.get_torrent_file(task) print 'Hashing bt ...' from lixian_progress import SimpleProgressBar bar = SimpleProgressBar() file_set = [f['name'].encode('utf-8').split('\\') for f in files] if 'files' in task else None verified = lixian_hash_bt.verify_bt(output_path, lixian_hash_bt.bdecode(torrent_file)['info'], file_set=file_set, progress_callback=bar.update) bar.done() if not verified: # note that we don't delete bt download folder if hash failed raise Exception('bt hash check failed') else: ensure_dir_exists(output_dir) with colors(options.get('colors')).green(): print output_name, '...' download_file(client, output_path, task, options) if delete and 'files' not in task: client.delete_task(task)
def addarpc(args): ''' usage: python lixian_cli.py addarpc url python lixian_cli.py addarpc -i url.txt python lixian_cli.py addarpc --input=url.txt cat url | python lixian_cli.py addarpc -I python lixian_cli.py addarpc 1.torrent python lixian_cli.py addarpc torrent-info-hash python lixian_cli.py addarpc --bt http://xxx/xxx.torrent ''' assert len(args) or args.input or args.I client = create_client(args) referer = client.get_referer() tasks = lixian_query.find_tasks_to_download(client, args) print('\033[35mAll tasks added. Checking status...\033[0m') #jsonrpc = get_config('aria2jsonrpc') #if jsonrpc is None: # global DEFAULT_JSONRPC # jsonrpc = DEFAULT_JSONRPC host = DEFAULT_HOST if args.dev is True: host = DEV_HOST port = DEFAULT_PORT jsonrpc = SERVER_URI_FORMAT.format(host, port) print("\033[35mAria2c jsonrpc server:%s\033[0m" % jsonrpc) columns = ['id', 'status', 'name'] if get_config('n'): columns.insert(0, 'n') if args.size: columns.append('size') output_tasks(tasks, columns, args) files = [] for task in tasks: if task['type'] == 'bt': subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: files.append((subs[0]['xunlei_url'], subs[0]['name'], None, task['original_url'])) else: for f in subs: files.append((f['xunlei_url'], f['name'], task['name'], task['original_url'])) else: files.append((task['xunlei_url'], task['name'], None, task['original_url'])) for url, name, dir, original_url in files: if type(url) == unicode: url = url.encode(default_encoding) if dir: dir = dir.encode(default_encoding) urls = [url] if original_url.startswith("http"): urls.append(original_url) jsonreq = json.dumps({"jsonrpc": "2.0", "id": "qwer", "method": "aria2.addUri", "params": [ urls, { "out": name.encode(default_encoding), "continue": "true", "header": ['Cookie: gdriveid=%s' % client.get_gdriveid()], "referer":referer } ] }) c = urllib2.urlopen(jsonrpc, jsonreq) result = c.read() if result is None or result == "": print("\033[31mCann't add aria2 task %s\033[0m" % name) else: result = json.loads(result.decode(default_encoding)) print("\033[32mAdd aria2 task[id= %s ] %s\033[0m" % (result[u"result"], name))
def download_single_task(client, task, options): output = options.get("output") output = output and os.path.expanduser(output) output_dir = options.get("output_dir") output_dir = output_dir and os.path.expanduser(output_dir) delete = options.get("delete") resuming = options.get("resuming") overwrite = options.get("overwrite") mini_hash = options.get("mini_hash") no_hash = options.get("no_hash") no_bt_dir = options.get("no_bt_dir") save_torrent_file = options.get("save_torrent_file") assert client.get_gdriveid() if task["status_text"] != "completed": if "files" not in task: with colors(options.get("colors")).yellow(): print "skip task %s as the status is %s" % (task["name"].encode(default_encoding), task["status_text"]) return if output: output_path = output output_dir = os.path.dirname(output) output_name = os.path.basename(output) else: output_name = escape_filename(task["name"]).encode(default_encoding) output_dir = output_dir or "." output_path = os.path.join(output_dir, output_name) if task["type"] == "bt": files, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if single_file: dirname = output_dir else: if no_bt_dir: output_path = os.path.dirname(output_path) dirname = output_path assert dirname # dirname must be non-empty, otherwise dirname + os.path.sep + ... might be dangerous if dirname and not os.path.exists(dirname): os.makedirs(dirname) for t in skipped: with colors(options.get("colors")).yellow(): print "skip task %s/%s (%s) as the status is %s" % ( str(t["id"]), t["index"], t["name"].encode(default_encoding), t["status_text"], ) if mini_hash and resuming and verify_mini_bt_hash(dirname, files): print task["name"].encode(default_encoding), "is already done" if delete and "files" not in task: client.delete_task(task) return if not single_file: with colors(options.get("colors")).green(): print output_name + "/" for f in files: name = f["name"] if f["status_text"] != "completed": print "Skipped %s file %s ..." % (f["status_text"], name.encode(default_encoding)) continue if not single_file: print name.encode(default_encoding), "..." else: with colors(options.get("colors")).green(): print name.encode(default_encoding), "..." # XXX: if file name is escaped, hashing bt won't get correct file splitted_path = map(escape_filename, name.split("\\")) name = os.path.join(*splitted_path).encode(default_encoding) path = dirname + os.path.sep + name # fix issue #82 if splitted_path[:-1]: subdir = os.path.join(*splitted_path[:-1]).encode(default_encoding) subdir = dirname + os.path.sep + subdir # fix issue #82 if not os.path.exists(subdir): os.makedirs(subdir) download_file(client, path, f, options) if save_torrent_file: info_hash = str(task["bt_hash"]) if single_file: torrent = os.path.join(dirname, escape_filename(task["name"]).encode(default_encoding) + ".torrent") else: torrent = os.path.join(dirname, info_hash + ".torrent") if os.path.exists(torrent): pass else: content = client.get_torrent_file_by_info_hash(info_hash) with open(torrent, "wb") as ouput_stream: ouput_stream.write(content) if not no_hash: torrent_file = client.get_torrent_file(task) print "Hashing bt ..." from lixian_progress import SimpleProgressBar bar = SimpleProgressBar() file_set = [f["name"].encode("utf-8").split("\\") for f in files] if "files" in task else None verified = lixian_hash_bt.verify_bt( output_path, lixian_hash_bt.bdecode(torrent_file)["info"], file_set=file_set, progress_callback=bar.update, ) bar.done() if not verified: # note that we don't delete bt download folder if hash failed raise Exception("bt hash check failed") else: if output_dir and not os.path.exists(output_dir): os.makedirs(output_dir) with colors(options.get("colors")).green(): print output_name, "..." download_file(client, output_path, task, options) if delete and "files" not in task: client.delete_task(task)
def addarpc(args): ''' usage: python lixian_cli.py addarpc url python lixian_cli.py addarpc -i url.txt python lixian_cli.py addarpc --input=url.txt cat url | python lixian_cli.py addarpc -I python lixian_cli.py addarpc 1.torrent python lixian_cli.py addarpc torrent-info-hash python lixian_cli.py addarpc --bt http://xxx/xxx.torrent ''' assert len(args) or args.input or args.I client = create_client(args) referer = client.get_referer() tasks = lixian_query.find_tasks_to_download(client, args) print('\033[35mAll tasks added. Checking status...\033[0m') #jsonrpc = get_config('aria2jsonrpc') #if jsonrpc is None: # global DEFAULT_JSONRPC # jsonrpc = DEFAULT_JSONRPC host = DEFAULT_HOST if args.dev is True: host = DEV_HOST port = DEFAULT_PORT jsonrpc = SERVER_URI_FORMAT.format(host, port) print("\033[35mAria2c jsonrpc server:%s\033[0m" % jsonrpc) columns = ['id', 'status', 'name'] if get_config('n'): columns.insert(0, 'n') if args.size: columns.append('size') output_tasks(tasks, columns, args) files = [] for task in tasks: if task['type'] == 'bt': subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) if not subs: continue if single_file: files.append((subs[0]['xunlei_url'], subs[0]['name'], None, task['original_url'])) else: for f in subs: files.append((f['xunlei_url'], f['name'], task['name'], task['original_url'])) else: files.append( (task['xunlei_url'], task['name'], None, task['original_url'])) for url, name, dir, original_url in files: if type(url) == unicode: url = url.encode(default_encoding) if dir: dir = dir.encode(default_encoding) urls = [url] if original_url.startswith("http"): urls.append(original_url) jsonreq = json.dumps({ "jsonrpc": "2.0", "id": "qwer", "method": "aria2.addUri", "params": [ urls, { "out": name.encode(default_encoding), "continue": "true", "header": ['Cookie: gdriveid=%s' % client.get_gdriveid()], "referer": referer } ] }) c = urllib2.urlopen(jsonrpc, jsonreq) result = c.read() if result is None or result == "": print("\033[31mCann't add aria2 task %s\033[0m" % name) else: result = json.loads(result.decode(default_encoding)) print("\033[32mAdd aria2 task[id= %s ] %s\033[0m" % (result[u"result"], name))