def get_task_process(self, nm_list=[], bt_list=[]): params = dict( callback="rebuild", list=",".join((str(x) for x in nm_list+bt_list)), nm_list=",".join((str(x) for x in nm_list)), bt_list=",".join((str(x) for x in bt_list)), uid=self.uid, noCacheIE=self._now, ) r = self.session.get(self.GET_TASK_PROCESS, params=params) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args result = [] for task in args[0].get("Process", {}).get("Record", []) if args else []: tmp = dict( task_id = int(task['tid']), cid = task.get('cid', None), status = self.d_status.get(int(task['download_status']), "waiting"), process = task['percent'], leave_time = task['leave_time'], speed = int(task['speed']), lixian_url = task.get('lixian_url', None), ) result.append(tmp) return result
def bt_task_check(self, url): r = self.session.get(self.QUERY_URL % {"url": urllib.quote_plus(url), "random": self._random, "cachetime": self._now}) if r.error: r.raise_for_status() #queryUrl(flag,infohash,fsize,bt_title,is_full,subtitle,subformatsize,size_list,valid_list,file_icon,findex,random) function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 12: return {} result = dict( flag = args[0], cid = args[1], size = args[2], title = args[3], is_full = args[4], subtitle = args[5], subformatsize = args[6], size_list = args[7], valid_list = args[8], file_icon = args[9], findex = args[10], random = args[11]) return result
def bt_task_check(self, url): r = self.session.get(self.QUERY_URL, params={ "callback": "queryUrl", "u": url, "random": self._random, "tcache": self._now}) r.raise_for_status() #queryUrl(flag,infohash,fsize,bt_title,is_full,subtitle,subformatsize,size_list,valid_list,file_icon,findex,random) function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 12: return {} if not args[2]: return {} result = dict( flag = args[0], cid = args[1], size = args[2], title = title_fix(args[3]), is_full = args[4], random = args[11]) filelist = [] for subtitle, subformatsize, size, valid, file_icon, findex in zip(*args[5:11]): tmp_file = dict( title = subtitle, formatsize = subformatsize, size=size, file_icon = file_icon, ext = "", index = findex, valid = int(valid), ) filelist.append(tmp_file) result['filelist'] = filelist return result
def webfilemail_url_analysis(self, url): params = {"action": "webfilemail_url_analysis", "url": url, "cachetime": self._now} r = self.session.get(self.WEBFILEMAIL_INTERFACE_URL, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def get_vip_info(self): params = {"cachetime": self._now, "callback": "jsonp1234567890"} r = self.session.get(self.VIP_INFO_URL, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def delay_task(self, task_ids): tmp_ids = [str(x) + "_1" for x in task_ids] r = self.session.get(self.TASK_DELAY_URL % dict(ids=",".join(tmp_ids), cachetime=self._now)) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: return True return False
def get_wait_time(self, task_id, key=None): params = dict(callback="download_check_respo", t=self._now, taskid=task_id) if key: params["key"] = key r = self.session.get(self.GET_WAIT_TIME_URL, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def batch_task_check(self, url_list): data = dict(url="\r\n".join(url_list), random=self._random) r = self.session.post(self.BATCH_TASK_CHECK_URL, data=data) r.raise_for_status() DEBUG(pformat(r.content)) m = re.search("""(parent.begin_task_batch_resp.*?)</script>""", r.content) assert m function, args = parser_js_function_call(m.group(1)) DEBUG(pformat(args)) assert args return args[0] if args else {}
def delete_task(self, task_ids): r = self.session.get(self.TASK_DELETE_URL % dict( ids = urllib.quote_plus(",".join(task_ids)), cachetime = self._now)) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: return True return False
def vod_get_bt_pic(self, cid, bindex=[]): """ get gcid and shotcut of movice of bt task * max length of bindex is 18 """ params = {"jsonp": "jsonp1234567890", "t": self._now, "info_hash": cid, "req_list": "/".join(map(str, bindex))} r = self.session.get(self.VOD_GET_BT_PIC % params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0].get("resp", {})
def delete_task(self, task_ids): r = self.session.get(self.TASK_DELETE_URL, params = { "type": "0", "taskids": ",".join(task_ids), "noCacheIE": self._now}) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: return True return False
def delete_task(self, task_ids): r = self.session.post( self.TASK_DELETE_URL, params={"type": "0", "t": self._now}, data={"databases": "0", "taskids": ",".join(map(str, task_ids))}, ) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: return True return False
def _torrent_update(self, filename, fp): files = {'filepath': (filename, fp)} r = self.session.post(self.TORRENT_UPDATE_URL, data={"random": self._random}, files=files) DEBUG(pformat(r.content)) if r.error: r.raise_for_status() m = re.search("""btResult =(.*?);</script>""", r.content) assert m function, args = parser_js_function_call(m.group(1)) DEBUG(pformat(args)) assert args return args[0] if args and args[0]['ret_value'] else {}
def vod_get_list_pic(self, gcids): params = { "callback": "jsonp1234567890", "t": self._now, "ids": "", # urlhash "gcid": ",".join(gcids), "rate": 0 } r = self.session.get(self.VOD_GET_LIST_PIC, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def cloud_play_get_url(self, url): params = { "url": url, "platform": 1, "userid": self.session.cookies["userid"], "jsonp": "jsonp1234567890", } r = self.session.get(self.CLOUD_PLAY_GET_URL, params=params) print r.content if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) assert args return args[0]
def get_free_url(self, nm_list=[], bt_list=[]): info = {} params = dict(key=info.get("key", ""), list=",".join((str(x) for x in nm_list + bt_list)), nm_list=",".join((str(x) for x in nm_list)), bt_list=",".join((str(x) for x in bt_list)), uid=self.uid, t=self._now) r = self.session.get(self.GET_FREE_URL, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def vod_get_process(self, url_list): params = { "callback": "jsonp1234567890", "t": self._now, "url_list": "####".join(url_list), "id_list": "####".join(["list_bt_%d" % x for x in range(len(url_list))]), "palform": 0, } r = self.session.get(self.VOD_GET_PROCESS, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def vod_get_bt_pic(self, cid, bindex=[]): params = { "callback": "jsonp1234567890", "t": self._now, "infohash": cid, "bindex": ",".join(map(str, bindex)), } r = self.session.get(self.VOD_GET_BT_PIC, params=params) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def _torrent_upload(self, filename, fp): files = {"filepath": (filename, fp)} r = self.session.post(self.TORRENT_UPDATE_URL, data={"random": self._random}, files=files) DEBUG(pformat(r.content)) r.raise_for_status() m = re.search("""btResult =(.*?);</script>""", r.content) if not m: m = re.search(r"""(parent\.edit_bt_list.*?);\s*</script>""", r.content) if not m: return {} function, args = parser_js_function_call(m.group(1)) DEBUG(pformat(args)) assert args return args[0] if (args and args[0]["ret_value"]) else {}
def check_login(self): r = self.session.get(self.CHECK_LOGIN_URL) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: self.uid = int(args[0]["data"].get("userid")) self.isvip = args[0]["data"].get("vipstate") self.nickname = args[0]["data"].get("nickname") self.username = args[0]["data"].get("usrname") self.task_url = self.TASK_URL % self.uid return True return False
def vod_get_bt_pic(self, cid, bindex=[]): params = { "jsonp" : "jsonp1234567890", "t" : self._now, "info_hash" : cid, "req_list" : "/".join(map(str, bindex)), } r = self.session.get(self.VOD_GET_BT_PIC % params) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0].get("resp", {})
def get_wait_time(self, task_id, key=None): params = dict( callback = "download_check_respo", t = self._now, taskid = task_id) if key: params["key"] = key r = self.session.get(self.GET_WAIT_TIME_URL, params=params) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def delete_task(self, task_ids): r = self.session.get(self.TASK_DELETE_URL, params = { "type": "0", "databases": "0", "taskids": ",".join(task_ids), "noCacheIE": self._now}) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: return True return False
def get_free_url(self, nm_list=[], bt_list=[]): info = {} params = dict( key=info.get("key", ""), list=",".join((str(x) for x in nm_list+bt_list)), nm_list=",".join((str(x) for x in nm_list)), bt_list=",".join((str(x) for x in bt_list)), uid=self.uid, t=self._now) r = self.session.get(self.GET_FREE_URL, params=params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def _get_bt_list(self, tid, cid): r = self.session.get(self.FILL_BT_LIST % dict( tid = tid, cid = cid, uid = self.uid, cachetime = self._now)) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if not args: return {} if isinstance(args[0], basestring): raise LiXianAPIException, args[0] return args[0].get("Result", {})
def _torrent_upload(self, filename, fp): files = {'filepath': (filename, fp)} r = self.session.post(self.TORRENT_UPDATE_URL, data={"random": self._random}, files=files) DEBUG(pformat(r.content)) r.raise_for_status() m = re.search("""btResult =(.+);</script> btRtcode =""", r.content) if not m: m = re.search(r"""(parent\.edit_bt_list.*?);\s*</script>""", r.content) if not m: return {} function, args = parser_js_function_call(m.group(1)) DEBUG(pformat(args)) assert args return args[0] if (args and args[0]['ret_value']) else {}
def _get_showtask(self, pagenum, st): self.session.cookies["pagenum"] = str(pagenum) r = self.session.get(self.SHOWTASK_UNFRSH_URL, params={ "callback": "json1234567890", "t": self._now, "type_id": st, "page": 1, "tasknum": pagenum, "p": 1, "interfrom": "task"}) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def _get_bt_list(self, tid, cid): self.session.cookies["pagenum"] = str(2000) r = self.session.get( self.FILL_BT_LIST, params=dict(callback="fill_bt_list", tid=tid, infoid=cid, g_net=1, p=1, uid=self.uid, noCacheIE=self._now), ) r.raise_for_status() # content starts with \xef\xbb\xbf, what's that? function, args = parser_js_function_call(r.content[3:]) DEBUG(pformat(args)) if not args: return {} if isinstance(args[0], basestring): raise LiXianAPIException, args[0] return args[0].get("Result", {})
def vod_get_play_url(self, url, bindex=-1): params = { "callback": "jsonp1234567890", "t": self._now, "check": 0, "url": url, "format": 225536, #225536:g, 282880:p "bindex": bindex, "isipad": 0, } r = self.session.get(self.VOD_GET_PLAY_URL, params=params, headers={'referer': 'http://222.141.53.5/iplay.html'}) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def vod_get_bt_pic(self, cid, bindex=[]): """ get gcid and shotcut of movice of bt task * max length of bindex is 18 """ params = { "jsonp" : "jsonp1234567890", "t" : self._now, "info_hash" : cid, "req_list" : "/".join(map(str, bindex)), } r = self.session.get(self.VOD_GET_BT_PIC % params) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0].get("resp", {})
def vod_get_play_url(self, url, bindex=-1): params = { "callback": "jsonp1234567890", "t": self._now, "check": 0, "url": url, "format": 225536, #225536:g, 282880:p "bindex": bindex, "isipad": 0, } r = self.session.get(self.VOD_GET_PLAY_URL, params=params) if r.error or r.status_code != 200: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0]
def delete_task(self, task_ids): r = self.session.post(self.TASK_DELETE_URL, params={ "type": "0", "t": _now() }, data={ "databases": "0", "taskids": ",".join(map(str, task_ids)) }) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0].get("result") == 1: return True return False
def _get_bt_list(self, tid, cid): self.session.cookies["pagenum"] = str(2000) r = self.session.get(self.FILL_BT_LIST, params=dict(callback="fill_bt_list", tid=tid, infoid=cid, g_net=1, p=1, uid=self.uid, noCacheIE=_now())) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if not args: return {} if isinstance(args[0], basestring): raise LiXianAPIException, args[0] return args[0].get("Result", {})
def get_free_url(self, nm_list=[], bt_list=[]): #info = self.get_wait_time(task_id) #if info.get("result") != 0: # return {} info = {} params = dict(key=info.get("key", ""), list=",".join((str(x) for x in nm_list + bt_list)), nm_list=",".join((str(x) for x in nm_list)), bt_list=",".join((str(x) for x in bt_list)), uid=self.uid, t=self._now) r = self.session.get(self.GET_FREE_URL, params=params) if r.error or r.status_code != 200: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def _get_bt_list(self, tid, cid): r = self.session.get(self.FILL_BT_LIST, params=dict( callback="fill_bt_list", tid = tid, infoid = cid, g_net = 1, p = 1, uid = self.uid, noCacheIE = self._now) , cookies=dict(pagenum=2000)) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if not args: return {} if isinstance(args[0], basestring): raise LiXianAPIException, args[0] return args[0].get("Result", {})
def get_free_url(self, nm_list=[], bt_list=[]): #info = self.get_wait_time(task_id) #if info.get("result") != 0: # return {} info = {} params = dict( key=info.get("key", ""), list=",".join((str(x) for x in nm_list+bt_list)), nm_list=",".join((str(x) for x in nm_list)), bt_list=",".join((str(x) for x in bt_list)), uid=self.uid, t=self._now) r = self.session.get(self.GET_FREE_URL, params=params) if r.error or r.status_code != 200: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args return args[0] if args else {}
def _get_bt_list(self, tid, cid): self.session.cookies["pagenum"] = str(2000) r = self.session.get(self.FILL_BT_LIST, params=dict(callback="fill_bt_list", tid=tid, infoid=cid, g_net=1, p=1, uid=self.uid, noCacheIE=self._now)) if r.error or r.status_code != 200: r.raise_for_status() # content starts with \xef\xbb\xbf, what's that? function, args = parser_js_function_call(r.content[3:]) DEBUG(pformat(args)) if not args: return {} if isinstance(args[0], basestring): raise LiXianAPIException, args[0] return args[0].get("Result", {})
def task_check(self, url): r = self.session.get(self.TASK_CHECK_URL, params={ "callback": "queryCid", "url": url, "random": self._random, "tcache": self._now}) r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 8: return {} result = dict( cid = args[0], gcid = args[1], size = args[2], title = title_fix(args[4]), goldbean_need = args[5], silverbean_need = args[6], is_full = args[7], random = args[8]) return result
def share(self, emails, tasks, msg="", task_list=None): if task_list is None: task_list = self.get_task_list() payload = [] i = 0 for task in task_list: if task["task_id"] in tasks: if task["task_type"] == "bt": # TODO pass else: if not task["lixian_url"]: continue url_params = parse_url(task["lixian_url"]) tmp = { "cid_%d" % i: task["cid"], "file_size_%d" % i: task["size"], "gcid_%d" % i: url_params.get("g", ""), "url_%d" % i: task["url"], "title_%d" % i: task["taskname"], "section_%d" % i: url_params.get("scn", ""), } i += 1 payload.append(tmp) data = dict( uid=self.uid, sessionid=self.get_cookie("sessionid"), msg=msg, resv_email=";".join(emails), data=json.dumps(payload), ) r = self.session.post(self.SHARE_URL, data) r.raise_for_status() # forward_res(1,"ok",649513164808429); function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0] == 1: return True return False
def task_check(self, url): r = self.session.get(self.TASK_CHECK_URL % { "url": urllib.quote_plus(url), "random": self._random, "cachetime": self._now}) if r.error: r.raise_for_status() #queryCid(cid,gcid,file_size,tname,goldbean_need,silverbean_need,is_full,random) function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 8: return {} result = dict( cid = args[0], gcid = args[1], size = args[2], title = args[3], goldbean_need = args[4], silverbean_need = args[5], is_full = args[6], random = args[7]) return result
def task_check(self, url): r = self.session.get(self.TASK_CHECK_URL, params={ "callback": "queryCid", "url": url, "random": self._random, "tcache": self._now}) r.raise_for_status() #queryCid(cid,gcid,file_size,avail_space,tname,goldbean_need,silverbean_need,is_full,random) function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 8: return {} result = dict( cid = args[0], gcid = args[1], size = args[2], title = title_fix(args[4]), goldbean_need = args[5], silverbean_need = args[6], is_full = args[7], random = args[8]) return result
def get_task_process(self, nm_list=[], bt_list=[], with_summary=False): params = dict( callback="rebuild", list=",".join((str(x) for x in nm_list + bt_list)), nm_list=",".join((str(x) for x in nm_list)), bt_list=",".join((str(x) for x in bt_list)), uid=self.uid, noCacheIE=self._now, ) r = self.session.get(self.GET_TASK_PROCESS, params=params) if r.error: r.raise_for_status() function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args args = args[0] result = [] for task in args.get("Process", {}).get("Record", []) if args else []: status = None if task.get('fsize', u'0B') == u'0B': # it's a task own by other account status = 'failed' tmp = dict( task_id=int(task['tid']), cid=task.get('cid', None), status=status or self.d_status.get(int(task['download_status']), "waiting"), process=task['percent'], leave_time=task['leave_time'], speed=int(task['speed']), lixian_url=task.get('lixian_url', None), ) result.append(tmp) if with_summary: return result, args.get("Process", {}).get("Task", {}) else: return result
def bt_task_check(self, url): r = self.session.get(self.QUERY_URL, params={ "callback": "queryUrl", "u": url, "random": self._random, "tcache": self._now }) if r.error: r.raise_for_status() #queryUrl(flag,infohash,fsize,bt_title,is_full,subtitle,subformatsize,size_list,valid_list,file_icon,findex,random) function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 12: return {} if not args[2]: return {} result = dict(flag=args[0], cid=args[1], size=args[2], title=title_fix(args[3]), is_full=args[4], random=args[11]) filelist = [] for subtitle, subformatsize, size, valid, file_icon, findex in zip( *args[5:11]): tmp_file = dict( title=subtitle, formatsize=subformatsize, size=size, file_icon=file_icon, ext="", index=findex, valid=int(valid), ) filelist.append(tmp_file) result['filelist'] = filelist return result
def share(self, emails, tasks, msg="", task_list=None): if task_list is None: task_list = self.get_task_list() payload = [] i = 0 for task in task_list: if task["task_id"] in tasks: if task["task_type"] == "bt": #TODO pass else: if not task["lixian_url"]: continue url_params = parse_url(task['lixian_url']) tmp = { "cid_%d" % i: task["cid"], "file_size_%d" % i: task["size"], "gcid_%d" % i: url_params.get("g", ""), "url_%d" % i: task["url"], "title_%d" % i: task["taskname"], "section_%d" % i: url_params.get("scn", "") } i += 1 payload.append(tmp) data = dict(uid=self.uid, sessionid=self.get_cookie("sessionid"), msg=msg, resv_email=";".join(emails), data=json.dumps(payload)) r = self.session.post(self.SHARE_URL, data) if r.error: r.raise_for_status() #forward_res(1,"ok",649513164808429); function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) assert args if args and args[0] == 1: return True return False
def task_check(self, url): r = self.session.get( self.TASK_CHECK_URL % { "url": urllib.quote_plus(url), "random": self._random, "cachetime": self._now }) if r.error: r.raise_for_status() #queryCid(cid,gcid,file_size,tname,goldbean_need,silverbean_need,is_full,random) function, args = parser_js_function_call(r.content) DEBUG(pformat(args)) if len(args) < 8: return {} result = dict(cid=args[0], gcid=args[1], size=args[2], title=args[3], goldbean_need=args[4], silverbean_need=args[5], is_full=args[6], random=args[7]) return result