def get(self, search=None, c=None, r=None, ver=None, title=None, imdbid=None, year=None, page=1, fullplot=None, tomatoes=None, media_type=None, season=None, episode=None, timeout=None): """Generic request returned as dict.""" params = { 'search': search, 'c': c, 'r': r, 'ver': ver, 'title': title, 'imdbid': imdbid, 'year': year, 'page': page, 'type': media_type, 'plot': 'full' if fullplot else 'short', 'tomatoes': 'true' if tomatoes else False, 'season': season, 'episode': episode, 'timeout': timeout } params[client.b64decode('Y29kZXJhbHBoYQ==')] = client.b64decode(c) # remove falsey params params = dict([(key, value) for key, value in iteritems(params) if value or isinstance(value, number_types)]) # set defaults for key in self.params_map.values(): if key in self.default_params: params.setdefault(key, self.default_params[key]) # convert function args to API query params params = self.convert_params(params) data = self.request(**params).json() return self.set_model(data, params)
def decode_ts(self, t): r = "" e_s = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' r_s = 'ACEGIKMOQSUWYBDFHJLNPRTVXZ' try: if len(t) > 1: for n in range(0, len(t)): s = False for ix in range(0, len(r_s)): if t[n] == r_s[ix]: r += e_s[ix] s = True break if s == False: r += t[n] missing_padding = len(r) % 4 if missing_padding != 0: r += b'=' * (4 - missing_padding) r = client.b64decode(r) return True, r except Exception as e: log('ERROR', 'decode_t', '%s' % e, dolog=False) False, 'Error in decoding'
def decode_t(self, t): r = "" e_s = 'abcdefghijklmnopqrstuvwxyz' r_s = 'acegikmoqsuwybdfhjlnprtvxz' return True, t try: if t[0] == '-' and len(t) > 1: t = t[1:] for n in range(0, len(t)): if n == 0 and t[n] == '-': pass else: s = False for ix in range(0, len(r_s)): if t[n] == r_s[ix]: r += e_s[ix] s = True break if s == False: r += t[n] missing_padding = len(r) % 4 if missing_padding != 0: r += b'=' * (4 - missing_padding) r = client.b64decode(r) return True, r except Exception as e: log('ERROR', 'decode_t', '%s' % e, dolog=False) False, 'Error in decoding'
def request(self, **params): """HTTP GET request to OMDB API. Raises exception for non-200 HTTP status codes. """ if 'timeout' in params: timeout = params.pop('timeout') else: timeout = self.default_params.get('timeout') params[client.b64decode('Y29kZXJhbHBoYQ==')] = client.b64decode( params['c']) res = self.session.get(self.url, params=params, timeout=timeout) # raise HTTP status code exception if status code != 200 # if status_code == 200, then no exception raised res.raise_for_status() return res
def getVidToken(self): try: all_js_url = urlparse.urljoin(self.base_link, self.ALL_JS) unpacked_code = '' cch = '' if len(self.TOKEN_KEY) == 0: all_js_pack_code = proxies.request( all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) unpacked_code = jsunpack.unpack(all_js_pack_code) cch = re.findall( r'%s' % client.b64decode( 'ZnVuY3Rpb25cKFthLXpdLFthLXpdLFthLXpdXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gW2Etel1cKFwpe3JldHVybiAoLio/KX0=' ), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key != None and token_key != '': self.TOKEN_KEY.append(token_key) control.set_setting(name + 'VidToken', token_key) except Exception as e: log('ERROR', 'getVidToken-1', '%s' % e, dolog=False) log('ERROR', 'getVidToken-1', '%s' % unpacked_code, dolog=False) log('ERROR', 'getVidToken-1', '%s' % cch, dolog=False) try: if len(self.TOKEN_KEY) == 0: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) if token_key != None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) control.set_setting(name + 'VidToken', token_key) except Exception as e: log('ERROR', 'getVidToken-2', '%s' % e, dolog=False) try: fm_flags = proxies.request(self.FLAGS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) if fm_flags != None and fm_flags != '': fm_flags = json.loads(fm_flags) #cookie_dict.update({'token_key':token_key}) self.FLAGS = fm_flags except Exception as e: log('ERROR', 'getVidToken-3-Flags', '%s' % e, dolog=False)
def getVidToken(self): try: all_js_url = urlparse.urljoin(self.base_link, self.ALL_JS) if len(self.TOKEN_KEY) == 0: all_js_pack_code = proxies.request(all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True) unpacked_code = jsunpack.unpack(all_js_pack_code) cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKHQsZSxpXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gblwoXCl7cmV0dXJuICguKj8pfWZ1bmN0aW9uIHJcKHRcKQ=='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: self.log('ERROR', 'getVidToken-1','%s' % e) try: if len(self.TOKEN_KEY) == 0: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: self.log('ERROR', 'getVidToken-2','%s' % e)
def createMeta(self, url, provider, logo, quality, links, key, riptype, showsplit=False, useGetlinkAPI=True, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False): orig_url = url if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO', 'createMeta', 'Host Disabled by User') return links if 'http' not in url and 'google.com/file' in url: url = 'https://drive.google.com/' + url.split('.com/')[1] httpsskip = False if control.setting('use_https_alt') != None and ( control.setting('use_https_alt') == True or control.setting('use_https_alt') == False): httpsskip = control.setting('httpsskip') #print "createMeta1 : %s %s %s %s" % (url, provider, logo, quality) videoData, headers, content, cookie = getVideoMetaData(url, httpsskip) try: cookie += '; %s' % content['Set-Cookie'] # cookie_s = cookie.split(';') # cookie_n = [] # for cook in cookie_s: # cook = cook.strip() # if '=' in cook and cook not in cookie_n: # cookie_n.append(cook) # cookie = ('; '.join(x for x in sorted(cookie_n))) cookie_value = client.search_regex(r"DRIVE_STREAM=([^;]+);", cookie, 'cookie val', group=1) domain = client.search_regex(r"https?://([^\/]+)", url, 'host val', group=1) cookie = 'DRIVE_STREAM=%s; path=/; domain=.%s;' % (cookie_value, domain) except: pass #print cookie #cookie = urllib.quote_plus(cookie).replace('+','%20').replace('%2F','/') # DRIVE_STREAM%3Dva1wsBbVn3A%3B%20path%3D/%3B%20domain%3D.docs.google.com%3B # DRIVE_STREAM%3DtV76KFL8a6k%3B+path%3D%2F%3B+domain%3D.docs.google.com%3B params = {'headers': headers, 'cookie': cookie} params = json.dumps(params, encoding='utf-8') params = client.b64encode(params) if client.geturlhost(url) in self.host[4]: pass # skip for googleapis.com link else: quality = file_quality(url, quality, videoData)[0] isOnline = check(url, videoData, headers=headers, cookie=cookie, httpsskip=httpsskip)[0] type = rip_type(url, riptype) files = [] #print "createMeta : %s %s %s %s" % (url, provider, logo, quality) titleinfo = txt if txt != '': titleinfo = txt ntitleinfo = titleinfo files_ret = [] enabled = True try: #udata = urldata(url, videoData=videoData, usevideoData=True) if 'google.com/file' in url: idstr = '%s' % (url.split('/preview')[0].split('/edit') [0].split('/view')[0]) idstr = idstr.split('/') id = idstr[len(idstr) - 1] try: durl, f_res, fs = getFileLink(id, httpsskip) except: fs = 0 durl = None if durl != None: files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': durl, 'durl': durl, 'urldata': createurldata(durl, quality), 'params': params, 'logo': logo, 'online': isOnline, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(durl), 'misc': { 'player': 'iplayer', 'gp': False } }) else: fs = client.getFileSize(url, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata('', ''), 'params': params, 'logo': logo, 'online': isOnline, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(url), 'misc': { 'player': 'eplayer', 'gp': False } }) else: fs = client.getFileSize(url, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata('', ''), 'params': params, 'logo': logo, 'online': isOnline, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(url), 'misc': { 'player': 'iplayer', 'gp': False } }) except Exception as e: log(type='ERROR', method='createMeta-1', err=u'%s' % e) isGetlinkWork = False try: if useGetlinkAPI == True and isOnline and 'google.com/file' in url and self.useGetLinkAPI: client.setIP4() ntitleinfo = titleinfo + ' | (via GetLink API) ' files = urldata(url) files = client.b64decode(files) filesJ = json.loads(files) if len(filesJ) > 0: for mfile in filesJ: mfile = json.loads(mfile) #print "mfile --- : %s" % mfile furl = mfile['src'] f2url = client.request(furl, followredirect=True, output='geturl') if 'http' in f2url: furl = f2url #print "furl --- : %s" % furl quality = file_quality(furl, mfile['res'], videoData)[0] isOnlineT = check(furl, videoData, headers=headers, cookie=cookie)[0] type = rip_type(furl, riptype) else: isOnlineT = 'Unknown' p = {'headers': '', 'cookie': ''} p = json.dumps(p, encoding='utf-8') p = client.b64encode(p) fs = client.getFileSize(furl, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': furl, 'durl': furl, 'urldata': urldata('', ''), 'params': p, 'logo': logo, 'online': isOnlineT, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(furl), 'misc': { 'player': 'iplayer', 'gp': False } }) isGetlinkWork = True client.setIP6() except Exception as e: log(type='ERROR', method='createMeta-2', err=u'%s' % e) try: if showsplit == True and isOnline and isGetlinkWork == False: # currently suffers from transcoding failure on most clients ntitleinfo = titleinfo + ' | *limited support* ' files = get_files(url, videoData)[0] for furl in files: quality = file_quality(furl, quality, videoData)[0] type = rip_type(furl, riptype) furl = urllib.unquote(furl).decode('utf8') furl = furl.decode('unicode_escape') isOnlineT = check(furl, videoData, headers=headers, cookie=cookie)[0] fs = client.getFileSize(furl, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': furl, 'durl': furl, 'urldata': createurldata(furl, quality), 'params': params, 'logo': logo, 'online': isOnlineT, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(furl), 'misc': { 'player': 'iplayer', 'gp': False } }) except Exception as e: log(type='ERROR', method='createMeta-3', err=u'%s' % e) for fr in files_ret: fr['resumeDownload'] = self.resumeDownload links.append(fr) log('INFO', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) return links
def getVidToken(self): try: page_html = proxies.request(self.base_link, use_web_proxy=self.proxyrequired, httpsskip=True) try: all_js_url = re.findall(r'<script src=\"(https://static1.*?all.js.*?)\"', page_html)[0] vid_token_key = all_js_url.split('?')[1] except: all_js_url = self.ALL_JS vid_token_key = 'None' try: token_pairs = proxies.request(self.TOKEN_PAIRS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_pairs !=None and token_pairs != '': token_pairs = json.loads(token_pairs) #cookie_dict.update({'token_key':token_key}) self.PAIRS = token_pairs except Exception as e: log('ERROR', 'getVidToken-3.a-Token-Pairs','%s' % e, dolog=False) try: fm_flags = proxies.request(self.FLAGS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if fm_flags !=None and fm_flags != '': fm_flags = json.loads(fm_flags) #cookie_dict.update({'token_key':token_key}) self.FLAGS = fm_flags except Exception as e: log('ERROR', 'getVidToken-3.b-Token-Pairs','%s' % e, dolog=False) all_js_pack_code = proxies.request(all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True) unpacked_code = all_js_pack_code del self.TOKEN_KEY[:] if len(self.PAIRS.keys()) > 0: if vid_token_key in self.PAIRS.keys(): d = self.PAIRS[vid_token_key] self.TOKEN_KEY.append(d) elif len(self.PAIRS.keys()) > 0: d = self.PAIRS["None"] self.TOKEN_KEY.append(d) try: if jsunpack.detect(all_js_pack_code): unpacked_code = jsunpack.unpack(all_js_pack_code) except: pass token_key = None cch = '' if len(self.TOKEN_KEY) == 0: try: parts = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb24gZlwoXClce3JldHVybiguKj8pXH0='), unpacked_code)[0].strip() parts_s = parts.split('+') val_str = '' if len(parts_s) > 0: for p in parts_s: p = re.escape(p) val_str += re.findall(r'%s\=\"(.*?)\",' % p, unpacked_code)[0] token_key = val_str else: raise Exception("ALL JS Parts were not found !") if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1a','%s' % e, dolog=False) if len(self.TOKEN_KEY) == 0: try: cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKHQsaSxuXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gZVwoXCl7cmV0dXJuICguKj8pfWZ1bmN0aW9uIHJcKHRcKQ=='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key !=None and token_key != '': self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1b','%s' % e, dolog=False) if len(self.TOKEN_KEY) == 0: try: cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKFthLXpdLFthLXpdLFthLXpdXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gW2Etel1cKFwpe3JldHVybiAoLio/KX1mdW5jdGlvbiBbYS16XVwoW2Etel1cKQ=='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1c','%s' % e, dolog=False) except Exception as e: log('ERROR', 'getVidToken-1','%s' % e, dolog=False) #log('ERROR', 'getVidToken-1','%s' % unpacked_code, dolog=False) log('ERROR', 'getVidToken-1','%s' % cch, dolog=False) try: if len(self.TOKEN_KEY) == 0 or True: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-2','%s' % e, dolog=False)