def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext = '.mp4', testing=False, poster=None, headers=None, page_url=None): files_ret = [] orig_url = url if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO','createMeta','Host Disabled by User') return links try: urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) online = check(url) vidurls, err, sub_url_t = getAllQuals(url, online) if vidurls == None: log(type='ERROR',method='createMeta-1', err=u'%s' % err) return links if sub_url_t != None: sub_url = sub_url_t seq = 0 for vv in vidurls: durl = vv['page'] vidurl = vv['file'] if vidurl != None: quality = vv['label'] fs = vv['fs'] try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl,quality,fs)) files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':seq}) seq += 1 except Exception as e: log('ERROR', 'createMeta', '%s' % e) for fr in files_ret: if fr != None and 'key' in fr.keys(): control.setPartialSource(fr,self.name) links.append(fr) if len(files_ret) > 0: log('SUCCESS', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) else: log('FAIL', 'createMeta', 'Failed in processing %s link >>> %s' % (provider, orig_url), dolog=self.init) log('INFO', 'createMeta', 'Completed', dolog=self.init) return links
def decode(url, page_url): items = [] err = '' try: id = re.compile('//.+?/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(url)[0] headersx = { 'Referer': 'https://www.xstreamcdn.com/v/%s' % id, 'User-Agent': client.agent() } post_data = {'r': page_url, 'd': 'www.xstreamcdn.com'} api_url = 'https://www.xstreamcdn.com/api/source/%s' % id page_data = client.request(api_url, post=client.encodePostData(post_data), headers=headersx) j_data = json.loads(page_data) success = j_data['success'] if success == False: try: msd = j_data['data'] except: msd = "" raise Exception( 'API returned error: %s | Data: %s | Return msg: %s' % (api_url, post_data, msd)) else: srcs = j_data['data'] for src in srcs: q = src['label'] u = src['file'] fs = client.getFileSize(u, retry429=True, headers=headersx) online = check(u) u1 = client.request(u, output='geturl') if u1 != None: u = u1 urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) paramsx = {'headers': headersx} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) items.append({ 'quality': q, 'src': u, 'fs': fs, 'online': online, 'params': params, 'urldata': urldata }) if len(items) == 0: raise Exception('No videos found !') except Exception as e: err = 'xtreamcdn Error: %s' % e return items, err
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext = '.mp4', testing=False, poster=None, headers=None): if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO','createMeta','Host Disabled by User') return links orig_url = url urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) online = check(url) vidurls, err, sub_url_t = getAllQuals(url, online) if vidurls == None: log(type='ERROR',method='createMeta-1', err=u'%s' % err) return links if sub_url_t != None: sub_url = sub_url_t files_ret = [] for vv in vidurls: durl = vv['page'] vidurl, r1, r2 = resolve(durl, online) if vidurl != None: quality = vv['label'] try: #vidurl_t = client.request(vidurl, output='geturl') fs = client.getFileSize(vidurl) fs = int(fs) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) fs = 0 try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (vidurl,quality,fs)) files_ret.append({'source':self.name, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'poster':poster, 'sub_url':sub_url, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) except Exception as e: log(type='ERROR',method='createMeta', err=u'%s' % e) files_ret.append({'source':urlhost, 'maininfo':'', 'titleinfo':txt, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'url':durl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':fs, 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'misc':{'player':'iplayer', 'gp':False}}) for fr in files_ret: links.append(fr) log('INFO', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) return links
def T3DonlineFilms(url): error = '' try: data = urlparse.parse_qs(url) headers = {} headers['Referer'] = 'http://3donlinefilms.com' b = data['page'][0] cook = client.request(b, output='cookie') l0 = 'http://3donlinefilms.com/update.php' post_data = {'file': data['src_file'][0]} cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook headers['Referer'] = data['page'][0] headers['User-Agent'] = client.agent() headers['Cookie'] = cookie try: ret = client.request(l0, post=client.encodePostData(post_data), output='extended', XHR=True, cookie=cookie) except: pass u = '%s?file=%s' % (data['file'][0], data['src_file'][0].replace( ' ', '')) paramsx = {'headers': headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) except Exception as e: error = '%s' % e return u, params, error
def createurldata(mfile, qual): ret = '' try: #mfile = urllib.quote(mfile) mfile = unicode(mfile) qual = unicode(qual) files = [] jsondata = { 'label': qual, 'type': 'video/mp4', 'src': mfile, 'file': mfile, 'res': qual } jsondata = json.loads(json.dumps(jsondata)) #print jsondata files.append(jsondata) if len(files) > 0: ret = files except Exception as e: print "Error in createurldata" print "URL : %s | Qual: %s" % (mfile, qual) print "Error: %s" % e #print ret ret = json.dumps(ret, encoding='utf-8') #print "urldata ------ %s" % ret return client.b64encode(ret)
def createurldata(mfile, qual): ret = '' try: mfile = unicode(mfile) qual = unicode(qual) files = [] jsondata = { 'label': qual, 'type': 'video/mp4', 'src': mfile, 'file': mfile, 'res': qual } jsondata = json.loads(json.dumps(jsondata)) files.append(jsondata) if len(files) > 0: ret = files except Exception as e: log('ERROR', 'createurldata', '%s - %s' % (mfile, e)) ret = json.dumps(ret, encoding='utf-8') return client.b64encode(ret)
def urldata(url, videoData=None, usevideoData=False): ret = '' #print "urldata ----------- %s" % url if url != None and url == '': pass else: try: if usevideoData == True and videoData != None and videoData != '': #print "urldata using videoData" files = [] res_split = videoData.split('&') for res in res_split: if 'fmt_stream_map' in res: file_data = res.split('=')[1] file_data = urllib.unquote(file_data).decode('utf8') files_split = file_data.split(',') for file in files_split: mfile = file.split('|')[1] qual = file_quality(mfile, '360p')[0] jsondata = { 'label': qual, 'type': 'video/mp4', 'src': mfile, 'file': mfile, 'res': qual } jsondata = json.loads(json.dumps(jsondata)) files.append(jsondata) #print mfile break if len(files) > 0: ret = files elif 'google.com/file' in url: #print "urldata using getlink API" r_split = url.split('/') getlinkurl = 'http://api.getlinkdrive.com/getlink?url=https://drive.google.com/file/d/%s/view' % r_split[ len(r_split) - 2] print "Getlink-API URL: %s" % getlinkurl c = 0 files = [] while ret == '' or len(files) == 0 and c < 3: ret = client.request(getlinkurl, IPv4=True) ret = ret.split('},{') for r in ret: r = r.replace('{', '').replace('}', '').replace( '[', '').replace(']', '') files.append('{%s}' % r) c += 1 if len(files) > 0: ret = files except Exception as e: print "Error in urldata" print "URL : %s" % url print e #print ret ret = json.dumps(ret, encoding='utf-8') #print "urldata ------ %s" % ret return client.b64encode(ret)
def urldata(url, qual): try: mfile, err, sub_url, page_html = resolve(url) files = [] jsondata = { "label": qual, "type": "video/mp4", "src": mfile, "file": mfile, "res": qual } files.append(jsondata) #print files ret = json.dumps(files, encoding='utf-8') #print "urldata ------ %s" % ret return client.b64encode(ret) except: return client.b64encode(json.dumps('', encoding='utf-8'))
def resolve(url, page_url=None, **kwargs): params = client.b64encode(json.dumps('', encoding='utf-8')) error = '' u = url if '3donlinefilms.com' in url or '3dmoviesfullhd.com' in url or 'freedocufilms.com' in url: u, error, params = T3DonlineFilms(url) return (u, error, params) else: if check(url) == False: return (None, 'Error in check !', params) return (u, error, params)
def resolve(url): params = client.b64encode(json.dumps('', encoding='utf-8')) error = '' u = url if '3donlinefilms.com' in url: u, params, error = T3DonlineFilms(url) return u, params, error else: if check(url) == False: return None, params, 'Error in check !' return u, params, error
def initAndSleep(self): try: self.TOKEN_KEY = [] self.getVidToken() if len(self.TOKEN_KEY) > 0: log('SUCCESS', 'initAndSleep', 'Vid Token: %s' % client.b64encode(self.TOKEN_KEY[0])) else: log('FAIL', 'initAndSleep', 'Vid Token Not retrieved !') t_base_link = self.base_link self.headers = {'X-Requested-With': 'XMLHttpRequest'} self.headers['Referer'] = t_base_link ua = client.randomagent() self.headers['User-Agent'] = ua #get cf cookie cookie1 = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True) self.headers['Cookie'] = cookie1 # get reqkey cookie try: token_url = urlparse.urljoin(t_base_link, self.token_link) r1 = proxies.request(token_url, headers=self.headers, httpsskip=True) reqkey = self.decodeJSFCookie(r1) except: reqkey = '' # get session cookie serverts = str(((int(time.time())/3600)*3600)) query = {'ts': serverts} try: tk = self.__get_token(query) except: tk = self.__get_token(query, True) query.update(tk) hash_url = urlparse.urljoin(t_base_link, self.hash_menu_link) hash_url = hash_url + '?' + urllib.urlencode(query) r1, headers, content, cookie2 = proxies.request(hash_url, headers=self.headers, limit='0', output='extended', httpsskip=True) #cookie = cookie1 + '; ' + cookie2 + '; user-info=null; reqkey=' + reqkey cookie = '%s; %s; user-info=null; reqkey=%s' % (cookie1 , cookie2 , reqkey) self.headers['Cookie'] = cookie log('SUCCESS', 'initAndSleep', 'Cookies : %s for %s' % (cookie,self.base_link)) except Exception as e: log('ERROR','initAndSleep', '%s' % e)
def T3DonlineFilms(url): error = '' try: data = urlparse.parse_qs(url) headers = {} if '3donlinefilms.com' in url: headers['Referer'] = 'https://3donlinefilms.com' l0 = 'https://3donlinefilms.com/update.php' elif 'freedocufilms.com' in url: headers['Referer'] = 'https://freedocufilms.com' l0 = 'https://freedocufilms.com/update.php' else: headers['Referer'] = 'https://3dmoviesfullhd.com' l0 = 'https://3dmoviesfullhd.com/update.php' u = data['file'][0] u = u.replace('//freedocufilms','//www.freedocufilms') page = data['page'][0] cook = client.request(page, output='cookie') post_data = {'file':data['src_file'][0]} cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook headers['Referer'] = data['page'][0] headers['User-Agent'] = client.agent() headers['Cookie'] = cookie try: ret = client.request(l0, post=client.encodePostData(post_data), output='extended', XHR=True, cookie=cookie) except: pass paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) except Exception as e: error = '%s' % e return u, error, params
def resolve(url, online=None, page_url=None, **kwargs): try: if online == None: if check(url) == False: raise Exception('Video not available') video_url = None headersx = {'Referer': url, 'User-Agent': client.agent()} page_data, head, ret, cookie = client.request(url, output='extended', headers=headersx) try: cookie = re.findall(r'Set-Cookie:(.*)', str(ret), re.MULTILINE)[0].strip() except: pass headersx['Cookie'] = cookie mp4_vids = re.findall(r'\"(http.*?.mp4.*?)\"', page_data) items = [] for u in mp4_vids: u = u.strip().replace(' ', '%20').replace('&', '&') items.append(u) if len(items) > 0: video_url = items else: raise Exception('Video not available') paramsx = {'headers': headersx} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) return (video_url, '', params) except Exception as e: e = '{}'.format(e) return (None, e, None)
def process(self, url, q, r, headers, page_url): items = [] try: if 'vcstream.to' in url: id = re.compile('//.+?/(?:embed|f)/([0-9a-zA-Z-_]+)').findall(url)[0] headersx = {'Referer': url, 'User-Agent': client.agent()} page_data = client.request('https://vcstream.to/player?fid=%s&page=embed' % id, headers=headersx) srcs = re.findall(r'sources:.\[(.*?)\]', page_data)[0] srcs = srcs.replace('\\n','').replace('\\','') srcs = '''[%s]''' % srcs j_data = json.loads(srcs) for j in j_data: t = j['name'] label = j['label'] u = j['src'] if label.lower() == 'raw': q = source_utils.check_sd_url(t) else: q = label r = source_utils.check_sd_url_rip(t) fs = client.getFileSize(u, retry429=True, headers=headers) if fs == None or int(fs) == 0: fs = client.getFileSize(u, retry429=True) q = qual_based_on_fs(q,fs) online = check(u) urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) if headers != None: paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True}) elif '3donlinefilms.com' in url or '3dmoviesfullhd.com' in url or 'freedocufilms.com' in url: data = urlparse.parse_qs(url) headers = {} if '3donlinefilms.com' in url: headers['Referer'] = 'http://3donlinefilms.com' l0 = 'https://3donlinefilms.com/update.php' elif 'freedocufilms.com' in url: headers['Referer'] = 'http://freedocufilms.com' l0 = 'https://freedocufilms.com/update.php' else: headers['Referer'] = 'http://3dmoviesfullhd.com' l0 = 'https://3dmoviesfullhd.com/update.php' page = data['page'][0] cook = client.request(page, output='cookie') post_data = {'file':data['src_file'][0]} cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook headers['Referer'] = page headers['User-Agent'] = client.agent() headers['Cookie'] = cookie u = data['file'][0] u = u.replace('//freedocufilms','//www.freedocufilms') try: ret = client.request(l0, post=client.encodePostData(post_data),headers=headers, output='extended', XHR=True, cookie=cookie) except Exception as e: log(type='FAIL', method='process', err='%s' % e, dolog=False, logToControl=False, doPrint=True) ret = client.request(u, output='headers', headers=headers, XHR=True) try: fs = int(re.findall(r'Content-Length:(.*)', str(ret), re.MULTILINE)[0].strip()) except: fs = 0 q = qual_based_on_fs(q,fs) online = False if int(fs) > 0: online = True urldata = client.b64encode(json.dumps('', encoding='utf-8')) paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False, 'allowsDownload':True}) elif 'cooltvseries.com' in url: urlx = client.request(url, output='geturl', headers=headers) urlx = '%s?e=file.mp4' % urlx fs = client.getFileSize(url, retry429=True, headers=headers) if fs == None or int(fs) == 0: fs = client.getFileSize(url, retry429=True) q = qual_based_on_fs(q,fs) online = check(url) urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) if headers != None: paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) allowsDownload = True items.append({'quality':q, 'riptype':r, 'src':urlx, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload}) else: fs = client.getFileSize(url, retry429=True, headers=headers) if fs == None or int(fs) == 0: fs = client.getFileSize(url, retry429=True) q = qual_based_on_fs(q,fs) online = check(url) urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) if headers != None: paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) allowsDownload = True if '.m3u8' in url: allowsDownload = False items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload}) except Exception as e: log(type='ERROR',method='process', err=u'%s' % e) if len(items) == 0: fs = client.getFileSize(url, retry429=True, headers=headers) if fs == None or int(fs) == 0: fs = client.getFileSize(url, retry429=True) q = qual_based_on_fs(q,fs) online = check(url) urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) if headers != None: paramsx = {'headers':headers} params = client.b64encode(json.dumps(paramsx, encoding='utf-8')) items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True}) return items
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext = '.mp4', testing=False, poster=None, headers=None, page_url=None): files_ret = [] orig_url = url if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO','createMeta','Host Disabled by User') return links try: urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) online = check(url) titleinfo = txt maininfo = '' fs = 0 try: furl, fs, file_ext1, err = mega.get_mega_dl_link(url) if err != '': raise Exception(err) if file_ext1 != None: file_ext = file_ext1 if file_ext not in ['.mp4','.mkv','.avi']: titleinfo = '%s%s' % (txt+' ' if len(txt)>0 else '', file_ext+' file') quality = qual_based_on_fs(quality, fs) if int(fs) == 0: fs = client.getFileSize(furl) urldata = createurldata(furl, quality) except Exception as e: online = False log('FAIL', 'createMeta-1', '%s - %s' % (url,e)) maininfo = '*File Unavailable*' try: log(type='INFO',method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (url,quality,fs)) files_ret.append({'source':self.name, 'maininfo':maininfo, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':riptype, 'provider':provider, 'orig_url':orig_url, 'durl':url, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'online':online, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta-2', '%s - %s' % (url,e)) files_ret.append({'source':urlhost, 'maininfo':maininfo, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'durl':url, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':self.netloc[0], 'page_url':page_url, 'misc':{'player':'iplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta', '%s' % e) for fr in files_ret: if fr != None and 'key' in fr.keys(): control.setPartialSource(fr,self.name) links.append(fr) if len(files_ret) > 0: log('SUCCESS', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) else: log('FAIL', 'createMeta', 'Failed in processing %s link >>> %s' % (provider, orig_url), dolog=self.init) log('INFO', 'createMeta', 'Completed', dolog=self.init) return links
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False, poster=None, headers=None, page_url=None): files_ret = [] orig_url = url if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO', 'createMeta', 'Host Disabled by User') return links try: if 'vidcloud.icu/load' in url: raise Exception('No mp4 Video found') elif 'vidcloud.icu/download' in url: headersx = {'Referer': url, 'User-Agent': client.agent()} page_data, head, ret, cookie = client.request( url, output='extended', headers=headersx) try: cookie = re.findall(r'Set-Cookie:(.*)', str(ret), re.MULTILINE)[0].strip() except: pass headersx['Cookie'] = cookie mp4_vids = re.findall(r'\"(http.*?.mp4.*?)\"', page_data) items = [] for u in mp4_vids: u = u.strip().replace(' ', '%20').replace('&', '&') fs = client.getFileSize(u, headers=headersx) q = qual_based_on_fs(quality, fs) online = check(u, headers=headersx) urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) if headersx != None: paramsx = {'headers': headers} params = client.b64encode( json.dumps(paramsx, encoding='utf-8')) items.append({ 'quality': q, 'riptype': riptype, 'src': u, 'fs': fs, 'online': online, 'params': params, 'urldata': urldata, 'allowsStreaming': False }) seq = 0 for item in items: durl = url vidurl = item['src'] allowsStreaming = item['allowsStreaming'] quality = item['quality'] riptype = item['riptype'] fs = item['fs'] online = item['online'] params = item['params'] urldata = item['urldata'] try: log(type='INFO', method='createMeta', err=u'durl:%s ; res:%s; fs:%s' % (durl, quality, fs)) files_ret.append({ 'source': self.name, 'maininfo': txt, 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': riptype, 'provider': provider, 'orig_url': orig_url, 'url': vidurl, 'durl': durl, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': allowsStreaming, 'key': key, 'enabled': True, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'poster': poster, 'subdomain': client.geturlhost(url), 'page_url': page_url, 'misc': { 'player': 'iplayer', 'gp': True }, 'seq': seq }) except Exception as e: log(type='ERROR', method='createMeta', err=u'%s' % e) files_ret.append({ 'source': urlhost, 'maininfo': txt, 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': 'Unknown', 'provider': provider, 'orig_url': orig_url, 'url': vidurl, 'durl': durl, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': allowsStreaming, 'key': key, 'enabled': True, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'poster': poster, 'subdomain': client.geturlhost(url), 'page_url': page_url, 'misc': { 'player': 'iplayer', 'gp': True }, 'seq': seq }) seq += 1 elif url != None: online = True result = client.request(orig_url, httpsskip=True) if 'Sorry, this video reuploading' in result: online = False if online == True: vids = client.parseDOM( result, 'ul', attrs={'class': 'list-server-items'})[0] vids = client.parseDOM(vids, 'li', attrs={'class': 'linkserver'}, ret='data-video') vids = list(set(vids)) for video_url in vids: video_urlx = video_url if 'http' not in video_urlx: video_urlx = 'http:' + video_urlx if video_urlx != None and 'vidcloud.icu/load' not in video_urlx: log(type='INFO', method='createMeta', err=u'url:%s requires additional processing' % video_urlx) video_url1 = '%s' % client.request( video_urlx, followredirect=True, httpsskip=True, output='geturl') if video_url1 != None and 'http' in video_url1 and 'vidcloud.icu' not in video_url1: try: files_ret = resolvers.createMeta( video_url1, provider, logo, quality, files_ret, key, poster=poster, riptype=riptype, vidtype=vidtype, sub_url=sub_url, testing=testing, headers=headers, page_url=page_url) except Exception as e: log(type='ERROR', method='createMeta', err=u'%s' % e) elif video_urlx != None and 'vidcloud.icu/load' in video_urlx: log(type='INFO', method='createMeta', err=u'url:%s requires additional processing' % video_urlx) id = re.findall(r'id=(.*?)&', video_urlx)[0] u = 'https://vidcloud.icu/download?id=%s' % id res = client.request(u) mp4_vids = re.findall(r'http.*?mp4', res) if len(mp4_vids) > 0: try: files_ret = resolvers.createMeta( u, provider, logo, quality, files_ret, key, poster=poster, riptype=riptype, vidtype=vidtype, sub_url=sub_url, testing=testing, headers=headers, page_url=page_url, urlhost='vidcloud.icu') except Exception as e: log(type='ERROR', method='createMeta', err=u'%s' % e) elif len(mp4_vids) == 0 and video_url == vids[ len(vids) - 1] and len(files_ret) == 0: raise Exception('No mp4 Video found') except Exception as e: log('FAIL', 'createMeta', '%s' % e) for fr in files_ret: if fr != None and 'key' in fr.keys(): control.setPartialSource(fr, self.name) links.append(fr) if len(files_ret) > 0: log('SUCCESS', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) else: log('FAIL', 'createMeta', 'Failed in processing %s link >>> %s' % (provider, orig_url), dolog=self.init) log('INFO', 'createMeta', 'Completed', dolog=self.init) return links
def createMeta(self, url, provider, logo, quality, links, key, riptype, showsplit=False, useGetlinkAPI=True, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False): orig_url = url if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO', 'createMeta', 'Host Disabled by User') return links if 'http' not in url and 'google.com/file' in url: url = 'https://drive.google.com/' + url.split('.com/')[1] httpsskip = False if control.setting('use_https_alt') != None and ( control.setting('use_https_alt') == True or control.setting('use_https_alt') == False): httpsskip = control.setting('httpsskip') #print "createMeta1 : %s %s %s %s" % (url, provider, logo, quality) videoData, headers, content, cookie = getVideoMetaData(url, httpsskip) try: cookie += '; %s' % content['Set-Cookie'] # cookie_s = cookie.split(';') # cookie_n = [] # for cook in cookie_s: # cook = cook.strip() # if '=' in cook and cook not in cookie_n: # cookie_n.append(cook) # cookie = ('; '.join(x for x in sorted(cookie_n))) cookie_value = client.search_regex(r"DRIVE_STREAM=([^;]+);", cookie, 'cookie val', group=1) domain = client.search_regex(r"https?://([^\/]+)", url, 'host val', group=1) cookie = 'DRIVE_STREAM=%s; path=/; domain=.%s;' % (cookie_value, domain) except: pass #print cookie #cookie = urllib.quote_plus(cookie).replace('+','%20').replace('%2F','/') # DRIVE_STREAM%3Dva1wsBbVn3A%3B%20path%3D/%3B%20domain%3D.docs.google.com%3B # DRIVE_STREAM%3DtV76KFL8a6k%3B+path%3D%2F%3B+domain%3D.docs.google.com%3B params = {'headers': headers, 'cookie': cookie} params = json.dumps(params, encoding='utf-8') params = client.b64encode(params) if client.geturlhost(url) in self.host[4]: pass # skip for googleapis.com link else: quality = file_quality(url, quality, videoData)[0] isOnline = check(url, videoData, headers=headers, cookie=cookie, httpsskip=httpsskip)[0] type = rip_type(url, riptype) files = [] #print "createMeta : %s %s %s %s" % (url, provider, logo, quality) titleinfo = txt if txt != '': titleinfo = txt ntitleinfo = titleinfo files_ret = [] enabled = True try: #udata = urldata(url, videoData=videoData, usevideoData=True) if 'google.com/file' in url: idstr = '%s' % (url.split('/preview')[0].split('/edit') [0].split('/view')[0]) idstr = idstr.split('/') id = idstr[len(idstr) - 1] try: durl, f_res, fs = getFileLink(id, httpsskip) except: fs = 0 durl = None if durl != None: files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': durl, 'durl': durl, 'urldata': createurldata(durl, quality), 'params': params, 'logo': logo, 'online': isOnline, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(durl), 'misc': { 'player': 'iplayer', 'gp': False } }) else: fs = client.getFileSize(url, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata('', ''), 'params': params, 'logo': logo, 'online': isOnline, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(url), 'misc': { 'player': 'eplayer', 'gp': False } }) else: fs = client.getFileSize(url, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata('', ''), 'params': params, 'logo': logo, 'online': isOnline, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(url), 'misc': { 'player': 'iplayer', 'gp': False } }) except Exception as e: log(type='ERROR', method='createMeta-1', err=u'%s' % e) isGetlinkWork = False try: if useGetlinkAPI == True and isOnline and 'google.com/file' in url and self.useGetLinkAPI: client.setIP4() ntitleinfo = titleinfo + ' | (via GetLink API) ' files = urldata(url) files = client.b64decode(files) filesJ = json.loads(files) if len(filesJ) > 0: for mfile in filesJ: mfile = json.loads(mfile) #print "mfile --- : %s" % mfile furl = mfile['src'] f2url = client.request(furl, followredirect=True, output='geturl') if 'http' in f2url: furl = f2url #print "furl --- : %s" % furl quality = file_quality(furl, mfile['res'], videoData)[0] isOnlineT = check(furl, videoData, headers=headers, cookie=cookie)[0] type = rip_type(furl, riptype) else: isOnlineT = 'Unknown' p = {'headers': '', 'cookie': ''} p = json.dumps(p, encoding='utf-8') p = client.b64encode(p) fs = client.getFileSize(furl, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': furl, 'durl': furl, 'urldata': urldata('', ''), 'params': p, 'logo': logo, 'online': isOnlineT, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(furl), 'misc': { 'player': 'iplayer', 'gp': False } }) isGetlinkWork = True client.setIP6() except Exception as e: log(type='ERROR', method='createMeta-2', err=u'%s' % e) try: if showsplit == True and isOnline and isGetlinkWork == False: # currently suffers from transcoding failure on most clients ntitleinfo = titleinfo + ' | *limited support* ' files = get_files(url, videoData)[0] for furl in files: quality = file_quality(furl, quality, videoData)[0] type = rip_type(furl, riptype) furl = urllib.unquote(furl).decode('utf8') furl = furl.decode('unicode_escape') isOnlineT = check(furl, videoData, headers=headers, cookie=cookie)[0] fs = client.getFileSize(furl, retry429=True) files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': ntitleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': furl, 'durl': furl, 'urldata': createurldata(furl, quality), 'params': params, 'logo': logo, 'online': isOnlineT, 'allowsDownload': self.allowsDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': enabled, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(furl), 'misc': { 'player': 'iplayer', 'gp': False } }) except Exception as e: log(type='ERROR', method='createMeta-3', err=u'%s' % e) for fr in files_ret: fr['resumeDownload'] = self.resumeDownload links.append(fr) log('INFO', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) return links
def createMeta(self, url, provider, logo, quality, links, key, vidtype='Movie', lang='en', txt=''): urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) online = check(url) files_ret = [] try: files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': 'BRRIP', 'provider': provider, 'url': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'key': key, 'enabled': True, 'ts': time.time(), 'lang': lang, 'misc': { 'player': 'eplayer', 'gp': False } }) except Exception as e: print "ERROR host_youtube.py > createMeta : %s" % e.args files_ret.append({ 'source': urlhost, 'maininfo': '', 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': 'Unknown', 'provider': provider, 'url': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'key': key, 'enabled': True, 'ts': time.time(), 'lang': lang, 'misc': { 'player': 'eplayer', 'gp': False } }) for fr in files_ret: links.append(fr) return links
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False, poster=None, headers=None): if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO', 'createMeta', 'Host Disabled by User') return links urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) orig_url = url online = check(url) files_ret = [] titleinfo = txt fs = 0 try: furl, fs, file_ext = mega.get_mega_dl_link(url) quality = qual_based_on_fs(quality, fs) if int(fs) == 0: fs = client.getFileSize(furl) urldata = createurldata(furl, quality) except Exception as e: online = False log('FAIL', 'createMeta-1', '%s - %s' % (url, e)) try: files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': titleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': riptype, 'provider': provider, 'durl': url, 'url': url, 'urldata': urldata, 'params': params, 'logo': logo, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': self.allowsStreaming, 'online': online, 'key': key, 'enabled': True, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'poster': poster, 'subdomain': self.netloc[0], 'misc': { 'player': 'iplayer', 'gp': False } }) except Exception as e: log('ERROR', 'createMeta-2', '%s - %s' % (url, e)) files_ret.append({ 'source': urlhost, 'maininfo': '', 'titleinfo': titleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': 'Unknown', 'provider': provider, 'durl': url, 'url': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': True, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'poster': poster, 'subdomain': self.netloc[0], 'misc': { 'player': 'iplayer', 'gp': False } }) for fr in files_ret: links.append(fr) log('INFO', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) return links
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False): if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO', 'createMeta', 'Host Disabled by User') return links orig_url = url urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) online = check(url) files_ret = [] fs = 5 * 1024 * 1024 * 1024 try: files_ret.append({ 'source': self.name, 'maininfo': '', 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': riptype, 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': True, 'fs': fs, 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': self.netloc[0], 'misc': { 'player': 'eplayer', 'gp': False } }) except Exception as e: log(type='ERROR', method='createMeta', err=u'%s' % e) files_ret.append({ 'source': urlhost, 'maininfo': '', 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': 'Unknown', 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': True, 'fs': fs, 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': self.netloc[0], 'misc': { 'player': 'eplayer', 'gp': False } }) for fr in files_ret: links.append(fr) log('INFO', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) return links
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext = '.mp4', testing=False, poster=None, headers=None, page_url=None): files_ret = [] url = url.replace('oload.tv','openload.co').replace('/embed/','/f/') orig_url = url if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO','createMeta','Host Disabled by User') return links try: durl = url urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) a1 = None if control.setting('use_openload_pairing') == True: isPairRequired, a1 = isPairingRequired(url) if isPairRequired == True: isPairRequired, a1 = isPairingRequired(url) #print "isPairRequired %s" % isPairRequired else: isPairRequired = False page_html = client.request(url) if a1 != None: vidurl = a1 else: vidurl, err, sub_url_t, r1 = resolve(url, usePairing=False) if sub_url == None: sub_url = sub_url_t if vidurl != None: isPairRequired = False pair = '' if isPairRequired == True: pair = ' *Pairing required* ' if isPairingDone(): pair = ' *Paired* ' if vidurl == None: vidurl = url online, r1, r2, fs, r3, sub_url_t = check(vidurl, videoData=page_html, usePairing=False, embedpage=True) if online == False: pair = ' *Vid Unavailable* ' file_title = '' try: file_title = client.parseDOM(page_html, 'title')[0] except: try: file_title = client.parseDOM(page_html, 'span', attrs = {'class': 'title'})[0] except: try: file_title = client.parseDOM(page_html, 'h3', attrs = {'class': 'other-title-bold'})[0] except: pass if sub_url == None: sub_url = sub_url_t titleinfo = txt try: log(type='INFO',method='createMeta', err=u'pair:%s; online:%s; durl:%s ; res:%s; fs:%s' % (isPairRequired,online,vidurl,quality,fs)) files_ret.append({'source':self.name, 'maininfo':pair, 'titleinfo':titleinfo, 'quality':file_quality(vidurl, quality, file_title), 'vidtype':vidtype, 'rip':rip_type(vidurl, riptype, file_title), 'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'pair':isPairRequired, 'player':'iplayer', 'gp':False}, 'seq':0}) except Exception as e: log(type='ERROR',method='createMeta-3', err=u'%s' % e) files_ret.append({'source':urlhost, 'maininfo':pair, 'titleinfo':titleinfo, 'quality':quality, 'vidtype':vidtype, 'rip':'Unknown' ,'provider':provider, 'orig_url':orig_url, 'url':vidurl, 'durl':durl, 'urldata':urldata, 'params':params, 'logo':logo, 'online':online, 'allowsDownload':self.allowsDownload, 'resumeDownload':self.resumeDownload, 'allowsStreaming':self.allowsStreaming, 'key':key, 'enabled':True, 'fs':int(fs), 'file_ext':file_ext, 'ts':time.time(), 'lang':lang, 'sub_url':sub_url, 'poster':poster, 'subdomain':client.geturlhost(url), 'page_url':page_url, 'misc':{'pair':isPairRequired, 'player':'eplayer', 'gp':False}, 'seq':0}) except Exception as e: log('ERROR', 'createMeta', '%s' % e) for fr in files_ret: if fr != None and 'key' in fr.keys(): control.setPartialSource(fr,self.name) links.append(fr) if len(files_ret) > 0: log('SUCCESS', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) else: log('FAIL', 'createMeta', 'Failed in processing %s link >>> %s' % (provider, orig_url), dolog=self.init) log('INFO', 'createMeta', 'Completed', dolog=self.init) return links
def createMeta(url, provider, logo, quality, links, key, vidtype='Movie', lang='en', txt=''): if url == None or url == '': print "resolvers > __init__.py > createMeta : url:%s prov:%s" % ( url, provider) return links url = url.strip() quality = fixquality(quality) links_m = [] urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) try: urlhost = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0] for host in sourceHostsCall: print "Searching %s in %s" % (urlhost, host['host']) if urlhost in host['host']: print "Found %s in %s" % (urlhost, host['host']) return host['call'].createMeta(url, provider, logo, quality, links, key, vidtype=vidtype, lang=lang, txt=txt) print "urlhost '%s' not found in host/resolver plugins" % urlhost quality = file_quality(url, quality) type = rip_type(url, quality) links_m.append({ 'source': urlhost, 'maininfo': '', 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': 'Unknown', 'key': key, 'enabled': True, 'ts': time.time(), 'lang': lang, 'misc': { 'player': 'eplayer', 'gp': False } }) except Exception as e: print "ERROR resolvers > __init__.py > createMeta : %s url: %s" % ( e.args, url) #quality = file_quality(url, quality) #type = rip_type(url, quality) #links_m.append({'source':urlhost, 'maininfo':'', 'titleinfo':'', 'quality':quality, 'rip':type, 'provider':provider, 'url':url, 'urldata':urldata, 'params':params, 'logo':logo, 'online':'Unknown', 'key':key, 'enabled':True}) links += [l for l in links_m] return links
def initAndSleep(self): try: self.TOKEN_KEY = [] self.getVidToken() if len(self.TOKEN_KEY) > 0: log('SUCCESS', 'initAndSleep', 'Vid Token: %s' % client.b64encode(self.TOKEN_KEY[0])) else: log('FAIL', 'initAndSleep', 'Vid Token Not retrieved !') t_base_link = self.base_link self.headers = {'X-Requested-With': 'XMLHttpRequest'} self.headers['Referer'] = t_base_link ua = client.randomagent() self.headers['User-Agent'] = ua cookie1 = '' cookie2 = '' reqkey = '' #get cf cookie if USE_SELENIUM == False or self.captcha == False or self.use_selenium == False: cookie1 = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True) cookie1 = make_cookie_str(cookie1) self.headers['Cookie'] = cookie1 # get reqkey cookie try: if USE_TOKEN == True: token_url = urlparse.urljoin(t_base_link, self.token_link) r1 = proxies.request(token_url, headers=self.headers, httpsskip=True) if r1 == None: raise Exception('%s not reachable !' % token_url) reqkey = self.decodeJSFCookie(r1) except Exception as e: reqkey = '' log('FAIL','initAndSleep', 'Not using reqkey: %s' % e, dolog=False) # get session cookie serverts = str(((int(time.time())/3600)*3600)) query = {'ts': serverts, '_', '634'} hash_url = urlparse.urljoin(t_base_link, self.hash_menu_link) hash_url = hash_url + '?' + urllib.urlencode(query) cookie2 = proxies.request(url=hash_url, headers=self.headers, output='cookie', httpsskip=True) cookie2 = make_cookie_str(cookie2) else: log('INFO','initAndSleep', 'Attempting Selenium Retrieval - Start') try: my_cookies_via_sel, pg_src = seleniumca.getMyCookies(base_url=t_base_link) for x in my_cookies_via_sel: if x['name'] == '__cfduid': cookie1 = '%s=%s' % (x['name'],x['value']) my_cookies_via_sel.remove(x) break log('INFO','initAndSleep', 'cookie1: %s' % cookie1) cookie2 = (';'.join('%s=%s' % (x['name'],x['value']) for x in my_cookies_via_sel)) log('INFO','initAndSleep', 'cookie2: %s' % cookie2) except Exception as e: log('ERROR','initAndSleep', '%s' % e) log('INFO','initAndSleep', 'Attempting Selenium Retrieval - End') cookie = '%s; %s' % (cookie1, cookie2) self.headers['Cookie'] = cookie log('SUCCESS', 'initAndSleep', 'Cookies : %s for %s' % (cookie,self.base_link)) except Exception as e: log('ERROR','initAndSleep', '%s' % e)
def createMeta(url, provider, logo, quality, links, key, riptype=None, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False, urlhost=None, poster=None, headers=None): if url == None or url == '': return links url = url.strip() for item in links: if url == item['durl']: log("%s has already been processed" % url) return links quality = fixquality(quality) links_m = [] urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) try: if urlhost == None: try: urlhost = re.findall( '([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0] except: urlhost = re.findall( '([\w]+[.][\w]+).*$', urlparse.urlparse(url.strip().lower()).netloc)[0] urlhost = urlhost.split('.')[1] if urlhost != None: if riptype == None: riptype_def = 'BRRIP' else: riptype_def = riptype for host in sourceHostsCall: log("Searching %s in %s" % (urlhost, host['host']), logToControl=False) if urlhost in host['host']: log("Found %s in %s" % (urlhost, host['host'])) return host['call'].createMeta(url, provider, logo, quality, links, key, riptype_def, vidtype=vidtype, lang=lang, sub_url=sub_url, txt=txt, file_ext=file_ext, testing=testing, poster=poster, headers=headers) log("urlhost '%s' not found in host/resolver plugins - creating generic meta for external services" % urlhost) quality = file_quality(url, quality) if riptype == None: type = rip_type(url, quality) else: type = riptype links_m.append({ 'source': urlhost, 'maininfo': '', 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': type, 'provider': provider, 'url': url, 'durl': url, 'urldata': urldata, 'params': params, 'logo': logo, 'online': 'Unknown', 'allowsDownload': False, 'resumeDownload': False, 'allowsStreaming': True, 'key': key, 'enabled': True, 'fs': int(0), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'poster': poster, 'subdomain': urlhost, 'misc': { 'player': 'eplayer', 'gp': False } }) except Exception as e: log(type='ERROR', err="createMeta : %s url: %s" % (e.args, url)) links += [l for l in links_m] return links
def createMeta(self, url, provider, logo, quality, links, key, vidtype='Movie', lang='en', txt=''): url = url.replace('oload.tv', 'openload.co') urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) if control.setting('use_openload_pairing') == True or control.setting( 'is_uss_installed') == False: isPairRequired = isPairingRequired(url) #print "isPairRequired %s" % isPairRequired else: isPairRequired = False vidurl, err = resolve(url, usePairing=False) pair = '' if isPairRequired == True: pair = ' *Pairing required* ' if isPairingDone(url): pair = ' *Paired* ' if vidurl == None: vidurl = url online, r1, r2 = check(vidurl, usePairing=False, embedpage=True) files_ret = [] try: files_ret.append({ 'source': self.name, 'maininfo': pair, 'titleinfo': '', 'quality': file_quality(url, quality), 'vidtype': vidtype, 'rip': rip_type(url, quality), 'provider': provider, 'url': vidurl, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'key': key, 'enabled': True, 'ts': time.time(), 'lang': lang, 'misc': { 'pair': isPairRequired, 'player': 'iplayer', 'gp': False } }) except Exception as e: print "ERROR host_openload.py > createMeta : %s" % e.args files_ret.append({ 'source': urlhost, 'maininfo': pair, 'titleinfo': '', 'quality': quality, 'vidtype': vidtype, 'rip': 'Unknown', 'provider': provider, 'url': vidurl, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'key': key, 'enabled': True, 'ts': time.time(), 'lang': lang, 'misc': { 'pair': isPairRequired, 'player': 'eplayer', 'gp': False } }) for fr in files_ret: links.append(fr) return links
def createMeta(self, url, provider, logo, quality, links, key, riptype, vidtype='Movie', lang='en', sub_url=None, txt='', file_ext='.mp4', testing=False): if testing == True: links.append(url) return links if control.setting('Host-%s' % name) == False: log('INFO', 'createMeta', 'Host Disabled by User') return links url = url.replace('oload.tv', 'openload.co').replace('/embed/', '/f/') orig_url = url durl = url if testing == False: log(type='INFO', method='createMeta-1', err=u'creating meta for url: %s' % url) urldata = client.b64encode(json.dumps('', encoding='utf-8')) params = client.b64encode(json.dumps('', encoding='utf-8')) a1 = None if control.setting('use_openload_pairing') == True: isPairRequired, a1 = isPairingRequired(url) if isPairRequired == True: isPairRequired, a1 = isPairingRequired(url) #print "isPairRequired %s" % isPairRequired else: isPairRequired = False if a1 != None: vidurl = a1 else: vidurl, err, sub_url_t = resolve(url, usePairing=False) if sub_url == None: sub_url = sub_url_t if vidurl != None: isPairRequired = False pair = '' if isPairRequired == True: pair = ' *Pairing required* ' if isPairingDone(): pair = ' *Paired* ' if vidurl == None: vidurl = url online, r1, r2, fs, r3, sub_url_t = check(vidurl, usePairing=False, embedpage=True) if sub_url == None: sub_url = sub_url_t files_ret = [] titleinfo = txt if testing == False: log(type='INFO', method='createMeta-2', err=u'pair: %s online: %s resolved url: %s' % (isPairRequired, online, vidurl)) try: files_ret.append({ 'source': self.name, 'maininfo': pair, 'titleinfo': titleinfo, 'quality': file_quality(vidurl, quality), 'vidtype': vidtype, 'rip': rip_type(vidurl, riptype), 'provider': provider, 'url': vidurl, 'durl': durl, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': True, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(url), 'misc': { 'pair': isPairRequired, 'player': 'iplayer', 'gp': False } }) except Exception as e: log(type='ERROR', method='createMeta-3', err=u'%s' % e) files_ret.append({ 'source': urlhost, 'maininfo': pair, 'titleinfo': titleinfo, 'quality': quality, 'vidtype': vidtype, 'rip': 'Unknown', 'provider': provider, 'url': vidurl, 'durl': durl, 'urldata': urldata, 'params': params, 'logo': logo, 'online': online, 'allowsDownload': self.allowsDownload, 'resumeDownload': self.resumeDownload, 'allowsStreaming': self.allowsStreaming, 'key': key, 'enabled': True, 'fs': int(fs), 'file_ext': file_ext, 'ts': time.time(), 'lang': lang, 'sub_url': sub_url, 'subdomain': client.geturlhost(url), 'misc': { 'pair': isPairRequired, 'player': 'eplayer', 'gp': False } }) for fr in files_ret: links.append(fr) log('INFO', 'createMeta', 'Successfully processed %s link >>> %s' % (provider, orig_url), dolog=self.init) return links
def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_options=None, key=None, testing=False): try: sources = [] if control.setting('Provider-%s' % name) == False: log('INFO','get_sources','Provider Disabled by User') log('INFO', 'get_sources', 'Completed') return sources if url == None: log('FAIL','get_sources','url == None. Could not find a matching title: %s' % cleantitle.title_from_key(key), dolog=not testing) log('INFO', 'get_sources', 'Completed') return sources UA = client.agent() # get TA JSON data from tadata api result = proxies.request(url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) resultx = json.loads(str(result)) ta_url = resultx['url'] poster = resultx['image'] if 'image' in resultx else None #print ta_url result = proxies.request(ta_url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) # get types of videos available types = {'trailer':'Trailer', 'feature_trailer':'Trailer', 'theatrical_trailer':'Trailer', 'behind_the_scenes':'Behind the scenes', 'deleted_scene':'Deleted Scenes', 'featurette':'Featurette', 'featured_box':'Featurette', 'music_video':'Music Video', 'misc_scene':'Misc.'} quality_maps = {'4k':'4K','2k':'2K','1080p':'1080p', 'HD':'720p', 'M':'480p', 'S':'360p'} extras = [] items = client.parseDOM(result, 'div', attrs = {'id':'featured_c'})[0] m_title = client.parseDOM(items, 'div', attrs = {'class':'movie_info'}) #print m_title fail_bool = False for video in m_title: try: time.sleep(0.1) video = video.replace('rttttttttttt','') video = video.replace('rtttttttttt','') video = video.replace('\r','') video = video.replace('\t','') video = video.replace('\n','') title = client.parseDOM(video, 'a', attrs = {'class':'m_title'})[0] ta_tage_url = client.parseDOM(video, 'a', ret = 'href')[0] if 'http' not in ta_tage_url: ta_tage_url = urlparse.urljoin(self.base_link, ta_tage_url) try: vid_date = client.parseDOM(video, 'span', attrs = {'class':'m_date'})[0] vid_date = vid_date.replace(',','') except: vid_date = '' # Trailers if title.lower() == 'trailer': extra_type = 'trailer' elif title.lower() == 'feature trailer': extra_type = 'feature_trailer' elif title.lower() == 'theatrical trailer': extra_type = 'theatrical_trailer' # Behind the scenes elif 'behind the scenes' in title.lower(): extra_type = 'behind_the_scenes' # Featurette elif 'featurette' in title.lower(): extra_type = 'featurette' # Music Video elif 'music video' in title.lower(): extra_type = 'music_video' # Interview elif 'interview' in title.lower(): extra_type = 'interview' if title.lower().startswith('interview') or title.lower().startswith('generic interview'): title = title.split('nterview - ')[-1].split('nterview- ')[-1] # Deleted scene elif 'deleted scene' in title.lower(): extra_type = 'deleted_scene' # Trailers elif 'trailer' in title.lower(): extra_type = 'trailer' else: extra_type = 'misc_scene' # process ta_tage_url #print ta_tage_url result = proxies.request(ta_tage_url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) data = None js = re.findall(r'eval\(function\(w,i,s,e\).*;', result) if len(js) > 0: data = js[0] else: try: jsd = re.findall(r'src="/util/client.js?c=(.*?)"><', result)[0].strip() except: try: jsd = re.findall(r'</style>rttr<!-- (.*?) -->rrttrtt<div id=\"embed_box\">', result)[0].strip() except: jsd = re.findall(r'</style>.*<!-- (.*?) -->.*<div id=\"embed_box\">', result, flags=re.DOTALL)[0].strip() jsd_url = tau % (urllib.quote_plus(jsd), client.b64encode(str(int(time.time()))), client.b64encode(ta_tage_url), client.b64encode(UA), control.setting('ver'), client.b64encode(control.setting('ca'))) data = proxies.request(jsd_url) if data == None: log('ERROR', 'get_sources-1', '%s' % jsd_url, dolog=True) if data != None: if str(data) == '423': fail_bool = True raise Exception("Helper site is currently unavailable !") try: data = unwise2.unwise_process(data) except: raise Exception("unwise2 could not process data") else: raise Exception("URL Post Data Unavailable") files = re.findall(r'source src="([^"]+)"', data) quals = re.findall(r'res=\"(.*?)\"', data) processed = [] for i in range(0, len(files)): v_file = files[i] if quals[i] in quality_maps.keys(): quality = quality_maps[quals[i]] else: quality = '720p' #print extra_type if quality not in processed: #print v_file processed.append(quality) extras.append( {'etype': extra_type, 'date': vid_date, 'type': types[extra_type], 'url' : v_file, 'quality': quality, 'title': title, 'thumb': poster} ) if testing == True and len(extras) > 0: break except Exception as e: log('ERROR', 'get_sources-2', '%s' % e, dolog=True) if fail_bool == True: raise Exception("%s" % e) links = [] #print extras for extra in extras: links = resolvers.createMeta(extra['url'], self.name, self.logo, extra['quality'], links, key, vidtype=extra['type'], testing=testing, txt=extra['title'], poster=extra['thumb']) if testing == True and len(links) > 0: break for i in links: sources.append(i) if len(sources) == 0: log('FAIL','get_sources','Could not find a matching title: %s' % cleantitle.title_from_key(key)) else: log('SUCCESS', 'get_sources','%s sources : %s' % (cleantitle.title_from_key(key), len(sources))) log('INFO', 'get_sources', 'Completed') return sources except Exception as e: log('ERROR', 'get_sources', '%s' % e) log('INFO', 'get_sources', 'Completed') return sources