def testSiteAlts(self, site): try: x1 = time.time() http_res, content = proxies.request(url=site, output='response', use_web_proxy=False) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: log('SUCCESS', 'testSite', 'HTTP Resp : %s for %s' % (http_res,site)) return True else: log('FAIL', 'testSite', 'Validation content Not Found. HTTP Resp : %s for %s' % (http_res,site)) x1 = time.time() http_res, content = proxies.request(url=site, output='response', use_web_proxy=True) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: self.proxyrequired = True log('SUCCESS', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res,site)) return True else: time.sleep(2.0) x1 = time.time() http_res, content = proxies.request(url=site, output='response', use_web_proxy=True) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: self.proxyrequired = True log('SUCCESS', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res,site)) return True else: log('FAIL', 'testSite', 'Validation content Not Found. HTTP Resp : %s via proxy for %s' % (http_res,site)) return False except Exception as e: log('ERROR','testSite', '%s' % e) return False
def ymovies_info(self, url, proxy_options=None): try: if 'ajax' not in url: #r = client.request(url, headers=self.headers, IPv4=True) r = proxies.request(url, headers=self.headers, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) id = re.findall(r'var id = (.*);', r)[0] url = self.info_link2 % id #r = client.request('%s?link_web=%s' % (url, self.base_link), headers=self.headers, IPv4=True) r = proxies.request('%s?link_web=%s' % (url, self.base_link), headers=self.headers, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) r = r.replace('\\', '') #print r q = client.parseDOM(r, 'div', attrs={'class': 'jtip-quality'})[0] q = cleantitle.getQuality(q) y = client.parseDOM(r, 'div', attrs={'class': 'jt-info'})[0] h = client.parseDOM(r, 'div', attrs={'class': 'jtip-bottom'})[0] h = client.parseDOM(h, 'a', ret='href')[0].replace(self.base_link, self.base_link + '/') return (y, q, h) except: return
def testSite(self): try: x1 = time.time() http_res, content = proxies.request(url=self.base_link, output='response', use_web_proxy=False) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: self.log('SUCCESS', 'testSite', 'HTTP Resp : %s for %s' % (http_res,self.base_link), dolog=True) return True else: self.log('ERROR', 'testSite', 'HTTP Resp : %s for %s' % (http_res,self.base_link), dolog=True) x1 = time.time() http_res, content = proxies.request(url=self.base_link, output='response', use_web_proxy=True) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: self.proxyrequired = True self.log('SUCCESS', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res,self.base_link), dolog=True) return True else: time.sleep(2.0) x1 = time.time() http_res, content = proxies.request(url=self.base_link, output='response', use_web_proxy=True) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: self.proxyrequired = True self.log('SUCCESS', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res,self.base_link), dolog=True) return True else: self.log('ERROR', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res,self.base_link), dolog=True) self.log('ERROR', 'testSite', content, dolog=True) return False except Exception as e: self.log('ERROR','testSite', '%s' % e, dolog=True) return False
def getSetServerTs(self): geturl = proxies.request('https://bmovies.is/home', output='geturl') res = proxies.request(geturl) try: myts1 = re.findall(r'data-ts="(.*?)"', res)[0] myts = str(int(myts1)) return myts except: pass return None
def getVidToken(self): try: all_js_url = urlparse.urljoin(self.base_link, self.ALL_JS) unpacked_code = '' cch = '' if len(self.TOKEN_KEY) == 0: try: all_js_pack_code = proxies.request( all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True) unpacked_code = jsunpack.unpack(all_js_pack_code) cch = re.findall( r'%s' % common.client.b64decode( 'ZnVuY3Rpb25cKHQsaSxuXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gZVwoXCl7cmV0dXJuICguKj8pfWZ1bmN0aW9uIHJcKHRcKQ==' ), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key != None and token_key != '': self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1', '%s' % e, dolog=False) if len(self.TOKEN_KEY) == 0: try: cch = re.findall( r'%s' % common.client.b64decode( 'ZnVuY3Rpb25cKFthLXpdLFthLXpdLFthLXpdXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gW2Etel1cKFwpe3JldHVybiAoLio/KX1mdW5jdGlvbiBbYS16XVwoW2Etel1cKQ==' ), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key != None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.2', '%s' % e, dolog=False) except Exception as e: log('ERROR', 'getVidToken-1', '%s' % e, dolog=False) log('ERROR', 'getVidToken-1', '%s' % unpacked_code, dolog=False) log('ERROR', 'getVidToken-1', '%s' % cch, dolog=False) try: if len(self.TOKEN_KEY) == 0 or True: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_key != None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-2', '%s' % e, dolog=False)
def getVidToken(self): try: all_js_url = urlparse.urljoin(self.base_link, self.ALL_JS) unpacked_code = '' cch = '' if len(self.TOKEN_KEY) == 0: all_js_pack_code = proxies.request( all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) unpacked_code = jsunpack.unpack(all_js_pack_code) cch = re.findall( r'%s' % client.b64decode( 'ZnVuY3Rpb25cKFthLXpdLFthLXpdLFthLXpdXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gW2Etel1cKFwpe3JldHVybiAoLio/KX0=' ), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key != None and token_key != '': self.TOKEN_KEY.append(token_key) control.set_setting(name + 'VidToken', token_key) except Exception as e: log('ERROR', 'getVidToken-1', '%s' % e, dolog=False) log('ERROR', 'getVidToken-1', '%s' % unpacked_code, dolog=False) log('ERROR', 'getVidToken-1', '%s' % cch, dolog=False) try: if len(self.TOKEN_KEY) == 0: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) if token_key != None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) control.set_setting(name + 'VidToken', token_key) except Exception as e: log('ERROR', 'getVidToken-2', '%s' % e, dolog=False) try: fm_flags = proxies.request(self.FLAGS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True, timeout=7) if fm_flags != None and fm_flags != '': fm_flags = json.loads(fm_flags) #cookie_dict.update({'token_key':token_key}) self.FLAGS = fm_flags except Exception as e: log('ERROR', 'getVidToken-3-Flags', '%s' % e, dolog=False)
def getSetServerTs(self): try: geturl = proxies.request('https://fmovies.taxi', output='geturl', httpsskip=True, timeout=7) res = proxies.request(geturl, httpsskip=True, timeout=7) myts1 = re.findall(r'data-ts="(.*?)"', res)[0] myts = str(int(myts1)) return myts except: pass return None
def initAndSleep(self): try: self.TOKEN_KEY = [] self.getVidToken() if len(self.TOKEN_KEY) > 0: log('SUCCESS', 'initAndSleep', 'Vid Token: %s' % client.b64encode(self.TOKEN_KEY[0])) else: log('FAIL', 'initAndSleep', 'Vid Token Not retrieved !') t_base_link = self.base_link self.headers = {'X-Requested-With': 'XMLHttpRequest'} self.headers['Referer'] = t_base_link ua = client.randomagent() self.headers['User-Agent'] = ua #get cf cookie cookie1 = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True) self.headers['Cookie'] = cookie1 # get reqkey cookie try: token_url = urlparse.urljoin(t_base_link, self.token_link) r1 = proxies.request(token_url, headers=self.headers, httpsskip=True) reqkey = self.decodeJSFCookie(r1) except: reqkey = '' # get session cookie serverts = str(((int(time.time())/3600)*3600)) query = {'ts': serverts} try: tk = self.__get_token(query) except: tk = self.__get_token(query, True) query.update(tk) hash_url = urlparse.urljoin(t_base_link, self.hash_menu_link) hash_url = hash_url + '?' + urllib.urlencode(query) r1, headers, content, cookie2 = proxies.request(hash_url, headers=self.headers, limit='0', output='extended', httpsskip=True) #cookie = cookie1 + '; ' + cookie2 + '; user-info=null; reqkey=' + reqkey cookie = '%s; %s; user-info=null; reqkey=%s' % (cookie1 , cookie2 , reqkey) self.headers['Cookie'] = cookie log('SUCCESS', 'initAndSleep', 'Cookies : %s for %s' % (cookie,self.base_link)) except Exception as e: log('ERROR','initAndSleep', '%s' % e)
def testSiteAlts(self, site): try: self.base_link = site if self.disabled: log('INFO', 'testSite', 'Plugin Disabled') return False self.initAndSleep() x1 = time.time() http_res, content = proxies.request(url=site, headers=self.headers, output='response', use_web_proxy=False, httpsskip=True) self.speedtest = time.time() - x1 for valcon in self.MainPageValidatingContent: if content != None and content.find(valcon) > -1: log('SUCCESS', 'testSite', 'HTTP Resp : %s for %s' % (http_res, site)) return True log( 'FAIL', 'testSite', 'Validation content Not Found. HTTP Resp : %s for %s' % (http_res, site)) return False except Exception as e: log('ERROR', 'testSite', '%s' % e) return False
def ymovies_info_season(self, title, season, proxy_options=None): try: qs = [] q = '%s Season %s' % (cleantitle.query(title), season) qs.append(q) q = cleantitle.query(title) qs.append(q) #print qs for qm in qs: try: q = '/search/%s.html' % (urllib.quote_plus(qm)) q = urlparse.urljoin(self.base_link, q) #print q for i in range(3): #r = client.request(q, IPv4=True) r = proxies.request(q, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) if not r == None: break r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in r] r = [(i[0][0], i[1][0]) for i in r if i[0] and i[1]] if not r == None and len(r) > 0: break except: pass return r except: return
def get_show(self, tvshowtitle, season, imdb=None, tvdb=None, year=None, proxy_options=None, key=None): try: if control.setting('Provider-%s' % name) == False: log('INFO','get_show','Provider Disabled by User') return None t = cleantitle.get(tvshowtitle) year = '%s' % year q = urlparse.urljoin(self.base_link, self.search_link) q = q % urllib.quote_plus(tvshowtitle) #r = client.request(q) r = proxies.request(q, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) r = client.parseDOM(r, 'ul', attrs={'class': 'items'}) r = client.parseDOM(r, 'li') if len(r) == 0: raise Exception('Could not find a matching show title: %s' % tvshowtitle) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('\d{4}', i)) for i in r] r = [(i[0][0], i[1][0], i[2][-1]) for i in r if i[0] and i[1] and i[2]] r = [i for i in r if t == cleantitle.get(i[1]) and year == i[2]] r = r[0][0] url = re.findall('(?://.+?|)(/.+)', r)[0] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') return url except Exception as e: log('ERROR', 'get_show','%s: %s' % (tvshowtitle,e), dolog=self.init) return
def get_movie(self, imdb, title, year, proxy_options=None, key=None): try: if control.setting('Provider-%s' % name) == False: log('INFO', 'get_movie', 'Provider Disabled by User') return None variations = [title, title.replace('&', 'and')] for title in variations: try: t = cleantitle.get(title) q = self.api_search % (urllib.quote_plus( cleantitle.query(title).replace(' ', '-')), self.base_link) #q = urlparse.urljoin(self.base_link, q) #r = client.request(q, headers=self.headers, IPv4=True) r = proxies.request(q, headers=self.headers, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) #if not r == None: break r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), client.parseDOM(i, 'a', ret='data-url')) for i in r] r = [(i[0][0], i[1][0], i[2][0]) for i in r if i[0] and i[1] and i[2]] r = [(i[0], i[2]) for i in r if t == cleantitle.get(i[1])][:2] #r = [(i, re.findall('(\d+)', i)[-1]) for i in r] for i in r: try: u = i[1] if 'http' not in u: u = urlparse.urljoin(self.base_link, u) y, q, h = self.ymovies_info(u) if 'http' not in h: h = urlparse.urljoin(self.base_link, h) #print '%s == %s' % (y, year) if str(y).strip() != str( year).strip() or h == None: raise Exception() url = h return [url, None] except: pass except: pass except Exception as e: log('ERROR', 'get_movie', '%s: %s' % (title, e), dolog=self.init) return
def get_servers(self, page_url, proxy_options=None): T_BASE_URL = self.base_link T_BASE_URL = 'https://%s' % client.geturlhost(page_url) page_id = page_url.rsplit('.', 1)[1] server_query = '/ajax/film/servers/%s' % page_id server_url = urlparse.urljoin(T_BASE_URL, server_query) log('INFO','get_servers', server_url, dolog=False) result = proxies.request(server_url, headers=self.headers, referer=page_url, limit='0', proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) html = '<html><body><div id="servers-container">%s</div></body></html>' % json.loads(result)['html'].replace('\n','').replace('\\','') return html
def setNewCookies(self): try: ua = client.randomagent() self.headers['User-Agent'] = ua self.cookie = proxies.request(url=self.base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired) self.headers['Cookie'] = self.cookie except Exception as e: log('ERROR', 'setNewCookies', '%s' % e)
def setNewCookies(self, site): try: ua = client.randomagent() self.headers['User-Agent'] = ua self.cookie = proxies.request(url=site, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired) if self.cookie == None: raise Exception('Retrieved cookie None') self.headers['Cookie'] = self.cookie log('SUCCESS', 'setNewCookies', 'CF Cookie : %s for %s' % (self.cookie,site)) except Exception as e: log('ERROR','setNewCookies', '%s' % e)
def getVidToken(self): try: all_js_url = urlparse.urljoin(self.base_link, self.ALL_JS) if len(self.TOKEN_KEY) == 0: all_js_pack_code = proxies.request(all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True) unpacked_code = jsunpack.unpack(all_js_pack_code) cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKHQsZSxpXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gblwoXCl7cmV0dXJuICguKj8pfWZ1bmN0aW9uIHJcKHRcKQ=='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: self.log('ERROR', 'getVidToken-1','%s' % e) try: if len(self.TOKEN_KEY) == 0: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: self.log('ERROR', 'getVidToken-2','%s' % e)
def get_show(self, imdb=None, tvdb=None, tvshowtitle=None, year=None, season=None, proxy_options=None, key=None, testing=False): try: if control.setting('Provider-%s' % name) == False: log('INFO','get_show','Provider Disabled by User') return None max = None for pg in range(100): query_url = urlparse.urljoin(self.base_link, self.search_link) % (pg, urllib.quote_plus(cleantitle.query(tvshowtitle))) if max != None and pg > int(max): raise log(type='INFO', method='get_show', err='Searching - %s' % (query_url), dolog=False, logToControl=False, doPrint=True) result = proxies.request(query_url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) if max == None: max1 = client.parseDOM(result, 'a', attrs = {'class': 'page gradient'}) max = max1[len(max1)-1] url_data = client.parseDOM(result, 'div', attrs = {'class': 'ajuste4'}) links_data = [] for data in url_data: #print data url = urlparse.urljoin(self.base_link, client.parseDOM(data, 'a', ret='href')[0]) titlex = client.parseDOM(data, 'img', ret='alt')[0] try: poster = urlparse.urljoin(self.base_link, client.parseDOM(data, 'img', ret='src')[0]) except: poster = None if titlex in tvshowtitle: link_data = {'page':url, 'title':titlex, 'poster':poster} links_data.append(link_data) if testing == True: break return links_data return except Exception as e: log('ERROR', 'get_show','%s: %s' % (tvshowtitle,e), dolog=self.init) return
def ymovies_info(self, url, proxy_options=None): try: u = urlparse.urljoin(self.base_link, self.info_link) for i in range(3): #r = client.request(u % url, IPv4=True) r = proxies.request(u % url, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) if not r == None: break q = client.parseDOM(r, 'div', attrs = {'class': 'jtip-quality'})[0] y = client.parseDOM(r, 'div', attrs = {'class': 'jt-info'}) y = [i.strip() for i in y if i.strip().isdigit() and len(i.strip()) == 4][0] return (y, q) except: return
def get_movie(self, imdb, title, year, proxy_options=None, key=None): try: if control.setting('Provider-%s' % name) == False: log('INFO', 'get_movie', 'Provider Disabled by User') return None variations = [title, title.replace('&', 'and')] for title in variations: try: t = cleantitle.get(title) q = '/search/%s.html' % (urllib.quote_plus( cleantitle.query(title))) q = urlparse.urljoin(self.base_link, q) for i in range(3): #r = client.request(q, IPv4=True) r = proxies.request(q, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) if not r == None: break r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in r] r = [(i[0][0], i[1][0]) for i in r if i[0] and i[1]] r = [i[0] for i in r if t == cleantitle.get(i[1])][:2] r = [(i, re.findall('(\d+)', i)[-1]) for i in r] for i in r: try: y, q = cache.get(self.ymovies_info, 9000, i[1], proxy_options=proxy_options) if not y == year: raise Exception() return urlparse.urlparse(i[0]).path, '' except: pass except: pass except Exception as e: log('ERROR', 'get_movie', '%s: %s' % (title, e), dolog=self.init) return
def testSite(self): try: if self.disabled: self.log('INFO','testSite', 'Plugin Disabled', dolog=True) return False self.initAndSleep() x1 = time.time() http_res, content = proxies.request(url=self.base_link, headers=self.headers, output='response', use_web_proxy=False) self.speedtest = time.time() - x1 if content != None and content.find(self.MainPageValidatingContent) >-1: self.log('SUCCESS', 'testSite', 'HTTP Resp : %s for %s' % (http_res,self.base_link), dolog=True) return True else: self.log('ERROR', 'testSite', 'HTTP Resp : %s for %s' % (http_res,self.base_link), dolog=True) return False except Exception as e: self.log('ERROR','testSite', '%s' % e, dolog=True) return False
def request_via_proxy(self, url, proxy_name, proxy_url, close=True, redirect=True, followredirect=False, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, output='', timeout='30', httpsskip=False, use_web_proxy=False, use_web_proxy_as_backup=False, XHR=False, IPv4=False): return proxies.request(url=url, proxy_name=proxy_name, proxy_url=proxy_url, close=close, redirect=redirect, followredirect=followredirect, error=error, proxy=proxy, post=post, headers=headers, mobile=mobile, limit=limit, referer=referer, cookie=cookie, output=output, timeout=timeout, httpsskip=httpsskip, use_web_proxy=use_web_proxy, use_web_proxy_as_backup=use_web_proxy_as_backup, XHR=XHR, IPv4=IPv4)
def initAndSleep(self): try: t_base_link = self.base_link self.headers = {'X-Requested-With': 'XMLHttpRequest'} self.headers['Referer'] = t_base_link ua = client.randomagent() self.headers['User-Agent'] = ua #get cf cookie cookie = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True) self.headers['Cookie'] = cookie log('SUCCESS', 'initAndSleep', 'Cookies : %s for %s' % (cookie, self.base_link)) except Exception as e: log('ERROR', 'initAndSleep', '%s' % e)
def get_show(self, tvshowtitle, season, imdb=None, tvdb=None, year=None, proxy_options=None, key=None): try: t = cleantitle.get(tvshowtitle) q = urlparse.urljoin(self.base_link, self.search_link) q = q % urllib.quote_plus(tvshowtitle) #r = client.request(q) r = proxies.request(q, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) r = client.parseDOM(r, 'ul', attrs={'class': 'items'}) r = client.parseDOM(r, 'li') r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('\d{4}', i)) for i in r] r = [(i[0][0], i[1][0], i[2][-1]) for i in r if i[0] and i[1] and i[2]] r = [i for i in r if t == cleantitle.get(i[1]) and year == i[2]] r = r[0][0] url = re.findall('(?://.+?|)(/.+)', r)[0] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') return url except Exception as e: print e return
def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_options=None, key=None, testing=False): try: sources = [] #print '%s ---------- %s' % (self.name,url) if url == None: return sources url_arr=[] data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) if 'episode' in data and 'season' in data: url0 = (data['title'].translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower() + "/s%s/e%s" % (data['season'],data['episode']) url_arr.append(url0) else: url1 = (data['title'].translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower() url2 = (data['title'].translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower() + "-%s" % (data['year']) url_arr.append(url1) url_arr.append(url2) try: title = data['title'] title = title.split(':') title = title[0] url3 = (title.translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower() url_arr.append(url3) except: pass if 'episode' in data and 'season' in data: try: url1 = (data['title'].split(':')[0].translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower() + "/s%s/e%s" % (data['season'],data['episode']) url_arr.append(url1) except: pass else: try: url4 = (data['title'].split(':')[0].translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower() url5 = (data['title'].split(':')[0].translate(None, '\/:*?"\'<>|!,')).replace(' ', '-').replace('--', '-').lower()+ "-%s" % (data['year']) url_arr.append(url4) url_arr.append(url5) except: pass url_arr = list(set(url_arr)) links = [] for url in url_arr: try: #print url url = urlparse.urljoin(self.base_link, self.watch_link % url) #print url r = proxies.request(url, output='geturl', proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) #print r if r == None: raise Exception() r = result = proxies.request(url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) #print "resp ===== %s" % r quality = '720p' r = re.sub(r'[^\x00-\x7F]+',' ', r) if 'episode' not in data or 'season' not in data: y = re.findall('Date\s*:\s*.+?>.+?(\d{4})', r) y = y[0] if len(y) > 0 else None #print y if ('year' in data and y != None and data['year'] != y): #print 'year not found' raise Exception() q = client.parseDOM(r, 'title') q = q[0] if len(q) > 0 else None quality = '1080p' if ' 1080' in q else '720p' #print quality #r = client.parseDOM(r, 'div', attrs = {'id': '5throw'})[0] #r = client.parseDOM(r, 'a', ret='href', attrs = {'rel': 'nofollow'}) try: r = client.parseDOM(result, 'iframe', ret='src') r2 = [i for i in r if 'g2g' in i or 'ytid' in i] #print r2 for r in r2: try: if 'http' not in r and self.urlhost in r: r = 'http:' + r elif 'http' not in r: r = self.base_link + r #print r r = proxies.request(r, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) r = re.sub(r'[^\x00-\x7F]+',' ', r) r = client.parseDOM(r, 'iframe', ret='src')[0] part2=False if '.php' in r: r = self.base_link + r rx = r.replace('.php','2.php') r = proxies.request(r, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) r = re.sub(r'[^\x00-\x7F]+',' ', r) r = client.parseDOM(r, 'iframe', ret='src')[0] try: rx = proxies.request(rx, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, IPv4=True) rx = re.sub(r'[^\x00-\x7F]+',' ', rx) rx = client.parseDOM(rx, 'iframe', ret='src')[0] if 'http' not in rx: rx = 'http:' + rx part2=True except: pass if 'http' not in r: r = 'http:' + r #print r if 'youtube' in r: vidtype = 'Trailer' qualityt = '720p' r = r.replace('?showinfo=0','') else: vidtype = 'Movie' qualityt = quality if part2: #print '2-part video' links = resolvers.createMeta(r, self.name, self.logo, qualityt, links, key, vidtype=vidtype, txt='Part-1') links = resolvers.createMeta(rx, self.name, self.logo, qualityt, links, key, vidtype=vidtype, txt='Part-2') else: links = resolvers.createMeta(r, self.name, self.logo, qualityt, links, key, vidtype=vidtype) except: pass except Exception as e: control.log('ERROR %s get_sources3 > %s' % (self.name, e.args)) except: pass for i in links: sources.append(i) #print sources return sources except Exception as e: control.log('ERROR %s get_sources > %s' % (self.name, e)) return sources
def testSiteAlts(self, site): try: ua = client.randomagent() self.headers['User-Agent'] = ua self.base_link = proxies.request(url=site, headers=self.headers, output='geturl', use_web_proxy=False, httpsskip=True).strip("/") x1 = time.time() http_res, content = proxies.request(url=self.base_link, headers=self.headers, output='response', use_web_proxy=False, httpsskip=True) self.speedtest = time.time() - x1 if content != None and content.find( self.MainPageValidatingContent) > -1: x1 = time.time() self.cookie = proxies.request(url=self.base_link, headers=self.headers, output='cookie', use_web_proxy=False, httpsskip=True) self.speedtest = time.time() - x1 self.headers['Cookie'] = self.cookie log('SUCCESS', 'testSite', 'HTTP Resp : %s for %s' % (http_res, self.base_link)) log('SUCCESS', 'testSite', 'Cookie Resp : %s for %s' % (self.cookie, self.base_link)) return True else: log( 'FAIL', 'testSite', 'Validation content Not Found. HTTP Resp : %s for %s' % (http_res, self.base_link)) x1 = time.time() http_res, content = proxies.request(url=self.base_link.replace( 'https:', 'http:'), headers=self.headers, output='response', use_web_proxy=True, httpsskip=True) self.speedtest = time.time() - x1 if content != None and content.find( self.MainPageValidatingContent) > -1: self.proxyrequired = True x1 = time.time() self.cookie = proxies.request(url=self.base_link, headers=self.headers, output='cookie', use_web_proxy=True, httpsskip=True) self.speedtest = time.time() - x1 self.headers['Cookie'] = self.cookie log( 'SUCCESS', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res, self.base_link)) log( 'SUCCESS', 'testSite', 'Cookie Resp : %s for %s' % (self.cookie, self.base_link)) return True else: time.sleep(2.0) x1 = time.time() http_res, content = proxies.request(url=self.base_link, headers=self.headers, output='response', use_web_proxy=True, httpsskip=True) self.speedtest = time.time() - x1 if content != None and content.find( self.MainPageValidatingContent) > -1: self.proxyrequired = True log( 'SUCCESS', 'testSite', 'HTTP Resp : %s via proxy for %s' % (http_res, self.base_link)) return True else: log( 'FAIL', 'testSite', 'Validation content Not Found. HTTP Resp : %s via proxy for %s' % (http_res, self.base_link)) return False except Exception as e: log('ERROR', 'testSite', '%s' % e) return False
def getVidToken(self): try: page_html = proxies.request(self.base_link, use_web_proxy=self.proxyrequired, httpsskip=True) try: all_js_url = re.findall(r'<script src=\"(https://static1.*?all.js.*?)\"', page_html)[0] vid_token_key = all_js_url.split('?')[1] except: all_js_url = self.ALL_JS vid_token_key = 'None' try: token_pairs = proxies.request(self.TOKEN_PAIRS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_pairs !=None and token_pairs != '': token_pairs = json.loads(token_pairs) #cookie_dict.update({'token_key':token_key}) self.PAIRS = token_pairs except Exception as e: log('ERROR', 'getVidToken-3.a-Token-Pairs','%s' % e, dolog=False) try: fm_flags = proxies.request(self.FLAGS_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if fm_flags !=None and fm_flags != '': fm_flags = json.loads(fm_flags) #cookie_dict.update({'token_key':token_key}) self.FLAGS = fm_flags except Exception as e: log('ERROR', 'getVidToken-3.b-Token-Pairs','%s' % e, dolog=False) all_js_pack_code = proxies.request(all_js_url, use_web_proxy=self.proxyrequired, httpsskip=True) unpacked_code = all_js_pack_code del self.TOKEN_KEY[:] if len(self.PAIRS.keys()) > 0: if vid_token_key in self.PAIRS.keys(): d = self.PAIRS[vid_token_key] self.TOKEN_KEY.append(d) elif len(self.PAIRS.keys()) > 0: d = self.PAIRS["None"] self.TOKEN_KEY.append(d) try: if jsunpack.detect(all_js_pack_code): unpacked_code = jsunpack.unpack(all_js_pack_code) except: pass token_key = None cch = '' if len(self.TOKEN_KEY) == 0: try: parts = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb24gZlwoXClce3JldHVybiguKj8pXH0='), unpacked_code)[0].strip() parts_s = parts.split('+') val_str = '' if len(parts_s) > 0: for p in parts_s: p = re.escape(p) val_str += re.findall(r'%s\=\"(.*?)\",' % p, unpacked_code)[0] token_key = val_str else: raise Exception("ALL JS Parts were not found !") if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1a','%s' % e, dolog=False) if len(self.TOKEN_KEY) == 0: try: cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKHQsaSxuXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gZVwoXCl7cmV0dXJuICguKj8pfWZ1bmN0aW9uIHJcKHRcKQ=='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key !=None and token_key != '': self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1b','%s' % e, dolog=False) if len(self.TOKEN_KEY) == 0: try: cch = re.findall(r'%s' % client.b64decode('ZnVuY3Rpb25cKFthLXpdLFthLXpdLFthLXpdXCl7XCJ1c2Ugc3RyaWN0XCI7ZnVuY3Rpb24gW2Etel1cKFwpe3JldHVybiAoLio/KX1mdW5jdGlvbiBbYS16XVwoW2Etel1cKQ=='), unpacked_code)[0] token_key = re.findall(r'%s=.*?\"(.*?)\"' % cch, unpacked_code)[0] if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-1.1c','%s' % e, dolog=False) except Exception as e: log('ERROR', 'getVidToken-1','%s' % e, dolog=False) #log('ERROR', 'getVidToken-1','%s' % unpacked_code, dolog=False) log('ERROR', 'getVidToken-1','%s' % cch, dolog=False) try: if len(self.TOKEN_KEY) == 0 or True: token_key = proxies.request(self.TOKEN_KEY_PASTEBIN_URL, use_web_proxy=self.proxyrequired, httpsskip=True) if token_key !=None and token_key != '': #cookie_dict.update({'token_key':token_key}) self.TOKEN_KEY.append(token_key) except Exception as e: log('ERROR', 'getVidToken-2','%s' % e, dolog=False)
def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_options=None, key=None, testing=False): try: sources = [] if control.setting('Provider-%s' % name) == False: log('INFO', 'get_sources', 'Provider Disabled by User') log('INFO', 'get_sources', 'Completed') return sources if url == None: log('FAIL', 'get_sources', 'url == None. Could not find a matching title: %s' % cleantitle.title_from_key(key), dolog=not testing) log('INFO', 'get_sources', 'Completed') return sources urls = [] vidtype = 'Movie' if not str(url).startswith('http'): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data[ 'tvshowtitle'] if 'tvshowtitle' in data else data[ 'title'] if 'year' in data: year = data['year'] if 'season' in data: query = { 'keyword': '%s %s %s' % (title, 'season', data['season']) } else: query = {'keyword': title} search_url = urlparse.urljoin(self.base_link, '/search.html') search_url = search_url + '?' + urllib.urlencode(query) result = proxies.request(search_url, headers=self.headers, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) r = client.parseDOM(result, 'div', attrs={'class': 'wrapper'}) try: r = r[1] except: raise Exception() r1 = client.parseDOM(r, 'figure') r2 = [] for res in r1: l = client.parseDOM(res, 'a', ret='href')[0] t = client.parseDOM(res, 'div', attrs={'class': 'title'})[0] r = (l, t) r2.append(r) r = r2 if 'season' in data: vidtype = 'Show' episode = int(data['episode']) r = [(i[0], re.sub(' \(\w*\)', '', i[1])) for i in r] url = [(i[0], re.findall('(.+?) (\d+)$', i[1])) for i in r] url = [(i[0], i[1][0][0], i[1][0][1]) for i in url if len(i[1]) > 0] url = [ i for i in url if cleantitle.get(title) in cleantitle.get(i[1]) ] url = [ i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2]) ] ep_url = [] for i in url: result = proxies.request( urlparse.urljoin(self.base_link, i[0]), headers=self.headers, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) t = client.parseDOM(result, 'div', attrs={'class': 'eps'}) for tt in t: if 'watch' in tt: tt = client.parseDOM( tt, 'div', attrs={'class': 'server'})[0] section_links = client.parseDOM(tt, 'a', ret='href') for a_link in section_links: if episode < 100: f_key = '-episode-%02d-' % episode else: f_key = '-episode-%03d-' % episode if f_key in a_link: log('INFO', 'get_sources', 'episode url = %s' % a_link) ep_url.append(a_link) break for i in ep_url: urls.append(urlparse.urljoin(self.base_link, i)) else: for i in r: if cleantitle.get(title) in cleantitle.get(i[1]): urls.append( urlparse.urljoin(self.base_link, i[0])) except: urls == [self.base_link] links_m = [] page = None for url in urls: try: log('INFO', 'get_sources', 'url == %s' % url, dolog=False, doPrint=True) page_url = url page = result = proxies.request( url, headers=self.headers, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) quality = '480p' type = 'BRRIP' atr = '' qtr = '' try: qtr = client.parseDOM(result, 'span', attrs={'class': 'quanlity'})[0] # q, t = cleantitle.getQuality(atr) # if q != None: # quality = q # type = t except: try: qtr = client.parseDOM(result, 'span', attrs={'class': 'quality'})[0] # q, t = cleantitle.getQuality(atr) # if q != None: # quality = q # type = t except: pass try: quality = source_utils.check_sd_url(qtr) type = source_utils.check_sd_url_rip(qtr) except Exception as e: quality = '480p' type = 'BRRIP' try: atr = client.parseDOM(result, 'span', attrs={'class': 'year'})[0] except: atr = '' try: atr_release = client.parseDOM(result, 'div', attrs={'class': 'meta'})[1] except: atr_release = '' if 'season' in data: vidtype = 'Show' pass else: vidtype = 'Movie' resultx = result if str(int(year)) in atr else None if resultx == None: resultx = result if str( int(year)) in atr_release else None if resultx == None: raise Exception() try: poster = client.parseDOM(page, 'div', attrs={'class': 'detail-l'})[0] poster = client.parseDOM(poster, 'img', ret='src')[0] if 'http' not in poster: poster = 'http:' + poster except: poster = None #print result #r = client.parseDOM(result, 'article', attrs = {'class': 'player current'})[0] #r = client.parseDOM(r, 'iframe', ret='src')[0] #r = r.split('?') try: servers = re.findall(r'link_server_.*\"(.*)\";', page) servers = list(set(servers)) for server in servers: try: if 'http' not in server: server = 'http:' + server result = proxies.request( server, headers=self.headers, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, httpsskip=True) server = client.parseDOM(result, 'iframe', ret='src')[0] if len(server) > 0: if 'http' not in server: server = 'http:' + server l = resolvers.createMeta(server, self.name, self.logo, quality, [], key, poster=poster, riptype=type, vidtype=vidtype, testing=testing, page_url=page_url) for ll in l: if ll != None and 'key' in ll.keys(): links_m.append(ll) except Exception as e: pass if testing and len(links_m) > 0: break except Exception as e: pass try: servers = re.findall(r'link_server_.*\"(.*)\";', page) servers = list(set(servers)) for server in servers: if server != None: if 'http' not in server: server = 'http:' + server try: l = resolvers.createMeta(server, self.name, self.logo, quality, [], key, poster=poster, riptype=type, vidtype=vidtype, testing=testing, page_url=page_url) for ll in l: if ll != None and 'key' in ll.keys(): links_m.append(ll) except: pass except: pass break except: pass for link in links_m: if link != None and 'key' in link.keys(): sources.append(link) if len(sources) == 0: log( 'FAIL', 'get_sources', 'Could not find a matching title: %s' % cleantitle.title_from_key(key)) else: log( 'SUCCESS', 'get_sources', '%s sources : %s' % (cleantitle.title_from_key(key), len(sources))) log('INFO', 'get_sources', 'Completed') return sources except Exception as e: log('ERROR', 'get_sources', '%s' % e) log('INFO', 'get_sources', 'Completed') return sources
def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_options=None, key=None, testing=False): #try: try: sources = [] if control.setting('Provider-%s' % name) == False: log('INFO', 'get_sources', 'Provider Disabled by User') return sources if url == None: log('FAIL', 'get_sources', 'url == None. Could not find a matching title: %s' % cleantitle.title_from_key(key), dolog=not testing) return sources base_link = self.base_link try: if url[0].startswith('http'): base_link = url[0] mid = re.findall('-(\d+)', url[0])[-1] except: if url.startswith('http'): base_link = url mid = re.findall('-(\d+)', url)[-1] try: if len(url[1]) > 0: episode = url[1] else: episode = None except: episode = None #print mid links_m = [] trailers = [] headers = {'Referer': self.base_link} u = urlparse.urljoin(self.base_link, url[0]) #print u #r = client.request(u, headers=headers, IPv4=True) r = proxies.request(u, headers=headers, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) try: elem = client.parseDOM(r, 'span', attrs={'class': 'quality'})[0] qual = source_utils.check_sd_url(elem) riptype = source_utils.check_sd_url_rip(elem) except Exception as e: qual = '480p' riptype = 'BRRIP' try: poster = client.parseDOM(r, 'div', attrs={'class': 'dm-thumb'})[0] poster = client.parseDOM(poster, 'img', ret='src')[0] except: poster = None if testing == False: try: #regex = r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+" #matches = re.finditer(regex, r, re.MULTILINE) matches = re.compile( 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+' ).findall(r) for match in matches: try: #print match if 'youtube.com' in match: match = match.replace('embed/', 'watch?v=') trailers.append(match) except: pass except Exception as e: pass for trailer in trailers: links_m = resolvers.createMeta(trailer, self.name, self.logo, '720p', links_m, key, vidtype='Trailer', testing=testing) try: u = urlparse.urljoin(self.base_link, self.server_link % mid) #print u #r = client.request(u, headers=headers, XHR=True, IPv4=True) r = proxies.request(u, headers=headers, XHR=True, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) r = json.loads(r)['html'] r = client.parseDOM(r, 'div', attrs={'class': 'pas-list'}) ids = client.parseDOM(r, 'li', ret='data-id') servers = client.parseDOM(r, 'li', ret='data-server') labels = client.parseDOM(r, 'a', ret='title') r = zip(ids, servers, labels) for eid in r: #print r try: sub_url = None try: ep = re.findall('episode.*?(\d+):.*?', eid[2].lower())[0] except: ep = 0 if (episode is None) or (int(ep) == int(episode)): url = urlparse.urljoin( self.base_link, self.token_link % (eid[0], mid)) #script = client.request(url, IPv4=True) script = proxies.request( url, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) #print script if '$_$' in script: params = self.uncensored1(script) elif script.startswith('[]') and script.endswith( '()'): params = self.uncensored2(script) elif '_x=' in script and '_y=' in script: params = self.uncensored3(script) else: raise Exception() u = urlparse.urljoin( self.base_link, self.sourcelink % (eid[0], params['x'], params['y'])) #print u #r = client.request(u, IPv4=True) r = proxies.request( u, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) if r == None or len(r) == 0: u = urlparse.urljoin( self.base_link, self.embed_link % (eid[0])) #print u #r = client.request(u, IPv4=True) r = proxies.request( u, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) try: url = json.loads(r)['playlist'][0]['sources'] except: url = [{'file': json.loads(r)['src']}] try: url = [i['file'] for i in url] except: url = [url['file']] try: sub_url = json.loads( r)['playlist'][0]['tracks'][0]['file'] except: pass vidtype = 'Movie' if int(ep) > 0: vidtype = 'Show' for s in url: links_m = resolvers.createMeta(s, self.name, self.logo, qual, links_m, key, poster=poster, riptype=riptype, vidtype=vidtype, sub_url=sub_url, testing=testing) except: pass except: pass sources += [l for l in links_m] if len(sources) == 0: log( 'FAIL', 'get_sources', 'Could not find a matching title: %s' % cleantitle.title_from_key(key)) return sources log('SUCCESS', 'get_sources', '%s sources : %s' % (cleantitle.title_from_key(key), len(sources)), dolog=not testing) return sources except Exception as e: log('ERROR', 'get_sources', '%s' % e, dolog=not testing) return sources
def get_movie(self, imdb, title, year, proxy_options=None, key=None, testing=False): try: if control.setting('Provider-%s' % name) == False: log('INFO', 'get_movie', 'Provider Disabled by User') return None headers = { 'Referer': self.base_link, 'User-Agent': self.user_agent } max = None title = title.replace('(3D)', '').strip() title = title.replace('3D', '').strip() for pg in range(100): query_url = urlparse.urljoin( self.base_link, self.search_link) % ( pg, urllib.quote_plus(cleantitle.query(title))) if max != None and int(pg) >= int(max): raise log(type='INFO', method='get_movie', err='Searching - %s' % (query_url), dolog=False, logToControl=False, doPrint=True) result = proxies.request(query_url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, headers=headers, timeout=60) if max == None: try: max1 = client.parseDOM( result, 'a', attrs={'class': 'page gradient'}) max = int(max1[len(max1) - 1]) - 1 except: pass url_data = client.parseDOM(result, 'div', attrs={'class': 'ajuste4'}) #print url_data if len(url_data) == 0: raise links_data = [] for data in url_data: data = client.parseDOM(data, 'div', attrs={'class': 'view'})[0] url = urlparse.urljoin( self.base_link, client.parseDOM(data, 'a', ret='href')[0]) titlex = client.parseDOM(data, 'img', ret='alt')[0] try: poster = urlparse.urljoin( self.base_link, client.parseDOM(data, 'img', ret='src')[0]) except: poster = None if title in titlex or titlex in title or lose_match_title( title, titlex): url = url.replace(' ', '%20') log(type='INFO', method='get_movie', err='Verifying - %s' % url, dolog=False, logToControl=False, doPrint=True) result = proxies.request( url, proxy_options=proxy_options, use_web_proxy=self.proxyrequired, headers=headers, timeout=60) ex_title = client.parseDOM(result, 'div', attrs={'class': 'rating'})[0] if year in ex_title: #print result all_links = re.findall( r'outdate\.php.*\n.*({.*file.*:.*})', result) all_srcs = re.findall(r'{.*file.*:.*type.*}', result) try: srt = re.findall(r'\"(.*srt.*)\"', result)[0] srt = urlparse.urljoin(self.base_link, srt) except: srt = None for sn in range(len(all_links)): datax2 = all_links[sn].replace( 'file', '\'file\'').replace('\'', '"') datax1 = all_srcs[sn].replace( 'file', '"file"').replace( 'label', '"label"').replace('type', '"type"') data_j1 = json.loads(datax1) file = data_j1['file'] label = data_j1['label'] data_j2 = json.loads(datax2) src_file = data_j2['file'] link_data = { 'file': file, 'title': titlex, 'label': label, 'page': url, 'srt': srt, 'src_file': src_file, 'poster': poster } links_data.append(link_data) return links_data return except Exception as e: log('ERROR', 'get_movie', '%s: %s' % (title, e), dolog=self.init) return
def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_options=None, key=None, testing=False): #try: try: sources = [] if control.setting('Provider-%s' % name) == False: log('INFO', 'get_sources', 'Provider Disabled by User') return sources if url == None: log('FAIL', 'get_sources', 'url == None. Could not find a matching title: %s' % cleantitle.title_from_key(key), dolog=not testing) return sources links_m = [] trailers = [] headers = self.headers headers = {'Referer': self.base_link} sub_url = None u = url[0] ep = url[1] #r = client.request(u, headers=headers IPv4=True) r = proxies.request(u, headers=self.headers, IPv4=True, proxy_options=proxy_options, use_web_proxy=self.proxyrequired) if testing == False: try: #regex = r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+" #matches = re.finditer(regex, r, re.MULTILINE) matches = re.compile( 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+' ).findall(r) for match in matches: try: #print match if 'youtube.com' in match: match = match.replace('embed/', 'watch?v=') trailers.append(match) except: pass except Exception as e: pass for trailer in trailers: links_m = resolvers.createMeta(trailer, self.name, self.logo, '720p', links_m, key, vidtype='Trailer', testing=testing) try: if ep == None: srcs = client.parseDOM(r, 'a', ret='player-data') else: srcs = client.parseDOM(r, 'a', ret='player-data', attrs={'episode-data': str(ep)}) try: elem = client.parseDOM(r, 'span', attrs={'class': 'quality'})[0] qual = source_utils.check_sd_url(elem) riptype = source_utils.check_sd_url_rip(elem) except Exception as e: qual = '480p' riptype = 'BRRIP' try: poster = client.parseDOM(r, 'div', attrs={'class': 'dm-thumb'})[0] poster = client.parseDOM(poster, 'img', ret='src')[0] except: poster = None for s in srcs: try: if s.startswith('//'): s = 'https:%s' % s links_m = resolvers.createMeta(s, self.name, self.logo, qual, links_m, key, poster=poster, riptype=riptype, vidtype='Movie', sub_url=sub_url, testing=testing) if testing == True and len(links_m) > 0: break except: pass except: pass sources += [l for l in links_m] if len(sources) == 0: log( 'FAIL', 'get_sources', 'Could not find a matching title: %s' % cleantitle.title_from_key(key)) return sources log('SUCCESS', 'get_sources', '%s sources : %s' % (cleantitle.title_from_key(key), len(sources)), dolog=not testing) return sources except Exception as e: log('ERROR', 'get_sources', '%s' % e, dolog=not testing) return sources