def movie(self, imdb, title, localtitle, aliases, year): try: url = {'imdb': imdb, 'title': title, 'year': year} url = urllib.urlencode(url) return url except: failure = traceback.format_exc() log_utils.log('UltraHD - Exception: \n' + str(failure)) return
def resolver(url, debrid): try: debrid_resolver = [ resolver for resolver in debrid_resolvers if resolver.name == debrid ][0] debrid_resolver.login() _host, _media_id = debrid_resolver.get_host_and_id(url) stream_url = debrid_resolver.get_media_url(_host, _media_id) return stream_url except Exception as e: log_utils.log('%s Resolve Failure: %s' % (debrid, e), log_utils.LOGWARNING) return None
def __getTrakt(url, post=None): try: url = urlparse.urljoin(BASE_URL, url) post = json.dumps(post) if post else None headers = {'Content-Type': 'application/json', 'trakt-api-key': V2_API_KEY, 'trakt-api-version': 2} if getTraktCredentialsInfo(): headers.update({'Authorization': 'Bearer %s' % control.setting('trakt.token')}) result = client.request(url, post=post, headers=headers, output='extended', error=True) resp_code = result[1] resp_header = result[2] result = result[0] if resp_code in ['500', '502', '503', '504', '520', '521', '522', '524']: log_utils.log('Temporary Trakt Error: %s' % resp_code, log_utils.LOGWARNING) return elif resp_code in ['404']: log_utils.log('Object Not Found : %s' % resp_code, log_utils.LOGWARNING) return # elif resp_code in ['429']: # log_utils.log('Trakt Rate Limit Reached: %s' % resp_code, log_utils.LOGWARNING) # return if resp_code not in ['401', '405']: return result, resp_header oauth = urlparse.urljoin(BASE_URL, '/oauth/token') opost = {'client_id': V2_API_KEY, 'client_secret': CLIENT_SECRET, 'redirect_uri': REDIRECT_URI, 'grant_type': 'refresh_token', 'refresh_token': control.setting('trakt.refresh')} result = client.request(oauth, post=json.dumps(opost), headers=headers) result = utils.json_loads_as_str(result) token, refresh = result['access_token'], result['refresh_token'] control.setSetting(id='trakt.token', value=token) control.setSetting(id='trakt.refresh', value=refresh) headers['Authorization'] = 'Bearer %s' % token result = client.request(url, post=post, headers=headers, output='extended', error=True) return result[0], result[2] except Exception as e: log_utils.log('Unknown Trakt Error: %s' % e, log_utils.LOGWARNING) pass
def sources(self, url, hostDict, hostprDict): try: sources = [] if url == None: return sources data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['title'].replace(':', '').lower() year = data['year'] query = '%s %s' % (data['title'], data['year']) query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query) url = urlparse.urljoin(self.base_link, self.post_link) post = 'do=search&subaction=search&search_start=0&full_search=0&result_from=1&story=%s' % urllib.quote_plus( query) r = client.request(url, post=post) r = client.parseDOM(r, 'div', attrs={'class': 'box-out margin'}) r = [(dom_parser2.parse_dom(i, 'div', attrs={'class': 'news-title'})) for i in r if data['imdb'] in i] r = [(dom_parser2.parse_dom(i[0], 'a', req='href')) for i in r if i] r = [(i[0].attrs['href'], i[0].content) for i in r if i] hostDict = hostprDict + hostDict for item in r: try: name = item[1] y = re.findall('\((\d{4})\)', name)[0] if not y == year: raise Exception() s = re.findall( '((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', name) s = s[0] if s else '0' data = client.request(item[0]) data = dom_parser2.parse_dom(data, 'div', attrs={'id': 'r-content'}) data = re.findall( '\s*<b><a href=.+?>(.+?)</b>.+?<u><b><a href="(.+?)".+?</a></b></u>', data[0].content, re.DOTALL) u = [(i[0], i[1], s) for i in data if i] for name, url, size in u: try: if '4K' in name: quality = '4K' elif '1080p' in name: quality = '1080p' elif '720p' in name: quality = '720p' elif any(i in ['dvdscr', 'r5', 'r6'] for i in name): quality = 'SCR' elif any(i in [ 'camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts' ] for i in name): quality = 'CAM' else: quality = '720p' info = [] if '3D' in name or '.3D.' in url: info.append('3D') quality = '1080p' if any(i in ['hevc', 'h265', 'x265'] for i in name): info.append('HEVC') try: size = re.findall( '((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', size)[-1] div = 1 if size.endswith( ('Gb', 'GiB', 'GB')) else 1024 size = float(re.sub('[^0-9|/.|/,]', '', size)) / div size = '%.2f GB' % size info.append(size) except: pass info = ' | '.join(info) url = client.replaceHTMLCodes(url) url = url.encode('utf-8') if any(x in url for x in ['.rar', '.zip', '.iso', 'turk']): continue if 'ftp' in url: host = 'COV' direct = True else: direct = False host = 'turbobit.net' #if not host in hostDict: continue host = client.replaceHTMLCodes(host) host = host.encode('utf-8') sources.append({ 'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': direct, 'debridonly': False }) except: pass except: pass return sources except: failure = traceback.format_exc() log_utils.log('UltraHD - Exception: \n' + str(failure)) return sources
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: if not url: return handlers = [] if not proxy == None: handlers += [ urllib2.ProxyHandler({'http': '%s' % (proxy)}), urllib2.HTTPHandler ] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if output == 'cookie' or output == 'extended' or not close == True: cookies = cookielib.LWPCookieJar() handlers += [ urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies) ] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if (2, 7, 8) < sys.version_info < (2, 7, 12): try: import ssl ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) except: pass if url.startswith('//'): url = 'http:' + url _headers = {} try: _headers.update(headers) except: pass if 'User-Agent' in _headers: pass elif not mobile == True: #headers['User-Agent'] = agent() _headers['User-Agent'] = cache.get(randomagent, 1) else: _headers['User-Agent'] = 'Apple-iPhone/701.341' if 'Referer' in _headers: pass elif referer is not None: _headers['Referer'] = referer if not 'Accept-Language' in _headers: _headers['Accept-Language'] = 'en-US' if 'X-Requested-With' in _headers: pass elif XHR == True: _headers['X-Requested-With'] = 'XMLHttpRequest' if 'Cookie' in _headers: pass elif not cookie == None: _headers['Cookie'] = cookie if 'Accept-Encoding' in _headers: pass elif compression and limit is None: _headers['Accept-Encoding'] = 'gzip' if redirect == False: #old implementation #class NoRedirection(urllib2.HTTPErrorProcessor): # def http_response(self, request, response): return response #opener = urllib2.build_opener(NoRedirection) #opener = urllib2.install_opener(opener) class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del _headers['Referer'] except: pass if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) url = utils.byteify(url) request = urllib2.Request(url, data=post) _add_request_header(request, _headers) try: response = urllib2.urlopen(request, timeout=int(timeout)) except urllib2.HTTPError as response: if response.code == 503: cf_result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': cf_result = gzip.GzipFile( fileobj=StringIO.StringIO(cf_result)).read() if 'cf-browser-verification' in cf_result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) if not netloc.endswith('/'): netloc += '/' ua = _headers['User-Agent'] cf = cache.get(cfcookie().get, 168, netloc, ua, timeout) _headers['Cookie'] = cf request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) else: log_utils.log( 'Request-Error (%s): %s' % (str(response.code), url), 'DEBUG') if error == False: return else: log_utils.log( 'Request-Error (%s): %s' % (str(response.code), url), 'DEBUG') if error == False: return if output == 'cookie': try: result = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: result = cf except: pass if close == True: response.close() return result elif output == 'geturl': result = response.geturl() if close == True: response.close() return result elif output == 'headers': result = response.headers if close == True: response.close() return result elif output == 'chunk': try: content = int(response.headers['Content-Length']) except: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) if close == True: response.close() return result elif output == 'file_size': try: content = int(response.headers['Content-Length']) except: content = '0' response.close() return content if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if 'sucuri_cloudproxy_js' in result: su = sucuri().get(result) _headers['Cookie'] = su request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile( fileobj=StringIO.StringIO(result)).read() if 'Blazingfast.io' in result and 'xhr.open' in result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) ua = _headers['User-Agent'] _headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout) result = _basic_request(url, headers=_headers, post=post, timeout=timeout, limit=limit) if output == 'extended': try: response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()]) except: response_headers = response.headers response_code = str(response.code) try: cookie = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: cookie = cf except: pass if close == True: response.close() return (result, response_code, response_headers, _headers, cookie) else: if close == True: response.close() return result except Exception as e: log_utils.log('Request-Error: (%s) => %s' % (str(e), url), 'DEBUG') return