def __getlinks(self, e, h, url, key): try: url = url + '/stream' params = {'e': e, 'h': h, 'lang': 'de', 'q': '', 'grecaptcha': key} oRequest = cRequestHandler(url[:-7]) oRequest.removeBreakLines(False) oRequest.removeNewLines(False) r = oRequest.request() csrf = dom_parser.parse_dom(r, "meta", attrs={"name": "csrf-token" })[0].attrs["content"] oRequest = cRequestHandler(url) oRequest.removeBreakLines(False) oRequest.removeNewLines(False) oRequest.addHeaderEntry('X-CSRF-TOKEN', csrf) oRequest.addHeaderEntry('X-Requested-With', 'XMLHttpRequest') oRequest.addParameters('e', e) oRequest.addParameters('h', h) oRequest.addParameters('lang', 'de') oRequest.addParameters('q', '') oRequest.addParameters('grecaptcha', key) oRequest.setRequestType(1) sHtmlContent = oRequest.request() helper = json.loads(sHtmlContent) mainData = utils.byteify(helper) tmp = mainData.get('d', '') + mainData.get('c', '') + mainData.get( 'iv', '') + mainData.get('f', '') + mainData.get( 'h', '') + mainData.get('b', '') tmp = utils.byteify(json.loads(base64.b64decode(tmp))) salt = unhexlify(tmp['s']) ciphertext = base64.b64decode(tmp['ct'][::-1]) b = base64.b64encode(csrf[::-1]) tmp = utils.cryptoJS_AES_decrypt(ciphertext, b, salt) tmp = utils.byteify(json.loads(base64.b64decode(tmp))) ciphertext = base64.b64decode(tmp['ct'][::-1]) salt = unhexlify(tmp['s']) b = '' a = csrf for idx in range(len(a) - 1, 0, -2): b += a[idx] if mainData.get('e', None): b += '1' else: b += '0' tmp = utils.cryptoJS_AES_decrypt(ciphertext, str(b), salt) return utils.byteify(json.loads(tmp)) except Exception: return
def __getlinks(self, e, h, url, key): try: url = url + '/stream' params = {'e': e, 'h': h, 'lang': 'de', 'q': '', 'grecaptcha': key} r = self.scraper.get(url[:-7]) csrf = dom_parser.parse_dom(r.content, "meta", attrs={"name": "csrf-token" })[0].attrs["content"] sHtmlContent = self.scraper.post(url, headers={ 'X-CSRF-TOKEN': csrf, 'X-Requested-With': 'XMLHttpRequest' }, data=params).content helper = json.loads(sHtmlContent) mainData = utils.byteify(helper) tmp = mainData.get('d', '') + mainData.get('c', '') + mainData.get( 'iv', '') + mainData.get('f', '') + mainData.get( 'h', '') + mainData.get('b', '') tmp = utils.byteify(json.loads(base64.b64decode(tmp))) salt = unhexlify(tmp['s']) ciphertext = base64.b64decode(tmp['ct'][::-1]) b = base64.b64encode(csrf[::-1]) tmp = utils.cryptoJS_AES_decrypt(ciphertext, b, salt) tmp = utils.byteify(json.loads(base64.b64decode(tmp))) ciphertext = base64.b64decode(tmp['ct'][::-1]) salt = unhexlify(tmp['s']) b = '' a = csrf for idx in range(len(a) - 1, 0, -2): b += a[idx] if mainData.get('e', None): b += '1' else: b += '0' tmp = utils.cryptoJS_AES_decrypt(ciphertext, str(b), salt) return utils.byteify(json.loads(tmp)) except Exception: return
def solve(self, url, siteKey): if self.ApiKey == "": control.infoDialog("Kein Captcha9KW API-Key eingetragen!") return token = '' post = { 'apikey': self.ApiKey, 'action': 'usercaptchaupload', 'interactive': '1', 'json': '1', 'file-upload-01': siteKey, 'oldsource': 'recaptchav2', 'pageurl': url, 'maxtimeout': self.time } if self.SolveType == 'true': post['selfsolve'] = '1' try: token = '' data = client.request('https://www.9kw.eu/index.cgi', post=post) if data: data = utils.byteify(json.loads(data)) if 'captchaid' in data: captchaid = data['captchaid'] tries = 0 while tries < self.time and self.IsAlive: tries += 1 xbmc.sleep(1000) data = client.request( 'https://www.9kw.eu/index.cgi?apikey=' + self.ApiKey + '&action=usercaptchacorrectdata&json=1&id=' + captchaid) if data: print str(data) data = utils.byteify(json.loads(data)) token = data['answer'] if token is not None and token != '': break except Exception as e: print '9kw Error: ' + str(e) return token
def _post(self, url, data={}): if self.token == '' and not 'token' in url: return None headers = {'Authorization': 'Bearer %s' % self.token} if not 'token' in url: url = self.base_url + url response = requests.post(url, data=data, headers=headers).text try: resp = utils.json_loads_as_str(response) except: resp = utils.byteify(response) return resp
def _get(self, url): original_url = url url = self.rest_base_url + url headers = {'User-Agent': self.user_agent, 'Authorization': 'Bearer {0}'.format(self.token)} response = requests.get(url, headers=headers).text if 'badToken' in response: self.refreshToken() response = self._get(original_url) try: resp = utils.json_loads_as_str(response) except: resp = utils.byteify(response) return resp
def solve(self, url, siteKey): if self.ApiKey == "": control.infoDialog("Kein 2Captcha API-Key eingetragen!") return token = '' post = { 'key': self.ApiKey, 'method': 'userrecaptcha', 'invisible': '1', 'json': '1', 'googlekey': siteKey, 'pageurl': url } try: token = '' data = client.request('https://2captcha.com/in.php', post=post) if data: data = utils.byteify(json.loads(data)) if 'status' in data and data['status'] == 1: captchaid = data['request'] tries = 0 while tries < self.time and self.IsAlive: tries += 1 xbmc.sleep(1000) data = client.request( 'https://2captcha.com/res.php?key=' + self.ApiKey + '&action=get&json=1&id=' + captchaid) if data: print str(data) data = utils.byteify(json.loads(data)) if data['status'] == 1 and data['request'] != '': token = data['request'] break except Exception as e: print '2Captcha Error: ' + str(e) return token
def _get(self, url): original_url = url url = self.rest_base_url + url if '?' not in url: url += "?auth_token=%s" % self.token else: url += "&auth_token=%s" % self.token response = requests.get(url).text if 'bad_token' in response or 'Bad Request' in response: self.refreshToken() response = self._get(original_url) try: resp = utils.json_loads_as_str(response) except: resp = utils.byteify(response) return resp
def _get(self, url): original_url = url url = self.rest_base_url + url if '?' not in url: url += "?auth_token=%s" % self.token else: url += "&auth_token=%s" % self.token response = requests.get(url).text if 'bad_token' in response or 'Bad Request' in response: self.refreshToken() response = self._get(original_url) try: resp = utils.json_loads_as_str(response) except: resp = utils.byteify(response) #from resources.lib.modules import log_utils #log_utils.log('RDapi-' + str(resp)) return resp
def request(url, check, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: r = client.request(url, close=close, redirect=redirect, proxy=proxy, post=post, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout=timeout) if r is not None and error is not False: return r if check in str(r) or str(r) == '': return r proxies = sorted(get(), key=lambda x: random.random()) proxies = sorted(proxies, key=lambda x: random.random()) proxies = proxies[:3] for p in proxies: p += urllib.quote_plus(url) if post is not None: if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) p += urllib.quote_plus('?%s' % post) r = client.request(p, close=close, redirect=redirect, proxy=proxy, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout='20') if check in str(r) or str(r) == '': return r except: pass
def request(url, check, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: r = client.request(url, close=close, redirect=redirect, proxy=proxy, post=post, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout=timeout) if r is not None and error is not False: return r if check in str(r) or str(r) == '': return r proxies = sorted(get(), key=lambda x: random.random()) proxies = sorted(proxies, key=lambda x: random.random()) proxies = proxies[:3] for p in proxies: p += urllib.quote_plus(url) if post is not None: if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) p += urllib.quote_plus('?%s' % post) r = client.request(p, close=close, redirect=redirect, proxy=proxy, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout='20') if check in str(r) or str(r) == '': return r except: pass
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: if not url: return handlers = [] if not proxy == None: handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if output == 'cookie' or output == 'extended' or not close == True: cookies = cookielib.LWPCookieJar() handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if (2, 7, 8) < sys.version_info < (2, 7, 12): try: import ssl; ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) except: pass if url.startswith('//'): url = 'http:' + url _headers ={} try: _headers.update(headers) except: pass if 'User-Agent' in _headers: pass elif not mobile == True: #headers['User-Agent'] = agent() _headers['User-Agent'] = cache.get(randomagent, 1) else: _headers['User-Agent'] = 'Apple-iPhone/701.341' if 'Referer' in _headers: pass elif referer is not None: _headers['Referer'] = referer if not 'Accept-Language' in _headers: _headers['Accept-Language'] = 'en-US' if 'X-Requested-With' in _headers: pass elif XHR == True: _headers['X-Requested-With'] = 'XMLHttpRequest' if 'Cookie' in _headers: pass elif not cookie == None: _headers['Cookie'] = cookie if 'Accept-Encoding' in _headers: pass elif compression and limit is None: _headers['Accept-Encoding'] = 'gzip' if redirect == False: #old implementation #class NoRedirection(urllib2.HTTPErrorProcessor): # def http_response(self, request, response): return response #opener = urllib2.build_opener(NoRedirection) #opener = urllib2.install_opener(opener) class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del _headers['Referer'] except: pass if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) url = utils.byteify(url) request = urllib2.Request(url, data=post) _add_request_header(request, _headers) try: response = urllib2.urlopen(request, timeout=int(timeout)) except urllib2.HTTPError as response: if response.code == 503: cf_result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read() if 'cf-browser-verification' in cf_result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) if not netloc.endswith('/'): netloc += '/' ua = _headers['User-Agent'] cf = cache.get(cfcookie().get, 168, netloc, ua, timeout) _headers['Cookie'] = cf request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) else: log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG) if error == False: return else: log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG) if error == False: return if output == 'cookie': try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: result = cf except: pass if close == True: response.close() return result elif output == 'geturl': result = response.geturl() if close == True: response.close() return result elif output == 'headers': result = response.headers if close == True: response.close() return result elif output == 'chunk': try: content = int(response.headers['Content-Length']) except: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) if close == True: response.close() return result elif output == 'file_size': try: content = int(response.headers['Content-Length']) except: content = '0' response.close() return content if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if 'sucuri_cloudproxy_js' in result: su = sucuri().get(result) _headers['Cookie'] = su request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if 'Blazingfast.io' in result and 'xhr.open' in result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) ua = _headers['User-Agent'] _headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout) result = _basic_request(url, headers=_headers, post=post, timeout=timeout, limit=limit) if output == 'extended': try: response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()]) except: response_headers = response.headers response_code = str(response.code) try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: cookie = cf except: pass if close == True: response.close() return (result, response_code, response_headers, _headers, cookie) else: if close == True: response.close() return result except Exception as e: log_utils.log('Request-Error: (%s) => %s' % (str(e), url), log_utils.LOGDEBUG) return
def request(url, close=True, redirect=True, error=False, verify=True, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='20'): try: if not url: return handlers = [] if proxy is not None: handlers += [ urllib.request.ProxyHandler({'http': '%s' % (proxy)}), urllib.request.HTTPHandler ] opener = urllib.request.build_opener(*handlers) opener = urllib.request.install_opener(opener) if output == 'cookie' or output == 'extended' or not close: cookies = cookielib.LWPCookieJar() handlers += [ urllib.request.HTTPHandler(), urllib.request.HTTPSHandler(), urllib.request.HTTPCookieProcessor(cookies) ] opener = urllib.request.build_opener(*handlers) opener = urllib.request.install_opener(opener) try: import platform node = platform.node().lower() except BaseException: node = '' if verify == False and sys.version_info >= (2, 7, 12): try: import ssl ssl_context = ssl._create_unverified_context() handlers += [urllib.request.HTTPSHandler(context=ssl_context)] opener = urllib.request.build_opener(*handlers) opener = urllib.request.install_opener(opener) except BaseException: pass if verify and ((2, 7, 8) < sys.version_info < (2, 7, 12) or platform.uname()[1] == 'XboxOne'): try: import ssl ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib.request.HTTPSHandler(context=ssl_context)] opener = urllib.request.build_opener(*handlers) opener = urllib.request.install_opener(opener) except BaseException: pass if url.startswith('//'): url = 'http:' + url _headers = {} try: _headers.update(headers) except BaseException: pass if 'User-Agent' in _headers: pass elif mobile: _headers['User-Agent'] = cache.get(randommobileagent, 1) else: _headers['User-Agent'] = cache.get(randomagent, 1) if 'Referer' in _headers: pass elif referer is not None: _headers['Referer'] = referer if 'Accept-Language' not in _headers: _headers['Accept-Language'] = 'en-US' if 'X-Requested-With' in _headers: pass elif XHR: _headers['X-Requested-With'] = 'XMLHttpRequest' if 'Cookie' in _headers: pass elif cookie is not None: _headers['Cookie'] = cookie if 'Accept-Encoding' in _headers: pass elif compression and limit is None: _headers['Accept-Encoding'] = 'gzip' if not redirect: class NoRedirectHandler(urllib.request.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib.request.build_opener(NoRedirectHandler()) urllib.request.install_opener(opener) try: del _headers['Referer'] except BaseException: pass url = utils.byteify(url) request = urllib.request.Request(url) if post is not None: if isinstance(post, dict): post = utils.byteify(post) post = urlencode(post) if len(post) > 0: request = urllib.request.Request(url, data=post) else: request.get_method = lambda: 'POST' request.has_header = lambda header_name: ( header_name == 'Content-type' or urllib.request.Request. has_header(request, header_name)) if limit == '0': request.get_method = lambda: 'HEAD' _add_request_header(request, _headers) response = urllib.request.urlopen(request, timeout=int(timeout)) try: response = urllib.request.urlopen(request, timeout=int(timeout)) except urllib.error.HTTPError as response: if response.code == 503: cf_result = response.read() try: encoding = response.info().getheader('Content-Encoding') except BaseException: encoding = None if encoding == 'gzip': cf_result = gzip.GzipFile( fileobj=BytesIO(cf_result)).read() if 'cf-browser-verification' in cf_result: from cloudscraper2 import CloudScraper as cfscrape _cf_lim = 0 while 'cf-browser-verification' in cf_result and _cf_lim <= 1: _cf_lim += 1 netloc = '%s://%s/' % (urlparse(url).scheme, urlparse(url).netloc) ua = _headers['User-Agent'] try: cf = cache.get(cfscrape.get_cookie_string, 1, netloc, ua)[0] except BaseException: try: cf = cfscrape.get_cookie_string(url, ua)[0] except BaseException: cf = None finally: _headers['Cookie'] = cf request = urllib.request.Request(url, data=post) _add_request_header(request, _headers) try: response = urllib.request.urlopen( request, timeout=int(timeout)) cf_result = 'Success' except urllib.error.HTTPError as response: cache.remove(cfscrape.get_cookie_string, netloc, ua) cf_result = response.read() else: xbmc.log( 'Request-Error (%s): %s' % (str(response.code), url), xbmc.LOGDEBUG) if not error: return else: xbmc.log('Request-Error (%s): %s' % (str(response.code), url), xbmc.LOGDEBUG) if not error: return if output == 'cookie': try: result = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except BaseException: pass try: result = cf except BaseException: pass if close: response.close() return result elif output == 'geturl': result = response.geturl() if close: response.close() return result elif output == 'headers': result = response.headers if close: response.close() return result elif output == 'location': result = response.headers if close: response.close() return result['Location'] elif output == 'chunk': try: content = int(response.headers['Content-Length']) except BaseException: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) if close: response.close() return result elif output == 'file_size': try: content = int(response.headers['Content-Length']) except BaseException: content = '0' response.close() return content if limit == '0': result = response.read(1 * 1024) elif limit is not None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.headers['Content-Encoding'] except BaseException: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=BytesIO(result)).read() if b'sucuri_cloudproxy_js' in result: su = sucuri().get(result) _headers['Cookie'] = su request = urllib.request.Request(url, data=post) _add_request_header(request, _headers) response = urllib.request.urlopen(request, timeout=int(timeout)) if limit == '0': result = response.read(224 * 1024) elif limit is not None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except BaseException: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=BytesIO(result)).read() if six.PY3 and isinstance(result, bytes): result = result.decode('utf-8') if output == 'extended': try: response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()]) except BaseException: response_headers = response.headers response_code = str(response.code) try: cookie = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except BaseException: pass try: cookie = cf except BaseException: pass if close: response.close() return (result, response_code, response_headers, _headers, cookie) else: if close: response.close() return result except Exception as e: xbmc.log('Request-Error: (%s) => %s' % (str(e), url), xbmc.LOGDEBUG) return
def __getTrakt(url, post=None): try: url = urllib_parse.urljoin(BASE_URL, url) post = json.dumps(post) if post else None headers = { 'Content-Type': 'application/json', 'trakt-api-key': V2_API_KEY, 'trakt-api-version': 2 } if getTraktCredentialsInfo(): headers.update({ 'Authorization': 'Bearer %s' % control.setting('trakt.token') }) result = client.request(url, post=post, headers=headers, output='extended', error=True) result = utils.byteify(result) resp_code = result[1] resp_header = result[2] result = result[0] if resp_code in [ '423', '500', '502', '503', '504', '520', '521', '522', '524' ]: log_utils.log('Trakt Error: %s' % str(resp_code)) control.infoDialog('Trakt Error: ' + str(resp_code), sound=True) return elif resp_code in ['429']: log_utils.log('Trakt Rate Limit Reached: %s' % str(resp_code)) control.infoDialog('Trakt Rate Limit Reached: ' + str(resp_code), sound=True) return elif resp_code in ['404']: log_utils.log('Object Not Found : %s' % str(resp_code)) return if resp_code not in ['401', '405']: return result, resp_header oauth = urllib_parse.urljoin(BASE_URL, '/oauth/token') opost = { 'client_id': V2_API_KEY, 'client_secret': CLIENT_SECRET, 'redirect_uri': REDIRECT_URI, 'grant_type': 'refresh_token', 'refresh_token': control.setting('trakt.refresh') } result = client.request(oauth, post=json.dumps(opost), headers=headers) result = utils.json_loads_as_str(result) token, refresh = result['access_token'], result['refresh_token'] print('Info - ' + str(token)) control.setSetting(id='trakt.token', value=token) control.setSetting(id='trakt.refresh', value=refresh) headers['Authorization'] = 'Bearer %s' % token result = client.request(url, post=post, headers=headers, output='extended', error=True) result = utils.byteify(result) return result[0], result[2] except: log_utils.log('getTrakt Error', 1) pass
def __getTrakt(url, post=None): try: url = urllib_parse.urljoin(BASE_URL, url) post = json.dumps(post) if post else None headers = { 'Content-Type': 'application/json', 'trakt-api-key': V2_API_KEY, 'trakt-api-version': 2 } if getTraktCredentialsInfo(): headers.update({ 'Authorization': 'Bearer %s' % control.setting('trakt.token') }) result = client.request(url, post=post, headers=headers, output='extended', error=True) result = utils.byteify(result) resp_code = result[1] resp_header = result[2] result = result[0] if resp_code in [ '500', '502', '503', '504', '520', '521', '522', '524' ]: log_utils.log('Temporary Trakt Error: %s' % resp_code, log_utils.LOGWARNING) return elif resp_code in ['404']: log_utils.log( '[Kpolyamass] Trakt error: Object Not Found : %s' % resp_code, log_utils.LOGWARNING) return if resp_code not in ['401', '405']: return result, resp_header oauth = urllib_parse.urljoin(BASE_URL, '/oauth/token') opost = { 'client_id': V2_API_KEY, 'client_secret': CLIENT_SECRET, 'redirect_uri': REDIRECT_URI, 'grant_type': 'refresh_token', 'refresh_token': control.setting('trakt.refresh') } result = client.request(oauth, post=json.dumps(opost), headers=headers) result = utils.json_loads_as_str(result) token, refresh = result['access_token'], result['refresh_token'] #print('Info - ' + str(token)) control.setSetting(id='trakt.token', value=token) control.setSetting(id='trakt.refresh', value=refresh) headers['Authorization'] = 'Bearer %s' % token result = client.request(url, post=post, headers=headers, output='extended', error=True, timeout='25') result = utils.byteify(result) return result[0], result[2] except Exception as e: failure = traceback.format_exc() log_utils.log('Trakt - Exception: \n' + str(failure)) log_utils.log('Unknown Trakt Error: %s' % e, LOGWARNING) pass
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: if not url: return handlers = [] if not proxy == None: handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if output == 'cookie' or output == 'extended' or not close == True: cookies = cookielib.LWPCookieJar() handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if (2, 7, 8) < sys.version_info < (2, 7, 12): try: import ssl; ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) except: pass if url.startswith('//'): url = 'http:' + url _headers ={} try: _headers.update(headers) except: pass if 'User-Agent' in _headers: pass elif not mobile == True: #headers['User-Agent'] = agent() _headers['User-Agent'] = cache.get(randomagent, 1) else: _headers['User-Agent'] = 'Apple-iPhone/701.341' if 'Referer' in _headers: pass elif referer is not None: _headers['Referer'] = referer if not 'Accept-Language' in _headers: _headers['Accept-Language'] = 'en-US' if 'X-Requested-With' in _headers: pass elif XHR == True: _headers['X-Requested-With'] = 'XMLHttpRequest' if 'Cookie' in _headers: pass elif not cookie == None: _headers['Cookie'] = cookie if 'Accept-Encoding' in _headers: pass elif compression and limit is None: _headers['Accept-Encoding'] = 'gzip' if redirect == False: #old implementation #class NoRedirection(urllib2.HTTPErrorProcessor): # def http_response(self, request, response): return response #opener = urllib2.build_opener(NoRedirection) #opener = urllib2.install_opener(opener) class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del _headers['Referer'] except: pass if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) url = utils.byteify(url) request = urllib2.Request(url, data=post) _add_request_header(request, _headers) try: response = urllib2.urlopen(request, timeout=int(timeout)) except urllib2.HTTPError as response: if response.code == 503: cf_result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read() if 'cf-browser-verification' in cf_result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) if not netloc.endswith('/'): netloc += '/' ua = _headers['User-Agent'] cf = cache.get(cfcookie().get, 168, netloc, ua, timeout) _headers['Cookie'] = cf request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) else: log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG) if error == False: return else: log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG) if error == False: return if output == 'cookie': try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: result = cf except: pass if close == True: response.close() return result elif output == 'geturl': result = response.geturl() if close == True: response.close() return result elif output == 'headers': result = response.headers if close == True: response.close() return result elif output == 'chunk': try: content = int(response.headers['Content-Length']) except: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) if close == True: response.close() return result elif output == 'file_size': try: content = int(response.headers['Content-Length']) except: content = '0' response.close() return content if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if 'sucuri_cloudproxy_js' in result: su = sucuri().get(result) _headers['Cookie'] = su request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if 'Blazingfast.io' in result and 'xhr.open' in result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) ua = _headers['User-Agent'] _headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout) result = _basic_request(url, headers=_headers, post=post, timeout=timeout, limit=limit) if output == 'extended': try: response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()]) except: response_headers = response.headers response_code = str(response.code) try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: cookie = cf except: pass if close == True: response.close() return (result, response_code, response_headers, _headers, cookie) else: if close == True: response.close() return result except Exception as e: log_utils.log('Request-Error: (%s) => %s' % (str(e), url), log_utils.LOGDEBUG) return
def request( url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output="", timeout="30", ): try: if not url: return handlers = [] if not proxy == None: handlers += [ urllib2.ProxyHandler({"http": "%s" % (proxy)}), urllib2.HTTPHandler, ] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if output == "cookie" or output == "extended" or not close == True: cookies = cookielib.LWPCookieJar() handlers += [ urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies), ] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if (2, 7, 8) < sys.version_info < (2, 7, 12): try: import ssl ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) except: pass if url.startswith("//"): url = "http:" + url _headers = {} try: _headers.update(headers) except: pass if "User-Agent" in _headers: pass elif not mobile == True: # headers['User-Agent'] = agent() _headers["User-Agent"] = cache.get(randomagent, 1) else: _headers["User-Agent"] = "Apple-iPhone/701.341" if "Referer" in _headers: pass elif referer is not None: _headers["Referer"] = referer if not "Accept-Language" in _headers: _headers["Accept-Language"] = "en-US" if "X-Requested-With" in _headers: pass elif XHR == True: _headers["X-Requested-With"] = "XMLHttpRequest" if "Cookie" in _headers: pass elif not cookie == None: _headers["Cookie"] = cookie if "Accept-Encoding" in _headers: pass elif compression and limit is None: _headers["Accept-Encoding"] = "gzip" if redirect == False: # old implementation # class NoRedirection(urllib2.HTTPErrorProcessor): # def http_response(self, request, response): return response # opener = urllib2.build_opener(NoRedirection) # opener = urllib2.install_opener(opener) class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del _headers["Referer"] except: pass if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) url = utils.byteify(url) request = urllib2.Request(url, data=post) _add_request_header(request, _headers) try: response = urllib2.urlopen(request, timeout=int(timeout)) except urllib2.HTTPError as response: if response.code == 503: cf_result = response.read(5242880) try: encoding = response.info().getheader("Content-Encoding") except: encoding = None if encoding == "gzip": cf_result = gzip.GzipFile( fileobj=StringIO.StringIO(cf_result) ).read() if "cf-browser-verification" in cf_result: netloc = "%s://%s" % ( urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc, ) if not netloc.endswith("/"): netloc += "/" ua = _headers["User-Agent"] cf = cache.get(cfcookie().get, 168, netloc, ua, timeout) _headers["Cookie"] = cf request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) else: log_utils.log( "Request-Error (%s): %s" % (str(response.code), url), log_utils.LOGDEBUG, ) if error == False: return else: log_utils.log( "Request-Error (%s): %s" % (str(response.code), url), log_utils.LOGDEBUG, ) if error == False: return if output == "cookie": try: result = "; ".join(["%s=%s" % (i.name, i.value) for i in cookies]) except: pass try: result = cf except: pass if close == True: response.close() return result elif output == "geturl": result = response.geturl() if close == True: response.close() return result elif output == "headers": result = response.headers if close == True: response.close() return result elif output == "chunk": try: content = int(response.headers["Content-Length"]) except: content = 2049 * 1024 if content < (2048 * 1024): return result = response.read(16 * 1024) if close == True: response.close() return result elif output == "file_size": try: content = int(response.headers["Content-Length"]) except: content = "0" response.close() return content if limit == "0": result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader("Content-Encoding") except: encoding = None if encoding == "gzip": result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if "sucuri_cloudproxy_js" in result: su = sucuri().get(result) _headers["Cookie"] = su request = urllib2.Request(url, data=post) _add_request_header(request, _headers) response = urllib2.urlopen(request, timeout=int(timeout)) if limit == "0": result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader("Content-Encoding") except: encoding = None if encoding == "gzip": result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if "Blazingfast.io" in result and "xhr.open" in result: netloc = "%s://%s" % ( urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc, ) ua = _headers["User-Agent"] _headers["Cookie"] = cache.get(bfcookie().get, 168, netloc, ua, timeout) result = _basic_request( url, headers=_headers, post=post, timeout=timeout, limit=limit ) if output == "extended": try: response_headers = dict( [(item[0].title(), item[1]) for item in response.info().items()] ) except: response_headers = response.headers response_code = str(response.code) try: cookie = "; ".join(["%s=%s" % (i.name, i.value) for i in cookies]) except: pass try: cookie = cf except: pass if close == True: response.close() return (result, response_code, response_headers, _headers, cookie) else: if close == True: response.close() return result except Exception as e: log_utils.log("Request-Error: (%s) => %s" % (str(e), url), log_utils.LOGDEBUG) return