def request(url, post=None, headers=None, mobile=False, safe=False, timeout='30'): try: try: headers.update(headers) except: headers = {} agent = cache.get(cloudflareAgent, 168) if not 'User-Agent' in headers: headers['User-Agent'] = agent u = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) cookie = cache.get(cloudflareCookie, 168, u, post, headers, mobile, safe, timeout) result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, output='response', error=True) if result[0] == '503': agent = cache.get(cloudflareAgent, 0) ; headers['User-Agent'] = agent cookie = cache.get(cloudflareCookie, 0, u, post, headers, mobile, safe, timeout) result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout) else: result= result[1] return result except: return
def cloudflare(url, post, headers, mobile, safe, timeout): try: result = client.request(url, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, error=True) jschl = re.compile('name="jschl_vc" value="(.+?)"/>').findall(result)[0] init = re.compile('setTimeout\(function\(\){\s*.*?.*:(.*?)};').findall(result)[0] builder = re.compile(r"challenge-form\'\);\s*(.*)a.v").findall(result)[0] decryptVal = parseJSString(init) lines = builder.split(';') for line in lines: if len(line)>0 and '=' in line: sections=line.split('=') line_val = parseJSString(sections[1]) decryptVal = int(eval(str(decryptVal)+sections[0][-1]+str(line_val))) answer = decryptVal + len(urlparse.urlparse(url).netloc) query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (url, jschl, answer) if 'type="hidden" name="pass"' in result: passval = re.compile('name="pass" value="(.*?)"').findall(result)[0] query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (url, urllib.quote_plus(passval), jschl, answer) time.sleep(5) cookie = client.request(query, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, output='cookie', error=True) return cookie except: pass
def solvemedia(data): try: url = client.parseDOM(data, 'iframe', ret='src') url = [i for i in url if 'api.solvemedia.com' in i] if not len(url) > 0: return result = client.request(url[0], referer='') response = client.parseDOM(result, 'iframe', ret='src') response += client.parseDOM(result, 'img', ret='src') response = [i for i in response if '/papi/media' in i][0] response = 'http://api.solvemedia.com' + response response = keyboard(response) post = {} f = client.parseDOM(result, 'form', attrs = {'action': 'verify.noscript'})[0] k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'}) for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]}) post.update({'adcopy_response': response}) client.request('http://api.solvemedia.com/papi/verify.noscript', post=urllib.urlencode(post)) return {'adcopy_challenge': post['adcopy_challenge'], 'adcopy_response': 'manual_challenge'} except: pass
def getTVrageEpisode(tvrage, title, date, season, episode): monthMap = {'01':'Jan', '02':'Feb', '03':'Mar', '04':'Apr', '05':'May', '06':'Jun', '07':'Jul', '08':'Aug', '09':'Sep', '10':'Oct', '11':'Nov', '12':'Dec'} title = cleantitle.tv(title) try: url = 'http://www.tvrage.com/shows/id-%s/episode_list/all' % tvrage result = client.request(url, timeout='5') search = re.compile('<td.+?><a.+?title=.+?season.+?episode.+?>(\d+?)x(\d+?)<.+?<td.+?>(\d+?/.+?/\d+?)<.+?<td.+?>.+?href=.+?>(.+?)<').findall(result.replace('\n','')) d = '%02d/%s/%s' % (int(date.split('-')[2]), monthMap[date.split('-')[1]], date.split('-')[0]) match = [i for i in search if d == i[2]] if len(match) == 1: return (str('%01d' % int(match[0][0])), str('%01d' % int(match[0][1]))) match = [i for i in search if title == cleantitle.tv(i[3])] if len(match) == 1: return (str('%01d' % int(match[0][0])), str('%01d' % int(match[0][1]))) except: pass try: url = 'http://epguides.com/common/exportToCSV.asp?rage=%s' % tvrage result = client.request(url, timeout='5') search = re.compile('\d+?,(\d+?),(\d+?),.+?,(\d+?/.+?/\d+?),"(.+?)",.+?,".+?"').findall(result) d = '%02d/%s/%s' % (int(date.split('-')[2]), monthMap[date.split('-')[1]], date.split('-')[0][-2:]) match = [i for i in search if d == i[2]] if len(match) == 1: return (str('%01d' % int(match[0][0])), str('%01d' % int(match[0][1]))) match = [i for i in search if title == cleantitle.tv(i[3])] if len(match) == 1: return (str('%01d' % int(match[0][0])), str('%01d' % int(match[0][1]))) except: pass
def solvemedia(data): try: url = client.parseDOM(data, "iframe", ret="src") url = [i for i in url if 'api.solvemedia.com' in i] if not len(url) > 0: return result = client.request(url[0], referer='') response = client.parseDOM(result, "iframe", ret="src") response += client.parseDOM(result, "img", ret="src") response = [i for i in response if '/papi/media' in i][0] response = 'http://api.solvemedia.com' + response response = keyboard(response) post = {} f = client.parseDOM(result, "form", attrs = { "action": "verify.noscript" })[0] k = client.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" }) for i in k: post.update({i: client.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]}) post.update({'adcopy_response': response}) client.request('http://api.solvemedia.com/papi/verify.noscript', post=urllib.urlencode(post)) return {'adcopy_challenge': post['adcopy_challenge'], 'adcopy_response': 'manual_challenge'} except: pass
def knnQuery(self, center, k): query = 'FINDLEAF$' + str(center) + '$' + self.root['fileName'] response = client.request(self.root['serverID'], query) response = response.split('$') query = 'KNNQUERY$' + response[1] + '$' + str(center) + '$' + str(k) response = client.request(response[0], query) return response
def windowQuery1(self, left, right): # left is included, right is not. query = 'FINDLEAF$' + str(left) + '$' + self.root['fileName'] response = client.request(self.root['serverID'], query) response = response.split('$') query = 'WINDOWQUERY1$' + response[1] + '$' + str(left) + '$' + str(right) response = client.request(response[0], query) return response
def insertInTree(self, key, data): ptr = self.saveContent(key, data) query = 'FINDLEAF$' + str(key) + '$' + self.root['fileName'] response = client.request(self.root['serverID'], query) response = response.split('$') query = 'INSERTINLEAF$' + response[1] + '$' + str(key) + '$' + ptr response = client.request(response[0], query) return 'SUCCESS'
def getNewLeaf(self, key): self.readMetaData() self.readServerData() serverID = self.getBestServer(key) self.fileCount[serverID]['leafCount'] += 1 newName = 'L'+('%09d'%self.fileCount[serverID]['leafCount']) result = {'serverID': serverID, 'fileName': newName} query = 'CREATELEAF$'+result['fileName'] client.request(result['serverID'], query) self.writeMetaData() return result
def request(url, post=None, headers=None, mobile=False, safe=False, timeout='60'): try: u = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) cookie = cache.get(cloudflare, 3, u, post, headers, mobile, safe, timeout) result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, output='response', error=True) if 'HTTP Error 503' in result[0]: cookie = cache.get(cloudflare, 0, u, post, headers, mobile, safe, timeout) result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout) else: result= result[1] return result except: return
def PLAYLINK(name,url,iconimage): link = open_url(url) try: url=re.compile('src="(.+?)" allowFullScreen></iframe>').findall(link)[0] except: url=re.compile("src='(.+?)' allowFullScreen></iframe>").findall(link)[0] ua='|User-Agent=Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.86 Safari/537.36' #### THANKS TO LAMBDA #### import client import jsunpack url = urlparse.urlparse(url).query url = urlparse.parse_qsl(url)[0][1] url = 'http://videomega.tv/cdn.php?ref=%s' % url result = client.request(url) unpacked = '' packed = result.split('\n') for i in packed: try: unpacked += jsunpack.unpack(i) except: unpacked += i result = unpacked result = re.sub('\s\s+', ' ', result) url = re.compile('"video".+?"src"\s*\,\s*"(.+?)"').findall(result) url += client.parseDOM(result, 'source', ret='src', attrs = {'type': 'video.+?'}) url = url[0]+ua #### THANKS TO LAMBDA #### ok=True liz=xbmcgui.ListItem(name, iconImage=icon,thumbnailImage=icon); liz.setInfo( type="Video", infoLabels={ "Title": name } ) ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=url,listitem=liz) xbmc.Player ().play(url, liz, False)
def find_link(url, html=''): global limit limit+=1 log('Finding in : %s'%url) try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = urlparse.urlparse(url).netloc headers = {'Referer':referer, 'Host':host, 'User-Agent' : client.agent(), 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language' : 'en-US,en;q=0.5'} if html=='': url = manual_url_fix(url) html = client.request(url, headers=headers) html = manual_html_fix(url,html,headers) ref=url fs=list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval (f+"(html,ref)") if resolved: log('Resolved with %s: %s'%(f,resolved)) return resolved break return
def request(url, mobile=False, timeout="30"): try: u = "%s://%s" % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) cookie = cache.get(cloudflare, 168, u, mobile, timeout) result = client.request(url, cookie=cookie, mobile=mobile, timeout=timeout, output="response", error=True) if "HTTP Error 503" in result[0]: cookie = cache.get(cloudflare, 0, u, mobile, timeout) result = client.request(url, cookie=cookie, mobile=mobile, timeout=timeout) else: result = result[1] return result except: return
def odnoklassniki(url): try: url = re.compile('//.+?/.+?/([\w]+)').findall(url)[0] url = 'http://ok.ru/dk?cmd=videoPlayerMetadata&mid=%s' % url result = client.request(url) result = re.sub(r'[^\x00-\x7F]+',' ', result) result = json.loads(result)['videos'] try: hd = [{'quality': '1080p', 'url': i['url']} for i in result if i['name'] == 'full'] except: pass try: hd += [{'quality': 'HD', 'url': i['url']} for i in result if i['name'] == 'hd'] except: pass try: sd = [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'sd'] except: pass try: sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'low'] except: pass try: sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'lowest'] except: pass try: sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'mobile'] except: pass url = hd + sd[:1] if not url == []: return url except: return
def get_remote_task_status(self): request = {"code": "TaskStatus", "task_key": self.remote_task_key} response = client.request(self.server, request) if response["code"] == "OK": return response["status"] else: return TaskStatus.FAILED
def run(args): params = json.loads(args.params) (request, response) = client.request(params) print('REQUEST ----------------------------------------------------------') print(json.dumps(request, ensure_ascii=False, indent=4)) print('RESPONSE ---------------------------------------------------------') print(json.dumps(response, indent=4))
def get_remote_task_result(self): request = {"code": "TaskResult", "task_key": self.remote_task_key} response = client.request(self.server, request) if response["code"] == "OK": return response["result"] else: return []
def vk(url): try: try: oid, id = urlparse.parse_qs(urlparse.urlparse(url).query)['oid'][0] , urlparse.parse_qs(urlparse.urlparse(url).query)['id'][0] except: oid, id = re.compile('\/video(.*)_(.*)').findall(url)[0] try: hash = urlparse.parse_qs(urlparse.urlparse(url).query)['hash'][0] except: hash = vk_hash(oid, id) u = 'http://api.vk.com/method/video.getEmbed?oid=%s&video_id=%s&embed_hash=%s' % (oid, id, hash) result = client.request(u) result = re.sub(r'[^\x00-\x7F]+',' ', result) try: result = json.loads(result)['response'] except: result = vk_private(oid, id) url = [] try: url += [{'quality': 'HD', 'url': result['url720']}] except: pass try: url += [{'quality': 'SD', 'url': result['url540']}] except: pass try: url += [{'quality': 'SD', 'url': result['url480']}] except: pass if not url == []: return url try: url += [{'quality': 'SD', 'url': result['url360']}] except: pass if not url == []: return url try: url += [{'quality': 'SD', 'url': result['url240']}] except: pass if not url == []: return url except: return
def request(url, timeout='30'): try: u = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) cookie = cache.get(cloudflare, 168, u, timeout) result = client.request(url, cookie=cookie, timeout=timeout, output='response', error=True) if 'HTTP Error 503' in result[0]: cookie = cache.get(cloudflare, 0, u, timeout) result = client.request(url, cookie=cookie, timeout=timeout) else: result= result[1] return result except: return
def resolve(url): try: id = (urlparse.urlparse(url).path).split("/")[-1] result = client.request(url) result = result.replace("\r", "").replace("\n", "").replace("\t", "") result = result.split('"%s"' % id)[-1].split("]]")[0] result = re.compile('\d*,\d*,\d*,"(.+?)"').findall(result) result = [i.replace("\\u003d", "=").replace("\\u0026", "&") for i in result][::-1] result = sum([tag(i) for i in result], []) url = [] try: url += [[i for i in result if i["quality"] == "1080p"][0]] except: pass try: url += [[i for i in result if i["quality"] == "HD"][0]] except: pass try: url += [[i for i in result if i["quality"] == "SD"][0]] except: pass if url == []: return return url except: return
def retrieve_messages(): """ Retrieve the list of messages and return them as a dictionary. """ p = { 'Request': 'getLibraryMessages', 'LibraryID': config.get_config('qt_library') } (request, response) = client.request(p) return response.get('Result')
def finder30(html,url): try: html = client.request(url, referer=urlparse.urlparse(url).netloc) url = re.findall('href="(.+?)">click here...',html)[0] resolved = find_link(url+'&referer=http://rojedirecta.me') return resolved except: return
def is_following(self, friend): # GET/relations/self.username:friend.username\0 """ True if the user is following "friend". Otherwise, False. """ command = 'GET/relations/%s:%s\0' % (self.username, friend.username) output = client.request(command) return output == 'true'
def request(url, post=None, headers=None, mobile=False, safe=False, timeout='30'): try: log("Cloudflare request") result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, output='response', error=True) log("Cloudflare response: %s " % result) return result except: return
def finder77(html,url): try: html = urllib.unquote(html) url = finder4(html,url) if client.request(url) != None: return url return except: return
def unfollow(self, friend): # DELETE/relations/self.username:friend.username\0 """ Record the user's unfollow to "friend". """ command = 'DELETE/relations/%s:%s\0' % (self.username, friend.username) output = client.request(command) if output != 'success': raise ErrorProcessingRequest()
def exists(self): # GET/credential/self.username\0 """ True if there's an active user with the provided username in the system. Otherwise, False. """ command = 'GET/credential/%s\0' % self.username output = client.request(command) return output == 'true'
def assign_remote_task(self): request = {"code": "TaskAssign", "job_id": self.job_id, "query": self.query, "query_index": self.query_index, "node": self.node} response = client.request(self.server, request) if response["code"] == "OK": self.remote_task_key = response["task_key"] return True else: return False
def check(url): try: id = re.compile('//.+?/(?:embed|f)/([0-9a-zA-Z-_]+)').findall(url)[0] url = 'https://openload.co/embed/%s/' % id result = client.request(url) if result == None: return False if '>We are sorry!<' in result: return False return True except: return False
def request(url, post=None, headers=None, mobile=False, safe=False, timeout='60'): try: if headers is None: headers = {common.Decode('ouLb26Vv1Mq74w=='): common.Decode('uN7a0qWb3Nu25w==')} else: headers[common.Decode('ouLb26Vv1Mq74w==')] = common.Decode('uN7a0qWb3Nu25w==') u = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) cookie = cache.get(cloudflare, 3, u, post, {common.Decode('ouLb26Vv1Mq74w=='): common.Decode('uN7a0qWb3Nu25w==')}, mobile, safe, timeout, table='cookies') result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, output='response', error=True) if 'HTTP Error 503' in result[0]: cookie = cache.get(cloudflare, 0, u, post, {common.Decode('ouLb26Vv1Mq74w=='): common.Decode('uN7a0qWb3Nu25w==')}, mobile, safe, timeout, table='cookies') result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout) else: result= result[1] return result except: return
def txxx(self, url): try: xbmc.executebuiltin("Dialog.Close(busydialog)") r = client.request(url) pattern = r'''<div class="download__link".+?<a href="(.*?)"''' url = re.findall(pattern,r)[0] url = CLEANUP(url) xbmc.executebuiltin("Dialog.Close(busydialog)") xbmc.Player().play(url) except: return
def menu(): lover.checkupdates() try: url = urlparse.urljoin(base_domain, 'channels/') c = client.request(url) r = dom_parser2.parse_dom( c, 'div', {'class': ['item-col', 'item--channel', 'col']}) r = [(dom_parser2.parse_dom(i, 'a', req=['href','title']), \ dom_parser2.parse_dom(i, 'img', req='src')) \ for i in r if i] r = [(i[0][0].attrs['href'], i[0][0].attrs['title'], i[1][0].attrs['src'] \ if i[1] else urlparse.urljoin(base_domain,'core/images/catdefault.jpg')) for i in r[8:]] if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for i in r: try: name = kodi.sortX(i[1].encode('utf-8')) fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': i[0], 'mode': content_mode, 'icon': i[2], 'fanart': fanarts, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (i[1].title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def check(url): try: result = client.request(url) if result == None: return False result = client.parseDOM(result, 'span', attrs={'class': 'para_title'}) if any('File not found' in x for x in result): raise Exception() return True except: return False
def resolve(url): try: url = re.compile('//.+?/([\w]+)').findall(url)[0] url = 'http://streamcloud.eu/%s' % url result = client.request(url) post = {} f = client.parseDOM(result, 'form', attrs = {'class': 'proform'})[0] k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'}) for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]}) post = urllib.urlencode(post) post = post.replace('op=download1', 'op=download2') result = client.request(url, post=post) url = re.compile('file *: *"(http.+?)"').findall(result)[-1] return url except: return
def eporner(self, url): try: r = client.request(url) pattern = r"""{\s*vid:\s*'([^']+)',\s*hash\s*:\s*["\']([\da-f]{32})""" id, hash = re.findall(pattern, r)[0] hash_code = ''.join((self.encode_base_n(int(hash[lb:lb + 8], 16), 36) for lb in range(0, 32, 8))) load_url = 'https://www.eporner.com/xhr/video/%s?hash=%s&device=generic&domain=www.eporner.com&fallback=false&embed=false&supportedFormats=mp4' % ( id, hash_code) r = client.request(load_url).replace("\/", "/") r = json.loads(r).get("sources", {}).get('mp4', {}) r = [(i, r[i].get("src")) for i in r] u = sorted(r, key=lambda x: int(re.search('(\d+)', x[0]).group(1)), reverse=True) return u except: return
def content(url, searched=False): if base_domain not in url: url = base_domain + url try: c = client.request(url) r = re.findall( '<div class="un-grid--thumb--content">(.*?)<div class="un-grid--thumb--info">', c) except Exception as e: if (not searched): log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() else: pass dirlst = [] for i in r: try: name = re.findall('alt="(.*?)"', i)[0] url2 = re.findall('<a href="(.*?)"', i)[0] icon = re.findall('<img src="(.*?)"', i)[0] time = re.findall('<div class="thumb__duration">(.*?)</div>', i)[0] if not base_domain in url2: url2 = base_domain + url2 if not 'https' in icon: icon = 'https:' + icon fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name + '[COLOR yellow] [ ' + time + ' ][/COLOR]', 'url': url2, 'mode': player_mode, 'icon': icon, 'fanart': fanarts, 'folder': False }) except Exception as e: log_utils.log( 'Error adding menu item. %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst, stopend=True, isVideo=True, isDownloadable=True) else: if (not searched): kodi.notify(msg='No Content Found') quit() if searched: return str(len(r)) if not searched: search_pattern = '''href=['"]([^'"]+)['"]\s+title="Next\s+Page"''' parse = base_domain helper.scraper().get_next_page(content_mode, url, search_pattern, filename, parse)
def menu(): lover.checkupdates() try: url = urljoin(base_domain, 'categories') c = client.request(url) soup = BeautifulSoup(c, 'html5lib') content = soup.find('div', class_={'ID-list-category'}) if (not content): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for a in content.find_all('a'): try: title = a.span.text.title() url = a['href'] if not base_domain in url: url = base_domain + url icon = translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/icon.png' % filename)) fanarts = translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': title, 'url': url, 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (i[1].title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def menu(): scraper_updater.check(filename) try: url = urlparse.urljoin(base_domain, 'videos') c = client.request(url) r = dom_parser2.parse_dom(c, 'h1') r = [(dom_parser2.parse_dom(i, 'a', req='href')) for i in r if i] r = [(urlparse.urljoin(base_domain, i[0].attrs['href']), i[0].content) for i in r if i] if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for i in r: try: name = kodi.sortX(i[1].encode('utf-8')).title() icon = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/icon.png' % filename)) fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': i[0], 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (i[1].title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def download_subs(url): try: srt = client.request(url) filename = url.split('/')[-1] final_path = os.path.join(subtitles_path, filename) final_path = xbmc.translatePath(final_path) with open(final_path, 'w') as f: f.write(srt) return final_path except: return ''
def girlfriendvideos(self, url): try: r = client.request(url) r = r.replace('\\', '') pattern = r"""<video src="([^"]+)""" link = re.findall(pattern, r)[0] u = 'http://www.girlfriendvideos.com' + link return u except: return
def getTVShowTranslation(self, thetvdb, lang): try: url = 'http://thetvdb.com/api/%s/series/%s/%s.xml' % ('MUQ2MkYyRjkwMDMwQzQ0NA=='.decode('base64'), thetvdb, lang) r = client.request(url) title = client.parseDOM(r, 'SeriesName')[0] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') return title except: pass
def resolve(url): pass #print "Here in allvid url=", url result = client.request(url) pass #print "Here in allvid result =", result s = '<img src="(.*?)/i/' match = re.compile(s).findall(result) vip = match[0] regexkey = '\|mp4\|(.*?)\|' match1 = re.compile(regexkey).findall(result) key = match1[0] vurl = vip + "/" + key + "/v.mp4" return vurl
def resolve(url): # try: url1 = re.compile('//.+?/.+?/([\w]+)').findall(url)[0] print "url1 =", url1 url = 'http://www.filepup.net/play/%s' % url1 result = client.request(url) print "result =", result url = re.compile('"video/mp4".*?"(.*?)"').findall(result)[0] print "url =", url # url = client.parseDOM(result, 'source', ret='src', attrs = {'type': 'video.+?'})[0] return url
def main(): parser = argparse.ArgumentParser() parser.add_argument('--host', default='localhost') parser.add_argument("txid") args = parser.parse_args() conn = client.Client((args.host, 50001)) tx, = conn.call([client.request("blockchain.transaction.get", args.txid, True)]) requests = [] for vin in tx["vin"]: prev_txid = vin["txid"] requests.append(client.request("blockchain.transaction.get", prev_txid, True)) fee = 0 for vin, prev_tx in zip(tx["vin"], conn.call(requests)): txo = prev_tx["vout"][vin["vout"]] fee += txo["value"] fee -= sum(vout["value"] for vout in tx["vout"]) print(f'vSize = {tx["vsize"]}, Fee = {1e3 * fee:.2f} mBTC = {1e8 * fee / tx["vsize"]:.2f} sat/vB')
def resolve(url): # Thanks to Lambda for the resolver :) try: if check(url) == False: return id = re.compile('//.+?/(?:embed|f)/([0-9a-zA-Z-_]+)').findall(url)[0] url = 'https://api.openload.io/1/file/dlticket?file=%s' % id result = client.request(url) result = json.loads(result) cap = result['result']['captcha_url'] if not cap == None: cap = captcha.keyboard(cap) time.sleep(result['result']['wait_time']) url = 'https://api.openload.io/1/file/dl?file=%s&ticket=%s' % ( id, result['result']['ticket']) if not cap == None: url += '&captcha_response=%s' % urllib.quote(cap) result = client.request(url) result = json.loads(result) url = result['result']['url'] + '?mime=true' return url except: return
def menu(): try: url = urlparse.urljoin(base_domain, 'categories') c = client.request(url) r = re.findall('<div class="small item">(.*?)</div>', c, flags=re.DOTALL) if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for i in r: try: name = re.findall('<h3 class="h4">(.*?)</h3>', i, flags=re.DOTALL)[0] url = re.findall('<a href="(.*?)"', i, flags=re.DOTALL)[0] icon = re.findall('<img src="(.*?)"', i, flags=re.DOTALL)[0] desc = re.findall('<p>(.*?)</p>', i, flags=re.DOTALL)[0] fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': url, 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'description': desc, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (i[1].title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def request(url, check, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: r = client.request(url, close=close, redirect=redirect, proxy=proxy, post=post, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout=timeout) if r is not None and error is not False: return r if check in str(r) or str(r) == '': return r proxies = sorted(get(), key=lambda x: random.random()) proxies = sorted(proxies, key=lambda x: random.random()) proxies = proxies[:3] for p in proxies: p += urllib.quote_plus(url) if post is not None: if isinstance(post, dict): post = utils.byteify(post) post = urllib.urlencode(post) p += urllib.quote_plus('?%s' % post) r = client.request(p, close=close, redirect=redirect, proxy=proxy, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout='20') if check in str(r) or str(r) == '': return r except: pass
def menu(): lover.checkupdates() try: url = urlparse.urljoin(base_domain, 'categories') c = client.request(url) r = re.findall( '<div class="categories_list p**n-categories sixteen-column action">(.*?)id="countryFlags">', c, flags=re.DOTALL)[0] pattern = '''<a href="(.*?)".+?data-original="(.*?)".+?<p>(.*?)</p>''' matches = re.findall(pattern, r, flags=re.DOTALL) if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for link, icon, name in matches: try: if not base_domain in link: link = base_domain + link fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': link, 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (i[1].title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def pornhd(self, url): r = client.request(url) pattern = r'''<source\s+src=['"]([^'"]+).*?label=['"](.*?)['"]''' r = re.findall(pattern, r) names = [] srcs = [] xbmc.executebuiltin("Dialog.Close(busydialog)") for url2, quality in sorted(r, reverse=False): names.append(kodi.giveColor(quality, 'white', True)) srcs.append(url2) selected = kodi.dialog.select('Select a link.', names) if selected < 0: kodi.notify(msg='No option selected.') kodi.idle() quit() else: url2 = srcs[selected] r = client.request(url2) dialog.ok("R", str(r)) #url2 ='https://cdn-ht.pornhd.com/video_720p/283/ZtuTBZBgy2/video_720p.mp4?validfrom=1581689202&validto=1581862002&burst=4096k&rate=384k&hash=EWPwvXFHEnz09e8669aO92SiPQQ%3D' xbmc.Player().play(url2)
def resolve(url): try: url = url.replace('/embed-', '/') url = re.compile('//.+?/([\w]+)').findall(url)[0] url = 'http://streamin.to/embed-%s.html' % url result = client.request(url, mobile=True) url = re.compile("file *: *[\'|\"](http.+?)[\'|\"]").findall( result)[-1] return url except: return
def viewDialog(url): global msg_text if url.startswith('http'): msg_text = client.request(url) else: with open(url,mode='r')as f: msg_text = f.read() from resources.lib.pyxbmct_.github import xxxtext #xxxtext.TextWindow(msg_text) window = TextBox('XXX-O-DUS') window.doModal() del window
def content(url, searched=False): try: c = client.request(url) soup = BeautifulSoup(c, 'html5lib') try: r = soup.find('section', class_={'video-list'}) except: r = soup.find('div', class_={'video-list__wrapper'}) except Exception as e: if (not searched): log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() else: pass dirlst = [] for data in r.find_all('li', class_={'video-list__item'}): try: name = data.p.text url2 = data.a['href'] icon = data.img['data-src'] if not base_domain in url2: url2 = base_domain + url2 fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': url2, 'mode': player_mode, 'icon': icon, 'fanart': fanarts, 'folder': False }) except Exception as e: log_utils.log( 'Error adding menu item. %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst, stopend=True, isVideo=True, isDownloadable=True) else: if (not searched): kodi.notify(msg='No Content Found') quit() if searched: return str(len(r)) if not searched: search_pattern = '''href=['"]([^'"]+)['"]\s*rel="next"''' parse = base_domain helper.scraper().get_next_page(content_mode, url, search_pattern, filename, parse)
def menu(): lover.checkupdates() try: url = base_domain c = client.request(url) r = re.findall('<div class="item-block item-normal col" >(.+?)</div>', c, flags=re.DOTALL) except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] searched = False for i in r: try: url2 = re.findall('<a href="(.*?)"', i, flags=re.DOTALL)[0] name = re.findall('title="(.*?)"', i, flags=re.DOTALL)[0] icon = re.findall('<img src="(.*?)"', i, flags=re.DOTALL)[0] if not base_domain in url2: url2 = base_domain + url2 fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % base_name)) dirlst.append({ 'name': name, 'url': url2, 'mode': player_mode, 'icon': icon, 'fanart': fanarts, 'folder': False }) except Exception as e: log_utils.log( 'Error adding menu item. %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst, stopend=True, isVideo=True, isDownloadable=True) else: if (not searched): kodi.notify(msg='No Content Found') quit() #if searched: return str(len(r)) if not searched: search_pattern = '''<a\s+href=['"]([^'"]+)['"]\s+class=['"]next['"]''' parse = base_domain helper.scraper().get_next_page(content_mode, url, search_pattern, filename, parse)
def perfectgirls(self, url): try: r = client.request(url) pattern = r'''source\s*src=\"([^"]+)\"\s*res=\"\d+\"\s*label="([^"]+)"''' r = re.findall(pattern, r) r = [(i[1], i[0]) for i in r if i] u = sorted(r, key=lambda x: int(re.search('(\d+)', x[0]).group(1)), reverse=True) return u except: return
def menu(): lover.checkupdates() try: url = urlparse.urljoin(base_domain, 'categories') c = client.request(url) r = re.findall('<ul class="allcats page clearfix">(.*?)</ul>', c, flags=re.DOTALL)[0] pattern = r""".+?<a\s*href=['"]([^'"]+).+?>([^'"]+)<""" r = re.findall(pattern, r, flags=re.DOTALL) if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for url, name in r: try: if not base_domain in url: url = base_domain + url icon = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/icon.png' % filename)) fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': url, 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'folder': True }) except: pass if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def resolve(url): # Thanks to Lambda for the resolver :) O = { '___': 0, '$$$$': "f", '__$': 1, '$_$_': "a", '_$_': 2, '$_$$': "b", '$$_$': "d", '_$$': 3, '$$$_': "e", '$__': 4, '$_$': 5, '$$__': "c", '$$_': 6, '$$$': 7, '$___': 8, '$__$': 9, '$_': "constructor", '$$': "return", '_$': "o", '_': "u", '__': "t", } url = url.replace('/f/', '/embed/') import client, jsunpack result = client.request(url) result = re.search('>\s*(eval\(function.*?)</script>', result, re.DOTALL).group(1) result = jsunpack.unpack(result) result = result.replace('\\\\', '\\') result = re.search('(O=.*?)(?:$|</script>)', result, re.DOTALL).group(1) result = re.search('O\.\$\(O\.\$\((.*?)\)\(\)\)\(\);', result) s1 = result.group(1) s1 = s1.replace(' ', '') s1 = s1.replace('(![]+"")', 'false') s3 = '' for s2 in s1.split('+'): if s2.startswith('O.'): s3 += str(O[s2[2:]]) elif '[' in s2 and ']' in s2: key = s2[s2.find('[') + 3:-1] s3 += s2[O[key]] else: s3 += s2[1:-1] s3 = s3.replace('\\\\', '\\') s3 = s3.decode('unicode_escape') s3 = s3.replace('\\/', '/') s3 = s3.replace('\\\\"', '"') s3 = s3.replace('\\"', '"') url = re.search('<source\s+src="([^"]+)', s3).group(1) return url
def menu(): lover.checkupdates() try: url = urlparse.urljoin(base_domain, 'categories?o=al') c = client.request(url) soup = BeautifulSoup(c, 'html.parser') r = soup.find_all('div', class_={'category-wrapper'}) if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for i in r: try: name = i.a['data-mxptext'] icon = i.img['data-thumb_url'] url2 = i.a['href'] if not base_domain in url2: url2 = base_domain + url2 fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': url2, 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (name.title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def menu(): try: url = urlparse.urljoin(base_domain, 'categories') c = client.request(url) r = dom_parser2.parse_dom(c, 'div', {'class': 'item'}) r = [(dom_parser2.parse_dom(i, 'a', req='href'), \ dom_parser2.parse_dom(i, 'strong'), \ dom_parser2.parse_dom(i, 'img', req='data-src')) \ for i in r] r = [(i[0][0].attrs['href'], i[1][0].content.replace('(', '[ ').replace(')', ' ]'), i[2][0].attrs['data-src']) for i in r if i] if (not r): log_utils.log( 'Scraping Error in %s:: Content of request: %s' % (base_name.title(), str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log( 'Fatal Error in %s:: Error: %s' % (base_name.title(), str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] for i in r: try: name = kodi.sortX(i[1].encode('utf-8')) fanarts = xbmc.translatePath( os.path.join('special://home/addons/script.wankbank.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({ 'name': name, 'url': i[0], 'mode': content_mode, 'icon': i[2], 'fanart': fanarts, 'folder': True }) except Exception as e: log_utils.log( 'Error adding menu item %s in %s:: Error: %s' % (i[1].title(), base_name.title(), str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()
def watchmygf(self,url): headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'} link = client.request(url) play = re.findall('''video_url.+?['"](.*?)['"]''',link,flags=re.DOTALL)[0] rnd = re.findall(r'''rnd:\s+['"](.*?)['"]''',link,flags=re.DOTALL)[0] licence = re.findall(r'''license_code:\s+['"](.*?)['"]''',link,flags=re.DOTALL)[0] from resources.lib.modules import fundec decrypt = fundec.decryptHash(play, licence, 16) follow = ('%s?rnd=%s' % (decrypt,rnd)) link2 = requests.get(follow,headers=headers,stream=True) play2 = link2.url xbmc.Player().play(play2)
def menu(): lover.checkupdates() try: url = urlparse.urljoin(base_domain,'video') c = client.request(url) r = dom_parser2.parse_dom(c, 'a', {'class': 'sidebar_section_item'}) r = [i for i in r if 'channels' in i.attrs['href']] r = [(urlparse.urljoin(base_domain,i.attrs['href']), i.content + ' - [ Professional ]') for i in r] url = urlparse.urljoin(base_domain,'amateur/videos/') c = client.request(url) e = dom_parser2.parse_dom(c, 'a', {'class': 'sidebar_section_item'}) e = [i for i in e if 'channels' in i.attrs['href']] r += [(urlparse.urljoin(base_domain,i.attrs['href']), i.content + ' - [ Amateur ]') for i in e] r = sorted(r, key=lambda x: x[1]) if ( not r ): log_utils.log('Scraping Error in %s:: Content of request: %s' % (base_name.title(),str(c)), log_utils.LOGERROR) kodi.notify(msg='Scraping Error: Info Added To Log File', duration=6000, sound=True) quit() except Exception as e: log_utils.log('Fatal Error in %s:: Error: %s' % (base_name.title(),str(e)), log_utils.LOGERROR) kodi.notify(msg='Fatal Error', duration=4000, sound=True) quit() dirlst = [] urls = [] for i in r: try: name = i[1] icon = xbmc.translatePath(os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/icon.png' % filename)) fanarts = xbmc.translatePath(os.path.join('special://home/addons/script.xxxodus.artwork', 'resources/art/%s/fanart.jpg' % filename)) dirlst.append({'name': name, 'url': i[0], 'mode': content_mode, 'icon': icon, 'fanart': fanarts, 'folder': True}) except Exception as e: log_utils.log('Error adding menu item %s in %s:: Error: %s' % (i[1].title(),base_name.title(),str(e)), log_utils.LOGERROR) if dirlst: buildDirectory(dirlst) else: kodi.notify(msg='No Menu Items Found') quit()