def find_link(url, html=''): log('Finding in : %s' % url) try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = urlparse.urlparse(url).netloc headers = { 'Referer': referer, 'Host': host, 'User-Agent': client.agent(), 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } if html == '': url = manual_url_fix(url) html = client.request(url, headers=headers) html = manual_html_fix(url, html, headers) ref = url fs = list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval(f + "(html,ref)") if resolved: log('Resolved with %s: %s' % (f, resolved)) return resolved break return
def find_link(url, html=''): global limit limit+=1 log('Finding in : %s'%url) try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = urlparse.urlparse(url).netloc headers = {'Referer':referer, 'Host':host, 'User-Agent' : client.agent(), 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language' : 'en-US,en;q=0.5'} if html=='': url = manual_url_fix(url) html = client.request(url, headers=headers) html = manual_html_fix(url,html,headers) ref=url fs=list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval (f+"(html,ref)") if resolved: log('Resolved with %s: %s'%(f,resolved)) return resolved break return
def find_link(url, html=''): log('Finding in : %s' % url) try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = urlparse.urlparse(url).netloc headers = { 'Referer': referer, 'Host': host, 'User-Agent': client.agent(), 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } if html == '' or html is None: url = manual_url_fix(url) html = client.request(url, headers=headers) if 'livetvcdn' in url or 'shadow' in url or 'blog' in url and 'goto/' not in url: import requests s = requests.Session() r = s.get(url, headers=headers) html = r.text ref = url fs = list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval(f + "(html,ref)") if resolved: log('Resolved with %s: %s' % (f, resolved)) return resolved break return
def find_link(url, html=''): log('Finding in : %s'%url) try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = urlparse.urlparse(url).netloc headers = {'Referer':referer, 'Host':host, 'User-Agent' : client.agent(), 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language' : 'en-US,en;q=0.5'} if html=='' or html is None: url = manual_url_fix(url) html = client.request(url, headers=headers) if 'livetvcdn' in url or 'shadow' in url or 'blog' in url and 'goto/' not in url: import requests s = requests.Session() r = s.get(url,headers=headers) html = r.text ref=url fs=list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval (f+"(html,ref)") if resolved: log('Resolved with %s: %s'%(f,resolved)) return resolved break return
def finder30(html, url): try: html = client.request(url, referer=urlparse.urlparse(url).netloc) url = re.findall('href="(.+?)">click here...', html)[0] resolved = find_link(url + '&referer=http://rojedirecta.me') return resolved except: return
def finder30(html,url): try: html = client.request(url, referer=urlparse.urlparse(url).netloc) url = re.findall('href="(.+?)">click here...',html)[0] resolved = find_link(url+'&referer=http://rojedirecta.me') return resolved except: return
def finder77(html, url): try: html = urllib.unquote(html) url = finder4(html, url) if client.request(url) != None: return url return except: return
def finder77(html,url): try: html = urllib.unquote(html) url = finder4(html,url) if client.request(url) != None: return url return except: return
def finder117(html,ref): if 'zunox' in ref: url = 'http://zunox.hk/players/' + re.findall('(proxy.php\?id=[^\"\']+)',html)[0] h2 = client.request(url) import json j = json.loads(h2) host = urlparse.urlparse(j['url']).netloc.split(':')[0].replace(':80','') url = j['url'].replace(':80','') +'.flv' + '|%s' % urllib.urlencode({'User-agent':client.agent(),'X-Requested-With':constants.get_shockwave(),'Referer':ref, 'Host':host, 'Connection':'keep-alive','Accept-Encodeing':'gzip, deflate, lzma, sdch'}) return url
def find_link(url): try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = host = urlparse.urlparse(url).netloc headers = { 'Referer': referer, 'Host': host, 'User-Agent': client.agent(), 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } try: import HTMLParser h = HTMLParser.HTMLParser() url = h.unescape(url) except: pass html = client.request(url, headers=headers) try: html = urllib.unquote(html) except: pass try: import HTMLParser h = HTMLParser.HTMLParser() html = h.unescape(html) except: pass if 'livetv.sx' in url: import requests s = requests.Session() s.headers.update(headers) html = s.get(url).text if '@3C' in html: html = html.replace('@', '%') html = urllib.unquote(html) ref = url fs = list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval(f + "(html,ref)") if resolved: return resolved break return
def find_link(url): try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0] except: referer = 'http://' + urlparse.urlparse(url).netloc host = host = urlparse.urlparse(url).netloc headers = {'Referer':referer, 'Host':host, 'User-Agent' : client.agent(), 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language' : 'en-US,en;q=0.5'} try: import HTMLParser h = HTMLParser.HTMLParser() url = h.unescape(url) except: pass html = client.request(url, headers=headers) try: html = urllib.unquote(html) except: pass try: import HTMLParser h = HTMLParser.HTMLParser() html = h.unescape(html) except: pass if 'livetv.sx' in url: import requests s = requests.Session() s.headers.update(headers) html = s.get(url).text if '@3C' in html: html = html.replace('@','%') html = urllib.unquote(html) ref=url fs=list(globals().copy()) for f in fs: if 'finder' in f: resolved = eval (f+"(html,ref)") if resolved: return resolved break return
def __init__(self): self.base = 'http://zunox.hk' self.html = client.request('http://zunox.hk/scheduleframe.php')
def __init__(self): self.base = 'http://goatd.net/' self.html = client.request(self.base)