def request(url, resolverList=None): u = url url = False # Custom Resolvers try: host = client.host(u) r = [i['class'] for i in info() if host in i['host']][0] r = __import__(r, globals(), locals(), [], -1) url = r.resolve(u) if url == False: raise Exception() except: pass # URLResolvers 3.0.0 try: if not url == False: raise Exception() logger.debug('Trying URL Resolver for %s' % u, __name__) hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True, include_universal=False) if hmf.valid_url() == True: url = hmf.resolve() else: url = False except: pass try: headers = url.rsplit('|', 1)[1] except: headers = '' headers = urllib.quote_plus(headers).replace('%3D', '=').replace( '%26', '&') if ' ' in headers else headers headers = dict(urlparse.parse_qsl(headers)) if url.startswith('http') and '.m3u8' in url: result = client.request(url.split('|')[0], headers=headers, output='geturl', timeout='20') if result == None: raise Exception() elif url.startswith('http'): result = client.request(url.split('|')[0], headers=headers, output='chunk', timeout='20') if result == None: logger.debug('Resolved %s but unable to play' % url, __name__) raise Exception() return url
def resolve(self, url, resolverList): logger.debug('ORIGINAL URL [%s]' % url, __name__) try: r = resolvers.request(url, resolverList) if not r: raise Exception() url = r logger.debug('RESOLVED URL [%s]' % url, __name__) return url except: return False
def sources(self, url): try: logger.debug('SOURCES URL %s' % url, __name__) quality = 'HD' srcs = [] result = '' try: result = client.request(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM(result, "div", attrs={"class": "single-post-video"})[0] items = re.compile( '(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(result) for item in items: if item[1].endswith('png'): continue host = client.host(item[1]) url = item[1] parts = [url] #parts = client.parseDOM(result, "script", ret="data-config") #for i in range(0, len(parts)): # if parts[i].startswith('//'): # parts[i]='http:%s'%parts[i] #host = client.host(parts[0]) #if len(parts) > 1 : # url = "##".join(parts) #else : # url = parts[0] srcs.append({ 'source': host, 'parts': len(parts), 'quality': quality, 'provider': 'BadtameezDil', 'url': "##".join(parts), 'direct': False }) logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def getAuthToken(self): url = base64.b64decode( 'aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC8lcy5waHA/d21zQXV0aFNpZ249') try: userAgent = self.getUserAgent() logger.debug('Final UserAgent : %s' % userAgent, __name__) filename = userAgent[:4] import datetime, hashlib timesegment = datetime.datetime.utcnow().strftime( "%m/%d/%Y %H:%M:%S") validtime = userAgent[4] headers = { 'User-Agent': base64.b64decode( 'UGFrJTIwVFYvMS4wIENGTmV0d29yay84MDguMi4xNiBEYXJ3aW4vMTYuMy4w' ) } ipstring = client.request(base64.b64decode( "aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC9pcF9jaGVjay5waHA="), headers=headers) ipadd = ipstring.split('Address: ')[1] s = "%s%s%s%s" % (ipadd, base64.b64decode("dHVtYmluamlhamF5bmFqYW5h") + userAgent[:10], timesegment, validtime) dd = base64.b64decode( "c2VydmVyX3RpbWU9JXMmaGFzaF92YWx1ZT0lcyZ2YWxpZG1pbnV0ZXM9JXM=" ) % (timesegment, base64.b64encode( hashlib.md5(s).hexdigest().lower()), validtime) url = (url % filename) + base64.b64encode(dd) headers = { 'User-Agent': cache.get(self.getDeviceID, 600000, table='live_cache'), 'Authorization': base64.b64decode( 'QmFzaWMgWW05emMyZGliM056T21kdmIyUm5aMjl2WkE9PQ==') } res = client.request(url, headers=headers) s = list(res) for i in range((len(s) - 59) / 12): ind = len(s) - 59 + (12 * (i)) if ind < len(s): print ind s[ind] = '' return ''.join(s) except Exception as e: logger.error(e)
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' self.srcs = [] if url == None: return self.srcs try: result = client.request(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = client.parseDOM(result, "div", attrs={"class": "col-md-12 mt20"})[0] try : item = client.parseDOM(result, "center")[0] url = re.compile('(SRC|src|data-config)=\"(.+?)\"').findall(item)[0][1] host = client.host(url) self.srcs.append({'source': host, 'parts' : '1', 'quality': quality, 'provider': 'iBollyTV', 'url': url, 'direct':False}) except: pass hypermode = False if control.setting('hypermode') == 'false' else True threads = [] try : result = client.parseDOM(result, "div", attrs={"class": "table-responsive"})[0] result = client.parseDOM(result, "tbody")[0] result = client.parseDOM(result, "tr") for item in result: if hypermode : threads.append(workers.Thread(self.source, item)) else : self.source(item) if hypermode: [i.start() for i in threads] stillWorking = True while stillWorking: stillWorking = False stillWorking = [True for x in threads if x.is_alive() == True] except: pass logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except: return self.srcs
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) url = self.live_link result = client.request(url, headers=self.headers) channels = re.findall( '<div class="subpattern.*?\s*<a href="(.*?)" title="(.*?)".*?\s*<img src=".*?".*?\s*<img src="(.*?)"', result) channelList = {} for url, title, icon in channels: title = title.replace("&", "And") #from ashock.modules import livemeta #names = cache.get(livemeta.source().getLiveNames, 200, table='live_cache') #title = cleantitle.live(title) #if title == 'SKIP': # continue if 'temple' in title.lower(): continue url = self.channel_link % url channelList[title] = { 'icon': icon, 'url': url, 'provider': 'lditto', 'source': 'ditto', 'direct': False, 'quality': 'HD', 'content': 'live' } filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: import traceback traceback.print_exc() pass
def resolve(url): try: result = client.request(url) url = re.findall('sources: \[(.+?)\]', result)[0] url = url.split(',') for i in url: i = i.replace('\"', '') if 'mp4' in i: url = i break logger.debug('URL [%s]' % url, __name__) return url except Exception as e: return False
def getSwiftAuthToken(self, postUrl, auth, stripping): logger.debug("Generating new token", __name__) headers = { 'User-Agent': self.getSwiftUserAgent(), 'Authorization': auth } res = client.request(postUrl, headers=headers, redirect=False) s = list(res) if stripping: for i in range((len(s) - 59) / 12): ind = len(s) - 59 + (12 * (i)) if ind < len(s): print ind s[ind] = '' ret = ''.join(s) return '?' + ret.split('?')[1]
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) content = re.compile('(.+?)\?episode=\d*$').findall(url) content = 'movie' if len(content) == 0 else 'episode' try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall(url)[0] except: pass result = client.request(url) url = zip(client.parseDOM(result, 'a', ret='href', attrs = {'target': 'EZWebPlayer'}), client.parseDOM(result, 'a', attrs = {'target': 'EZWebPlayer'})) url = [(i[0], re.compile('(\d+)').findall(i[1])) for i in url] url = [(i[0], i[1][-1]) for i in url if len(i[1]) > 0] if content == 'episode': url = [i for i in url if i[1] == '%01d' % int(episode)] links = [client.replaceHTMLCodes(i[0]) for i in url] for u in links: try: result = client.request(u) result = re.findall('sources\s*:\s*\[(.+?)\]', result)[0] result = re.findall('"file"\s*:\s*"(.+?)".+?"label"\s*:\s*"(.+?)"', result) url = [{'url': i[0], 'quality': '1080p'} for i in result if '1080' in i[1]] url += [{'url': i[0], 'quality': 'HD'} for i in result if '720' in i[1]] url += [{'url': i[0], 'quality': 'SD'} for i in result if '480' in i[1]] url += [{'url': i[0], 'quality': 'SCR'} for i in result if '360' in i[1]] for i in url: srcs.append({'source': 'gvideo', 'parts' : '1','quality': i['quality'], 'provider': 'Pubfilm', 'url': i['url'], 'direct': True, 'debridonly': False}) except: pass logger.debug('SOURCES URL %s' % srcs, __name__) return srcs except: return srcs
def getUserAgent(self): headers = { 'User-Agent': base64.b64decode('cDl4VE1nV2hFclpxZGlFWU1iV045bFVvd0xGMFdWM3I='), 'Authorization': base64.b64decode( 'QmFzaWMgWVcxMVpHbHNZbUZ5YW1GdWFUcHFZVzUxWjJWeWJXRnVhbUZ1YVE9PQ==' ) } useragent = client.request(base64.b64decode( 'aHR0cHM6Ly93d3cuYm94dHZoZC5jb20vdG9wL3Bha2luZGlhdjIzcC5waHA='), headers=headers) logger.debug('UserAgent : %s' % useragent, __name__) return useragent
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: if url == None: return self.srcs url = '%s%s' % (self.base_link, url) try: result = client.request(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM(result, "table", attrs={"class": "table table-bordered"})[0] result = client.parseDOM(result, "tbody")[0] result = client.parseDOM(result, "tr") hypermode = False if control.setting( 'hypermode') == 'false' else True threads = [] for item in result: if hypermode: threads.append(workers.Thread(self.source, item)) else: self.source(item) if hypermode: [i.start() for i in threads] stillWorking = True while stillWorking: stillWorking = False stillWorking = [ True for x in threads if x.is_alive() == True ] logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except: return self.srcs
def resolve(self, url, resolverList): logger.debug('ORIGINAL URL [%s]' % url, __name__) u = None ''' try : #headers = {'User-agent': 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10'} agent = cache.get(client.randomagent, 1) headers = {'User-agent': agent} result = client.request('https://cinefuntv.com/watchnow.php?content=%s' % url, headers=headers, redirect=False) u = re.findall('var cms_url = [\'"](.*?)[\'"]', result)[0] u += '|%s' % urllib.urlencode({'User-agent': agent}) url = u except: u = None ''' if u == None: try: headers = { 'User-agent': base64.b64decode( 'Q0ZVTlRWLzMuMSBDRk5ldHdvcmsvNzU4LjAuMiBEYXJ3aW4vMTUuMC4w' ) } result = client.request(base64.b64decode( 'aHR0cHM6Ly9jaW5lZnVudHYuY29tL3NtdGFsbmMvY29udGVudC5waHA/Y21kPWRldGFpbHMmQCZkZXZpY2U9aW9zJnZlcnNpb249MCZjb250ZW50aWQ9JXMmc2lkPSZ1PWMzMjgxOTMwQHRyYnZuLmNvbQ==' ) % url, headers=headers, redirect=False) links = json.loads(result) u = links[0]['HLSURL'] if u == '': u = links[0]['SamsungURL'] if u == '': u = links[0]['PanasonicURL'] u += "|%s" % urllib.urlencode({ 'User-Agent': 'AppleCoreMedia/1.0.0.13A452 (iPhone; U; CPU OS 9_0_2 like Mac OS X; en_gb)' }) url = u except: url = None result = client.validateUrl(url) logger.debug('RESOLVED URL [%s]' % url, __name__) return url
def resolve(url): res = ['720', '480', '360', '240'] try: result = client.request(url) for r in res: try: url = client.parseDOM(result, name="source", attrs={"res": r}, ret="src")[0] break except: pass logger.debug('URL [%s]' % url, __name__) return url except Exception as e: return False
def resolve(url): try: result = client.request(url) packed = re.search('(eval\(function.*?)\s*</script>', result, re.DOTALL) if packed: js = jsunpack.unpack(packed.group(1)) else: js = result link = re.search('file\s*:\s*[\'|"]([^\'|"]+)', js) if link: url = link.group(1) else : url = None logger.debug('URL [%s]' % url, __name__) return url except Exception as e: return False
def resolve(self, url, resolverList): try: tUrl = url.split('##') if len(tUrl) > 0: url = tUrl else: url = urlparse.urlparse(url).path links = [] for item in url: r = resolvers.request(item, resolverList) if not r: raise Exception() links.append(r) url = links logger.debug('RESOLVED URL [%s]' % url, __name__) return url except: return False
def get(self, url, idx=True, provider=None, lang=None): logger.debug('url [%s] provider [%s] lang [%s] ' % (url, provider, lang), self.__class__) self.lang = lang try: try: u = urlparse.urlparse(url).netloc.lower() except: pass if not provider == None: call = __import__('resources.lib.sources.%s' % provider, globals(), locals(), ['source'], -1).source() self.list = cache.get(call.scn_full_list, 48, url, lang, provider) self.worker() elif u in self.imdb_link: self.list = cache.get(self.imdb_list, 48, url) if idx == True: self.worker() if idx == True: self.movieDirectory(self.list, lang=lang) return self.list except Exception as e: logger.error(e, __name__) pass
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) url = self.live_link result = client.request(url, headers=self.headers) result = json.loads(result) channelList = {} for channel in result: title = channel['Title'] #title = cleantitle.live(title) #if title == 'SKIP': # continue icon = channel['ThumbnailURL'] cUrl = channel['ContentId'] channelList[title] = { 'icon': icon, 'url': cUrl, 'provider': 'cinefun', 'source': 'cinefun', 'direct': False, 'quality': 'HD', 'content': 'live' } filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: import traceback traceback.print_exc() pass
def resolve(url): try: headers = {'User-Agent': client.randomagent()} result, response_code, response_headers, headers, cookie = client.request( url, headers=headers, output='extended') headers.update({'Referer': url}) headers.update({'Cookie': cookie}) scheme = urlparse(url).scheme result_blacklist = [] source_list = scrape_sources(result, result_blacklist, scheme) source = pick_source(source_list) url = source + append_headers(headers) logger.debug('URL [%s]' % url, __name__) return url except Exception as e: import traceback traceback.print_exc() return False
def resolve(self, url, resolverList): logger.debug('ORIGINAL URL [%s]' % url, __name__) url = '%s|User-Agent=%s' % (url, self.getSolidPlayUserAgent()) logger.debug('RESOLVED URL [%s]' % url, __name__) #result = client.validateUrl(url) logger.debug('VALID RESOLVED URL [%s]' % url, __name__) return url
def resolve(self, url, resolverList): try: logger.debug('ORIGINAL URL [%s]' % url, __name__) result = client.request(url, headers=self.headers) playdata = 'window.pl_data = (\{.*?"key":.*?\}\})' result = re.findall(playdata, result)[0] try: result = json.loads(result) link = result['live']['channel_list'][0]['file'] key = result['live']['key'] link = link.decode('base64') key = key.decode('base64') de = pyaes.new(key, pyaes.MODE_CBC, IV='\0' * 16) link = de.decrypt(link).replace('\x00', '').split('\0')[0] link = re.sub('[^\s!-~]', '', link) except: link = client.parseDOM(result, "source", attrs={"type": "application/x-mpegurl"}, ret="src")[0] logger.debug('URL : [%s]' % link, __name__) url = '%s|Referer=%s' % (link.strip(), url) result = client.validateUrl(url) logger.debug('RESOLVED URL [%s]' % url, __name__) return url except: return False
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' srcs = [] if url == None: return srcs self.login() #url = urlparse.urljoin(self.base_link_1, url) url = 'http://erosnow.com/profiles/1000218?platform=2&q=auto' try: result = client.request(url) except: result = '' result = json.loads(result) logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def resolve(self, url, resolverList): logger.debug('ORIGINAL URL [%s]' % url, __name__) try: post = { 'el': self.user, 'pw': self.password, 'mobile': '', 'callingcode': '', 'type': 'json', 'fbid': '' } h = {'Referer': self.base_link} result = client.request(self.login_link, post=urllib.urlencode(post)) result = json.loads(result) t = result['success'] logger.debug('RESOLVED URL [%s]' % url, __name__) return [url] except: return False
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs if 'hd' in url.lower(): quality = 'HD' else: quality = 'SD' html = client.request(url) mlink = SoupStrainer("div", {"class": "entry"}) videoclass = BeautifulSoup(html, parseOnlyThese=mlink) try: links = videoclass.findAll('iframe') for link in links: url = link.get('src') host = client.host(url) srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'tamilyogi', 'url': url, 'direct': False }) except: pass return srcs except: return srcs
def resolve(url): try: url = url + '#' url = re.compile('http://www.mediaplaybox.com/video/(.+?)#').findall(url)[0] url = 'http://www.mediaplaybox.com/mobile?vinf=%s' % url result = client.request(url, debug=True) try : url = client.parseDOM(result, "div", attrs = {"class": "divider"})[0] url = client.parseDOM(url, "a", ret ="href") url = url[0] url = url.replace('_ipod.mp4', '.flv') return url except: pass try :url = client.parseDOM(result, "meta", attrs={"itemprop": "contentURL"}, ret="content")[0] except: pass logger.debug('URL [%s]' % url, __name__) return url except: return False
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs oUrl = urlparse.urljoin(self.base_link_1, url) try: result = client.request(oUrl) except: result = '' csrf = client.parseDOM(result, "meta", attrs={"name": "csrf-token"}, ret="content")[0] url = client.parseDOM(result, "div", attrs={"class": "video-wrapper"})[0] url = client.parseDOM(url, "source", ret="src")[0] url = '%s|Referer=%s' % (url, oUrl) srcs.append({ 'source': "Ditto", 'parts': '1', 'quality': "HD", 'provider': 'mDitto', 'url': url, 'direct': True }) logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def resolve(self, url, resolverList): try: logger.debug('ORIGINAL URL [%s]' % url, __name__) authToken = self.getAuthToken() logger.debug('AuthToken %s' % authToken, __name__) url += authToken if '|' not in url: url += '|' import random useragent = 'User-Agent=AppleCoreMedia/1.0.0.%s (%s; U; CPU OS %s like Mac OS X; en_gb)' % ( random.choice([ '13G35', '13G36', '14A403', '14A456', '14B72', '14B150' ]), random.choice(['iPhone', 'iPad', 'iPod']), random.choice(['9.3.4', '9.3.5', '10.0.2', '10.1', '10.1.1'])) url += useragent result = client.validateUrl(url) logger.debug('RESOLVED URL [%s]' % url, __name__) return url except Exception as e: logger.error(e) return False
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: sources = [] if url == None: return sources if url.isdigit(): url = '/watch-%s-online-free-%s.html' % (url, url) url = urlparse.urljoin(self.base_link, url) result = proxy.request(url, 'ovie') quality = re.compile('Quality(.+?)<').findall( result.replace('\n', '')) quality = quality[0].strip() if quality else 'SD' if quality == 'CAM' or quality == 'TS': quality = 'CAM' elif quality == 'SCREENER': quality = 'SCR' else: quality = 'SD' dupes = [] links = re.findall('\'(.+?)\'', result) + re.findall( '\"(.+?)\"', result) links = [proxy.parse(i) for i in links] links = [i for i in links if i.startswith('http')] links = [x for y, x in enumerate(links) if x not in links[:y]] for i in links: try: url = i url = urlparse.urlparse(url).query url = url.decode('base64') url = re.findall('((?:http|https)://.+?/.+?)(?:&|$)', url)[0] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') if url in dupes: raise Exception() dupes.append(url) host = re.findall( '([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0] host = host.encode('utf-8') sources.append({ 'provider': 'movie25', 'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, 'debridonly': False }) except: pass return sources except: return sources
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' srcs = [] if url == None: return srcs try: result = client.request(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') quality = '' result = client.parseDOM( result, name="div", attrs={"class": "entry-content rich-content"})[0] result = client.parseDOM(result, name="p") try: host = '' urls = [] result = result[1::] serversList = result[::2] linksList = result[1::2] for i in range(0, len(serversList)): try: links = linksList[i] urls = client.parseDOM(links, name="a", ret="href") for j in range(0, len(urls)): try: item = client.request(urls[j], mobile=True) item = client.parseDOM(item, "td")[0] item = re.compile( '(SRC|src|data-config)=\"(.+?)\"').findall( item)[0][1] urls[j] = item except: pass if len(urls) > 1: url = "##".join(urls) else: url = urls[0] host = client.host(urls[0]) srcs.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'HindiLinks4U', 'url': url, 'direct': False }) except: pass except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs if 'hd' in url.lower(): quality = 'HD' else: quality = 'SD' html = client.request(url) try: linkcode = jsunpack.unpack(html).replace('\\', '') srcs = json.loads(re.findall('sources:(.*?)\}\)', linkcode)[0]) for source in srcs: url = source['file'] host = client.host(url) self.srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'tamilgun', 'url': url, 'direct': False }) except: pass mlink = SoupStrainer('div', {'id': 'videoframe'}) videoclass = BeautifulSoup(html, parseOnlyThese=mlink) try: links = videoclass.findAll('iframe') for link in links: url = link.get('src') host = client.host(url) self.srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'tamilgun', 'url': url, 'direct': False }) except: pass mlink = SoupStrainer('div', {'class': 'entry-excerpt'}) videoclass = BeautifulSoup(html, parseOnlyThese=mlink) try: links = videoclass.findAll('iframe') for link in links: if 'http' in str(link): url = link.get('src') host = client.host(url) self.srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'tamilgun', 'url': url, 'direct': False }) except: pass try: sources = json.loads( re.findall('vdf-data-json">(.*?)<', html)[0]) url = 'https://www.youtube.com/watch?v=%s' % sources['videos'][ 0]['youtubeID'] host = client.host(url) self.srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'tamilgun', 'url': url, 'direct': False }) except: pass return self.srcs except: return self.srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' srcs = [] if url == None: return srcs data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) imdb, title, year = data.get('imdb'), data.get('title'), data.get( 'year') self.base_link = random.choice( [self.base_link_1, self.base_link_2]) query = '%s %s' % (title, year) query = self.search_link % (urllib.quote_plus(query)) query = urlparse.urljoin(self.base_link, query) result = client.request(query) result = result.decode('iso-8859-1').encode('utf-8') posts = client.parseDOM(result, "item") items = [] for post in posts: try: t = client.parseDOM(post, 'title')[0] if 'trailer' in cleantitle.movie(t): raise Exception() try: s = re.findall( '((?:\d+\.\d+|\d+\,\d+|\d+)(?:GB|GiB|MB|MiB|mb|gb))', t)[0] except: s = '0' i = client.parseDOM(post, 'link')[0] items += [{'name': t, 'url': i, 'size': s}] except: pass title = cleantitle.movie(title) for item in items: try: name = item.get('name') t = re.sub( '(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name) #searchTitle = re.compile('(.+?) \d{4}').findall(searchTitle)[0] #searchTitle = cleantitle.movie(searchTitle) if cleantitle.movie(title) == cleantitle.movie(t): y = re.findall( '[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper() if not y == year: raise Exception() fmt = re.sub( '(.+)(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*)(\.|\)|\]|\s)', '', name.upper()) fmt = re.split('\.|\(|\)|\[|\]|\s|\-', fmt) fmt = [i.lower() for i in fmt] if any( i.endswith(('subs', 'sub', 'dubbed', 'dub')) for i in fmt): raise Exception() if any(i in ['extras'] for i in fmt): raise Exception() if '1080p' in fmt: quality = '1080p' elif '720p' in fmt: quality = 'HD' else: quality = 'SD' if any(i in ['dvdscr', 'r5', 'r6'] for i in fmt): quality = 'SCR' elif any(i in [ 'camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts' ] for i in fmt): quality = 'CAM' info = [] if '3d' in fmt: info.append('3D') try: size = re.findall( '((?:\d+\.\d+|\d+\,\d+|\d+)(?:GB|GiB|MB|MiB|mb|gb))', item.get('size'))[-1] div = 1 if size.endswith(('GB', 'GiB')) else 1024 size = float(re.sub('[^0-9|/.|/,]', '', size)) / div size = '%.2f GB' % size info.append(size) except: pass if any(i in ['hevc', 'h265', 'x265'] for i in fmt): info.append('HEVC') info = ' | '.join(info) movieurl = item.get('url') result = client.request(movieurl) result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM(result, 'div', attrs={'class': 'entry'})[0] #result = client.parseDOM(result, 'div', attrs={'class':'separator'}) #result = re.findall('<div class=\"wpz-sc-box(.+?)<div class=\"wpz-sc-box download', result) links = client.parseDOM(result, 'a', attrs={'target': '_blank'}, ret='href') for link in links: if 'http' in link: #if urlresolver.HostedMediaFile(url= link): host = client.host(link) srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'world4u', 'url': link, 'direct': False, 'info': info }) except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: import traceback traceback.print_exc() return srcs