def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) result = cache.get(self.getSwiftCache, 600000, table='live_cache') password = base64.b64encode(result["DATA"][0]["Password"]) headers = { 'User-Agent': 'Dalvik/1.6.0 (Linux; U; Android 4.4.2; SM-G900F Build/KOT49H)', 'Authorization': 'Basic %s' % password } headers = { 'User-Agent': 'Dalvik/1.6.0 (Linux; U; Android 4.4.2; SM-G900F Build/KOT49H)' } category_url = 'http://swiftstreamz.com/SwiftStream/api.php' result = client.request(category_url, headers=headers) items = json.loads(result)['LIVETV'] self.channelList = {} categories = ['INDIAN TV', 'SOUTH INDIAN', 'PUNJABI TV'] for item in items: if item['category_name'] in categories: url = '%s?cat_id=%s' % (category_url, item['cid']) #channelList = self.getSwiftChannels(url, headers) channelList = self.getSwiftChannels(url, headers) filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(self.channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: import traceback traceback.print_exc() pass
def movie(self, imdb, title, year): try: t = cleantitle.get(title) headers = {'X-Requested-With': 'XMLHttpRequest'} query = urllib.urlencode({'keyword': title}) url = urlparse.urljoin(self.base_link, self.search_link) r = client.request(url, post=query, headers=headers) r = json.loads(r)['content'] r = zip( client.parseDOM(r, 'a', ret='href', attrs={'class': 'ss-title'}), client.parseDOM(r, 'a', attrs={'class': 'ss-title'})) r = [i[0] for i in r if cleantitle.get(t) == cleantitle.get(i[1])][:2] r = [(i, re.findall('(\d+)', i)[-1]) for i in r] for i in r: try: y, q = cache.get(self.onemovies_info, 9000, i[1]) if not y == year: raise Exception() return urlparse.urlparse(i[0]).path except: pass except: return
def livetv(self, generateJSON=False): try: retValue = 0 generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: fileFetcher = FileFetcher(self.fileName, control.addon) if control.setting('livelocal') == 'true': retValue = 1 else: retValue = fileFetcher.fetchFile() if retValue < 0: raise Exception() liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile() return (retValue, self.srcs) except: import traceback traceback.print_exc() pass
def getSwiftAuth(self, url): stripping = True result = cache.get(self.getSwiftCache, 600000, table='live_cache') if result["DATA"][0]["HelloUrl"] in url or result["DATA"][0][ "HelloUrl1"] in url: postUrl = result["DATA"][0]["HelloLogin"] auth = 'Basic %s' % base64.b64encode( result["DATA"][0]["PasswordHello"]) stripping = False elif result["DATA"][0]["LiveTvUrl"] in url: postUrl = result["DATA"][0]["LiveTvLogin"] auth = 'Basic %s' % base64.b64encode( result["DATA"][0]["PasswordLiveTv"]) elif result["DATA"][0]["nexgtvUrl"] in url: postUrl = result["DATA"][0]["nexgtvToken"] auth = 'Basic %s' % base64.b64encode( result["DATA"][0]["nexgtvPass"]) stripping = False elif '.m3u8' not in url: print 'skip auth' else: postUrl = result["DATA"][0]["loginUrl"] auth = 'Basic %s' % base64.b64encode(result["DATA"][0]["Password"]) if postUrl: return self.getSwiftAuthToken(postUrl, auth, stripping) return url
def get(self, url, idx=True, provider=None,network=None): try: self.list = cache.get(self.shows, 168, url, provider, network, table='rel_shows') self.list = sorted(self.list, key=lambda k: k['name']) if idx == True: self.tvshowDirectory(self.list) return self.list except Exception as e: logger.error(e, __name__) pass
def movie(self, imdb, title, year): try: movies = cache.get(self.desiRulezCache, 168) url = [ i['url'] for i in movies if cleantitle.movie(i['title']) == cleantitle.movie(title) ][0] return url except: pass
def get(self, url, idx=True, provider=None, lang=None): logger.debug('url [%s] provider [%s] lang [%s] ' % (url, provider, lang), self.__class__) self.lang = lang try: try: u = urlparse.urlparse(url).netloc.lower() except: pass if not provider == None: call = __import__('resources.lib.sources.%s' % provider, globals(), locals(), ['source'], -1).source() self.list = cache.get(call.scn_full_list, 48, url, lang, provider) self.worker() elif u in self.imdb_link: self.list = cache.get(self.imdb_list, 48, url) if idx == True: self.worker() if idx == True: self.movieDirectory(self.list, lang=lang) return self.list except Exception as e: logger.error(e, __name__) pass
def get(self, tvshowtitle, year, imdb, tvdb, idx=True): if idx == True: self.list = cache.get(self.tvdb_list, 24, tvshowtitle, year, imdb, tvdb, self.info_lang) self.seasonDirectory(self.list) return self.list else: self.list = self.tvdb_list(tvshowtitle, year, imdb, tvdb, self.info_lang) return self.list
def getAuthToken(self): url = base64.b64decode( 'aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC8lcy5waHA/d21zQXV0aFNpZ249') try: userAgent = self.getUserAgent() logger.debug('Final UserAgent : %s' % userAgent, __name__) filename = userAgent[:4] import datetime, hashlib timesegment = datetime.datetime.utcnow().strftime( "%m/%d/%Y %H:%M:%S") validtime = userAgent[4] headers = { 'User-Agent': base64.b64decode( 'UGFrJTIwVFYvMS4wIENGTmV0d29yay84MDguMi4xNiBEYXJ3aW4vMTYuMy4w' ) } ipstring = client.request(base64.b64decode( "aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC9pcF9jaGVjay5waHA="), headers=headers) ipadd = ipstring.split('Address: ')[1] s = "%s%s%s%s" % (ipadd, base64.b64decode("dHVtYmluamlhamF5bmFqYW5h") + userAgent[:10], timesegment, validtime) dd = base64.b64decode( "c2VydmVyX3RpbWU9JXMmaGFzaF92YWx1ZT0lcyZ2YWxpZG1pbnV0ZXM9JXM=" ) % (timesegment, base64.b64encode( hashlib.md5(s).hexdigest().lower()), validtime) url = (url % filename) + base64.b64encode(dd) headers = { 'User-Agent': cache.get(self.getDeviceID, 600000, table='live_cache'), 'Authorization': base64.b64decode( 'QmFzaWMgWW05emMyZGliM056T21kdmIyUm5aMjl2WkE9PQ==') } res = client.request(url, headers=headers) s = list(res) for i in range((len(s) - 59) / 12): ind = len(s) - 59 + (12 * (i)) if ind < len(s): print ind s[ind] = '' return ''.join(s) except Exception as e: logger.error(e)
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) url = self.live_link result = client.request(url, headers=self.headers) channels = re.findall( '<div class="subpattern.*?\s*<a href="(.*?)" title="(.*?)".*?\s*<img src=".*?".*?\s*<img src="(.*?)"', result) channelList = {} for url, title, icon in channels: title = title.replace("&", "And") #from ashock.modules import livemeta #names = cache.get(livemeta.source().getLiveNames, 200, table='live_cache') #title = cleantitle.live(title) #if title == 'SKIP': # continue if 'temple' in title.lower(): continue url = self.channel_link % url channelList[title] = { 'icon': icon, 'url': url, 'provider': 'lditto', 'source': 'ditto', 'direct': False, 'quality': 'HD', 'content': 'live' } filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: import traceback traceback.print_exc() pass
def __init__(self): self.base_link = 'http://www.dittotv.com' self.live_link = 'http://origin.dittotv.com/livetv/all/0' self.channel_link = 'http://origin.dittotv.com%s' self.headers = { 'Accept': 'text/html,application/xhtml+xml,q=0.9,image/jxr,*/*', 'Accept-Language': 'en-US,en;q=0.5', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'keep-alive', 'User-Agent': cache.get(client.randomagent, 1, table='live_cache'), 'Referer': 'http://www.dittotv.com/livetv' } self.srcs = [] self.fileName = 'ditto.json' self.filePath = os.path.join(control.dataPath, self.fileName)
def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) t = cleantitle.get(data['tvshowtitle']) year = re.findall('(\d{4})', premiered)[0] years = [str(year), str(int(year) + 1), str(int(year) - 1)] season = '%01d' % int(season) episode = '%01d' % int(episode) headers = {'X-Requested-With': 'XMLHttpRequest'} query = urllib.urlencode( {'keyword': '%s - Season %s' % (data['tvshowtitle'], season)}) url = urlparse.urljoin(self.base_link, self.search_link) r = client.request(url, post=query, headers=headers) r = json.loads(r)['content'] r = zip( client.parseDOM(r, 'a', ret='href', attrs={'class': 'ss-title'}), client.parseDOM(r, 'a', attrs={'class': 'ss-title'})) r = [(i[0], re.findall('(.+?) - season (\d+)$', i[1].lower())) for i in r] r = [(i[0], i[1][0][0], i[1][0][1]) for i in r if len(i[1]) > 0] r = [i for i in r if t == cleantitle.get(i[1])] r = [i[0] for i in r if season == '%01d' % int(i[2])][:2] r = [(i, re.findall('(\d+)', i)[-1]) for i in r] for i in r: try: y, q = cache.get(self.onemovies_info, 9000, i[1]) if not y in years: raise Exception() return urlparse.urlparse( i[0]).path + '?episode=%01d' % int(episode) except: pass except: return
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) url = self.live_link result = client.request(url, headers=self.headers) result = json.loads(result) channelList = {} for channel in result: title = channel['Title'] #title = cleantitle.live(title) #if title == 'SKIP': # continue icon = channel['ThumbnailURL'] cUrl = channel['ContentId'] channelList[title] = { 'icon': icon, 'url': cUrl, 'provider': 'cinefun', 'source': 'cinefun', 'direct': False, 'quality': 'HD', 'content': 'live' } filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: import traceback traceback.print_exc() pass
def search(self, query=None, lang=None): try: if query == None: t = control.lang(30201).encode('utf-8') k = control.keyboard('', t) ; k.doModal() self.query = k.getText() if k.isConfirmed() else None else: self.query = query if (self.query == None or self.query == ''): return #url = self.search_link % ('%s', urllib.quote_plus(self.query)) url = self.search_link % (urllib.quote_plus(self.query)) self.list = cache.get(self.imdb_list, 0, url) self.worker() self.movieDirectory(self.list) return self.list except: return
def sources(self, url): try: srcs = [] for movielink, cookies, referer in url: # print ("YMOVIES SOURCES", movielink, cookies, referer) headers = { 'Referer': referer, 'User-Agent': cache.get(client.randomagent, 1), 'X-Requested-With': 'XMLHttpRequest' } result = client.request(movielink, headers=headers, cookie=cookies) result = json.loads(result) # print ("YMOVIES SOURCE PLAYLIST", result) links = result['playlist'][0]['sources'] for item in links: videoq = item['label'] url = item['file'] if "1080" in videoq: quality = "1080p" elif "720" in videoq: quality = "HD" else: quality = "SD" url = client.replaceHTMLCodes(url) url = url.encode('utf-8') srcs.append({ 'source': 'gvideo', 'quality': quality, 'provider': 'Ymovies', 'url': url, 'direct': True, 'debridonly': False }) return srcs except: return srcs
def root(self): self.addDirectoryItem(30860, 'movieLangNavigator', 'movies.png','DefaultMovies.png') #self.addDirectoryItem(90114, 'desiLiveNavigator', 'tv-live.png','DefaultMovies.png') #self.addDirectoryItem(90115, 'liveEPGNavigator', 'tv-epg.png','DefaultMovies.png') self.addDirectoryItem(30861, 'desiTVNavigator', 'tv-vod.png','DefaultMovies.png') self.addDirectoryItem(90116, 'openSettings&query=0.0', 'settings.png', 'DefaultMovies.png') self.addDirectoryItem(90117, 'clearCache', 'clearcache.png', 'DefaultMovies.png') self.addDirectoryItem(30864, 'changelog', 'changelog.png', 'DefaultMovies.png') from ashock.modules import cache from ashock.modules import changelog cache.get(changelog.get, 600000000, control.addonInfo('version'), table='changelog') cache.get(self.donation, 600000000, control.addonInfo('version'), table='changelog') #cache.get(control.resetSettings, 600000000, 'true', control.addonInfo('version'), table='changelog') cache.get(analytics.sendAnalytics, 600000000, ("Installed-%s" % control.addonInfo('version')), table='changelog') self.endDirectory()
def getSwiftPlayUserAgent(self): result = cache.get(self.getSwiftCache, 600000, table='live_cache') usagents = result["DATA"][0]["Agent"] return usagents
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) result = cache.get(self.getSolidCache, 48, table='live_cache') mainUrl = result["DATA"][0]["MainURL"] username = result["DATA"][0]["Username"] password = result["DATA"][0]["Password"] category_url = base64.b64decode( 'JXMvcGFuZWxfYXBpLnBocD9tb2RlPWxpdmUmdXNlcm5hbWU9JXMmcGFzc3dvcmQ9JXM=' ) % (mainUrl, username, password) result = client.request(category_url, headers=self.headers) result = json.loads(result) categories = [ 'HINDI', 'PUNJABI', 'TAMIL LIVE', 'SOUTH INDIAN', 'NORTH INDIAN', 'INDIAN', 'MALAYALAM/TAMIL/TELUGU' ] liveCategories = result['categories']['live'] liveChannels = result['available_channels'] serverInfo = result['server_info'] userInfo = result['user_info'] categoryId = [] for category in liveCategories: if category['category_name'] in categories: categoryId.append(category['category_id']) for i in range(0, len(categoryId)): for channelKey in liveChannels: channel = liveChannels[channelKey] print channel if channel['category_id'] == categoryId[i] and channel[ 'live'] == '1': url = base64.b64decode( 'aHR0cDovLyVzOiVzL2xpdmUvJXMvJXMvJXMudHM=') % ( serverInfo['url'], serverInfo['port'], userInfo['username'], userInfo['password'], channel['stream_id']) title = channel['name'] try: title = re.compile('.*: ([\w\s]*)').findall( title)[0].strip() except: pass icon = channel['stream_icon'] self.channelList[title] = { 'icon': icon, 'url': url, 'provider': 'solid', 'source': 'solid', 'direct': False, 'quality': 'HD' } filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(self.channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: import traceback traceback.print_exc() pass
def episode(self, url, imdb, tvdb, title, premiered, season, episode): self.super_url = [] try: headers = {} data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data[ 'title'] year = data['year'] title = cleantitle.getsearch(title) cleanmovie = cleantitle.get(title) data['season'], data['episode'] = season, episode seasoncheck = "season%s" % season checktitle = cleanmovie + seasoncheck self.super_url = [] showlist = [] query = "/search/%s.html" % (urllib.quote_plus(title)) query = urlparse.urljoin(self.base_link, query) link = client.request(query) r = client.parseDOM(link, 'div', attrs={'class': 'ml-item'}) for links in r: season_url = client.parseDOM(links, 'a', ret='href')[0] title = client.parseDOM(links, 'a', ret='title')[0] title = title.encode('utf-8') season_url = season_url.encode('utf-8') title = cleantitle.get(title) # print "YMOVIES check URLS %s %s %s %s" % (seasoncheck, season_url, cleanmovie, title) if checktitle in title: # print "YMOVIES PASSED %s" % (season_url) showlist.append(season_url) for seasonlist in showlist: # print ('YMOVIES TV' , seasonlist) mylink = client.request(seasonlist) referer = re.findall( r'<a class="mod-btn mod-btn-watch" href="(.*?)" title="Watch movie">', mylink, re.I | re.DOTALL)[0] # print ('YMOVIES REFERER' , referer) epurl = client.request(referer) i_d = re.findall(r'id: "(.*?)"', epurl, re.I | re.DOTALL)[0] server = re.findall(r'server: "(.*?)"', epurl, re.I | re.DOTALL)[0] type = re.findall(r'type: "(.*?)"', epurl, re.I | re.DOTALL)[0] episode_id = re.findall(r'episode_id: "(.*?)"', epurl, re.I | re.DOTALL)[0] request_url = self.base_link + '/ajax/v3_movie_get_episodes/' + i_d + '/' + server + '/' + episode_id + '/' + type + '.html' headers = { 'Referer': referer, 'User-Agent': cache.get(client.randomagent, 1), 'X-Requested-With': 'XMLHttpRequest' } episodelink = client.request(request_url, headers=headers) pattern = 'episodes-server-%s"(.+?)/ul>' % server match = re.findall(pattern, episodelink, re.DOTALL)[0] # print "YMOVIES EPISODELINK %s" % match blocks = re.compile('<li(.+?)/li>', re.DOTALL).findall(match) for fragment in blocks: epnumber = re.findall('title="Episode\s+(\d+):', fragment)[0] episode = "%02d" % (int(episode)) epnumber = "%02d" % (int(epnumber)) # print "EPISODE NUMBER %s %s" % (epnumber, episode) if epnumber == episode: epid = re.findall('id="episode-(\d+)"', fragment)[0] episode_id = epid # print "EPISODE NNUMBER Passed %s %s" % (epnumber, episode) # print ("YMOVIES REQUEST", episode_id) token = self.__get_token() # print ("YMOVIES TOKEN", token) cookies = '%s%s%s=%s' % (self.key1, episode_id, self.key2, token) # print ("YMOVIES cookies", cookies) url_hash = urllib.quote( self.__uncensored(episode_id + self.key, token)) # print ("YMOVIES hash", url_hash) url = urlparse.urljoin( self.base_link, self.playlist % (episode_id, url_hash)) request_url = url # print ("YMOVIES REQUEST", request_url) self.super_url.append([request_url, cookies, referer]) # print ("YMOVIES SELFURL", self.super_url) return self.super_url except: return
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) userAgent = self.getUserAgent() deviceid = userAgent.split('.')[-1] url = base64.b64decode( "aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC9pcF9jaGVjay5waHA=") result = client.request(url, headers=self.headers) self.ipAddress = re.findall('Address: (.*)', result)[0] TIME = time.time() second = str(TIME).split('.')[0] first = int(second) + int(base64.b64decode('NjkyOTY5Mjk=')) token = base64.b64encode( base64.b64decode('JXNAMm5kMkAlcw==') % (str(first), second)) headers = { #'Authorization': base64.b64decode('QmFzaWMgWVdSdGFXNUFZWE5rWmpwaGMyUm1jWGRsY25SNQ=='), base64.b64decode("VXNlci1BZ2VudA=="): cache.get(self.getDeviceID, 600000, table='live_cache') } url = base64.b64decode( 'aHR0cHM6Ly9hcHMuZHlubnMuY29tL2FwcHMvb3V0cHV0LnBocC9wbGF5bGlzdD90eXBlPXhtbCZkZXZpY2VTbj0lcw==' ) % deviceid result = client.request(url, headers=headers) result = client.parseDOM(result, "items") channelList = {} for channel in result: category = client.parseDOM(channel, "programCategory")[0] if category == 'Indian': title = client.parseDOM(channel, "programTitle")[0] #from ashock.modules import livemeta #names = cache.get(livemeta.source().getLiveNames, 200, table='live_cache') #title = cleantitle.live(title) #if title == 'SKIP': # continue poster = client.parseDOM(channel, "programImage")[0] url = client.parseDOM(channel, "programURL")[0] channelList[title] = { 'icon': poster, 'url': url, 'provider': 'dynns', 'source': 'dynns', 'direct': False, 'quality': 'HD' } filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except: pass
def livetv(self): try: generateJSON = cache.get(self.removeJSON, 168, __name__, table='live_cache') if not os.path.exists(self.filePath): generateJSON = 1 if generateJSON: logger.debug('Generating %s JSON' % __name__, __name__) ''' filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(self.base_link, outfile, sort_keys=True, indent=2) filename = open(self.filePath) result = filename.read() filename.close() self.base_link = json.loads(result) ''' channelList = {} if control.setting('livelocal') == 'true': self.base_location = os.path.join(control.dataPath, 'iptv_base.local') file = open(self.base_location) result = file.read() file.close() else: result = client.request(self.base_location) self.base_link = json.loads(result) for item in self.base_link: try: enabled = item['enabled'] if enabled == "false": logger.debug('Skipping %s' % item['link'], __name__) continue type = item['source'] link = item['link'] regex = item['regex'] headers = link.rsplit('|', 1)[1] link = link.rsplit('|', 1)[0] except: headers = None ''' if control.setting('livelocal') == 'true' : self.base_location = os.path.join(control.dataPath, 'test_base.local') file = open(self.base_location) result = file.read() file.close() else: result = client.request(link, timeout=5) ''' logger.debug('Fetching %s' % link, __name__) result = client.request(link, timeout=5) if result == None: continue result = result.replace('\r', '') result = re.findall(regex, result, re.IGNORECASE) for source, title, cUrl in result: title = title.strip() #title = cleantitle.live(title) #if title == 'SKIP': # continue if not headers == None: cUrl = '%s|%s' % (cUrl, headers) channelList['%s||%s' % (title, type)] = { 'icon': '', 'url': cUrl, 'provider': 'iptv', 'source': type, 'direct': False, 'quality': 'HD' } logger.debug( 'Fetched [%s] from %s' % (len(channelList), link), __name__) filePath = os.path.join(control.dataPath, self.fileName) with open(filePath, 'w') as outfile: json.dump(channelList, outfile, sort_keys=True, indent=2) liveParser = LiveParser(self.fileName, control.addon) self.srcs = liveParser.parseFile(decode=False) return (generateJSON, self.srcs) except Exception as e: import traceback traceback.print_exc() logger.error(e) pass
def sources(self, url): try: srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) url = referer = url.replace('/watching.html', '') try: url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0] except: episode = None vid_id = re.findall('-(\d+)', url)[-1] quality = cache.get(self.onemovies_info, 9000, vid_id)[1].lower() if quality == 'cam' or quality == 'ts': quality = 'CAM' elif quality == 'hd': quality = 'HD' else: quality = 'SD' try: headers = { 'X-Requested-With': 'XMLHttpRequest', 'Referer': url } u = urlparse.urljoin(self.base_link, self.server_link % vid_id) r = client.request(u, headers=headers) r = client.parseDOM(r, 'div', attrs={'class': 'les-content'}) r = zip(client.parseDOM(r, 'a', ret='onclick'), client.parseDOM(r, 'a')) r = [(i[0], ''.join(re.findall('(\d+)', i[1])[:1])) for i in r] if not episode == None: r = [i[0] for i in r if '%01d' % int(i[1]) == episode] else: r = [i[0] for i in r] r = [re.findall('(\d+),(\d+)', i) for i in r] r = [i[0][:2] for i in r if len(i) > 0] links = [] links += [{ 'source': 'gvideo', 'url': self.direct_link + i[1], 'direct': True } for i in r if 2 <= int(i[0]) <= 11] links += [{ 'source': 'openload.co', 'url': self.embed_link + i[1], 'direct': False } for i in r if i[0] == '14'] links += [{ 'source': 'videowood.tv', 'url': self.embed_link + i[1], 'direct': False } for i in r if i[0] == '12'] head = '|' + urllib.urlencode(headers) for i in links: srcs.append({ 'source': i['source'], 'quality': quality, 'provider': 'Onemovies', 'url': urlparse.urljoin(self.base_link, i['url']) + head, 'direct': i['direct'], 'debridonly': False }) except: pass return srcs except: return srcs