def idleForPlayback(self): for i in range(0, 200): if control.condVisibility('Window.IsActive(busydialog)') == 1: control.idle() else: break control.sleep(100)
def setView(content, viewDict=None): skin = control.skin for i in range(0, 200): if control.condVisibility('Container.Content(%s)' % content): try: record = (skin, content) dbcon = database.connect(control.databaseFile) dbcur = dbcon.cursor() dbcur.execute( "SELECT * FROM views WHERE skin = '%s' AND view_type = '%s'" % (record[0], record[1])) view = dbcur.fetchone() view = view[2] if view == None: raise Exception() return control.execute('Container.SetViewMode(%s)' % str(view)) except: try: return control.execute('Container.SetViewMode(%s)' % str(viewDict[skin])) except: return else: try: return control.execute('Container.SetViewMode(%s)' % str(viewDict[skin])) except: return control.sleep(100)
def onPlayBackStarted(self): for i in range(0, 200): if control.condVisibility('Window.IsActive(busydialog)') == 1: control.idle() else: break control.sleep(100) if control.setting('playback_info') == 'true': elapsedTime = '%s %s %s' % (control.lang(30464).encode('utf-8'), int((time.time() - self.loadingTime)), control.lang(30465).encode('utf-8')) control.infoDialog(elapsedTime, heading=self.name) try: if self.offset == '0': raise Exception() self.seekTime(float(self.offset)) except: pass try: if not control.setting('subtitles') == 'true': raise Exception() try: subtitle = subtitles.get(self.name, self.imdb, self.season, self.episode) except: subtitle = subtitles.get(self.name, self.imdb, '', '') except: pass
def service(self): try: control.makeFile(control.dataPath) dbcon = database.connect(control.libcacheFile) dbcur = dbcon.cursor() dbcur.execute("CREATE TABLE IF NOT EXISTS service (""setting TEXT, ""value TEXT, ""UNIQUE(setting)"");") dbcur.execute("SELECT * FROM service WHERE setting = 'last_run'") fetch = dbcur.fetchone() if fetch == None: serviceProperty = "1970-01-01 23:59:00.000000" dbcur.execute("INSERT INTO service Values (?, ?)", ('last_run', serviceProperty)) dbcon.commit() else: serviceProperty = str(fetch[1]) dbcon.close() except: try: return dbcon.close() except: return try: control.window.setProperty(self.property, serviceProperty) except: return while (not xbmc.abortRequested): try: serviceProperty = control.window.getProperty(self.property) t1 = datetime.timedelta(hours=6) t2 = datetime.datetime.strptime(serviceProperty, '%Y-%m-%d %H:%M:%S.%f') t3 = datetime.datetime.now() check = abs(t3 - t2) > t1 if check == False: raise Exception() if (control.player.isPlaying() or control.condVisibility('Library.IsScanningVideo')): raise Exception() serviceProperty = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') control.window.setProperty(self.property, serviceProperty) try: dbcon = database.connect(control.libcacheFile) dbcur = dbcon.cursor() dbcur.execute("CREATE TABLE IF NOT EXISTS service (""setting TEXT, ""value TEXT, ""UNIQUE(setting)"");") dbcur.execute("DELETE FROM service WHERE setting = 'last_run'") dbcur.execute("INSERT INTO service Values (?, ?)", ('last_run', serviceProperty)) dbcon.commit() dbcon.close() except: try: dbcon.close() except: pass if not control.setting('service_update') == 'true': raise Exception() info = control.setting('service_notification') or 'true' self.update(None, info=info) except: pass control.sleep(10000)
def play(self, name, title, year, imdb, tmdb, tvdb, tvrage, season, episode, tvshowtitle, alter, date, meta, url): control.log('############# PLAY # %s' % url) try: if not control.infoLabel('Container.FolderPath').startswith( 'plugin://'): control.playlist.clear() control.resolve(int(sys.argv[1]), True, control.item(path='')) control.execute('Dialog.Close(okdialog)') if imdb == '0': imdb = '0000000' imdb = 'tt' + re.sub('[^0-9]', '', str(imdb)) content = 'movie' if tvshowtitle == None else 'episode' self.sources = self.getSources(name, title, year, imdb, tmdb, tvdb, tvrage, season, episode, tvshowtitle, alter, date) if self.sources == []: raise Exception() self.sources = self.sourcesFilter() if control.window.getProperty('PseudoTVRunning') == 'True': url = self.sourcesDirect() elif url == 'dialog://': url = self.sourcesDialog() elif url == 'direct://': url = self.sourcesDirect() elif not control.infoLabel('Container.FolderPath').startswith( 'plugin://') and control.setting( 'autoplay_library') == 'false': url = self.sourcesDialog() elif control.infoLabel('Container.FolderPath').startswith( 'plugin://') and control.setting('autoplay') == 'false': url = self.sourcesDialog() else: url = self.sourcesDirect() if url == None: raise Exception() if url == 'close://': return if control.setting('playback_info') == 'true': control.infoDialog(self.selectedSource, heading=name) control.sleep(200) from resources.lib.libraries.player import player player().run(content, name, url, year, imdb, tvdb, meta) return url except: control.infoDialog(control.lang(30501).encode('utf-8'))
def run(self, title, year, season, episode, imdb, tvdb, url, meta): try: control.sleep(200) self.totalTime = 0 self.currentTime = 0 self.content = 'movie' if season == None or episode == None else 'episode' self.title = title self.year = year self.name = urllib.quote_plus(title) + urllib.quote_plus( ' (%s)' % year) if self.content == 'movie' else urllib.quote_plus( title) + urllib.quote_plus(' S%02dE%02d' % (int(season), int(episode))) self.name = urllib.unquote_plus(self.name) self.season = '%01d' % int( season) if self.content == 'episode' else None self.episode = '%01d' % int( episode) if self.content == 'episode' else None self.DBID = None self.imdb = imdb if not imdb == None else '0' self.tvdb = tvdb if not tvdb == None else '0' self.ids = {'imdb': self.imdb, 'tvdb': self.tvdb} self.ids = dict( (k, v) for k, v in self.ids.iteritems() if not v == '0') self.offset = bookmarks().get(self.name, self.year) poster, thumb, meta = self.getMeta(meta) self.meta = meta item = control.item(path=url) item.setArt({ 'icon': thumb, 'thumb': thumb, 'poster': poster, 'tvshow.poster': poster, 'season.poster': poster }) item.setInfo(type='Video', infoLabels=meta) if 'plugin' in control.infoLabel('Container.PluginName'): control.player.play(url, item) control.resolve(int(sys.argv[1]), True, item) control.window.setProperty('script.trakt.ids', json.dumps(self.ids)) self.keepPlaybackAlive(meta) control.window.clearProperty('script.trakt.ids') except: return
def run(self, url): title = control.infoLabel('ListItem.Label') image = control.infoLabel('ListItem.Icon') item = control.item(path=url, iconImage=image, thumbnailImage=image) item.setInfo(type='Video', infoLabels = {'title': title}) control.player.play(url, item) for i in range(0, 240): if self.isPlayingVideo(): break control.sleep(1000)
def run(self, url): title = control.infoLabel('ListItem.Label') image = control.infoLabel('ListItem.Icon') item = control.item(path=url, iconImage=image, thumbnailImage=image) item.setInfo(type='Video', infoLabels={'title': title}) control.player.play(url, item) for i in range(0, 240): if self.isPlayingVideo(): break control.sleep(1000)
def play(self, name, title, year, imdb, tmdb, tvdb, tvrage, season, episode, tvshowtitle, alter, date, meta, url): try: if not control.infoLabel('Container.FolderPath').startswith('plugin://'): control.playlist.clear() control.resolve(int(sys.argv[1]), True, control.item(path=None)) control.execute('Dialog.Close(okdialog)') if imdb == '0': imdb = '0000000' imdb = 'tt' + re.sub('[^0-9]', '', str(imdb)) content = 'movie' if tvshowtitle == None else 'episode' self.sources = self.getSources(name, title, year, imdb, tmdb, tvdb, tvrage, season, episode, tvshowtitle, alter, date) if self.sources == []: raise Exception() self.sources = self.sourcesFilter() if control.window.getProperty('PseudoTVRunning') == 'True': url = self.sourcesDirect() elif url == 'dialog://': url = self.sourcesDialog() elif url == 'direct://': url = self.sourcesDirect() elif not control.infoLabel('Container.FolderPath').startswith('plugin://') and control.setting('autoplay_library') == 'false': url = self.sourcesDialog() elif control.infoLabel('Container.FolderPath').startswith('plugin://') and control.setting('autoplay') == 'false': url = self.sourcesDialog() else: url = self.sourcesDirect() if url == None: raise Exception() if url == 'close://': return if control.setting('playback_info') == 'true': control.infoDialog(self.selectedSource, heading=name) try: self.progressDialog.close() except: pass control.sleep(200) from resources.lib.libraries.player import player player().run(content, name, url, year, imdb, tvdb, meta) return url except: control.infoDialog(control.lang(30501).encode('utf-8'))
def run(self, url, selectGame ,side): if selectGame == None or side == None: return control.resolve(int(sys.argv[1]), True, control.item(path=url)) command = ['java','-jar',jarFile,selectGame,side] startupinfo = None if os.name == 'nt': startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=startupinfo) if os.name == 'posix': success = False success, output = FuckNeulionClient.request_proxy_hack(selectGame,side) control.sleep(1000) control.resolve(int(sys.argv[1]), True, control.item(path=url)) for i in range(0, 240): if self.isPlayingVideo(): break control.sleep(1000) while self.isPlayingVideo(): control.sleep(1000) control.sleep(5000)
def run(self, url, selectGame, side): if selectGame == None or side == None: return control.resolve(int(sys.argv[1]), True, control.item(path=url)) command = ['java', '-jar', jarFile, selectGame, side] startupinfo = None if os.name == 'nt': startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=startupinfo) if os.name == 'posix': success = False success, output = FuckNeulionClient.request_proxy_hack( selectGame, side) control.sleep(1000) control.resolve(int(sys.argv[1]), True, control.item(path=url)) for i in range(0, 240): if self.isPlayingVideo(): break control.sleep(1000) while self.isPlayingVideo(): control.sleep(1000) control.sleep(5000)
def setView(content, viewDict=None): for i in range(0, 200): if control.condVisibility('Container.Content(%s)' % content): try: skin = control.skin record = (skin, content) dbcon = database.connect(control.databaseFile) dbcur = dbcon.cursor() dbcur.execute("SELECT * FROM views WHERE skin = '%s' AND view_type = '%s'" % (record[0], record[1])) view = dbcur.fetchone() view = view[2] if view == None: raise Exception() return control.execute('Container.SetViewMode(%s)' % str(view)) except: try: return control.execute('Container.SetViewMode(%s)' % str(viewDict[skin])) except: return control.sleep(100)
def onPlayBackStarted(self): control.sleep(200) control.idle()
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] myts = str(((int(time.time())/3600)*3600)) if url == None: return sources if not str(url).startswith('http'): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title'] year = re.findall('(\d{4})', data['premiered'])[0] if 'tvshowtitle' in data else data['year'] try: episode = data['episode'] except: pass query = {'keyword': title, 's':''} #query.update(self.__get_token(query)) search_url = urlparse.urljoin(self.base_link, '/search') search_url = search_url + '?' + urllib.urlencode(query) #print("R",search_url) result = client.request(search_url) #print("r", result) r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*movie-list[^"]*'})[0] r = client.parseDOM(r, 'div', attrs = {'class': 'item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', attrs = {'class': 'name'})) for i in r] r = [(i[0][0], i[1][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0] r = [(re.sub('http.+?//.+?/','/', i[0]), re.sub('&#\d*;','', i[1])) for i in r] print r if 'season' in data: url = [(i[0], re.findall('(.+?) (\d*)$', i[1])) for i in r] #print url url = [(i[0], i[1][0][0], i[1][0][1]) for i in url if len(i[1]) > 0] #print url url = [i for i in url if cleantitle.get(title) in cleantitle.get(i[1])] print url,'%01d' % int(data['season']) url = [i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2])] print("END",url) else: url = [i for i in r if cleantitle.get(title) in cleantitle.get(i[1])] #print("r1", cleantitle.get(title),url,r) url = url[0][0] url = urlparse.urljoin(self.base_link, url) r2 = url.split('.')[-1] #print("r2", r2) except: url == self.base_link try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall(url)[0] except: pass referer = url result = client.request(url, limit='0') result, headers, content, cookie1 = client.request(url, limit='0', output='extended') #http://fmovies.to/user/ajax/menu-bar?ts=1481367600&_=1623 #Cookie:"__cfduid=d3e825f4e60935fb63188dccb8206b16b1481368143; # session=88aca375fa71b2005ea33dd8b540c80bb7aa2b9f; user-info=null; # MarketGidStorage=%7B%220%22%3A%7B%22svspr%22%3A%22%22%2C%22svsds%22%3A3%2C%22TejndEEDj%22%3A%22MTQ4MTM2ODE0NzM0NzQ4NTMyOTAx%22%7D%2C%22C48532%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147359%7D%2C%22C77945%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147998%7D%2C%22C77947%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368148109%7D%7D" print("r22", cookie1) hash_url = urlparse.urljoin(self.base_link, '/user/ajax/menu-bar') # int(time.time()) query = {'ts': myts} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) r1, headers, content, cookie2 = client.request(hash_url, limit='0', output='extended', cookie=cookie1) print("r22", cookie2) alina = client.parseDOM(result, 'title')[0] print( re.findall('(\d{4})', alina)) atr = [i for i in client.parseDOM(result, 'title') if len(re.findall('(\d{4})', i)) > 0][-1] if 'season' in data: result = result if year in atr or data['year'] in atr else None else: result = result if year in atr else None #print("r3",result) try: quality = client.parseDOM(result, 'span', attrs = {'class': 'quality'})[0].lower() except: quality = 'hd' if quality == 'cam' or quality == 'ts': quality = 'CAM' elif quality == 'hd' or 'hd ' in quality: quality = 'HD' else: quality = 'SD' result = client.parseDOM(result, 'ul', attrs = {'data-range-id':"0"}) print("r3",result,quality) servers = [] #servers = client.parseDOM(result, 'li', attrs = {'data-type': 'direct'}) servers = zip(client.parseDOM(result, 'a', ret='data-id'), client.parseDOM(result, 'a')) servers = [(i[0], re.findall('(\d+)', i[1])) for i in servers] servers = [(i[0], ''.join(i[1][:1])) for i in servers] #print("r3",servers) try: servers = [i for i in servers if '%01d' % int(i[1]) == '%01d' % int(episode)] except: pass for s in servers[:4]: try: #1481295600 #http://fmovies.to/ajax/episode/info?_token=31f2ab5&id=1r12ww&update=0&film=286l #http://fmovies.to/ajax/episode/info? # ts=1481367600&_=2334&id=902kxx&update=0 # # headers = {'X-Requested-With': 'XMLHttpRequest'} time.sleep(0.2) hash_url = urlparse.urljoin(self.base_link, self.hash_link) query = {'ts': myts, 'id': s[0], 'update': '0'} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) print "HASH URL", hash_url headers['Referer'] = urlparse.urljoin(url, s[0]) headers['Cookie'] = cookie1 + ';' + cookie2 + ';user-info=null; MarketGidStorage=%7B%220%22%3A%7B%22svspr%22%3A%22%22%2C%22svsds%22%3A3%2C%22TejndEEDj%22%3A%22MTQ4MTM2ODE0NzM0NzQ4NTMyOTAx%22%7D%2C%22C48532%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147359%7D%2C%22C77945%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147998%7D%2C%22C77947%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368148109%7D%7D' result = client.request(hash_url, headers=headers, limit='0') print("r101 result",result,headers) time.sleep(0.3) query = {'id': s[0], 'update': '0'} query.update(self.__get_token(query)) url = url + '?' + urllib.urlencode(query) #result = client2.http_get(url, headers=headers) result = json.loads(result) print("S",s[1],"r102", result) quality = 'SD' if s[1] == '1080': quality = '1080p' if s[1] == '720': quality = 'HD' if s[1] == 'CAM': quality == 'CAM' query = result['params'] query['mobile'] = '0' query.update(self.__get_token(query)) grabber = result['grabber'] + '?' + urllib.urlencode(query) result = client.request(grabber, headers=headers, referer=url, limit='0') print("r112",result) result = json.loads(result) result = result['data'] result = [i['file'] for i in result if 'file' in i] print("r122",result) for i in result: if 'google' in i: try:sources.append({'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'provider': 'Fmovies', 'url': i}) except:pass else: try: sources.append({'source': 'gvideo', 'quality': quality, 'provider': 'Fmovies', 'url': i}) except: pass control.sleep(410) except: pass if quality == 'CAM': for i in sources: i['quality'] = 'CAM' return sources except: return sources
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources url1 = urlparse.urljoin(self.base_link, url) result, headers, content, cookie = client.request( url1, output='extended') try: auth = re.findall('__utmx=(.+)', cookie)[0].split(';')[0] auth = 'Bearer %s' % urllib.unquote_plus(auth) except: auth = 'Bearer false' headers['Authorization'] = auth headers['X-Requested-With'] = 'XMLHttpRequest' #headers['Content-Type']='application/x-www-form-urlencoded; charset=UTF-8' #headers['Accept'] = 'application/json, text/javascript, */*; q=0.01' headers['Cookie'] = cookie u = '/ajax/nembeds.php' u = urlparse.urljoin(self.base_link, u) #action = 'getEpisodeEmb' if '/episode/' in url else 'getMovieEmb' if '/episode/' in url: url = urlparse.urljoin(self.base_link, '/tv-series' + url) action = 'getEpisodeEmb' else: action = 'getMovieEmb' url = urlparse.urljoin(self.base_link, '/tv-series' + url) headers['Referer'] = url control.sleep(200) elid = urllib.quote( base64.encodestring(str(int(time.time()))).strip()) token = re.findall("var\s+tok\s*=\s*'([^']+)", result)[0] idEl = re.findall('elid\s*=\s*"([^"]+)', result)[0] post = { 'action': action, 'idEl': idEl, 'token': token, 'elid': elid } post = urllib.urlencode(post) print post print headers r = client.request(u, post=post, headers=headers, output='cookie2') print("####", r) r = str(json.loads(r)) r = client.parseDOM(r, 'iframe', ret='.+?') + client.parseDOM( r, 'IFRAME', ret='.+?') links = [] for i in r: try: links += [{ 'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'url': i }] except: pass links += [{ 'source': 'openload', 'quality': 'SD', 'url': i } for i in r if 'openload.co' in i] links += [{ 'source': 'videomega', 'quality': 'SD', 'url': i } for i in r if 'videomega.tv' in i] for i in links: sources.append({ 'source': i['source'], 'quality': i['quality'], 'provider': 'MoviesHD', 'url': i['url'] }) return sources except Exception as e: control.log('ERROR moviesHD %s' % e) return sources
def playItem(self, content, name, year, imdb, tvdb, source): try: control.resolve(int(sys.argv[1]), True, control.item(path='')) control.execute('Dialog.Close(okdialog)') next = [] ; prev = [] ; total = [] meta = None for i in range(1,10000): try: u = control.infoLabel('ListItem(%s).FolderPath' % str(i)) if u in total: raise Exception() total.append(u) u = dict(urlparse.parse_qsl(u.replace('?',''))) if 'meta' in u: meta = u['meta'] u = json.loads(u['source'])[0] next.append(u) except: break for i in range(-10000,0)[::-1]: try: u = control.infoLabel('ListItem(%s).FolderPath' % str(i)) if u in total: raise Exception() total.append(u) u = dict(urlparse.parse_qsl(u.replace('?',''))) if 'meta' in u: meta = u['meta'] u = json.loads(u['source'])[0] prev.append(u) except: break items = json.loads(source) source, quality = items[0]['source'], items[0]['quality'] items = [i for i in items+next+prev if i['quality'] == quality and i['source'] == source][:10] items += [i for i in next+prev if i['quality'] == quality and not i['source'] == source][:10] self.progressDialog = control.progressDialog self.progressDialog.create(control.addonInfo('name'), '') self.progressDialog.update(0) block = None for i in range(len(items)): try: self.progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' ')) if items[i]['source'] == block: raise Exception() w = workers.Thread(self.sourcesResolve, items[i]['url'], items[i]['provider']) w.start() m = '' for x in range(3600): if self.progressDialog.iscanceled(): return self.progressDialog.close() if xbmc.abortRequested == True: return sys.exit() k = control.condVisibility('Window.IsActive(virtualkeyboard)') if k: m += '1'; m = m[-1] if (w.is_alive() == False or x > 30) and not k: break time.sleep(1) for x in range(30): if m == '': break if self.progressDialog.iscanceled(): return self.progressDialog.close() if xbmc.abortRequested == True: return sys.exit() if w.is_alive() == False: break time.sleep(1) if w.is_alive() == True: block = items[i]['source'] if self.url == None: raise Exception() try: self.progressDialog.close() except: pass control.sleep(200) if control.setting('playback_info') == 'true': control.infoDialog(items[i]['label'], heading=name) from resources.lib.libraries.player import player player().run(content, name, self.url, year, imdb, tvdb, meta) return self.url except: pass try: self.progressDialog.close() except: pass raise Exception() except: control.infoDialog(control.lang(30501).encode('utf-8')) pass
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] myts = str(((int(time.time()) / 3600) * 3600)) if url == None: return sources if not str(url).startswith('http'): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data[ 'tvshowtitle'] if 'tvshowtitle' in data else data[ 'title'] year = re.findall( '(\d{4})', data['premiered'] )[0] if 'tvshowtitle' in data else data['year'] try: episode = data['episode'] except: pass query = {'keyword': title, 's': ''} #query.update(self.__get_token(query)) search_url = urlparse.urljoin(self.base_link, '/search') search_url = search_url + '?' + urllib.urlencode(query) #print("R",search_url) result = client.request(search_url) #print("r", result) r = client.parseDOM( result, 'div', attrs={'class': '[^"]*movie-list[^"]*'})[0] r = client.parseDOM(r, 'div', attrs={'class': 'item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', attrs={'class': 'name'})) for i in r] r = [(i[0][0], i[1][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0] r = [(re.sub('http.+?//.+?/', '/', i[0]), re.sub('&#\d*;', '', i[1])) for i in r] print r if 'season' in data: url = [(i[0], re.findall('(.+?) (\d*)$', i[1])) for i in r] #print url url = [(i[0], i[1][0][0], i[1][0][1]) for i in url if len(i[1]) > 0] #print url url = [ i for i in url if cleantitle.get(title) in cleantitle.get(i[1]) ] print url, '%01d' % int(data['season']) url = [ i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2]) ] print("END", url) else: url = [ i for i in r if cleantitle.get(title) in cleantitle.get(i[1]) ] #print("r1", cleantitle.get(title),url,r) url = url[0][0] url = urlparse.urljoin(self.base_link, url) r2 = url.split('.')[-1] #print("r2", r2) except: url == self.base_link try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall( url)[0] except: pass referer = url result = client.request(url, limit='0') r = client.request(url, limit='0', output='extended') cookie1 = r[4] headers = r[3] r1 = r[0] print("r22", cookie1) hash_url = urlparse.urljoin(self.base_link, '/user/ajax/menu-bar') # int(time.time()) query = {'ts': myts} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) r = client.request(hash_url, limit='0', output='extended', cookie=cookie1) cookie2 = r[4] headers = r[3] r1 = r[0] print("r22", cookie2) alina = client.parseDOM(result, 'title')[0] print(re.findall('(\d{4})', alina)) atr = [ i for i in client.parseDOM(result, 'title') if len(re.findall('(\d{4})', i)) > 0 ][-1] if 'season' in data: result = result if year in atr or data['year'] in atr else None else: result = result if year in atr else None #print("r3",result) try: quality = client.parseDOM(result, 'span', attrs={'class': 'quality'})[0].lower() except: quality = 'hd' if quality == 'cam' or quality == 'ts': quality = 'CAM' elif quality == 'hd' or 'hd ' in quality: quality = 'HD' else: quality = 'SD' result = client.parseDOM(result, 'ul', attrs={'data-range-id': "0"}) print("r3", result, quality) servers = [] #servers = client.parseDOM(result, 'li', attrs = {'data-type': 'direct'}) servers = zip(client.parseDOM(result, 'a', ret='data-id'), client.parseDOM(result, 'a')) servers = [(i[0], re.findall('(\d+)', i[1])) for i in servers] servers = [(i[0], ''.join(i[1][:1])) for i in servers] #print("r3",servers) try: servers = [ i for i in servers if '%01d' % int(i[1]) == '%01d' % int(episode) ] except: pass for s in servers[:4]: try: #1481295600 #http://fmovies.to/ajax/episode/info?_token=31f2ab5&id=1r12ww&update=0&film=286l #http://fmovies.to/ajax/episode/info? # ts=1481367600&_=2334&id=902kxx&update=0 # # headers = {'X-Requested-With': 'XMLHttpRequest'} time.sleep(0.2) hash_url = urlparse.urljoin(self.base_link, self.hash_link) query = {'ts': myts, 'id': s[0], 'update': '0'} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) print "HASH URL", hash_url headers['Referer'] = urlparse.urljoin(url, s[0]) headers[ 'Cookie'] = cookie1 + ';' + cookie2 + ';user-info=null; MarketGidStorage=%7B%220%22%3A%7B%22svspr%22%3A%22%22%2C%22svsds%22%3A3%2C%22TejndEEDj%22%3A%22MTQ4MTM2ODE0NzM0NzQ4NTMyOTAx%22%7D%2C%22C48532%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147359%7D%2C%22C77945%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147998%7D%2C%22C77947%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368148109%7D%7D' result = client.request(hash_url, headers=headers, limit='0') print("r101 result", result, headers) time.sleep(0.3) query = {'id': s[0], 'update': '0'} query.update(self.__get_token(query)) url = url + '?' + urllib.urlencode(query) #result = client2.http_get(url, headers=headers) result = json.loads(result) print("S", s[1], "r102", result) quality = 'SD' if s[1] == '1080': quality = '1080p' if s[1] == '720': quality = 'HD' if s[1] == 'CAM': quality == 'CAM' query = result['params'] query['mobile'] = '0' query.update(self.__get_token(query)) grabber = result['grabber'] + '?' + urllib.urlencode(query) print "GRABERRRRR", grabber if not grabber.startswith('http'): grabber = 'http:' + grabber result = client.request(grabber, headers=headers, referer=url, limit='0') print("ZZZZ r112", result) result = json.loads(result) result = result['data'] result = [i['file'] for i in result if 'file' in i] print("r122", result) for i in result: if 'google' in i: try: sources.append({ 'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'provider': 'Fmovies', 'url': i }) except: pass else: try: sources.append({ 'source': 'gvideo', 'quality': quality, 'provider': 'Fmovies', 'url': i }) except: pass control.sleep(410) except: pass if quality == 'CAM': for i in sources: i['quality'] = 'CAM' return sources except: return sources
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] myts = str(((int(time.time())/3600)*3600)) if url == None: return sources if not str(url).startswith('http'): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title'] year = re.findall('(\d{4})', data['premiered'])[0] if 'tvshowtitle' in data else data['year'] try: episode = data['episode'] except: pass query = {'keyword': title, 's':''} search_url = urlparse.urljoin(self.base_link, '/search') search_url = search_url + '?' + urllib.urlencode(query) result = client.request(search_url) r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*movie-list[^"]*'})[0] r = client.parseDOM(r, 'div', attrs = {'class': 'item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', attrs = {'class': 'name'})) for i in r] r = [(i[0][0], i[1][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0] r = [(re.sub('http.+?//.+?/','/', i[0]), re.sub('&#\d*;','', i[1])) for i in r] if 'season' in data: r = [(i[0], re.sub(' \(\w*\)', '', i[1])) for i in r] #title += '%01d' % int(data['season']) url = [(i[0], re.findall('(.+?) (\d+)$', i[1])) for i in r] url = [(i[0], i[1][0][0], i[1][0][1]) for i in url if len(i[1]) > 0] url = [i for i in url if cleantitle.get(title) in cleantitle.get(i[1])] for i in url: print i[2],i[0],i[1] print '%01d' % int(data['season']) == '%01d' % int(i[2]) url = [i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2])] else: url = [i for i in r if cleantitle.get(title) in cleantitle.get(i[1])] #print("r1", cleantitle.get(title),url,r) url = url[0][0] url = urlparse.urljoin(self.base_link, url) r2 = url.split('.')[-1] except: url == self.base_link try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall(url)[0] except: pass referer = url result = client.request(url, limit='0') r = client.request(url, limit='0', output='extended') cookie1 = r[4] ; headers = r[3] ; r1 = r[0] hash_url = urlparse.urljoin(self.base_link, '/user/ajax/menu-bar') # int(time.time()) query = {'ts': myts} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) r = client.request(hash_url, limit='0', output='extended', cookie=cookie1) cookie2 = r[4] ; headers = r[3] ; r1 = r[0] alina = client.parseDOM(result, 'title')[0] atr = [i for i in client.parseDOM(result, 'title') if len(re.findall('(\d{4})', i)) > 0][-1] if 'season' in data: years = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1)] mychk = False for y in years: if y in atr: mychk = True result = result if mychk ==True else None else: result = result if year in atr else None #print("r3",result) try: quality = client.parseDOM(result, 'span', attrs = {'class': 'quality'})[0].lower() except: quality = 'hd' if quality == 'cam' or quality == 'ts': quality = 'CAM' elif quality == 'hd' or 'hd ' in quality: quality = 'HD' else: quality = 'SD' result = client.parseDOM(result, 'ul', attrs = {'data-range-id':"0"}) servers = [] #servers = client.parseDOM(result, 'li', attrs = {'data-type': 'direct'}) servers = zip(client.parseDOM(result, 'a', ret='data-id'), client.parseDOM(result, 'a')) servers = [(i[0], re.findall('(\d+)', i[1])) for i in servers] servers = [(i[0], ''.join(i[1][:1])) for i in servers] #print("r3",servers) try: servers = [i for i in servers if '%01d' % int(i[1]) == '%01d' % int(episode)] except: pass for s in servers[:4]: try: headers = {'X-Requested-With': 'XMLHttpRequest'} time.sleep(0.2) hash_url = urlparse.urljoin(self.base_link, self.hash_link) query = {'ts': myts, 'id': s[0], 'update': '0'} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) headers['Referer'] = urlparse.urljoin(url, s[0]) headers['Cookie'] = cookie1 + ';' + cookie2 + ';user-info=null; MarketGidStorage=%7B%220%22%3A%7B%22svspr%22%3A%22%22%2C%22svsds%22%3A3%2C%22TejndEEDj%22%3A%22MTQ4MTM2ODE0NzM0NzQ4NTMyOTAx%22%7D%2C%22C48532%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147359%7D%2C%22C77945%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368147998%7D%2C%22C77947%22%3A%7B%22page%22%3A1%2C%22time%22%3A1481368148109%7D%7D' result = client.request(hash_url, headers=headers, limit='0') print("r101 result",result,headers) time.sleep(0.3) query = {'id': s[0], 'update': '0'} query.update(self.__get_token(query)) url = url + '?' + urllib.urlencode(query) #result = client2.http_get(url, headers=headers) result = json.loads(result) quality = 'SD' if s[1] == '1080': quality = '1080p' if s[1] == '720': quality = 'HD' if s[1] == 'CAM': quality == 'CAM' query = result['params'] query['mobile'] = '0' query.update(self.__get_token(query)) grabber = result['grabber'] + '?' + urllib.urlencode(query) if not grabber.startswith('http'): grabber = 'http:'+grabber result = client.request(grabber, headers=headers, referer=url, limit='0') result = json.loads(result) result = result['data'] result = [i['file'] for i in result if 'file' in i] for i in result: if 'google' in i: try:sources.append({'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'provider': 'Fmovies', 'url': i}) except:pass else: try: sources.append({'source': 'gvideo', 'quality': quality, 'provider': 'Fmovies', 'url': i}) except: pass control.sleep(410) except: pass if quality == 'CAM': for i in sources: i['quality'] = 'CAM' return sources except: return sources
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources if not str(url).startswith('http'): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data[ 'tvshowtitle'] if 'tvshowtitle' in data else data[ 'title'] year = re.findall( '(\d{4})', data['premiered'] )[0] if 'tvshowtitle' in data else data['year'] try: episode = data['episode'] except: pass query = {'keyword': title, 's': ''} #query.update(self.__get_token(query)) search_url = urlparse.urljoin(self.base_link, '/search') search_url = search_url + '?' + urllib.urlencode(query) print("R", search_url) result = client.request(search_url) print("r", result) r = client.parseDOM( result, 'div', attrs={'class': '[^"]*movie-list[^"]*'})[0] r = client.parseDOM(r, 'div', attrs={'class': 'item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', attrs={'class': 'name'})) for i in r] r = [(i[0][0], i[1][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0] r = [(re.sub('http.+?//.+?/', '/', i[0]), re.sub('&#\d*;', '', i[1])) for i in r] if 'season' in data: url = [(i[0], re.findall('(.+?) (\d*)$', i[1])) for i in r] url = [(i[0], i[1][0][0], i[1][0][1]) for i in url if len(i[1]) > 0] url = [ i for i in url if cleantitle.get(title) == cleantitle.get(i[1]) ] url = [ i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2]) ] else: url = [ i for i in r if cleantitle.get(title) == cleantitle.get(i[1]) ] """ r = cache.get(self.fmovies_cache, 120) if 'season' in data: url = [(i[0], re.findall('(.+?) (\d*)$', i[1]), i[2]) for i in r] url = [(i[0], i[1][0][0], i[1][0][1], i[2]) for i in url if len(i[1]) > 0] url = [i for i in url if cleantitle.get(title) == cleantitle.get(i[1])] url = [i for i in url if i[3] == year] + [i for i in url if i[3] == data['year']] url = [i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2])] else: url = [i for i in r if cleantitle.get(title) == cleantitle.get(i[1]) and i[2] == year] """ url = url[0][0] url = urlparse.urljoin(self.base_link, url) print("r2", url) r2 = url.split('.')[-1] print("r2", r2) except: url == self.base_link try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall( url)[0] except: pass referer = url #xtoken = self.__get_xtoken() result = client.request(url, limit='0') result, headers, content, cookie = client.request( url, limit='0', output='extended') #xtoken = self.__get_xtoken() print("r22", result) alina = client.parseDOM(result, 'title')[0] print(re.findall('(\d{4})', alina)) atr = [ i for i in client.parseDOM(result, 'title') if len(re.findall('(\d{4})', i)) > 0 ][-1] if 'season' in data: result = result if year in atr or data['year'] in atr else None else: result = result if year in atr else None print("r3", result) try: quality = client.parseDOM(result, 'span', attrs={'class': 'quality'})[0].lower() except: quality = 'hd' if quality == 'cam' or quality == 'ts': quality = 'CAM' elif quality == 'hd' or 'hd ' in quality: quality = 'HD' else: quality = 'SD' result = client.parseDOM(result, 'ul', attrs={'data-range-id': "0"}) print("r3", result, quality) servers = [] #servers = client.parseDOM(result, 'li', attrs = {'data-type': 'direct'}) servers = zip(client.parseDOM(result, 'a', ret='data-id'), client.parseDOM(result, 'a')) servers = [(i[0], re.findall('(\d+)', i[1])) for i in servers] servers = [(i[0], ''.join(i[1][:1])) for i in servers] print("r3", servers) try: servers = [ i for i in servers if '%01d' % int(i[1]) == '%01d' % int(episode) ] except: pass for s in servers[:4]: try: #http://fmovies.to/ajax/episode/info?_token=31f2ab5&id=1r12ww&update=0&film=286l headers = {'X-Requested-With': 'XMLHttpRequest'} time.sleep(0.2) hash_url = urlparse.urljoin(self.base_link, self.hash_link) query = {'id': s[0], 'update': '0', 'film': r2} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) headers['Referer'] = urlparse.urljoin(url, s[0]) headers['Cookie'] = cookie result = client.request(hash_url, headers=headers, limit='0') print("r101 result", result, headers) query = {'id': s[0], 'update': '0'} query.update(self.__get_token(query)) url = url + '?' + urllib.urlencode(query) #result = client2.http_get(url, headers=headers) result = json.loads(result) print("S", s[1], "r102", result) quality = 'SD' if s[1] == '1080': quality = '1080p' if s[1] == '720': quality = 'HD' if s[1] == 'CAM': quality == 'CAM' query = result['params'] query['mobile'] = '0' query.update(self.__get_token(query)) grabber = result['grabber'] + '?' + urllib.urlencode(query) result = client.request(grabber, headers=headers, referer=url, limit='0') print("r112", result) result = json.loads(result) result = result['data'] result = [i['file'] for i in result if 'file' in i] print("r122", result) for i in result: if 'google' in i: try: sources.append({ 'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'provider': 'Fmovies', 'url': i }) except: pass else: try: sources.append({ 'source': 'gvideo', 'quality': quality, 'provider': 'Fmovies', 'url': i }) except: pass control.sleep(410) except: pass if quality == 'CAM': for i in sources: i['quality'] = 'CAM' return sources except: return sources
def run(self): def download(): return [] result = cache.get(download, 600000000, table='rel_dl') for item in result: self.name = item['name'] ; self.image = item['image'] ; self.url = item['url'] sysname = self.name.translate(None, '\/:*?"<>|').strip('.') url = self.url.split('|')[0] try: headers = dict(urlparse.parse_qsl(self.url.rsplit('|', 1)[1])) except: headers = dict('') ext = os.path.splitext(urlparse.urlparse(url).path)[1][1:].lower() if not ext in ['mp4', 'mkv', 'flv', 'avi', 'mpg']: ext = 'mp4' hdlr = re.compile('.+? ([(]\d{4}[)]|S\d*E\d*)$').findall(self.name) if len(hdlr) == 0: self.content = 'Uncategorised' hdlr = re.compile('.+? (S\d*E\d*)$').findall(self.name) if len(hdlr) > 0: self.content = 'TVShows' hdlr = re.compile('.+? [(](\d{4})[)]$').findall(self.name) if len(hdlr) > 0: self.content = 'Movies' if self.content == 'Movies': dest = os.path.join(downloadPath, 'Movies') control.makeFile(dest) dest = os.path.join(dest, sysname) control.makeFile(dest) elif self.content == 'TVShows': d = re.compile('(.+?) S(\d*)E(\d*)$').findall(sysname)[0] dest = os.path.join(downloadPath, 'TV Shows') control.makeFile(dest) dest = os.path.join(dest, d[0]) control.makeFile(dest) dest = os.path.join(dest, 'Season %01d' % int(d[1])) control.makeFile(dest) else: dest = os.path.join(downloadPath, 'Uncategorised') control.makeFile(dest) dest = os.path.join(dest, sysname + '.' + ext) control.infoDialog(self.name + ' Is Downloading', 'Downloads Started', self.image, time=7000) try: req = urllib2.Request(url, headers=headers) resp = urllib2.urlopen(req, timeout=30) except Exception,e: removeDownload(self.url) print '%s ERROR - File Failed To Open' % (dest) continue try: self.size = int(resp.headers['Content-Length']) except: self.size = 0 if self.size < 1: removeDownload(self.url) print '%s Unknown filesize - Unable to download' % (dest) continue try: resumable = 'bytes' in resp.headers['Accept-Ranges'].lower() except: resumable = False size = 1024 * 1024 if self.size < size: size = self.size gb = '%.2f GB' % (float(self.size) / 1073741824) start = time.clock() total = 0 ; notify = 0 ; errors = 0 ; count = 0 ; resume = 0 ; sleep = 0 self.clear() control.window.setProperty(property + '.status', 'downloading') control.window.setProperty(property + '.name', str(self.name)) control.window.setProperty(property + '.image', str(self.image)) control.window.setProperty(property + '.size', str(gb)) f = control.openFile(dest, 'wb') chunk = None chunks = [] while True: downloaded = total for c in chunks: downloaded += len(c) percent = min(100 * downloaded / self.size, 100) self.speed = str(int((downloaded / 1024) / (time.clock() - start))) + ' KB/s' self.percent = str(percent) + '%' control.window.setProperty(property + '.percent', str(self.percent)) control.window.setProperty(property + '.speed', str(self.speed)) if percent >= notify: control.infoDialog('Downloaded %s' % self.percent, self.name, self.image, time=5000) notify += 10 chunk = None error = False try: chunk = resp.read(size) if not chunk: if self.percent < 99: error = True else: while len(chunks) > 0: c = chunks.pop(0) f.write(c) del c f.close() print '%s download complete' % (dest) break except Exception, e: print str(e) error = True sleep = 10 errno = 0 if hasattr(e, 'errno'): errno = e.errno if errno == 10035: # 'A non-blocking socket operation could not be completed immediately' pass if errno == 10054: #'An existing connection was forcibly closed by the remote host' errors = 10 #force resume sleep = 30 if errno == 11001: # 'getaddrinfo failed' errors = 10 #force resume sleep = 30 if chunk: errors = 0 chunks.append(chunk) if len(chunks) > 5: c = chunks.pop(0) f.write(c) total += len(c) del c if error: errors += 1 count += 1 print '%d Error(s) whilst downloading %s' % (count, dest) control.sleep(sleep*1000) if (resumable and errors > 0) or errors >= 10: if (not resumable and resume >= 50) or resume >= 500: #Give up! print '%s download canceled - too many error whilst downloading' % (dest) break resume += 1 errors = 0 if resumable: chunks = [] #create new response print 'Download resumed (%d) %s' % (resume, dest) h = headers ; h['Range'] = 'bytes=%d-' % int(total) try: resp = urllib2.urlopen(urllib2.Request(url, headers=h), timeout=10) except: resp = None else: #use existing response pass if control.window.getProperty(property + '.status') == 'stop': control.infoDialog('Process Complete', 'Downloads', time=5000) return self.clear()
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources u = urlparse.urljoin(self.base_link, url) r = client.request(u) #control.log('R %s' % r) r = re.findall("load_player\(\s*'([^']+)'\s*,\s*'?(\d+)\s*'?", r) r = list(set(r)) r = [i for i in r if i[1] == '0' or int(i[1]) >= 720] control.log('R %s' % r) links = [] for p in r: try: print ('P',p ) headers = {'X-Requested-With': 'XMLHttpRequest', 'Referer': u} player = urlparse.urljoin(self.base_link, '/ajax/movie/load_player') post = urllib.urlencode({'id': p[0], 'quality': p[1]}) control.sleep(220) result = client.request(player, post=post, headers=headers) control.log('result %s' % result) frame = client.parseDOM(result, 'iframe', ret='src') embed = client.parseDOM(result, 'embed', ret='flashvars') if frame: if 'player.php' in frame[0]: frame = client.parseDOM(result, 'input', ret='value', attrs={'type': 'hidden'})[0] headers = {'Referer': urlparse.urljoin(self.base_link, frame[0])} url = client.request(frame, headers=headers, output='geturl') links += [ {'source': 'gvideo', 'url': url, 'quality': client.googletag(url)[0]['quality']}] elif 'openload.' in frame[0]: links += [{'source': 'openload.co', 'url': frame[0], 'quality': 'HQ'}] elif 'videomega.' in frame[0]: links += [{'source': 'videomega.tv', 'url': frame[0], 'quality': 'HQ'}] elif embed: url = urlparse.parse_qs(embed[0])['fmt_stream_map'][0] url = [i.split('|')[-1] for i in url.split(',')] for i in url: try: links.append({'source': 'gvideo', 'url': i, 'quality': client.googletag(i)[0]['quality'],'direct': True}) except: pass except: pass for i in links: #sources.append({'source': i['source'], 'quality': i['quality'], 'provider': 'Xmovies', 'url': i['url'], 'direct': i['direct'], 'debridonly': False}) sources.append({'source': i['source'], 'quality': i['quality'], 'provider': 'Xmovies', 'url': i['url']}) return sources except Exception as e: control.log('ERROR XMOVIES %s' % e) return sources
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources url1 = urlparse.urljoin(self.base_link, url) result, headers, content, cookie = client.request(url1, output='extended') auth = re.findall('__utmx=(.+)', cookie)[0].split(';')[0] auth = 'Bearer %s' % urllib.unquote_plus(auth) print cookie headers['Authorization'] = auth headers['X-Requested-With'] = 'XMLHttpRequest' #headers['Content-Type']='application/x-www-form-urlencoded; charset=UTF-8' #headers['Accept'] = 'application/json, text/javascript, */*; q=0.01' headers['Cookie'] = cookie u = '/ajax/embeds.php' u = urlparse.urljoin(self.base_link, u) #action = 'getEpisodeEmb' if '/episode/' in url else 'getMovieEmb' if '/episode/' in url: url = urlparse.urljoin(self.base_link, '/tv-series'+ url) action = 'getEpisodeEmb' else: action = 'getMovieEmb' url = urlparse.urljoin(self.base_link, '/tv-series' + url) headers['Referer'] = url control.sleep(200) elid = urllib.quote(base64.encodestring(str(int(time.time()))).strip()) token = re.findall("var\s+tok\s*=\s*'([^']+)", result)[0] idEl = re.findall('elid\s*=\s*"([^"]+)', result)[0] post = {'action': action, 'idEl': idEl, 'token': token, 'elid': elid} post = urllib.urlencode(post) print post print headers r = client.request(u, post=post, headers=headers, output='cookie2') print("####",r) r = str(json.loads(r)) r = client.parseDOM(r, 'iframe', ret='.+?') + client.parseDOM(r, 'IFRAME', ret='.+?') links = [] for i in r: try: links += [{'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'url': i}] except: pass links += [{'source': 'openload', 'quality': 'SD', 'url': i} for i in r if 'openload.co' in i] links += [{'source': 'videomega', 'quality': 'SD', 'url': i} for i in r if 'videomega.tv' in i] for i in links: sources.append({'source': i['source'], 'quality': i['quality'], 'provider': 'MoviesHD', 'url': i['url']}) return sources except: return sources
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources u = urlparse.urljoin(self.base_link, url) r = client.request(u) #control.log('R %s' % r) r = re.findall("load_player\(\s*'([^']+)'\s*,\s*'?(\d+)\s*'?", r) r = list(set(r)) r = [i for i in r if i[1] == '0' or int(i[1]) >= 720] control.log('R %s' % r) links = [] for p in r: try: print('P', p) headers = { 'X-Requested-With': 'XMLHttpRequest', 'Referer': u } player = urlparse.urljoin(self.base_link, '/ajax/movie/load_player') post = urllib.urlencode({'id': p[0], 'quality': p[1]}) control.sleep(220) result = client.request(player, post=post, headers=headers) control.log('result %s' % result) frame = client.parseDOM(result, 'iframe', ret='src') embed = client.parseDOM(result, 'embed', ret='flashvars') if frame: if 'player.php' in frame[0]: frame = client.parseDOM(result, 'input', ret='value', attrs={'type': 'hidden'})[0] headers = { 'Referer': urlparse.urljoin(self.base_link, frame[0]) } url = client.request(frame, headers=headers, output='geturl') links += [{ 'source': 'gvideo', 'url': url, 'quality': client.googletag(url)[0]['quality'] }] elif 'openload.' in frame[0]: links += [{ 'source': 'openload.co', 'url': frame[0], 'quality': 'HQ' }] elif 'videomega.' in frame[0]: links += [{ 'source': 'videomega.tv', 'url': frame[0], 'quality': 'HQ' }] elif embed: url = urlparse.parse_qs(embed[0])['fmt_stream_map'][0] url = [i.split('|')[-1] for i in url.split(',')] for i in url: try: links.append({ 'source': 'gvideo', 'url': i, 'quality': client.googletag(i)[0]['quality'], 'direct': True }) except: pass except: pass for i in links: #sources.append({'source': i['source'], 'quality': i['quality'], 'provider': 'Xmovies', 'url': i['url'], 'direct': i['direct'], 'debridonly': False}) sources.append({ 'source': i['source'], 'quality': i['quality'], 'provider': 'Xmovies', 'url': i['url'] }) return sources except Exception as e: control.log('ERROR XMOVIES %s' % e) return sources
def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources if not str(url).startswith('http'): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title'] year = re.findall('(\d{4})', data['premiered'])[0] if 'tvshowtitle' in data else data['year'] try: episode = data['episode'] except: pass query = {'keyword': title, 's':''} #query.update(self.__get_token(query)) search_url = urlparse.urljoin(self.base_link, '/search') search_url = search_url + '?' + urllib.urlencode(query) print("R",search_url) result = client.request(search_url) print("r", result) r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*movie-list[^"]*'})[0] r = client.parseDOM(r, 'div', attrs = {'class': 'item'}) r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', attrs = {'class': 'name'})) for i in r] r = [(i[0][0], i[1][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0] r = [(re.sub('http.+?//.+?/','/', i[0]), re.sub('&#\d*;','', i[1])) for i in r] if 'season' in data: url = [(i[0], re.findall('(.+?) (\d*)$', i[1])) for i in r] url = [(i[0], i[1][0][0], i[1][0][1]) for i in url if len(i[1]) > 0] url = [i for i in url if cleantitle.get(title) == cleantitle.get(i[1])] url = [i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2])] else: url = [i for i in r if cleantitle.get(title) == cleantitle.get(i[1])] """ r = cache.get(self.fmovies_cache, 120) if 'season' in data: url = [(i[0], re.findall('(.+?) (\d*)$', i[1]), i[2]) for i in r] url = [(i[0], i[1][0][0], i[1][0][1], i[2]) for i in url if len(i[1]) > 0] url = [i for i in url if cleantitle.get(title) == cleantitle.get(i[1])] url = [i for i in url if i[3] == year] + [i for i in url if i[3] == data['year']] url = [i for i in url if '%01d' % int(data['season']) == '%01d' % int(i[2])] else: url = [i for i in r if cleantitle.get(title) == cleantitle.get(i[1]) and i[2] == year] """ url = url[0][0] url = urlparse.urljoin(self.base_link, url) print("r2", url) r2 = url.split('.')[-1] print("r2", r2) except: url == self.base_link try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall(url)[0] except: pass referer = url #xtoken = self.__get_xtoken() result = client.request(url, limit='0') result, headers, content, cookie = client.request(url, limit='0', output='extended') #xtoken = self.__get_xtoken() print("r22", result) alina = client.parseDOM(result, 'title')[0] print( re.findall('(\d{4})', alina)) atr = [i for i in client.parseDOM(result, 'title') if len(re.findall('(\d{4})', i)) > 0][-1] if 'season' in data: result = result if year in atr or data['year'] in atr else None else: result = result if year in atr else None print("r3",result) try: quality = client.parseDOM(result, 'span', attrs = {'class': 'quality'})[0].lower() except: quality = 'hd' if quality == 'cam' or quality == 'ts': quality = 'CAM' elif quality == 'hd' or 'hd ' in quality: quality = 'HD' else: quality = 'SD' result = client.parseDOM(result, 'ul', attrs = {'data-range-id':"0"}) print("r3",result,quality) servers = [] #servers = client.parseDOM(result, 'li', attrs = {'data-type': 'direct'}) servers = zip(client.parseDOM(result, 'a', ret='data-id'), client.parseDOM(result, 'a')) servers = [(i[0], re.findall('(\d+)', i[1])) for i in servers] servers = [(i[0], ''.join(i[1][:1])) for i in servers] print("r3",servers) try: servers = [i for i in servers if '%01d' % int(i[1]) == '%01d' % int(episode)] except: pass for s in servers[:4]: try: #http://fmovies.to/ajax/episode/info?_token=31f2ab5&id=1r12ww&update=0&film=286l headers = {'X-Requested-With': 'XMLHttpRequest'} time.sleep(0.7) hash_url = urlparse.urljoin(self.base_link, self.hash_link) query = {'id': s[0], 'update': '0', 'film': r2} query.update(self.__get_token(query)) hash_url = hash_url + '?' + urllib.urlencode(query) headers['Referer'] = urlparse.urljoin(url, s[0]) headers['Cookie'] = cookie result = client.request(hash_url, headers=headers, limit='0') print("r101 result",result,headers) time.sleep(0.6) query = {'id': s[0], 'update': '0'} query.update(self.__get_token(query)) url = url + '?' + urllib.urlencode(query) #result = client2.http_get(url, headers=headers) result = json.loads(result) print("S",s[1],"r102", result) quality = 'SD' if s[1] == '1080': quality = '1080p' if s[1] == '720': quality = 'HD' if s[1] == 'CAM': quality == 'CAM' query = result['params'] query['mobile'] = '0' query.update(self.__get_token(query)) grabber = result['grabber'] + '?' + urllib.urlencode(query) result = client.request(grabber, headers=headers, referer=url, limit='0') print("r112",result) result = json.loads(result) result = result['data'] result = [i['file'] for i in result if 'file' in i] print("r122",result) for i in result: if 'google' in i: try:sources.append({'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'provider': 'Fmovies', 'url': i}) except:pass else: try: sources.append({'source': 'gvideo', 'quality': quality, 'provider': 'Fmovies', 'url': i}) except: pass control.sleep(410) except: pass if quality == 'CAM': for i in sources: i['quality'] = 'CAM' return sources except: return sources
def playItem(self, content, name, year, imdb, tvdb, source): try: control.resolve(int(sys.argv[1]), True, control.item(path='')) control.execute('Dialog.Close(okdialog)') next = [] prev = [] total = [] meta = None for i in range(1, 10000): try: u = control.infoLabel('ListItem(%s).FolderPath' % str(i)) if u in total: raise Exception() total.append(u) u = dict(urlparse.parse_qsl(u.replace('?', ''))) if 'meta' in u: meta = u['meta'] u = json.loads(u['source'])[0] next.append(u) except: break for i in range(-10000, 0)[::-1]: try: u = control.infoLabel('ListItem(%s).FolderPath' % str(i)) if u in total: raise Exception() total.append(u) u = dict(urlparse.parse_qsl(u.replace('?', ''))) if 'meta' in u: meta = u['meta'] u = json.loads(u['source'])[0] prev.append(u) except: break items = json.loads(source) source, quality = items[0]['source'], items[0]['quality'] items = [ i for i in items + next + prev if i['quality'] == quality and i['source'] == source ][:10] items += [ i for i in next + prev if i['quality'] == quality and not i['source'] == source ][:10] self.progressDialog = control.progressDialog self.progressDialog.create(control.addonInfo('name'), '') self.progressDialog.update(0) block = None for i in range(len(items)): try: self.progressDialog.update( int((100 / float(len(items))) * i), str(items[i]['label']), str(' ')) if items[i]['source'] == block: raise Exception() w = workers.Thread(self.sourcesResolve, items[i]['url'], items[i]['provider']) w.start() m = '' for x in range(3600): if self.progressDialog.iscanceled(): return self.progressDialog.close() if xbmc.abortRequested == True: return sys.exit() k = control.condVisibility( 'Window.IsActive(virtualkeyboard)') if k: m += '1' m = m[-1] if (w.is_alive() == False or x > 30) and not k: break time.sleep(1) for x in range(30): if m == '': break if self.progressDialog.iscanceled(): return self.progressDialog.close() if xbmc.abortRequested == True: return sys.exit() if w.is_alive() == False: break time.sleep(1) if w.is_alive() == True: block = items[i]['source'] if self.url == None: raise Exception() try: self.progressDialog.close() except: pass control.sleep(200) if control.setting('playback_info') == 'true': control.infoDialog(items[i]['label'], heading=name) from resources.lib.libraries.player import player player().run(content, name, self.url, year, imdb, tvdb, meta) return self.url except: pass try: self.progressDialog.close() except: pass raise Exception() except: control.infoDialog(control.lang(30501).encode('utf-8')) pass
def resolve(self, url): try: m3u8 = [ '#EXTM3U', '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",DEFAULT=YES,AUTOSELECT=YES,NAME="Stream 1",URI="{audio_stream}"', '', '#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=0,NAME="{stream_name}",AUDIO="audio"', '{video_stream}' ] query = urlparse.parse_qs(url) query = dict([(key, query[key][0]) if query[key] else (key, '') for key in query]) auth = 'http://streamtorrent.tv/api/torrent/%s/%s.m3u8?json=true' % ( query['vid_id'], query['stream_id']) r = client.request(auth) r = json.loads(r) try: url = r['url'] except: url = None if not url == None: def dialog(url): try: self.disableScraper = control.yesnoDialog( 'To watch this video visit from any device', '[COLOR skyblue]%s[/COLOR]' % url, '', 'Torba', 'Cancel', 'Settings') except: pass workers.Thread(dialog, url).start() control.sleep(3000) for i in range(100): try: if not control.condVisibility( 'Window.IsActive(yesnoDialog)'): break r = client.request(auth) r = json.loads(r) try: url = r['url'] except: url = None if url == None: break workers.Thread(dialog, url).start() control.sleep(3000) except: pass if self.disableScraper: control.openSettings(query='2.0') return '' control.execute('Dialog.Close(yesnoDialog)') if not url == None: return stream_name = '%sp' % (query['height']) video_stream = r[stream_name] if not 'audio' in r: return video_stream audio_stream = r['audio'] content = ('\n'.join(m3u8)).format( **{ 'audio_stream': audio_stream, 'stream_name': stream_name, 'video_stream': video_stream }) path = os.path.join(control.dataPath, 'torbase.m3u8') control.makeFile(control.dataPath) control.deleteFile(path) file = control.openFile(path, 'w') file.write(content) file.close() return path except: return
def run(self): def download(): return [] result = cache.get(download, 600000000, table='rel_dl') for item in result: self.name = item['name'] self.image = item['image'] self.url = item['url'] sysname = self.name.translate(None, '\/:*?"<>|').strip('.') url = self.url.split('|')[0] try: headers = dict(urlparse.parse_qsl(self.url.rsplit('|', 1)[1])) except: headers = dict('') ext = os.path.splitext(urlparse.urlparse(url).path)[1][1:].lower() if not ext in ['mp4', 'mkv', 'flv', 'avi', 'mpg']: ext = 'mp4' hdlr = re.compile('.+? ([(]\d{4}[)]|S\d*E\d*)$').findall(self.name) if len(hdlr) == 0: self.content = 'Uncategorised' hdlr = re.compile('.+? (S\d*E\d*)$').findall(self.name) if len(hdlr) > 0: self.content = 'TVShows' hdlr = re.compile('.+? [(](\d{4})[)]$').findall(self.name) if len(hdlr) > 0: self.content = 'Movies' if self.content == 'Movies': dest = os.path.join(downloadPath, 'Movies') control.makeFile(dest) dest = os.path.join(dest, sysname) control.makeFile(dest) elif self.content == 'TVShows': d = re.compile('(.+?) S(\d*)E(\d*)$').findall(sysname)[0] dest = os.path.join(downloadPath, 'TVShows') control.makeFile(dest) dest = os.path.join(dest, d[0]) control.makeFile(dest) dest = os.path.join(dest, 'Season %01d' % int(d[1])) control.makeFile(dest) else: dest = os.path.join(downloadPath, 'Uncategorised') control.makeFile(dest) dest = os.path.join(dest, sysname + '.' + ext) control.infoDialog(self.name + ' Is Downloading', 'Downloads Started', self.image, time=7000) try: req = urllib2.Request(url, headers=headers) resp = urllib2.urlopen(req, timeout=30) except Exception, e: removeDownload(self.url) print '%s ERROR - File Failed To Open' % (dest) continue try: self.size = int(resp.headers['Content-Length']) except: self.size = 0 if self.size < 1: removeDownload(self.url) print '%s Unknown filesize - Unable to download' % (dest) continue try: resumable = 'bytes' in resp.headers['Accept-Ranges'].lower() except: resumable = False size = 1024 * 1024 if self.size < size: size = self.size gb = '%.2f GB' % (float(self.size) / 1073741824) start = time.clock() total = 0 notify = 0 errors = 0 count = 0 resume = 0 sleep = 0 self.clear() control.window.setProperty(property + '.status', 'downloading') control.window.setProperty(property + '.name', str(self.name)) control.window.setProperty(property + '.image', str(self.image)) control.window.setProperty(property + '.size', str(gb)) f = control.openFile(dest, 'wb') chunk = None chunks = [] while True: downloaded = total for c in chunks: downloaded += len(c) percent = min(100 * downloaded / self.size, 100) self.speed = str( int((downloaded / 1024) / (time.clock() - start))) + ' KB/s' self.percent = str(percent) + '%' control.window.setProperty(property + '.percent', str(self.percent)) control.window.setProperty(property + '.speed', str(self.speed)) if percent >= notify: control.infoDialog('Downloaded %s' % self.percent, self.name, self.image, time=5000) notify += 10 chunk = None error = False try: chunk = resp.read(size) if not chunk: if self.percent < 99: error = True else: while len(chunks) > 0: c = chunks.pop(0) f.write(c) del c f.close() print '%s download complete' % (dest) break except Exception, e: print str(e) error = True sleep = 10 errno = 0 if hasattr(e, 'errno'): errno = e.errno if errno == 10035: # 'A non-blocking socket operation could not be completed immediately' pass if errno == 10054: #'An existing connection was forcibly closed by the remote host' errors = 10 #force resume sleep = 30 if errno == 11001: # 'getaddrinfo failed' errors = 10 #force resume sleep = 30 if chunk: errors = 0 chunks.append(chunk) if len(chunks) > 5: c = chunks.pop(0) f.write(c) total += len(c) del c if error: errors += 1 count += 1 print '%d Error(s) whilst downloading %s' % (count, dest) control.sleep(sleep * 1000) if (resumable and errors > 0) or errors >= 10: if (not resumable and resume >= 50) or resume >= 500: #Give up! print '%s download canceled - too many error whilst downloading' % ( dest) break resume += 1 errors = 0 if resumable: chunks = [] #create new response print 'Download resumed (%d) %s' % (resume, dest) h = headers h['Range'] = 'bytes=%d-' % int(total) try: resp = urllib2.urlopen(urllib2.Request(url, headers=h), timeout=10) except: resp = None else: #use existing response pass if control.window.getProperty(property + '.status') == 'stop': control.infoDialog('Process Complete', 'Downloads', time=5000) return self.clear()
def resolve(self, url): try: m3u8 = [ '#EXTM3U', '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",DEFAULT=YES,AUTOSELECT=YES,NAME="Stream 1",URI="{audio_stream}"', '', '#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=0,NAME="{stream_name}",AUDIO="audio"', '{video_stream}' ] query = urlparse.parse_qs(url) query = dict([(key, query[key][0]) if query[key] else (key, '') for key in query]) auth = 'http://streamtorrent.tv/api/torrent/%s/%s.m3u8?json=true' % (query['vid_id'], query['stream_id']) r = client.request(auth) r = json.loads(r) try: url = r['url'] except: url = None if not url == None: def dialog(url): try: self.disableScraper = control.yesnoDialog('To watch this video visit from any device', '[COLOR skyblue]%s[/COLOR]' % url, '', 'Torba', 'Cancel', 'Settings') except: pass workers.Thread(dialog, url).start() control.sleep(3000) for i in range(100): try: if not control.condVisibility('Window.IsActive(yesnoDialog)'): break r = client.request(auth) r = json.loads(r) try: url = r['url'] except: url = None if url == None: break workers.Thread(dialog, url).start() control.sleep(3000) except: pass if self.disableScraper: control.openSettings(query='2.0') return '' control.execute('Dialog.Close(yesnoDialog)') if not url == None: return stream_name = '%sp' % (query['height']) video_stream = r[stream_name] if not 'audio' in r: return video_stream audio_stream = r['audio'] content = ('\n'.join(m3u8)).format(**{'audio_stream': audio_stream, 'stream_name': stream_name, 'video_stream': video_stream}) path = os.path.join(control.dataPath, 'torbase.m3u8') control.makeFile(control.dataPath) ; control.deleteFile(path) file = control.openFile(path, 'w') ; file.write(content) ; file.close() return path except: return