def get(self, tvshowtitle, year, imdb, tvdb, season=None, episode=None, idx=True, provider=None, url=None): try: if idx == True: if not provider == None: call = __import__('resources.lib.sources.%s' % provider, globals(), locals(), ['source'], -1).source() self.list = call.episodes(tvshowtitle, url) if self.list == []: raise Exception(control.lang(30516).encode('utf-8')) self.list = self.super_info(self.list) try: logger.debug('Before Episode Direcotry', __name__) self.episodeDirectory(self.list, provider) logger.debug('After Episode Direcotry', __name__) except Exception as e: logger.error(e) return self.list except Exception as e: logger.error(e) control.infoDialog(control.lang(30516).encode('utf-8')) pass
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) try: result = client.request(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','').replace('\t','') items = client.parseDOM(result, "div", attrs={"class":"entry-content"}) for item in items: try : url = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] host = client.host(url) srcs.append({'source': host, 'parts' : '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct':False}) except : pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: if url == None: return self.srcs try: result = client.request(url, referer=self.base_link) except: result = '' items = client.parseDOM(result, "source", ret="src") for item in items: try: url = item host = client.host(url) self.srcs.append({ 'source': host, 'parts': '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return self.srcs
def resolve(url): try: result = client.request(url) dek = EnkDekoder.dekode(result) if not dek == None: url = client.parseDOM(dek, "param", attrs={"name": "flashvars"}, ret="value")[0] else: dek = result url = re.compile('file*:*"(http.+?)"').findall(dek)[0] if re.search(';video_url', url): url = re.findall(';video_url=(.+?)&', url)[0] elif re.search('iframe src=', url): url = re.findall('<iframe src="(.+?)"', url)[0] url = url.replace('_ipod.mp4', '.flv') url = url.replace('preview', 'edit') logger.debug('URL [%s]' % url, __name__) return url except: return False
def sources(self, url): try: logger.debug('SOURCES URL %s' % url, __name__) quality = 'HD' srcs = [] try: result = client.request(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','').replace('\t','') result = client.parseDOM(result, "div", attrs={"class": "single-post-video"})[0] items = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(result) for item in items: if item[1].endswith('png'): continue host = client.host(item[1]) url = item[1] parts = [url] srcs.append({'source':host, 'parts': len(parts), 'quality':quality,'scraper':self.name,'url':"##".join(parts), 'direct':False}) logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def resolve(url): try: result = client.request(url) url = re.findall('file: "(.+?)"',result)[0] logger.debug('URL [%s]' % url, __name__) return url except: return
def resolve(url): try: url = 'http://playu.net/embed-%s.html' % str(getVideoID(url)) result = client.request(url) url = re.findall('file: "(.+?)"', result)[0] logger.debug('URL [%s]' % url, __name__) return url except: return False
def sources(self, url): try: logger.debug('SOURCES URL %s' % url, __name__) if url == None: return [] data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title'] cleanedTitle = cleantitle.get(title) hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year'] query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year']) query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query) url = self.search_link % urllib.quote_plus(query) url = urlparse.urljoin(self.base_link, url) r = client.request(url) posts = client.parseDOM(r, 'item') items = [] for post in posts: try: t = client.parseDOM(post, 'title')[0] post = post.replace('\n','').replace('\t','') post = re.compile('<span style="color: #ff0000">Single Link</b></span><br />(.+?)<span style="color: #ff0000">').findall(post)[0] u = re.findall('<a href="(http(?:s|)://.+?)">', post) items += [(t, i) for i in u] except: pass for item in items: try: name = client.replaceHTMLCodes(item[0]) linkTitle = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name) if not cleanedTitle == cleantitle.get(linkTitle): raise Exception() year = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper() if not year == hdlr: raise Exception() self.source(item) except: pass logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except: return self.srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) result = proxy.request(url, 'link_ite') links = client.parseDOM(result, 'table', attrs={'class': 'link_ite.+?'}) for i in links: try: url = client.parseDOM(i, 'a', ret='href') url = [x for x in url if 'gtfo' in x][-1] url = proxy.parse(url) url = urlparse.parse_qs( urlparse.urlparse(url).query)['gtfo'][0] url = base64.b64decode(url) url = client.replaceHTMLCodes(url) url = url.encode('utf-8') host = re.findall( '([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0] host = host.encode('utf-8') quality = client.parseDOM(i, 'div', attrs={'class': 'quality'}) if any(x in ['[CAM]', '[TS]'] for x in quality): quality = 'CAM' else: quality = 'SD' quality = quality.encode('utf-8') srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def testManual(): desiscrapers.clear_cache() try: dbcon = database.connect(control.cacheFile) dbcur = dbcon.cursor() except: logger.debug("Desiscrapers Testing Mode", 'Error connecting to db') sys.exit() testManualMovies()
def resolve(url, allowDebrid=False): u = url url = False # Custom Resolvers try: host = client.host(u) r = [i['class'] for i in info() if host in i['host']][0] r = __import__(r, globals(), locals(), [], -1) url = r.resolve(u) if url == False: raise Exception() except: pass # URLResolvers try: if not url == False: raise Exception() logger.debug('Trying URL Resolver for %s' % u, __name__) hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True, include_universal=allowDebrid) if hmf.valid_url() == True: url = hmf.resolve() else: url = False except: pass try: headers = url.rsplit('|', 1)[1] except: headers = '' headers = urllib.quote_plus(headers).replace('%3D', '=').replace( '%26', '&') if ' ' in headers else headers headers = dict(urlparse.parse_qsl(headers)) if url.startswith('http') and '.m3u8' in url: result = client.request(url.split('|')[0], headers=headers, output='geturl', timeout='20') if result == None: raise Exception() elif url.startswith('http'): result = client.request(url.split('|')[0], headers=headers, output='chunk', timeout='20') if result == None: logger.debug('Resolved %s but unable to play' % url, __name__) raise Exception() return url
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs #url = urlparse.urljoin(self.base_link, url) try: result = client.request(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM( result, "div", attrs={"class": "entry-content clearfix single-post-content"}) result = client.parseDOM(result, "p", attrs={"style": "text-align: center;"}) items = client.parseDOM(result, "a", ret="href") for item in items: try: url = item if 'digibolly.se' in url: result = client.request(url) url = re.findall('<iframe src="(.+?)"', result, re.IGNORECASE)[0] host = client.host(url) srcs.append({ 'source': host, 'parts': '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return srcs
def resolve(url): try: result = client.request(url) url = re.findall('sources: \[(.+?)\]', result)[0] url = url.split(',') for i in url: i = i.replace('\"', '') if 'mp4' in i: url = i break logger.debug('URL [%s]' % url, __name__) return url except Exception as e: return False
def testManualMovies(): num_movies = len(movies) if num_movies > 0: logger.debug('Desiscrapers Testing mode active', 'please wait') index = 0 for movie in movies: index += 1 title = movie['title'] year = movie['year'] imdb = movie['imdb'] logger.debug(" Scraping movie {} of {}".format(index, num_movies)) links_scraper = desiscrapers.scrape_movie(title, year, imdb, host=['cinevood']) links_scraper = links_scraper() for scraper_links in links_scraper: if scraper_links: random.shuffle(scraper_links)
def resolve(url): res = ['720', '480', '360', '240'] try: result = client.request(url) for r in res: try: url = client.parseDOM(result, name="source", attrs={"res": r}, ret="src")[0] break except: pass logger.debug('URL [%s]' % url, __name__) return url except Exception as e: return False
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: sources = [] if url == None: return sources url = self.movie_link % url url = urlparse.urljoin(self.base_link, url) r = self.request(url) sources.append({'source': 'einthusan', 'quality': 'HD', 'scraper':self.name,'url': url, 'direct': True, 'debridonly': False}) logger.debug('SOURCES URL %s' % url, __name__) except: pass return sources
def source(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs result, response_code, response_headers, headers, cookie = client.request(url, output='extended') result = result.replace('\n','').replace('\t','').replace('\r','') referer = headers.get('Referer') result = client.parseDOM(result, 'div', attrs={"class":"detail ls_item"})[0] link = client.parseDOM(result, 'div', attrs={"class":"loaer_detai"})[0] link = client.parseDOM(link, 'a', ret='href')[0] link = urlparse.urljoin(referer, link) result = client.request(link) result = re.compile('sources:\s\[(.+?)\]').findall(result)[0] result = '[%s]' % result result = json.loads(result) for item in result: url = item.get('file') label = item.get('label') if '1080p' in label: quality = '1080p' elif '720p' in label : quality = 'HD' elif '360p' in label: quality = 'SD' else: quality = 'SCR' host = client.host(url) srcs.append({'source': host, 'parts' : '1', 'quality': quality, 'scraper': self.name, 'url': url, 'direct': False}) logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except : return srcs
def get(self, url, idx=True, provider=None, lang=None): logger.debug( 'url [%s] provider [%s] lang [%s] ' % (url, provider, lang), self.__class__) self.lang = lang try: try: u = urlparse.urlparse(url).netloc.lower() except: pass if u in self.imdb_link: self.list = cache.get(self.imdb_list, 48, url) if idx == True: self.worker() if idx == True: self.movieDirectory(self.list, lang=lang) return self.list except Exception as e: logger.error(e, __name__) pass
def resolve(url): try: result = client.request(url) packed = re.search('(eval\(function.*?)\s*</script>', result, re.DOTALL) if packed: js = jsunpack.unpack(packed.group(1)) else: js = result link = re.search('file\s*:\s*[\'|"]([^\'|"]+)', js) if link: url = link.group(1) else: url = None logger.debug('URL [%s]' % url, __name__) return url except Exception as e: return False
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' srcs = [] if url == None: return srcs result = client.request(url) result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') result = client.parseDOM(result, "div", attrs={"class": "post-content bottom"})[0] items = client.parseDOM(result, "p") hosts = client.parseDOM(result, "span", attrs={"style": "color: red;"}) links = [] for item in items: if 'a href' in item: links.append(item) items = zip(hosts, links) for item in items: self.srcs.extend(self.source(item)) logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except Exception as e: logger.error(e) return self.srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) try: result = client.request(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM(result, "div", attrs={"id": "list-dl"}) items = client.parseDOM(result, "a", ret="href") for item in items: try: url = item host = client.host(url) srcs.append({ 'source': host, 'parts': '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs if 'hd' in url.lower(): quality = 'HD' else: quality = 'SD' html = client.request(url) mlink = SoupStrainer("div", {"class": "entry"}) videoclass = BeautifulSoup(html, parseOnlyThese=mlink) try: links = videoclass.findAll('iframe') for link in links: url = link.get('src') host = client.host(url) srcs.append({ 'source': host, 'parts': '1', 'quality': quality, 'scraper': self.name, 'url': url, 'direct': False }) except: pass return srcs except: return srcs
def resolve(url): try: url = url + '#' url = re.compile('http://www.mediaplaybox.com/video/(.+?)#').findall(url)[0] url = 'http://www.mediaplaybox.com/mobile?vinf=%s' % url result = client.request(url, debug=True) try : url = client.parseDOM(result, "div", attrs = {"class": "divider"})[0] url = client.parseDOM(url, "a", ret ="href") url = url[0] url = url.replace('_ipod.mp4', '.flv') return url except: pass try :url = client.parseDOM(result, "meta", attrs={"itemprop": "contentURL"}, ret="content")[0] except: pass logger.debug('URL [%s]' % url, __name__) return url except: return False
def resolve(url): try: rUrl = None hdUrl = None try: result = client.request(url) rUrl = client.parseDOM(result, name="source", ret="src")[0] videoId = getVideoID(rUrl) rUrl = 'http://www.apnasave.in/media/player/config_embed.php?vkey=%s' % videoId result = client.request(rUrl) try: hdUrl = client.parseDOM(result, name="hd")[0] url = hdUrl except: pass if hdUrl == None: url = client.parseDOM(result, name="src")[0] except: pass logger.debug('URL [%s]' % url, __name__) return url except: return False
def super_info(self, items): logger.debug('INSIDE SUPER_INFO', __name__) try: for i in range(0, len(items)): season = '0' if items[i].get( 'season') == None else items[i].get('season') self.list[i].update({ 'season': season, 'episode': self.list[i]['name'], 'imdb': '0', 'tvdb': '0', 'year': '0', 'poster': '0', 'banner': '0', 'fanart': '0', 'thumb': '0', 'premiered': '0', 'duration': '30' }) logger.debug('COMPLETE SUPER_INFO', __name__) return self.list except Exception as e: logger.error(e) pass
def sourcesResolve(self, item): try: logger.debug('selected url : %s' % item['url'], __name__) logger.debug('selected item : %s' % item, __name__) u = url = item['url'] if url == None or url == False: raise Exception() direct = item['direct'] if not direct == True: logger.debug('Resolving [%s]' % url, __name__) try: allowDebrid = bool(control.setting('allow_debrid')) except: allowDebrid = False from resources.lib import resolvers u = resolvers.request(url, allowDebrid=allowDebrid) if 'plugin.video.f4mTester' in u: try: title = item['name'] title = urllib.quote_plus(title.encode('utf-8')) iconImage = item['poster'] except: pass u += '&name=%s&iconImage=%s' % (title, iconImage) logger.debug('Resolved [%s]' % u, __name__) if u == False: raise Exception() url = u try: ext = url.split('?')[0].split('&')[0].split('|')[0].rsplit( '.')[-1].replace('/', '').lower() except: ext = None if ext == 'rar': raise Exception() self.url = url return url except: self.url = None return
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import sys import urlparse from aftershock.common import analytics, logger, control params = dict(urlparse.parse_qsl(sys.argv[2].replace('?',''))) logger.debug(params, __name__) action = params.get('action') name = params.get('name').decode('UTF-8') if params.get('name') else None title = params.get('title').decode('UTF-8') if params.get('title') else None year = params.get('year') season = params.get('season') episode = params.get('episode').decode('utf-8') if params.get('episode') else None tvshowtitle = params.get('tvshowtitle').decode('utf-8') if params.get('tvshowtitle') else None date = params.get('date') url = params.get('url') image = params.get('image') meta = params.get('meta').decode('utf-8') if params.get('meta') else None query = params.get('query') source = params.get('source') content = params.get('content')
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs result = client.request(url) result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') result = client.parseDOM( result, name="div", attrs={"class": "entry-content rich-content"})[0] result = client.parseDOM(result, name="p") try: quality = host = '' urls = [] result = result[1::] serversList = result[::2] linksList = result[1::2] for i in range(0, len(serversList)): try: links = linksList[i] urls = client.parseDOM(links, name="a", ret="href") for j in range(0, len(urls)): try: item = client.request(urls[j], mobile=True) item = client.parseDOM(item, "td")[0] item = re.compile( '(SRC|src|data-config)=\"(.+?)\"').findall( item)[0][1] urls[j] = item except: pass if len(urls) > 1: url = "##".join(urls) else: url = urls[0] host = client.host(urls[0]) srcs.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'scraper': self.name, 'url': url, 'direct': False }) except: pass except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except: return srcs
def episodes(self, title, url): try: episodes = [] links = [self.base_link_1, self.base_link_2, self.base_link_3] tvshowurl = url for base_link in links: try: url = url.replace(base_link, '') result = client.request(base_link + '/' + url) if result == None: raise Exception() except: result = '' if 'threadtitle' in result: break rawResult = result.decode('windows-1252').encode('utf-8') result = client.parseDOM( rawResult, "h3", attrs={"class": "title threadtitle_unread"}) result += client.parseDOM(rawResult, "h3", attrs={"class": "threadtitle"}) for item in result: name = client.parseDOM(item, "a", attrs={"class": "title"}) name += client.parseDOM( item, "a", attrs={"class": "title threadtitle_unread"}) if type(name) is list: name = name[0] url = client.parseDOM(item, "a", ret="href") if type(url) is list: url = url[0] if "Online" not in name: continue name = name.replace(title, '') if not title == 'awards': try: name = re.compile('([\d{1}|\d{2}]\w.+\d{4})').findall( name)[0] except: pass name = name.strip() try: season = title.lower() season = re.compile('[0-9]+').findall(season)[0] except: season = '0' episodes.append({ 'season': season, 'tvshowtitle': title, 'title': name, 'name': name, 'url': url, 'provider': 'desirulez', 'tvshowurl': tvshowurl }) next = client.parseDOM(rawResult, "span", attrs={"class": "prev_next"}) next = client.parseDOM(next, "a", attrs={"rel": "next"}, ret="href")[0] episodes[0].update({'next': next}) except Exception as e: logger.error(e) logger.debug(episodes, __name__) return episodes
def resolve(url): try: result = client.request(url) data = json.loads(result) try : publisherId = data['publisherId'] hostingId = data['hostingId'] videoId = data['content']['videoId'] except: publisherId = data['settings']['publisherId'] hostingId = data['settings']['hostingId'] videoId = data['settings']['videoId'] url ='https://config.playwire.com/videos/v2/%s/player.json' % str(videoId) result = client.request(url) data = json.loads(result) src = data['src'] video_info = re.compile('config.playwire.com/(.+?)/videos/v2/(.+?)/manifest.f4m').findall(src)[0] # try best url #url = 'https://config.playwire.com/' + video_info[0] + '/videos/v2/' + video_info[1] + '/abr-non-hd.m3u8' #logger.debug('PLAYWIRE HD URL : %s ' % url) #try : # result = client.request(url, output='geturl', timeout=10) # if result == None: # raise Exception() #except: # pass # try sd url result = None if result == None: url = 'http://cdn.phoenix.intergi.com/' + video_info[0] + '/videos/' + video_info[1] + '/video-sd.mp4?hosting_id=' + video_info[0] try : result = client.request(url, output='chunk', timeout=10) if result == None: raise Exception() except: pass if result == None: # try mobile url url = 'http://cdn.phoenix.intergi.com/' + video_info[0] + '/videos/' + video_info[1] + '/video-mobile.mp4?hosting_id=' + video_info[0] try : result = client.request(url, output='chunk', timeout=10) if result == None: raise Exception() except: pass ''' try : publisherId = data['publisherId'] hostingId = data['hostingId'] videoId = data['content']['videoId'] except: publisherId = data['settings']['publisherId'] hostingId = data['settings']['hostingId'] videoId = data['settings']['videoId'] url = 'https://cdn.video.playwire.com/%s/videos/%s/video-sd.mp4?hosting_id=%s' % (publisherId, videoId, hostingId) ''' logger.debug('URL [%s]' % url, __name__) return url except: return False