def __search(self, titles, year): try: query = self.search_link % (urllib.quote_plus(cleantitle.query(titles[0]) + ' ' + year)) query = urlparse.urljoin(self.base_link, query) t = [cleantitle.get(i) for i in set(titles) if i][0] r = client.request(query) r = client.parseDOM(r, 'div', attrs={'class': 'karatula'}) for i in r: title = client.parseDOM(i, 'a', ret='title')[0] y = re.findall('(\d{4})', title)[0] title = cleantitle.get_simple(title) if t in title and y == year: x = dom_parser.parse_dom(i, 'a', req='href') return source_utils.strip_domain(x[0][0]['href']) return except: return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): try: simple_title = cleantitle.get_simple(tvshowtitle) tvshowtitle = cleantitle.geturl(tvshowtitle).replace('-', '+') search_url = urlparse.urljoin(self.base_link, self.search_link % tvshowtitle) r = client.request(search_url) r = json.loads(r)['series'] r = [(urlparse.urljoin(self.base_link, i['seo_name'])) for i in r if simple_title == cleantitle.get_simple(i['original_name'])] if r: return r[0] else: return except: return
def sources(self, url, hostDict, hostprDict): try: self._sources = [] if url is None: return self._sources if debrid.status() is False: raise Exception() data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data[ 'title'] hdlr = 'S%02dE%02d' % (int(data['season']), int( data['episode'])) if 'tvshowtitle' in data else data['year'] query = '%s S%02dE%02d' % ( data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % ( data['title'], data['year']) query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query) query = self.search_link % cleantitle.geturl(query) url = urlparse.urljoin(self.base_link, query) r = client.request(url) posts = dom_parser2.parse_dom(r, 'div', {'class': 'eTitle'}) posts = [ dom_parser2.parse_dom(i.content, 'a', req='href') for i in posts if i ] posts = [(i[0].attrs['href'], re.sub('<.+?>', '', i[0].content)) for i in posts if i] posts = [ (i[0], i[1]) for i in posts if (cleantitle.get_simple(i[1].split(hdlr)[0]) == cleantitle.get(title) and hdlr.lower() in i[1].lower()) ] self.hostDict = hostDict + hostprDict threads = [] for i in posts: threads.append(workers.Thread(self._get_sources, i)) [i.start() for i in threads] [i.join() for i in threads] alive = [x for x in threads if x.is_alive() == True] while alive: alive = [x for x in threads if x.is_alive() == True] time.sleep(0.1) return self._sources except Exception: return self._sources
def _manage_pack(self): for i in self.files: if self.content_type == 'movie': if 'is_largest' in i: url = i['url_dl'] else: name = cleantitle.get_simple(i['name']) if 'furk320' not in name.lower() and 'sample' not in name.lower(): for x in self.filtering_list: if x in name.lower(): url = i['url_dl'] else: pass return url
def search(self, title, year): try: content = [] for link in self.base_link: try: query = urlparse.urljoin( link, self.search_link % (urllib.quote(title), year)) r = client.request(query) r = json.loads(r) r = r['data']['children'][0]['data'] if not cleantitle.get_simple(r['title'].split(year) [0]) == cleantitle.get(title): raise Exception() if not year in r['title']: raise Exception() content = [(r['title'], r['url'])] except BaseException: pass return content except BaseException: return
def sources(self, url, hostDict, hostprDict): sources = [] try: if url is None: return sources data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) content_type = 'episode' if 'tvshowtitle' in data else 'movie' years = (data['year'], str(int(data['year']) + 1), str(int(data['year']) - 1)) if content_type == 'movie': title = cleantitle.get_simple(data['title']).lower() localtitle = cleantitle.get_simple(data['localtitle']).lower() ids = [data['imdb']] r = control.jsonrpc( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovies", "params": {"filter":{"or": [{"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}]}, "properties": ["imdbnumber", "title", "originaltitle", "file"]}, "id": 1}' % years) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['movies'] r = [ i for i in r if str(i['imdbnumber']) in ids or title in [ cleantitle.get_simple(i['title']), cleantitle.get_simple(i['originaltitle']) ] ] if r == []: return sources r = [ i for i in r if not i['file'].encode('utf-8').endswith('.strm') ][0] r = control.jsonrpc( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovieDetails", "params": {"properties": ["streamdetails", "file"], "movieid": %s }, "id": 1}' % str(r['movieid'])) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['moviedetails'] elif content_type == 'episode': title = cleantitle.get_simple(data['tvshowtitle']).lower() localtitle = cleantitle.get_simple( data['localtvshowtitle']).lower() season, episode = data['season'], data['episode'] r = control.jsonrpc( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": {"filter":{"or": [{"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}]}, "properties": ["imdbnumber", "title"]}, "id": 1}' % years) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['tvshows'] r = [ i for i in r if title in (cleantitle.get_simple(i['title']).lower( ) if not ' (' in i['title'] else cleantitle. get_simple(i['title']).split(' (')[0]) ][0] r = control.jsonrpc( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodes", "params": {"filter":{"and": [{"field": "season", "operator": "is", "value": "%s"}, {"field": "episode", "operator": "is", "value": "%s"}]}, "properties": ["file"], "tvshowid": %s }, "id": 1}' % (str(season), str(episode), str(r['tvshowid']))) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['episodes'] r = [ i for i in r if not i['file'].encode('utf-8').endswith('.strm') ][0] r = control.jsonrpc( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodeDetails", "params": {"properties": ["streamdetails", "file"], "episodeid": %s }, "id": 1}' % str(r['episodeid'])) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['episodedetails'] url = r['file'].encode('utf-8') try: quality = int(r['streamdetails']['video'][0]['width']) except: quality = -1 if quality > 1920: quality = '4K' if quality >= 1920: quality = '1080p' if 1280 <= quality < 1900: quality = '720p' if quality < 1280: quality = 'SD' info = [] try: f = control.openFile(url) s = f.size() f.close() dsize = float(s) / 1024 / 1024 / 1024 isize = '%.2f GB' % dsize info.insert(0, isize) except: dsize = 0 pass try: c = r['streamdetails']['video'][0]['codec'] if c == 'avc1': c = 'h264' info.append(c) except: pass try: ac = r['streamdetails']['audio'][0]['codec'] if ac == 'dca': ac = 'dts' if ac == 'dtshd_ma': ac = 'dts-hd ma' info.append(ac) except: pass try: ach = r['streamdetails']['audio'][0]['channels'] if ach == 1: ach = 'mono' if ach == 2: ach = '2.0' if ach == 6: ach = '5.1' if ach == 8: ach = '7.1' info.append(ach) except: pass info = ' | '.join(info) info = info.encode('utf-8') sources.append({ 'source': '0', 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'local': True, 'direct': True, 'debridonly': False, 'size': dsize }) return sources except: source_utils.scraper_error('library') return sources