def search_newznab(self, url_base, apikey, **params): ''' Searches Newznab/Torznab for movie url_base (str): base url for all requests (https://indexer.com/) apikey (str): api key for indexer params (dict): parameters to url encode and append to url Creates url based off url_base. Appends url-encoded **params to url. Returns list of dicts of search results ''' url = '{}api?apikey={}&{}'.format(url_base, apikey, urllib.parse.urlencode(params)) logging.info('SEARCHING: {}api?apikey=APIKEY&{}'.format( url_base, urllib.parse.urlencode(params))) proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] try: if proxy_enabled and proxy.whitelist(url) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text return self.parse_newznab_xml(response) except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Newz/TorzNab backlog search.', exc_info=True) return []
def search(imdbid): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info( 'Performing backlog search on ThePirateBay for {}.'.format(imdbid)) url = 'https://www.thepiratebay.org/search/{}/0/99/200'.format(imdbid) headers = {'Cookie': 'lw=s'} try: if proxy_enabled and proxy.whitelist( 'https://www.thepiratebay.org') is True: response = Url.open(url, proxy_bypass=True, headers=headers).text else: response = Url.open(url, headers=headers).text if response: return ThePirateBay.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('ThePirateBay search failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Performing backlog search on YTS for {}.'.format(imdbid)) url = 'https://yts.ag/api/v2/list_movies.json?limit=1&query_term={}'.format( imdbid) try: if proxy_enabled and proxy.whitelist('https://www.yts.ag') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: r = json.loads(response) if r['data']['movie_count'] < 1: return [] else: return YTS.parse(r['data']['movies'][0], imdbid, term) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('YTS search failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info( 'Performing backlog search on TorrentDownloads for {}.'.format( imdbid)) url = 'http://www.torrentdownloads.me/rss.xml?type=search&search={}'.format( term) try: if proxy_enabled and proxy.whitelist( 'http://www.torrentdownloads.me') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return TorrentDownloads.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('TorrentDownloads search failed.', exc_info=True) return []
def _get_rss(self): ''' Get latest uploads from all indexers Returns list of dicts with parsed release info ''' proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] if self.feed_type == 'nzb': indexers = core.CONFIG['Indexers']['NewzNab'].values() else: indexers = core.CONFIG['Indexers']['TorzNab'].values() results = [] for indexer in indexers: if indexer[2] is False: continue url_base = indexer[0] logging.info('Fetching latest RSS from {}.'.format(url_base)) if url_base[-1] != '/': url_base = url_base + '/' apikey = indexer[1] url = '{}api?t=movie&cat=2000&extended=1&offset=0&apikey={}'.format( url_base, apikey) logging.info( 'RSS_SYNC: {}api?t=movie&cat=2000&extended=1&offset=0&apikey=APIKEY' .format(url_base)) try: if proxy_enabled and proxy.whitelist(url) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text results = self.parse_newznab_xml(response) logging.info('Found {} results from {}.'.format( len(results), url_base)) return results except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Newz/TorzNab rss get xml.', exc_info=True) return results
def search(imdbid, term): ''' Search api for movie imdbid (str): imdb id # Returns list of dicts of parsed releases ''' proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info( 'Performing backlog search on Rarbg for {}.'.format(imdbid)) if RarBG.timeout: now = datetime.datetime.now() while RarBG.timeout > now: time.sleep(1) now = datetime.datetime.now() try: url = ('https://www.torrentapi.org/pubapi_v2.php' '?token={}' '&mode=search' '&search_imdb={}' '&category=movies' '&format=json_extended' '&app_id=Watcher'.format(RarBG.token(), imdbid)) RarBG.timeout = datetime.datetime.now() + datetime.timedelta( seconds=2) if proxy_enabled and proxy.whitelist( 'https://www.torrentapi.org') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text results = json.loads(response).get('torrent_results') if results: return RarBG.parse(results, imdbid=imdbid) else: logging.info('Nothing found on Rarbg.') return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Rarbg search failed.', exc_info=True) return []
def search_newznab(self, url_base, apikey, type_, q=None, imdbid=None): ''' Searches Newznab/Torznab for movie url_base (str): base url for all requests (https://indexer.com/) apikey (str): api key for indexer type_ (str): one of 'movie' or 'search' q (str): query to use with type_ == 'search' imdbid (str): imdbid Creates url based off url_base. Appends url-encoded **params to url. Returns list of dicts of search results ''' url = '{}api?apikey={}&cat=2000&extended=1'.format(url_base, apikey) if type_ == 'movie': url += '&t=movie&imdbid={}'.format(imdbid[2:]) elif type_ == 'search': url += '&t=search&q={}'.format(q) else: logging.error('Unknown search method {}'.format(type_)) return [] logging.info('SEARCHING: {}'.format(url.replace(apikey, 'APIKEY'))) proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] try: if proxy_enabled and proxy.whitelist(url) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text results = self.parse_newznab_xml(response, imdbid=imdbid) logging.info('Found {} results from {}.'.format( len(results), url_base)) return results except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Newz/TorzNab backlog search.', exc_info=True) return []
def get_rss(): ''' Gets latest rss feed from api Returns list of dicts of parsed releases ''' proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Fetching latest RSS from Rarbg.') if RarBG.timeout: now = datetime.datetime.now() while RarBG.timeout > now: time.sleep(1) now = datetime.datetime.now() try: url = ('https://www.torrentapi.org/pubapi_v2.php' '?token={}' '&mode=list' '&category=movies' '&format=json_extended' '&app_id=Watcher'.format(RarBG.token())) RarBG.timeout = datetime.datetime.now() + datetime.timedelta( seconds=2) if proxy_enabled and proxy.whitelist( 'https://www.torrentapi.org') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text results = json.loads(response).get('torrent_results') if results: return RarBG.parse(results) else: logging.info('Nothing found in Rarbg RSS.') return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Rarbg RSS fetch failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Fetching latest RSS from YTS.') url = 'https://yts.ag/rss/0/all/all/0' try: if proxy_enabled and proxy.whitelist('https://www.yts.ag') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return YTS.parse_rss(response) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('YTS RSS fetch failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Fetching latest RSS from ThePirateBay.') url = 'https://www.thepiratebay.org/browse/201/0/3/0' headers = {'Cookie': 'lw=s'} try: if proxy_enabled and proxy.whitelist('https://www.thepiratebay.org') is True: response = Url.open(url, proxy_bypass=True, headers=headers).text else: response = Url.open(url, headers=headers).text if response: return ThePirateBay.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('ThePirateBay RSS fetch failed.', exc_info=True) return []
def get_rss(): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Fetching latest RSS from Torrentz2.') url = 'https://www.torrentz2.eu/feed?f=movies' try: if proxy_enabled and proxy.whitelist('https://www.torrentz2.e') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return Torrentz2.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Torrentz2 RSS fetch failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Searching Torrentz2 for {}.'.format(term)) url = 'https://www.torrentz2.eu/feed?f={}'.format(term) try: if proxy_enabled and proxy.whitelist('https://www.torrentz2.e') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return Torrentz2.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Torrentz2 search failed.', exc_info=True) return []
def search(self, imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Performing backlog search on SkyTorrents for {}.'.format(imdbid)) url = 'https://www.skytorrents.in/rss/all/ed/1/{}'.format(term) try: if proxy_enabled and proxy.whitelist('https://www.skytorrents.in') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return self.parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('SkyTorrents search failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Searching Zooqle for {}.'.format(term)) url = 'https://zooqle.com/search?q={}&fmt=rss'.format(term) try: if proxy_enabled and proxy.whitelist('https://www.zooqle.com') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return Zooqle.parse(response, imdbid, term) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Zooqle search failed.', exc_info=True) return []
def get_rss(self): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Fetching latest RSS from TorrentDownloads.') url = 'http://www.torrentdownloads.me/rss2/last/4' try: if proxy_enabled and proxy.whitelist('http://www.torrentdownloads.me') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return self.parse(response, None) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('TorrentDownloads RSS fetch failed.', exc_info=True) return []