def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: table_body = html.find('tbody') # Continue only if at least one release is found if not table_body: sickrage.app.log.debug('Data returned from provider does not contain any torrents') return results for row in table_body('tr'): cells = row('td') if len(cells) < 4: continue try: title = download_url = None info_cell = cells[0].a if info_cell: title = info_cell.get_text() download_url = self._get_download_link(urljoin(self.urls['base_url'], info_cell.get('href'))) if not all([title, download_url]): continue title = '{name} {codec}'.format(name=title, codec='x264') if self.custom_url: if not validate_url(self.custom_url): sickrage.app.log.warning("Invalid custom url: {}".format(self.custom_url)) return results download_url = urljoin(self.custom_url, download_url.split(self.urls['base_url'])[1]) seeders = try_int(cells[2].get_text(strip=True)) leechers = try_int(cells[3].get_text(strip=True)) torrent_size = cells[1].get_text() size = convert_size(torrent_size, -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def get_scene_numbering_for_show(indexer_id, indexer): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings for an entire show. Both the keys and values of the dict are tuples. Will be empty if there are no scene numbers set """ if indexer_id is None: return {} indexer_id = int(indexer_id) indexer = int(indexer) result = {} for dbData in [ x['doc'] for x in sickrage.app.main_db.db.get_many( 'scene_numbering', indexer_id, with_doc=True) ]: season = try_int(dbData['season']) episode = try_int(dbData['episode']) scene_season = try_int(dbData['scene_season']) scene_episode = try_int(dbData['scene_episode']) if try_int(dbData['indexer']) != indexer or (scene_season or scene_episode) == 0: continue result[(season, episode)] = (scene_season, scene_episode) return result
def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode): """ Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering :param indexer_id: int :param sceneSeason: int :param sceneEpisode: int :return: (int, int) a tuple of (season, episode) """ if indexer_id is None or sceneSeason is None or sceneEpisode is None: return sceneSeason, sceneEpisode indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) dbData = [ x['doc'] for x in sickrage.app.main_db.db.get_many( 'tv_episodes', indexer_id, with_doc=True) if x['doc']['indexer'] == indexer and x['doc']['scene_season'] == sceneSeason and x['doc']['scene_episode'] == sceneEpisode ] if dbData: return try_int(dbData[0].get("season")), try_int( dbData[0].get("episode")) return sceneSeason, sceneEpisode
def search(self, search_strings, age=0, series_id=None, series_provider_id=None, season=None, episode=None, **kwargs): """ Search a provider and parse the results. :param search_strings: A dict with mode (key) and the search value (value) :param age: Not used :param ep_obj: Not used :returns: A list of search results (structure) """ results = [] # Search Params search_params = { 'q': '* category:TV', 's': 'dt', 'v': 't', 'sd': 'd', } for mode in search_strings: sickrage.app.log.debug('Search mode: {}'.format(mode)) for search_string in search_strings[mode]: if mode != 'RSS': sickrage.app.log.debug( 'Search string: {}'.format(search_string)) search_params['q'] = '{} category:TV'.format(search_string) search_params['fmt'] = 'rss' search_params['pg'] = 1 while search_params['pg'] < 11: data = self.cache.get_rss_feed(self.urls['search'], params=search_params) if not data or not data.get('feed'): sickrage.app.log.debug( 'No data returned from provider') break results += self.parse(data, mode) total_results = try_int( data['feed'].get('opensearch_totalresults')) start_index = try_int( data['feed'].get('opensearch_startindex')) items_per_page = try_int( data['feed'].get('opensearch_itemsperpage')) if not total_results or start_index + items_per_page > total_results: break search_params['pg'] += 1 return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] if not data.startswith("<rss"): sickrage.app.log.info( "Expected rss but got something else, is your mirror failing?") return results feed = feedparser.parse(data) for item in feed.entries: try: title = item.title download_url = item.link if not (title and download_url): continue info = self.regex.search(item.description) if not info: continue seeders = try_int(info.group("seeders")) leechers = try_int(info.group("leechers")) category = item.category if category != 'all': sickrage.app.log.warning( 'skytorrents.in has added categories! Please report this so it can be updated: Category={cat}, ' 'Title={title}'.format(cat=category, title=title)) size = convert_size(info.group('size'), -1) try: info_hash = download_url.rsplit('/', 2)[1] except IndexError: info_hash = '' item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': info_hash } if mode != "RSS": sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def get_xem_absolute_numbering_for_show(indexer_id, indexer): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings for an entire show. Both the keys and values of the dict are tuples. Will be empty if there are no scene numbers set in xem """ if indexer_id is None: return {} indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) result = {} for dbData in [ x['doc'] for x in sickrage.app.main_db.db.get_many( 'tv_episodes', indexer_id, with_doc=True) ]: absolute_number = try_int(dbData.get('absolute_number')) scene_absolute_number = try_int(dbData.get('scene_absolute_number')) if try_int(dbData['indexer']) != indexer or scene_absolute_number == 0: continue result[absolute_number] = scene_absolute_number return result
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] def _process_title(title): # Quality, if no literal is defined it's HDTV if 'calidad' not in title: title += ' HDTV x264' else: title = title.replace('(calidad baja)', 'HDTV x264') title = title.replace('(Buena calidad)', '720p HDTV x264') title = title.replace('(Alta calidad)', '720p HDTV x264') title = title.replace('(calidad regular)', 'DVDrip x264') title = title.replace('(calidad media)', 'DVDrip x264') # Language, all results from this provider have spanish audio, we append it to title (avoid to download undesired torrents) title += ' SPANISH AUDIO-ELITETORRENT' return title with bs4_parser(data) as html: torrent_table = html.find('table', class_='fichas-listado') torrent_rows = torrent_table('tr') if torrent_table else [] if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results for row in torrent_rows[1:]: try: title = _process_title(row.find('a', class_='nombre')['title']) download_url = self.urls['base_url'] + row.find('a')['href'] if not all([title, download_url]): continue seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) # seeders are not well reported. Set 1 in case of 0 seeders = max(1, seeders) # Provider does not provide size size = -1 results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def get_indexer_numbering(indexer_id, indexer, sceneSeason, sceneEpisode, fallback_to_xem=True): """ Returns a tuple, (season, episode) with the TVDB numbering for (sceneSeason, sceneEpisode) (this works like the reverse of get_scene_numbering) """ if indexer_id is None or sceneSeason is None or sceneEpisode is None: return sceneSeason, sceneEpisode indexer_id = int(indexer_id) indexer = int(indexer) dbData = [ x['doc'] for x in sickrage.app.main_db.db.get_many( 'scene_numbering', indexer_id, with_doc=True) if x['doc']['indexer'] == indexer and x['doc']['scene_season'] == sceneSeason and x['doc']['scene_episode'] == sceneEpisode ] if dbData: return try_int(dbData[0].get("season")), try_int( dbData[0].get("episode")) else: if fallback_to_xem: return get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode) return sceneSeason, sceneEpisode
def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode): """ Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering :param indexer_id: int :param sceneSeason: int :param sceneEpisode: int :return: (int, int) a tuple of (season, episode) """ if indexer_id is None or sceneSeason is None or sceneEpisode is None: return sceneSeason, sceneEpisode indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', indexer_id) if x['indexer'] == indexer and x['scene_season'] == sceneSeason and x['scene_episode'] == sceneEpisode] if dbData: return try_int(dbData[0].get("season")), try_int(dbData[0].get("episode")) return sceneSeason, sceneEpisode
def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode, session=None): """ Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering :param indexer_id: int :param sceneSeason: int :param sceneEpisode: int :return: (int, int) a tuple of (season, episode) """ if not all([indexer_id, sceneSeason, sceneEpisode]): return sceneSeason, sceneEpisode indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer, session=session) try: dbData = session.query(TVEpisode).filter_by( showid=indexer_id, indexer=indexer, scene_season=sceneSeason, scene_episode=sceneEpisode).one() return try_int(dbData.season), try_int(dbData.episode) except (orm.exc.NoResultFound, orm.exc.MultipleResultsFound): return sceneSeason, sceneEpisode
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_rows = html.find_all('tr', class_='torrent') if len(torrent_rows) < 1: sickrage.app.log.debug( "Data returned from provider does not contain any torrents" ) return results for result in torrent_rows: try: # skip if torrent has been nuked due to poor quality if result.find('img', alt='Nuked'): continue download_url = urljoin( self.urls['base_url'] + '/', result.find('span', title='Download').parent['href']) title = result.find( 'a', title='View torrent').get_text(strip=True) if not all([title, download_url]): continue seeders = try_int( result('td', class_="number_column")[1].text) leechers = try_int( result('td', class_="number_column")[2].text) size = -1 if re.match(r'\d+([,.]\d+)?\s*[KkMmGgTt]?[Bb]', result('td', class_="number_column")[0].text): size = convert_size( result('td', class_="number_column")[0].text.strip(), -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug( "Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def get_xem_numbering_for_show(indexer_id, indexer, session=None): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings for an entire show. Both the keys and values of the dict are tuples. Will be empty if there are no scene numbers set in xem """ if not indexer_id: return {} indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer, session=session) result = {} for dbData in session.query(TVEpisode).filter_by(showid=indexer_id): season = try_int(dbData.season) episode = try_int(dbData.episode) scene_season = try_int(dbData.scene_season) scene_episode = try_int(dbData.scene_episode) if try_int(dbData.indexer) != indexer or (scene_season or scene_episode) == 0: continue result[(season, episode)] = (scene_season, scene_episode) return result
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find('table', class_='tl') if not torrent_table: sickrage.app.log.debug( 'Data returned from provider does not contain any torrents' ) return results # Continue only if at least one Release is found torrent_rows = torrent_table.find_all('tr') if len(torrent_rows) < 2: sickrage.app.log.debug( 'Data returned from provider does not contain any torrents' ) return results for result in torrent_rows[2:]: cells = result('td') if len(cells) < 8: continue try: title = cells[2].find('a').get_text(strip=True) download_url = cells[0].find_all('a')[1].get('href') if not (title and download_url): continue seeders = try_int(cells[5].get_text(strip=True), 0) leechers = try_int(cells[6].get_text(strip=True), 0) torrent_size = cells[4].get_text() size = convert_size( torrent_size, -1, ['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB']) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug( "Found result: {}".format(title)) except Exception: sickrage.app.log.error('Failed parsing provider.') return results
def find_xem_numbering(indexer_id, indexer, season, episode): """ Returns the scene numbering, as retrieved from xem. Refreshes/Loads as needed. :param indexer_id: int :param season: int :param episode: int :return: (int, int) a tuple of scene_season, scene_episode, or None if there is no special mapping. """ if indexer_id is None or season is None or episode is None: return season, episode indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) dbData = [ x['doc'] for x in sickrage.app.main_db.db.get_many( 'tv_episodes', indexer_id, with_doc=True) if x['doc']['indexer'] == indexer and x['doc']['season'] == season and x['doc']['episode'] == episode and x['doc']['scene_season'] != 0 and x['doc']['scene_episode'] != 0 ] if dbData: return try_int(dbData[0].get("scene_season")), try_int( dbData[0].get("scene_episode"))
def parse(self, data, mode, **kwargs): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ results = [] torrent_rows = data.values() for row in torrent_rows: title, download_url = self._process_title_and_url(row) if not all([title, download_url]): continue seeders = try_int(row.get('Seeders')) leechers = try_int(row.get('Leechers')) size = try_int(row.get('Size'), -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] sickrage.app.log.debug("Found result: {}".format(title)) return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] for item in data: try: title = item['title'] download_url = item['link'] if not all([title, download_url]): continue seeders = try_int(item['nyaa_seeders']) leechers = try_int(item['nyaa_leechers']) size = convert_size(item['nyaa_size'], -1, units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB']) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def get_xem_numbering_for_show(indexer_id, indexer): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings for an entire show. Both the keys and values of the dict are tuples. Will be empty if there are no scene numbers set in xem """ if indexer_id is None: return {} indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) result = {} for dbData in sickrage.app.main_db.get_many('tv_episodes', indexer_id): season = try_int(dbData.get('season')) episode = try_int(dbData.get('episode')) scene_season = try_int(dbData.get('scene_season')) scene_episode = try_int(dbData.get('scene_episode')) if try_int(dbData['indexer']) != indexer or (scene_season or scene_episode) == 0: continue result[(season, episode)] = (scene_season, scene_episode) return result
def get_xem_numbering_for_show(indexer_id, indexer): """ Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings for an entire show. Both the keys and values of the dict are tuples. Will be empty if there are no scene numbers set in xem """ if indexer_id is None: return {} indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) result = {} for dbData in sickrage.app.main_db.get_many('tv_episodes', indexer_id): season = try_int(dbData.get('season')) episode = try_int(dbData.get('episode')) scene_season = try_int(dbData.get('scene_season')) scene_episode = try_int(dbData.get('scene_episode')) if try_int(dbData['indexer']) != indexer or (scene_season or scene_episode) == 0: continue result[(season, episode)] = (scene_season, scene_episode) return result
def parse_date_time(self, d, t, network): """ Parse date and time string into local time :param d: date string :param t: time string :param network: network to use as base :return: datetime object containing local time """ parsed_time = self.time_regex.search(t) network_tz = self.get_network_timezone(network) hr = 0 m = 0 if parsed_time: hr = try_int(parsed_time.group('hour')) m = try_int(parsed_time.group('minute')) ap = parsed_time.group('meridiem') ap = ap[0].lower() if ap else '' if ap == 'a' and hr == 12: hr -= 12 elif ap == 'p' and hr != 12: hr += 12 hr = hr if 0 <= hr <= 23 else 0 m = m if 0 <= m <= 59 else 0 result = datetime.fromordinal(max(try_int(d), 1)) return result.replace(hour=hr, minute=m, tzinfo=network_tz)
def find_xem_numbering(indexer_id, indexer, season, episode, session=None): """ Returns the scene numbering, as retrieved from xem. Refreshes/Loads as needed. :param indexer_id: int :param season: int :param episode: int :return: (int, int) a tuple of scene_season, scene_episode, or None if there is no special mapping. """ if not all([indexer_id, season, episode]): return season, episode indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) try: dbData = session.query(TVEpisode).filter_by( showid=indexer_id, indexer=indexer, season=season, episode=episode).filter(TVEpisode.scene_season != 0, TVEpisode.scene_episode != 0).one() return try_int(dbData.scene_season), try_int(dbData.scene_episode) except orm.exc.NoResultFound: return None
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] for item in data: try: title = item['title'] download_url = item['link'] if not all([title, download_url]): continue seeders = try_int(item['nyaa_seeders']) leechers = try_int(item['nyaa_leechers']) size = convert_size(item['nyaa_size'], -1, units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB']) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def parse(self, data, mode, **kwargs): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ results = [] torrent_rows = data.values() for row in torrent_rows: title, download_url = self._process_title_and_url(row) if not all([title, download_url]): continue seeders = try_int(row.get('Seeders')) leechers = try_int(row.get('Leechers')) size = try_int(row.get('Size'), -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] sickrage.app.log.debug("Found result: {}".format(title)) return results
def get_indexer_numbering(indexer_id, indexer, sceneSeason, sceneEpisode, fallback_to_xem=True, session=None): """ Returns a tuple, (season, episode) with the TVDB numbering for (sceneSeason, sceneEpisode) (this works like the reverse of get_scene_numbering) """ if not all([indexer_id, sceneSeason, sceneEpisode]): return sceneSeason, sceneEpisode indexer_id = int(indexer_id) indexer = int(indexer) try: dbData = session.query(MainDB.SceneNumbering).filter_by( indexer_id=indexer_id, indexer=indexer, scene_season=sceneSeason, scene_episode=sceneEpisode).one() return try_int(dbData.season), try_int(dbData.episode) except orm.exc.NoResultFound: if fallback_to_xem: return get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode, session=session) return sceneSeason, sceneEpisode
def find_xem_numbering(indexer_id, indexer, season, episode): """ Returns the scene numbering, as retrieved from xem. Refreshes/Loads as needed. :param indexer_id: int :param season: int :param episode: int :return: (int, int) a tuple of scene_season, scene_episode, or None if there is no special mapping. """ if indexer_id is None or season is None or episode is None: return season, episode indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', indexer_id) if x['indexer'] == indexer and x['season'] == season and x['episode'] == episode and x['scene_season'] != 0 and x['scene_episode'] != 0] if dbData: return try_int(dbData[0].get("scene_season")), try_int(dbData[0].get("scene_episode"))
def parse(self, data, mode, **kwargs): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='table-responsive results') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug( 'Data returned from provider does not contain any torrents' ) return results for result in torrent_rows[1:]: cells = result('td') if len(cells) < 9: continue try: info = cells[1].find('a') title = info.get_text(strip=True) download_url = info.get('href') if not (title and download_url): continue torrent_id = re.search(r'/(\d+)-', download_url) download_url = self.urls['download'] % torrent_id.group(1) seeders = try_int(cells[7].get_text(strip=True), 0) leechers = try_int(cells[8].get_text(strip=True), 0) torrent_size = cells[5].get_text() size = convert_size(torrent_size, -1, ['O', 'KO', 'MO', 'GO', 'TO', 'PO']) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug( "Found result: {}".format(title)) except Exception: sickrage.app.log.error('Failed parsing provider.') return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find('div', class_='boxContent') torrent_table = torrent_table.find( 'table') if torrent_table else None torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one release is found if len(torrent_rows) < 2: sickrage.app.log.debug( 'Data returned from provider does not contain any torrents' ) return results # Skip column headers for row in torrent_rows[1:]: cells = row('td') try: title = cells[1].find('a').get_text() download_url = urljoin( self.urls['base_url'], cells[2].find(title='Download').parent['href']) if not all([title, download_url]): continue seeders = try_int(cells[6].get_text(strip=True)) leechers = try_int(cells[7].get_text(strip=True)) torrent_size = cells[4].get_text() torrent_size = torrent_size[:-2] + ' ' + torrent_size[-2:] size = convert_size(torrent_size, -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug( "Found result: {}".format(title)) except (AttributeError, TypeError, KeyError, ValueError, IndexError): sickrage.app.log.exception('Failed parsing provider.') return results
def parse(self, data, mode): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='table table-striped') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug( 'Data returned from provider does not contain any torrents' ) return results # Skip column headers for result in torrent_rows[1:]: cells = result('td') if len(cells) < 5: continue try: title = cells[0].find( 'a', class_='torrent-name').get_text(strip=True) download_url = urljoin( self.urls['base_url'], cells[0].find('a', target='_blank')['href']) if not (title and download_url): continue seeders = try_int(cells[4].get_text(strip=True), 1) leechers = try_int(cells[5].get_text(strip=True), 0) torrent_size = cells[3].get_text() size = convert_size(torrent_size, -1) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers } if mode != 'RSS': sickrage.app.log.debug( 'Found result: {}'.format(title)) results.append(item) except Exception: sickrage.app.log.error('Failed parsing provider.') return results
def load_from_indexer(self, cache=True, tvapi=None): if self.indexer is not INDEXER_TVRAGE: sickrage.app.log.debug( str(self.indexer_id) + ": Loading show info from " + IndexerApi(self.indexer).name) t = tvapi if not t: lINDEXER_API_PARMS = IndexerApi(self.indexer).api_params.copy() lINDEXER_API_PARMS['cache'] = cache lINDEXER_API_PARMS[ 'language'] = self.lang or sickrage.app.config.indexer_default_language if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True t = IndexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) myEp = t[self.indexer_id] if not myEp: return try: self.name = myEp['seriesname'].strip() except AttributeError: raise indexer_attributenotfound( "Found %s, but attribute 'seriesname' was empty." % self.indexer_id) self.overview = safe_getattr(myEp, 'overview', self.overview) self.classification = safe_getattr(myEp, 'classification', self.classification) self.genre = safe_getattr(myEp, 'genre', self.genre) self.network = safe_getattr(myEp, 'network', self.network) self.runtime = try_int(safe_getattr(myEp, 'runtime', self.runtime)) self.imdb_id = safe_getattr(myEp, 'imdbid', self.imdb_id) try: self.airs = (safe_getattr(myEp, 'airsdayofweek') + " " + safe_getattr(myEp, 'airstime')).strip() except: self.airs = '' try: self.startyear = try_int( str(safe_getattr(myEp, 'firstaired') or datetime.date.min).split('-')[0]) except: self.startyear = 0 self.status = safe_getattr(myEp, 'status', self.status) else: sickrage.app.log.warning( str(self.indexer_id) + ": NOT loading info from " + IndexerApi(self.indexer).name + " as it is temporarily disabled.") object_session(self).commit()
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] def process_column_header(td): result = '' if td.a and td.a.img: result = td.a.img.get('title', td.a.get_text(strip=True)) if not result: result = td.get_text(strip=True) return result with bs4_parser(data) as html: torrent_table = html.find('table', attrs={'id': 'torrent_table'}) torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results # '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers' labels = [process_column_header(label) for label in torrent_rows[0]('td')] # Skip column headers for row in torrent_rows[1:]: try: cells = row('td') if len(cells) < len(labels): continue title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True) download = cells[labels.index('Name /Year')].find('a', title='Download')['href'] download_url = urljoin(self.urls['base_url'], download) if not all([title, download_url]): continue seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find('table', {'id': 'torrent_table'}) # Continue only if at least one release is found if not torrent_table: sickrage.app.log.debug('Data returned from provider does not contain any {}torrents', 'ranked ' if self.ranked else '') return results torrent_body = torrent_table.find('tbody') torrent_rows = torrent_body.contents del torrent_rows[1::2] for row in torrent_rows[1:]: try: torrent = row('td') if len(torrent) <= 1: break all_as = (torrent[1])('a') notinternal = row.find('img', src='/static//common/user_upload.png') if self.ranked and notinternal: sickrage.app.log.debug('Found a user uploaded release, Ignoring it..') continue freeleech = row.find('img', src='/static//common/browse/freeleech.png') if self.freeleech and not freeleech: continue title = all_as[2].string download_url = urljoin(self.urls['base_url'], all_as[0].attrs['href']) if not all([title, download_url]): continue seeders = try_int((row('td')[6]).text.replace(',', '')) leechers = try_int((row('td')[7]).text.replace(',', '')) size = convert_size(row.find('td', class_='nobr').find_next_sibling('td').string, -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as soup: torrent_table = soup.find('table', class_='listing') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug( "Data returned from provider does not contain any torrents" ) return results a = 1 if len(torrent_rows[0]('td')) < 2 else 0 for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]): try: desc_top = top.find('td', class_='desc-top') title = desc_top.get_text(strip=True) download_url = desc_top.find('a')['href'] if not all([title, download_url]): continue stats = bot.find('td', class_='stats').get_text(strip=True) sl = re.match( r'S:(?P<seeders>\d+)L:(?P<leechers>\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', '')) seeders = try_int(sl.group('seeders'), 0) leechers = try_int(sl.group('leechers'), 0) desc_bottom = bot.find( 'td', class_='desc-bot').get_text(strip=True) size = convert_size( desc_bottom.split('|')[1].strip('Size: '), -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug( "Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] error_code = data.pop('error', {}) if error_code.get('code'): if error_code.get('code') != 2: sickrage.app.log.warning( '{0}', error_code.get('descr', 'Error code 2 - no description available')) return results account_ok = data.pop('user', {}).get('can_leech') if not account_ok: sickrage.app.log.warning( 'Sorry, your account is not allowed to download, check your ratio' ) return results torrent_rows = data.pop('torrents', {}) if not torrent_rows: sickrage.app.log.debug('Provider has no results for this search') return results for row in torrent_rows: try: title = row.get('name') download_url = row.get('download_link') if not all([title, download_url]): continue seeders = try_int(row.get('seeders')) leechers = try_int(row.get('leechers')) size = try_int(row.get('size'), -1) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider.") return results
def search(self, search_strings, age=0, ep_obj=None, **kwargs): results = [] if not self.login(): return results # Search Params search_params = { 'app_id': self.app_id, 'category': 'tv', 'min_seeders': try_int(self.minseed), 'min_leechers': try_int(self.minleech), 'limit': 100, 'format': 'json_extended', 'ranked': try_int(self.ranked), 'token': self.token, 'sort': 'last', 'mode': 'list', } for mode in search_strings: sickrage.app.log.debug("Search Mode: %s" % mode) if mode == 'RSS': search_params['search_string'] = None search_params['search_tvdb'] = None else: search_params[ 'sort'] = self.sorting if self.sorting else 'seeders' search_params['mode'] = 'search' search_params['search_tvdb'] = ep_obj.show.indexerid for search_string in search_strings[mode]: if mode != 'RSS': sickrage.app.log.debug("Search string: %s " % search_string) if self.ranked: sickrage.app.log.debug( 'Searching only ranked torrents') search_params['search_string'] = search_string # Check if token is still valid before search if not self.login(): continue # sleep 5 secs per request sleep(2) try: data = self.session.get(self.urls['api'], params=search_params, random_ua=True).json() results += self.parse(data, mode) except Exception: sickrage.app.log.debug("No data returned from provider") return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] def process_column_header(td): td_title = '' if td.img: td_title = td.img.get('title', td.get_text(strip=True)) if not td_title: td_title = td.get_text(strip=True) return td_title with bs4_parser(data) as html: torrent_table = html.find('table', id='sortabletable') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results labels = [process_column_header(label) for label in torrent_rows[0]('td')] # Skip column headers for result in torrent_rows[1:]: try: title = result.find('div', class_='tooltip-target').get_text(strip=True) # skip if torrent has been nuked due to poor quality if title.startswith('Nuked.'): continue download_url = result.find( 'img', title='Click to Download this Torrent in SSL!').parent['href'] if not all([title, download_url]): continue cells = result('td') seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find("table", border="1") torrent_rows = torrent_table("tr") if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers" labels = [label.get_text(strip=True) for label in torrent_rows[0]("td")] for result in torrent_rows[1:]: try: cells = result("td") link = cells[labels.index("Name")].find("a", href=re.compile(r"download.php\?id="))["href"] download_url = urljoin(self.urls['base_url'], link) title_element = cells[labels.index("Name")].find("a", href=re.compile(r"details.php\?id=")) title = title_element.get("title", "") or title_element.get_text(strip=True) if not all([title, download_url]): continue if self.freeleech: # Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F L]</font>) freeleech = cells[labels.index("Name")].find("font", color="green") if not freeleech or freeleech.get_text(strip=True) != "[F\xa0L]": continue seeders = try_int(cells[labels.index("Seeders")].get_text(strip=True)) leechers = try_int(cells[labels.index("Leechers")].get_text(strip=True)) torrent_size = cells[labels.index("Size")].get_text(strip=True) size = convert_size(torrent_size, -1) item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''} if mode != "RSS": sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider.") return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] def process_column_header(td): result = '' if td.a: result = td.a.get('title') if not result: result = td.get_text(strip=True) return result with bs4_parser(data) as html: torrent_table = html.find('table', id='torrenttable') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results labels = [process_column_header(label) for label in torrent_rows[0]('th')] for row in torrent_rows[1:]: cells = row('td') try: name = cells[labels.index('Name')] title = name.find('a').get_text(strip=True) download_url = row.find('td', class_='quickdownload').find('a') if not all([title, download_url]): continue download_url = urljoin(self.urls['base_url'], download_url['href']) seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) size = convert_size(cells[labels.index('Size')].get_text(), -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider.") return results
def parse(self, data, mode, **kwargs): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='table-responsive results') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug('Data returned from provider does not contain any torrents') return results for result in torrent_rows[1:]: cells = result('td') if len(cells) < 9: continue try: info = cells[1].find('a') title = info.get_text(strip=True) download_url = info.get('href') if not (title and download_url): continue torrent_id = re.search(r'/(\d+)-', download_url) download_url = self.urls['download'] % torrent_id.group(1) seeders = try_int(cells[7].get_text(strip=True), 0) leechers = try_int(cells[8].get_text(strip=True), 0) torrent_size = cells[5].get_text() size = convert_size(torrent_size, -1, ['O', 'KO', 'MO', 'GO', 'TO', 'PO']) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers }] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error('Failed parsing provider.') return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] torrent_rows = data.pop('torrents', {}) if not self._check_auth_from_data(data): return results # Skip column headers for row in torrent_rows: try: title = row.pop('title', '') info_hash = row.pop('infoHash', '') download_url = 'magnet:?xt=urn:btih:' + info_hash if not all([title, download_url, info_hash]): continue swarm = row.pop('swarm', {}) seeders = try_int(swarm.pop('seeders', 0)) leechers = try_int(swarm.pop('leechers', 0)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': sickrage.app.log.debug( "Discarding torrent because it doesn't meet the minimum " "seeders: {0}. Seeders: {1}".format( title, seeders)) continue size = convert_size(row.pop('size', -1)) or -1 item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, } if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) results.append(item) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug('Data returned from provider does not contain any torrents') return results # Catégorie, Release, Date, DL, Size, C, S, L labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] # Skip column headers for result in torrent_rows[1:]: try: cells = result('td') if len(cells) < len(labels): continue title = cells[labels.index('Release')].get_text(strip=True) download_url = urljoin(self.urls['base_url'], cells[labels.index('DL')].find('a', class_='tooltip')['href']) if not all([title, download_url]): continue seeders = try_int(cells[labels.index('S')].get_text(strip=True)) leechers = try_int(cells[labels.index('L')].get_text(strip=True)) size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille') units = ['O', 'KO', 'MO', 'GO', 'TO', 'PO'] size = convert_size(cells[size_index].get_text(), -1, units) item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''} if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) results.append(item) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] torrent_rows = data.pop('torrents', {}) if not self._check_auth_from_data(data): return results # Skip column headers for row in torrent_rows: try: title = row.pop('title', '') info_hash = row.pop('infoHash', '') download_url = 'magnet:?xt=urn:btih:' + info_hash if not all([title, download_url, info_hash]): continue swarm = row.pop('swarm', {}) seeders = try_int(swarm.pop('seeders', 0)) leechers = try_int(swarm.pop('leechers', 0)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': sickrage.app.log.debug("Discarding torrent because it doesn't meet the minimum " "seeders: {0}. Seeders: {1}".format(title, seeders)) continue size = convert_size(row.pop('size', -1)) or -1 item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, } if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) results.append(item) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug('Data returned from provider does not contain any torrents') return results # Catégorie, Release, Date, DL, Size, C, S, L labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] # Skip column headers for row in torrent_rows[1:]: try: cells = row('td') if len(cells) < len(labels): continue title = cells[labels.index('Release')].get_text(strip=True) download = cells[labels.index('DL')].find('a', class_='tooltip')['href'] download_url = urljoin(self.urls['base_url'], download) if not all([title, download_url]): continue seeders = try_int(cells[labels.index('S')].get_text(strip=True)) leechers = try_int(cells[labels.index('L')].get_text(strip=True)) size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille') units = ['O', 'KO', 'MO', 'GO', 'TO', 'PO'] size = convert_size(cells[size_index].get_text(), -1, units) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def search(self, search_strings, age=0, ep_obj=None): results = [] if not self.login(): return results # Search Params search_params = { 'app_id': 'sickrage', 'category': 'tv', 'min_seeders': try_int(self.minseed), 'min_leechers': try_int(self.minleech), 'limit': 100, 'format': 'json_extended', 'ranked': try_int(self.ranked), 'token': self.token, 'sort': 'last', 'mode': 'list', } for mode in search_strings: sickrage.app.log.debug("Search Mode: %s" % mode) if mode == 'RSS': search_params['search_string'] = None search_params['search_tvdb'] = None else: search_params['sort'] = self.sorting if self.sorting else 'seeders' search_params['mode'] = 'search' search_params['search_tvdb'] = ep_obj.show.indexerid for search_string in search_strings[mode]: if mode != 'RSS': sickrage.app.log.debug("Search string: %s " % search_string) if self.ranked: sickrage.app.log.debug('Searching only ranked torrents') search_params['search_string'] = search_string # Check if token is still valid before search if not self.login(): continue # sleep 5 secs per request sleep(5) try: data = sickrage.app.wsession.get(self.urls['api'], params=search_params).json() results += self.parse(data, mode) except Exception: sickrage.app.log.debug("No data returned from provider") return results
def parse(self, data, mode): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='table table-striped') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug('Data returned from provider does not contain any torrents') return results # Skip column headers for result in torrent_rows[1:]: cells = result('td') if len(cells) < 5: continue try: title = cells[0].find('a', class_='torrent-name').get_text(strip=True) download_url = urljoin(self.urls['base_url'], cells[0].find('a', target='_blank')['href']) if not (title and download_url): continue seeders = try_int(cells[4].get_text(strip=True), 1) leechers = try_int(cells[5].get_text(strip=True), 0) torrent_size = cells[3].get_text() size = convert_size(torrent_size, -1) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers } if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) results.append(item) except Exception: sickrage.app.log.error('Failed parsing provider.') return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] if not data.startswith("<rss"): sickrage.app.log.info("Expected rss but got something else, is your mirror failing?") return results feed = feedparser.parse(data) for item in feed.entries: try: title = item.title download_url = item.link if not (title and download_url): continue info = self.regex.search(item.description) if not info: continue seeders = try_int(info.group("seeders")) leechers = try_int(info.group("leechers")) category = item.category if category != 'all': sickrage.app.log.warning( 'skytorrents.in has added categories! Please report this so it can be updated: Category={cat}, ' 'Title={title}'.format(cat=category, title=title)) size = convert_size(info.group('size'), -1) try: info_hash = download_url.rsplit('/', 2)[1] except IndexError: info_hash = '' item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': info_hash} if mode != "RSS": sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find(class_='ttable_headinner') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug('Data returned from provider does not contain any torrents') return results # Catégorie, Release, Date, DL, Size, C, S, L labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')] # Skip column headers for result in torrent_rows[1:]: try: cells = result('td') if len(cells) < len(labels): continue id = re.search('id=([0-9]+)', cells[labels.index('Nom')].find('a')['href']).group(1) title = cells[labels.index('Nom')].get_text(strip=True) download_url = urljoin(self.urls['download'], '?id={0}&name={1}'.format(id, title)) if not all([title, download_url]): continue seeders = try_int(cells[labels.index('S')].get_text(strip=True)) leechers = try_int(cells[labels.index('L')].get_text(strip=True)) size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille') torrent_size = cells[size_index].get_text() size = convert_size(torrent_size, -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as soup: torrent_table = soup.find('table', class_='listing') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results a = 1 if len(torrent_rows[0]('td')) < 2 else 0 for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]): try: title = download_url = "" desc_top = top.find('td', class_='desc-top') if desc_top: title = desc_top.get_text(strip=True) download_url = desc_top.find('a')['href'] if not all([title, download_url]): continue stats = bot.find('td', class_='stats').get_text(strip=True) sl = re.match(r'S:(?P<seeders>\d+)L:(?P<leechers>\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', '')) seeders = try_int(sl.group('seeders')) leechers = try_int(sl.group('leechers')) desc_bottom = bot.find('td', class_='desc-bot').get_text(strip=True) size = convert_size(desc_bottom.split('|')[1].strip('Size: '), -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def parse(self, data, mode, **kwargs): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] error_code = data.pop('error', {}) if error_code.get('code'): if error_code.get('code') != 2: sickrage.app.log.warning('{0}', error_code.get('descr', 'Error code 2 - no description available')) return results account_ok = data.pop('user', {}).get('can_leech') if not account_ok: sickrage.app.log.warning('Sorry, your account is not allowed to download, check your ratio') return results torrent_rows = data.pop('torrents', {}) if not torrent_rows: sickrage.app.log.debug('Provider has no results for this search') return results for row in torrent_rows: try: title = row.get('name') download_url = row.get('download_link') if not all([title, download_url]): continue seeders = try_int(row.get('seeders')) leechers = try_int(row.get('leechers')) size = try_int(row.get('size'), -1) results += [ {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers} ] if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) except Exception: sickrage.app.log.error("Failed parsing provider.") return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_rows = html.find_all('tr', class_='torrent') if len(torrent_rows) < 1: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results for result in torrent_rows: try: # skip if torrent has been nuked due to poor quality if result.find('img', alt='Nuked'): continue download_url = urljoin(self.urls['base_url'] + '/', result.find('span', title='Download').parent['href']) title = result.find('a', title='View torrent').get_text(strip=True) if not all([title, download_url]): continue seeders = try_int(result('td', class_="number_column")[1].text, 0) leechers = try_int(result('td', class_="number_column")[2].text, 0) size = -1 if re.match(r'\d+([,.]\d+)?\s*[KkMmGgTt]?[Bb]', result('td', class_="number_column")[0].text): size = convert_size(result('td', class_="number_column")[0].text.strip(), -1) item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''} if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_rows = html.find_all('tr') for row in torrent_rows: for torrent in row.find_all('td'): for link in torrent.find_all('a'): try: fileType = ''.join(link.find_previous('i')["class"]) fileType = unicodedata.normalize('NFKD', fileType). \ encode(sickrage.app.sys_encoding, 'ignore') if fileType == "Series": title = link.get_text(strip=True) download_url = self.get_download_url(link.get('href')) if not all([title, download_url]): continue # size size = convert_size(link.findNext('td').text, -1) # Filter unseeded torrent seeders = try_int(link.find_next('img', alt='seeders').parent.text, 0) leechers = try_int(link.find_next('img', alt='leechers').parent.text, 0) if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) results += [{ 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, }] except Exception: sickrage.app.log.error("Failed parsing provider") return results
def get_indexer_absolute_numbering(indexer_id, indexer, sceneAbsoluteNumber, fallback_to_xem=True, scene_season=None): """ Returns a tuple, (season, episode, absolute_number) with the TVDB absolute numbering for (sceneAbsoluteNumber) (this works like the reverse of get_absolute_numbering) """ if indexer_id is None or sceneAbsoluteNumber is None: return sceneAbsoluteNumber indexer_id = int(indexer_id) indexer = int(indexer) if scene_season is None: dbData = [x for x in sickrage.app.main_db.get_many('scene_numbering', indexer_id) if x['indexer'] == indexer and x['scene_absolute_number'] == sceneAbsoluteNumber] else: dbData = [x for x in sickrage.app.main_db.get_many('scene_numbering', indexer_id) if x['indexer'] == indexer and x['scene_absolute_number'] == sceneAbsoluteNumber and x['scene_season'] == scene_season] if dbData: return try_int(dbData[0].get("absolute_number")) else: if fallback_to_xem: return get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNumber, scene_season) return sceneAbsoluteNumber
def get_absolute_number_from_season_and_episode(show, season, episode): """ Find the absolute number for a show episode :param show: Show object :param season: Season number :param episode: Episode number :return: The absolute number """ absolute_number = None if season and episode: dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', show.indexerid) if x['season'] == season and x['episode'] == episode] if len(dbData) == 1: absolute_number = try_int(dbData[0].get("absolute_number")) sickrage.app.log.debug( "Found absolute number %s for show %s S%02dE%02d" % (absolute_number, show.name, season, episode)) else: sickrage.app.log.debug( "No entries for absolute number for show %s S%02dE%02d" % (show.name, season, episode)) return absolute_number
def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNumber, scene_season=None): """ Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering :param indexer_id: int :param sceneAbsoluteNumber: int :return: int """ if indexer_id is None or sceneAbsoluteNumber is None: return sceneAbsoluteNumber indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) if scene_season is None: dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', indexer_id) if x['indexer'] == indexer and x['scene_absolute_number'] == sceneAbsoluteNumber] else: dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', indexer_id) if x['indexer'] == indexer and x['scene_absolute_number'] == sceneAbsoluteNumber and x['scene_season'] == scene_season] if dbData: return try_int(dbData[0].get('absolute_number')) return sceneAbsoluteNumber
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data) as html: torrent_table = html.find('table', attrs={'id': 'torrenttable'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results for result in torrent_table.find_all('tr')[1:]: try: title = result.find("td", class_="name").find("a").get_text(strip=True) download_url = urljoin(self.urls['base_url'], result.find("td", class_="quickdownload").find("a")["href"]) if not all([title, download_url]): continue seeders = try_int(result.find('td', attrs={'class': 'seeders'}).text, 0) leechers = try_int(result.find('td', attrs={'class': 'leechers'}).text, 0) size = -1 if re.match(r'\d+([,.]\d+)?\s*[KkMmGgTt]?[Bb]', result('td', class_="listcolumn")[1].text): size = convert_size(result('td', class_="listcolumn")[1].text.strip(), -1) item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''} if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider.") return results
def search(self, search_strings, age=0, ep_obj=None, **kwargs): """ Search a provider and parse the results. :param search_strings: A dict with mode (key) and the search value (value) :param age: Not used :param ep_obj: Not used :returns: A list of search results (structure) """ results = [] # Search Params search_params = { 'q': '* category:TV', 's': 'dt', 'v': 't', 'sd': 'd', } for mode in search_strings: sickrage.app.log.debug('Search mode: {}'.format(mode)) for search_string in search_strings[mode]: if mode != 'RSS': sickrage.app.log.debug('Search string: {}'.format(search_string)) search_params['q'] = '{} category:TV'.format(search_string) search_params['fmt'] = 'rss' search_params['pg'] = 1 while search_params['pg'] < 11: data = self.cache.get_rss_feed(self.urls['search'], params=search_params) if not data or not data.get('feed'): sickrage.app.log.debug('No data returned from provider') break results += self.parse(data, mode) total_results = try_int(data['feed'].get('opensearch_totalresults')) start_index = try_int(data['feed'].get('opensearch_startindex')) items_per_page = try_int(data['feed'].get('opensearch_itemsperpage')) if not total_results or start_index + items_per_page > total_results: break search_params['pg'] += 1 return results
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] for item in data: try: title = item['title'] download_url = item['link'] if not all([title, download_url]): continue seeders = try_int(item['nyaa_seeders']) leechers = try_int(item['nyaa_leechers']) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': sickrage.app.log.debug("Discarding torrent because it doesn't meet the " "minimum seeders: {}. Seeders: {}".format(title, seeders)) continue size = convert_size(item['nyaa_size'], -1, units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB']) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers } if mode != 'RSS': sickrage.app.log.debug('Found result: {}'.format(title)) results.append(item) except Exception: sickrage.app.log.error('Failed parsing provider') return results
def find_scene_numbering(indexer_id, indexer, season, episode): """ Same as get_scene_numbering(), but returns None if scene numbering is not set """ if indexer_id is None or season is None or episode is None: return season, episode indexer_id = int(indexer_id) indexer = int(indexer) dbData = [x for x in sickrage.app.main_db.get_many('scene_numbering', indexer_id) if x['indexer'] == indexer and x['season'] == season and x['episode'] == episode and x['scene_season'] != 0 and x['scene_episode'] != 0] if dbData: return try_int(dbData[0].get("scene_season")), try_int(dbData[0].get("scene_episode"))
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] if not (data and "total_found" in data and int(data["total_found"]) > 0): sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results del data["total_found"] for i in data: try: title = data[i]["title"] seeders = try_int(data[i]["seeds"], 1) leechers = try_int(data[i]["leechs"], 0) t_hash = data[i]["torrent_hash"] torrent_size = data[i]["torrent_size"] if not all([t_hash, torrent_size]): continue download_url = data[i]["magnet"] size = convert_size(torrent_size, -1) if not all([title, download_url]): continue item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': t_hash} if mode != 'RSS': sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider.") return results