def search(self, search_params, age=0, ep_obj=None): results = [] for mode in search_params: items = [] logger.debug(_(f'Search Mode: {mode}')) for search_string in search_params[mode]: if search_string == '': continue search_string = str(search_string).replace('.', ' ') logger.debug(_(f'Search String: {search_string}')) last_page = False for page in range(0, self.max_pages): if last_page: break logger.debug('Processing page {0} of results'.format(page)) search_url = self.urls['search'].format( search_string, page) data = self.get_url(search_url, returns='text') if not data: logger.debug(_('No data returned from provider')) continue try: with BS4Parser(data, 'html5lib') as html: table_header = html.find('tr', class_='bordo') torrent_table = table_header.find_parent( 'table') if table_header else None if not torrent_table: logger.exception( 'Could not find table of torrents') continue torrent_rows = torrent_table('tr') # Continue only if one Release is found if len(torrent_rows) < 6 or len( torrent_rows[2]('td')) == 1: logger.debug( 'Data returned from provider does not contain any torrents' ) last_page = True continue if len(torrent_rows) < 45: last_page = True for result in torrent_rows[2:-3]: result_cols = result('td') if len(result_cols) == 1: # Ignore empty rows in the middle of the table continue try: title = result('td')[1].get_text( strip=True) torrent_size = result('td')[2].get_text( strip=True) info_hash = result('td')[3].find( 'input', class_='downarrow')['value'].upper() download_url = self._magnet_from_result( info_hash, title) seeders = try_int( result('td')[5].get_text(strip=True)) leechers = try_int( result('td')[6].get_text(strip=True)) size = convert_size(torrent_size) or -1 except (AttributeError, IndexError, TypeError): continue filename_qt = self._reverseQuality( self._episodeQuality(result)) for text in self.hdtext: title1 = title title = title.replace(text, filename_qt) if title != title1: break if Quality.nameQuality( title) == Quality.UNKNOWN: title += filename_qt if not self._is_italian( title) and not self.subtitle: logger.debug( 'Torrent is subtitled, skipping: {0}'. format(title)) continue if self.engrelease and not self._is_english( title): logger.debug( 'Torrent isn\'t english audio/subtitled, skipping: {0}' .format(title)) continue search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] show_title = search_show ep_params = '' rindex = re.search(r'([Ss][\d{1,2}]+)', title) if rindex: show_title = title[:rindex.start()] ep_params = title[rindex.start():] if show_title.lower() != search_show.lower( ) and search_show.lower() in show_title.lower( ): new_title = search_show + ep_params title = new_title if not all([title, download_url]): continue if self._is_season_pack(title): title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: logger.debug( 'Discarding torrent because it doesn\'t meet the minimum seeders or leechers: {0} (S:{1} L:{2})' .format(title, seeders, leechers)) continue item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': info_hash } if mode != 'RSS': logger.debug( _(f'Found result: {title} with {seeders} seeders and {leechers} leechers' )) items.append(item) except Exception as error: logger.exception( 'Failed parsing provider. Error: {0}'.format( error)) # For each search mode sort all the items by seeders if available items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results
def search(self, search_params, age=0, ep_obj=None): results = [] if not self.login(): return results self.categories = "cat=" + str(self.cat) for mode in search_params: items = [] logger.debug(_(f"Search Mode: {mode}")) for search_string in search_params[mode]: if mode == 'RSS': self.page = 2 last_page = 0 y = int(self.page) if search_string == '': continue search_string = str(search_string).replace('.', ' ') for x in range(0, y): z = x * 20 if last_page: break if mode != 'RSS': search_url = (self.urls['search_page'] + '&filter={2}').format( z, self.categories, search_string) else: search_url = self.urls['search_page'].format( z, self.categories) if mode != 'RSS': logger.debug(_(f"Search String: {search_string}")) data = self.get_url(search_url, returns='text') if not data: logger.debug("No data returned from provider") continue try: with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', class_='copyright') torrent_rows = torrent_table( 'tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 3: logger.debug( "Data returned from provider does not contain any torrents" ) last_page = 1 continue if len(torrent_rows) < 42: last_page = 1 for result in torrent_table('tr')[2:]: try: link = result.find('td').find('a') title = link.string download_url = self.urls[ 'download'] % result('td')[8].find( 'a')['href'][-8:] leechers = result('td')[3]('td')[0].text leechers = int(leechers.strip('[]')) seeders = result('td')[3]('td')[1].text seeders = int(seeders.strip('[]')) torrent_size = result('td')[3]( 'td')[3].text.strip('[]') + " GB" size = convert_size(torrent_size) or -1 except (AttributeError, TypeError): continue filename_qt = self._reverseQuality( self._episodeQuality(result)) for text in self.hdtext: title1 = title title = title.replace(text, filename_qt) if title != title1: break if Quality.nameQuality( title) == Quality.UNKNOWN: title += filename_qt if not self._is_italian( result) and not self.subtitle: logger.debug( "Torrent is subtitled, skipping: {0} ". format(title)) continue if self.engrelease and not self._is_english( result): logger.debug( "Torrent isnt english audio/subtitled , skipping: {0} " .format(title)) continue search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] show_title = search_show rindex = re.search(r'([Ss][\d{1,2}]+)', title) if rindex: show_title = title[:rindex.start()] ep_params = title[rindex.start():] if show_title.lower() != search_show.lower( ) and search_show.lower() in show_title.lower( ): new_title = search_show + ep_params title = new_title if not all([title, download_url]): continue if self._is_season_pack(title): title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: if mode != 'RSS': logger.debug( "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})" .format(title, seeders, leechers)) continue item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': '' } if mode != 'RSS': logger.debug( "Found result: {0} with {1} seeders and {2} leechers" .format(title, seeders, leechers)) items.append(item) except Exception: logger.exception( "Failed parsing provider. Traceback: {0}".format( traceback.format_exc())) # For each search mode sort all the items by seeders if available if available items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results
def search(self, search_params, age=0, ep_obj=None): results = [] for mode in search_params: items = [] logger.debug(_("Search Mode: {mode}".format(mode=mode))) for search_string in search_params[mode]: if search_string == "": continue search_string = str(search_string).replace(".", " ") logger.debug( _("Search String: {search_string}".format( search_string=search_string))) last_page = False for page in range(0, self.max_pages): if last_page: break logger.debug("Processing page {0} of results".format(page)) search_url = self.urls["search"].format( search_string, page) data = self.get_url(search_url, returns="text") if not data: logger.debug(_("No data returned from provider")) continue try: with BS4Parser(data, "html5lib") as html: table_header = html.find("tr", class_="bordo") torrent_table = table_header.find_parent( "table") if table_header else None if not torrent_table: logger.exception( "Could not find table of torrents") continue torrent_rows = torrent_table("tr") # Continue only if one Release is found if len(torrent_rows) < 6 or len( torrent_rows[2]("td")) == 1: logger.debug( "Data returned from provider does not contain any torrents" ) last_page = True continue if len(torrent_rows) < 45: last_page = True for result in torrent_rows[2:-3]: result_cols = result("td") if len(result_cols) == 1: # Ignore empty rows in the middle of the table continue try: title = result("td")[1].get_text( strip=True) torrent_size = result("td")[2].get_text( strip=True) info_hash = result("td")[3].find( "input", class_="downarrow")["value"].upper() download_url = self._magnet_from_result( info_hash, title) seeders = try_int( result("td")[5].get_text(strip=True)) leechers = try_int( result("td")[6].get_text(strip=True)) size = convert_size(torrent_size) or -1 except (AttributeError, IndexError, TypeError): continue filename_qt = self._reverseQuality( self._episodeQuality(result)) for text in self.hdtext: title1 = title title = title.replace(text, filename_qt) if title != title1: break if Quality.nameQuality( title) == Quality.UNKNOWN: title += filename_qt if not self._is_italian( title) and not self.subtitle: logger.debug( "Torrent is subtitled, skipping: {0}". format(title)) continue if self.engrelease and not self._is_english( title): logger.debug( "Torrent isn't english audio/subtitled, skipping: {0}" .format(title)) continue search_show = re.split(r"([Ss][\d{1,2}]+)", search_string)[0] show_title = search_show ep_params = "" rindex = re.search(r"([Ss][\d{1,2}]+)", title) if rindex: show_title = title[:rindex.start()] ep_params = title[rindex.start():] if show_title.lower() != search_show.lower( ) and search_show.lower() in show_title.lower( ): new_title = search_show + ep_params title = new_title if not all([title, download_url]): continue if self._is_season_pack(title): title = re.sub(r"([Ee][\d{1,2}\-?]+)", "", title) # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: logger.debug( "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})" .format(title, seeders, leechers)) continue item = { "title": title, "link": download_url, "size": size, "seeders": seeders, "leechers": leechers, "hash": info_hash } if mode != "RSS": logger.debug( _("Found result: {title} with {seeders} seeders and {leechers} leechers" .format(title=title, seeders=seeders, leechers=leechers))) items.append(item) except Exception as error: logger.exception( "Failed parsing provider. Error: {0}".format( error)) # For each search mode sort all the items by seeders if available items.sort(key=lambda d: try_int(d.get("seeders", 0)), reverse=True) results += items return results