def provider_name_auto_numbered(new_provider): """ Check if the provider_id (created from the provider name) is already used. If so create a name with autonumbering added to it. For example. provider_id is used. Create provider_id_1. If provider_id_1 is also used. Create provider_id_2. :param new_provider: Provider object. :return: Unique provider id. """ unique_number = 1 while new_provider.get_id() in [ x.get_id() for x in providers.sorted_provider_list() ]: provider_ends_with_number = re.match(r'(.+_)(\d+)$', new_provider.name) if not provider_ends_with_number: new_provider.name = f'{new_provider.name}_{unique_number}' else: # The id is already numbered, let's bumb the number. unique_number += 1 new_provider.name = f'{provider_ends_with_number.group(1)}{unique_number}' return new_provider
def enabled_providers(search_type): """Return providers based on search type: daily, backlog and manual search.""" from medusa import providers return [x for x in providers.sorted_provider_list(app.RANDOMIZE_PROVIDERS) if x.is_active() and x.get_id() not in app.BROKEN_PROVIDERS and hasattr(x, 'enable_{}'.format(search_type)) and getattr(x, 'enable_{}'.format(search_type))]
def enabled_providers(search_type): """Return providers based on search type: daily, backlog and manual search.""" from medusa import providers return [ x for x in providers.sorted_provider_list(app.RANDOMIZE_PROVIDERS) if x.is_active() and x.get_id() not in app.BROKEN_PROVIDERS and hasattr(x, 'enable_{}'.format(search_type)) and getattr(x, 'enable_{}'.format(search_type)) ]
def _migrate_v11(self): """Convert all ratio values for torrent providers when '' -> -1.""" from medusa.providers.generic_provider import GenericProvider all_providers = providers.sorted_provider_list() for provider in all_providers: if provider.provider_type == GenericProvider.TORRENT: if provider.ratio == '': provider.ratio = -1 elif isinstance(provider.ratio, str): provider.ratio = int(provider.ratio)
def saveProviders(self, provider_order, **kwargs): """Save Provider related settings.""" newznab_string = kwargs.pop('newznab_string', '') torrentrss_string = kwargs.pop('torrentrss_string', '') torznab_string = kwargs.pop('torznab_string', '') self._save_newznab_providers(newznab_string) self._save_rsstorrent_providers(torrentrss_string) self._save_torznab_providers(torznab_string) def ordered_providers(names, providers): reminder = {} for name in names: for provider in providers: reminder[provider.get_id()] = provider if provider.get_id() == name: yield provider else: rest = set(reminder).difference(set(names)) for provider in rest: yield reminder[provider] ordered_names = OrderedDict() provider_order_list = provider_order.split() for provider_setting in provider_order_list: cur_provider, cur_setting = provider_setting.split(':') enabled = try_int(cur_setting) ordered_names[cur_provider] = enabled providers_enabled = [] providers_disabled = [] all_providers = providers.sorted_provider_list() for provider in ordered_providers(ordered_names, all_providers): name = provider.get_id() if ordered_names.get(name): provider.enabled = True providers_enabled.append(name) else: provider.enabled = False providers_disabled.append(name) self._set_common_settings(provider, **kwargs) if isinstance(provider, TorrentProvider): self._set_torrent_settings(provider, **kwargs) app.PROVIDER_ORDER = providers_enabled + providers_disabled app.instance.save_config() ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE)) return self.redirect('/config/providers/')
def saveProviders(self, provider_order, **kwargs): """Save Provider related settings.""" newznab_string = kwargs.pop('newznab_string', '') torrentrss_string = kwargs.pop('torrentrss_string', '') torznab_string = kwargs.pop('torznab_string', '') self._save_newznab_providers(newznab_string) self._save_rsstorrent_providers(torrentrss_string) self._save_torznab_providers(torznab_string) def ordered_providers(names, providers): reminder = {} for name in names: for provider in providers: reminder[provider.get_id()] = provider if provider.get_id() == name: yield provider else: rest = set(reminder).difference(set(names)) for provider in rest: yield reminder[provider] ordered_names = OrderedDict() provider_order_list = provider_order.split() for provider_setting in provider_order_list: cur_provider, cur_setting = provider_setting.split(':') enabled = try_int(cur_setting) ordered_names[cur_provider] = enabled providers_enabled = [] providers_disabled = [] all_providers = providers.sorted_provider_list() for provider in ordered_providers(ordered_names, all_providers): name = provider.get_id() if ordered_names.get(name): provider.enabled = True providers_enabled.append(name) else: provider.enabled = False providers_disabled.append(name) self._set_common_settings(provider, **kwargs) if isinstance(provider, TorrentProvider): self._set_torrent_settings(provider, **kwargs) app.PROVIDER_ORDER = providers_enabled + providers_disabled app.instance.save_config() ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE)) return self.redirect('/config/providers/')
def _save_provider_order(self, sorted_providers): """Save the provider order.""" def ordered_providers(names, providers): reminder = {} for name in names: for provider in providers: reminder[provider.get_id()] = provider if provider.get_id() == name: yield provider else: rest = set(reminder).difference(set(names)) for provider in rest: yield reminder[provider] ordered_names = OrderedDict() for sorted_provider in sorted_providers: ordered_names[ sorted_provider['id']] = sorted_provider['config']['enabled'] providers_enabled = [] providers_disabled = [] all_providers = providers.sorted_provider_list() for provider in ordered_providers(ordered_names, all_providers): name = provider.get_id() if ordered_names.get(name): provider.enabled = True providers_enabled.append(name) else: provider.enabled = False providers_disabled.append(name) new_settings = [ prov for prov in sorted_providers if prov.get('id') == name ] if not new_settings: continue self._set_common_settings(provider, new_settings[0]['config']) if isinstance(provider, TorrentProvider): self._set_torrent_settings(provider, new_settings[0]['config']) app.PROVIDER_ORDER = providers_enabled + providers_disabled app.instance.save_config()
def saveProviders(self, provider_order, **kwargs): """Save Provider related settings.""" newznab_string = kwargs.pop('newznab_string', '') torrentrss_string = kwargs.pop('torrentrss_string', '') torznab_string = kwargs.pop('torznab_string', '') self._save_newznab_providers(newznab_string) self._save_rsstorrent_providers(torrentrss_string) self._save_torznab_providers(torznab_string) provider_list = [] provider_names_list = provider_order.split() sorted_providers = providers.sorted_provider_list() # do the enable/disable for provider_name in provider_names_list: cur_provider, cur_enabled = provider_name.split(':') cur_enabled = try_int(cur_enabled) provider_list.append(cur_provider) for provider in sorted_providers: if provider.get_id() == cur_provider and hasattr(provider, 'enabled'): provider.enabled = bool(cur_enabled) break # dynamically load provider settings for provider in sorted_providers: self._set_common_settings(provider, **kwargs) if isinstance(provider, (TorrentRssProvider, TorznabProvider)): self._set_torrent_settings(provider, **kwargs) # app.NEWZNAB_DATA = '!!!'.join([x.config_string() for x in app.newznabProviderList]) app.PROVIDER_ORDER = provider_list app.instance.save_config() ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE)) return self.redirect('/config/providers/')
def get(self, identifier, path_param=None): """ Query provider information. Return a list of provider id's. :param identifier: provider id. E.g.: myawesomeprovider :param path_param: """ show_slug = self._parse(self.get_argument('showslug', default=None), str) season = self._parse(self.get_argument('season', default=None), str) episode = self._parse(self.get_argument('episode', default=None), str) if not identifier: # return a list of provider id's provider_list = providers.sorted_provider_list() return self._ok([provider.to_json() for provider in provider_list]) provider = providers.get_provider_class(identifier) if not provider: return self._not_found('Provider not found') if not path_param == 'results': return self._ok(provider.to_json()) provider_results = provider.cache.get_results(show_slug=show_slug, season=season, episode=episode) arg_page = self._get_page() arg_limit = self._get_limit(default=50) def data_generator(): """Read log lines based on the specified criteria.""" start = arg_limit * (arg_page - 1) + 1 for item in provider_results[start - 1:start - 1 + arg_limit]: episodes = [int(ep) for ep in item['episodes'].strip('|').split('|') if ep != ''] yield { 'identifier': item['identifier'], 'release': item['name'], 'season': item['season'], 'episodes': episodes, # For now if episodes is 0 or (multiepisode) mark as season pack. 'seasonPack': len(episodes) == 0 or len(episodes) > 1, 'indexer': item['indexer'], 'seriesId': item['indexerid'], 'showSlug': show_slug, 'url': item['url'], 'time': datetime.fromtimestamp(item['time']), 'quality': item['quality'], 'releaseGroup': item['release_group'], 'dateAdded': datetime.fromtimestamp(item['date_added']), 'version': item['version'], 'seeders': item['seeders'], 'size': item['size'], 'leechers': item['leechers'], 'pubdate': parser.parse(item['pubdate']).replace(microsecond=0) if item['pubdate'] else None, 'provider': { 'id': provider.get_id(), 'name': provider.name, 'imageName': provider.image_name() } } if not len(provider_results): return self._not_found('Provider cache results not found') return self._paginate(data_generator=data_generator)
print("Quality not in common.ANY, %r %s" % (quality, size)) continue return do_test # TODO: py.test parameters if __name__ == '__main__': print(""" ================== STARTING - Search TESTS ================== ###################################################################### """) # create the test methods for forceSearch in (True, False): for name, data in TESTS.items(): filename = name.replace(' ', '_') for provider in providers.sorted_provider_list(): if provider.provider_type == GenericProvider.TORRENT: if forceSearch: test_name = 'test_manual_%s_%s_%s' % (filename, data["tvdbid"], provider.name) else: test_name = 'test_%s_%s_%s' % (filename, data["tvdbid"], provider.name) test = generator(data, name, provider) setattr(SearchTest, test_name, test) SUITE = unittest.TestLoader().loadTestsFromTestCase(SearchTest) unittest.TextTestRunner(verbosity=2).run(SUITE)
def search_providers(show, episodes, forced_search=False, down_cur_quality=False, manual_search=False, manual_search_type=u'episode'): """ Walk providers for information on shows. :param show: Show we are looking for :param episodes: List, episodes we hope to find :param forced_search: Boolean, is this a forced search? :param down_cur_quality: Boolean, should we re-download currently available quality file :param manual_search: Boolean, should we choose what to download? :param manual_search_type: Episode or Season search :return: results for search """ found_results = {} final_results = [] manual_search_results = [] did_search = False # build name cache for show name_cache.build_name_cache(show) original_thread_name = threading.currentThread().name if manual_search: log.info(u'Using manual search providers') providers = [ x for x in sorted_provider_list(app.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_manualsearch ] else: log.info(u'Using backlog search providers') providers = [ x for x in sorted_provider_list(app.RANDOMIZE_PROVIDERS) if x.is_active() and x.enable_backlog ] threading.currentThread().name = original_thread_name for cur_provider in providers: threading.currentThread( ).name = original_thread_name + u' :: [' + cur_provider.name + u']' if cur_provider.anime_only and not show.is_anime: log.debug(u'{0} is not an anime, skipping', show.name) continue found_results[cur_provider.name] = {} search_count = 0 search_mode = cur_provider.search_mode # Always search for episode when manually searching when in sponly if search_mode == u'sponly' and (forced_search or manual_search): search_mode = u'eponly' if manual_search and manual_search_type == u'season': search_mode = u'sponly' while True: search_count += 1 if search_mode == u'eponly': log.info(u'Performing episode search for {0}', show.name) else: log.info(u'Performing season pack search for {0}', show.name) try: search_results = cur_provider.find_search_results( show, episodes, search_mode, forced_search, down_cur_quality, manual_search, manual_search_type) except AuthException as error: log.error(u'Authentication error: {0}', ex(error)) break except socket_timeout as error: log.debug( u'Connection timed out (sockets) while searching {0}. Error: {1!r}', cur_provider.name, ex(error)) break except (requests.exceptions.HTTPError, requests.exceptions.TooManyRedirects) as error: log.debug(u'HTTP error while searching {0}. Error: {1!r}', cur_provider.name, ex(error)) break except requests.exceptions.ConnectionError as error: log.debug( u'Connection error while searching {0}. Error: {1!r}', cur_provider.name, ex(error)) break except requests.exceptions.Timeout as error: log.debug( u'Connection timed out while searching {0}. Error: {1!r}', cur_provider.name, ex(error)) break except requests.exceptions.ContentDecodingError as error: log.debug( u'Content-Encoding was gzip, but content was not compressed while searching {0}.' u' Error: {1!r}', cur_provider.name, ex(error)) break except Exception as error: if u'ECONNRESET' in error or (hasattr(error, u'errno') and error.errno == errno.ECONNRESET): log.warning( u'Connection reseted by peer while searching {0}. Error: {1!r}', cur_provider.name, ex(error)) else: log.debug(traceback.format_exc()) log.error( u'Unknown exception while searching {0}. Error: {1!r}', cur_provider.name, ex(error)) break did_search = True if search_results: # make a list of all the results for this provider for cur_ep in search_results: if cur_ep in found_results[cur_provider.name]: found_results[cur_provider. name][cur_ep] += search_results[cur_ep] else: found_results[ cur_provider.name][cur_ep] = search_results[cur_ep] # Sort the list by seeders if possible if cur_provider.provider_type == u'torrent' or getattr( cur_provider, u'torznab', None): found_results[cur_provider.name][cur_ep].sort( key=lambda d: int(d.seeders), reverse=True) break elif not cur_provider.search_fallback or search_count == 2: break # Don't fallback when doing manual season search if manual_search_type == u'season': break if search_mode == u'sponly': log.debug(u'Fallback episode search initiated') search_mode = u'eponly' else: log.debug(u'Fallback season pack search initiate') search_mode = u'sponly' # skip to next provider if we have no results to process if not found_results[cur_provider.name]: continue # Update the cache if a manual search is being run if manual_search: # Let's create a list with episodes that we where looking for if manual_search_type == u'season': # If season search type, we only want season packs searched_episode_list = [SEASON_RESULT] else: searched_episode_list = [ episode_obj.episode for episode_obj in episodes ] + [MULTI_EP_RESULT] for searched_episode in searched_episode_list: if (searched_episode in search_results and cur_provider.cache.update_cache_manual_search( search_results[searched_episode])): # If we have at least a result from one provider, it's good enough to be marked as result manual_search_results.append(True) # Continue because we don't want to pick best results as we are running a manual search by user continue # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[cur_provider.name]: best_season_result = pick_best_result( found_results[cur_provider.name][SEASON_RESULT]) highest_quality_overall = 0 for cur_episode in found_results[cur_provider.name]: for cur_result in found_results[cur_provider.name][cur_episode]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality log.debug(u'The highest quality of any match is {0}', Quality.qualityStrings[highest_quality_overall]) # see if every episode is wanted if best_season_result: searched_seasons = {str(x.season) for x in episodes} # get the quality of the season nzb season_quality = best_season_result.quality log.debug(u'The quality of the season {0} is {1}', best_season_result.provider.provider_type, Quality.qualityStrings[season_quality]) main_db_con = db.DBConnection() selection = main_db_con.select( 'SELECT episode ' 'FROM tv_episodes ' 'WHERE showid = ?' ' AND ( season IN ( {0} ) )'.format( ','.join(searched_seasons)), [show.indexerid]) all_eps = [int(x[b'episode']) for x in selection] log.debug(u'Episode list: {0}', all_eps) all_wanted = True any_wanted = False for cur_ep_num in all_eps: for season in {x.season for x in episodes}: if not show.want_episode(season, cur_ep_num, season_quality, down_cur_quality): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then # just download this and be done with it (unless single episodes are preferred) if all_wanted and best_season_result.quality == highest_quality_overall: log.info( u'All episodes in this season are needed, downloading {0} {1}', best_season_result.provider.provider_type, best_season_result.name) ep_objs = [] for cur_ep_num in all_eps: for season in {x.season for x in episodes}: ep_objs.append(show.get_episode(season, cur_ep_num)) best_season_result.episodes = ep_objs # Remove provider from thread name before return results threading.currentThread().name = original_thread_name return [best_season_result] elif not any_wanted: log.debug( u'No episodes in this season are needed at this quality, ignoring {0} {1}', best_season_result.provider.provider_type, best_season_result.name) else: if best_season_result.provider.provider_type == GenericProvider.NZB: log.debug( u'Breaking apart the NZB and adding the individual ones to our results' ) # if not, break it apart and add them as the lowest priority results individual_results = nzb_splitter.split_result( best_season_result) for cur_result in individual_results: if len(cur_result.episodes) == 1: ep_number = cur_result.episodes[0].episode elif len(cur_result.episodes) > 1: ep_number = MULTI_EP_RESULT if ep_number in found_results[cur_provider.name]: found_results[cur_provider.name][ep_number].append( cur_result) else: found_results[cur_provider.name][ep_number] = [ cur_result ] # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, # creating multi-ep result for it. log.info( u'Adding multi-ep result for full-season torrent.' u' Undesired episodes can be skipped in torrent client if desired!' ) ep_objs = [] for cur_ep_num in all_eps: for season in {x.season for x in episodes}: ep_objs.append(show.get_episode( season, cur_ep_num)) best_season_result.episodes = ep_objs if MULTI_EP_RESULT in found_results[cur_provider.name]: found_results[cur_provider.name][ MULTI_EP_RESULT].append(best_season_result) else: found_results[cur_provider.name][MULTI_EP_RESULT] = [ best_season_result ] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[cur_provider.name]: for _multi_result in found_results[ cur_provider.name][MULTI_EP_RESULT]: log.debug( u'Seeing if we want to bother with multi-episode result {0}', _multi_result.name) # Filter result by ignore/required/whitelist/blacklist/quality, etc multi_result = pick_best_result(_multi_result) if not multi_result: continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: # if we have results for the episode if ep_obj.episode in found_results[cur_provider.name] and \ len(found_results[cur_provider.name][ep_obj.episode]) > 0: not_needed_eps.append(ep_obj.episode) else: needed_eps.append(ep_obj.episode) log.debug( u'Single-ep check result is needed_eps: {0}, not_needed_eps: {1}', needed_eps, not_needed_eps) if not needed_eps: log.debug( u'All of these episodes were covered by single episode results,' u' ignoring this multi-episode result') continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: if ep_obj.episode in multi_results: multi_not_needed_eps.append(ep_obj.episode) else: multi_needed_eps.append(ep_obj.episode) log.debug( u'Multi-ep check result is multi_needed_eps: {0}, multi_not_needed_eps: {1}', multi_needed_eps, multi_not_needed_eps) if not multi_needed_eps: log.debug( u'All of these episodes were covered by another multi-episode nzb, ' u'ignoring this multi-ep result') continue # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result if ep_obj.episode in found_results[cur_provider.name]: log.debug( u'A needed multi-episode result overlaps with a single-episode result for episode {0},' u' removing the single-episode results from the list', ep_obj.episode, ) del found_results[cur_provider.name][ep_obj.episode] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) for cur_ep in found_results[cur_provider.name]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if not found_results[cur_provider.name][cur_ep]: continue # if all results were rejected move on to the next episode best_result = pick_best_result( found_results[cur_provider.name][cur_ep]) if not best_result: continue # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_resultEp in best_result.episodes: if best_resultEp in result.episodes: if result.quality < best_result.quality: final_results.pop(i) else: found = True if not found: final_results += [best_result] if not did_search: log.warning( u'No NZB/Torrent providers found or enabled in the application config for backlog searches.' u' Please check your settings.') # Remove provider from thread name before return results threading.currentThread().name = original_thread_name if manual_search: # If results in manual search return True, else False return any(manual_search_results) else: return final_results
def saveProviders(self, newznab_string='', torrentrss_string='', provider_order=None, **kwargs): """ Save Provider related settings """ results = [] provider_str_list = provider_order.split() provider_list = [] newznab_provider_dict = dict( zip([x.get_id() for x in app.newznabProviderList], app.newznabProviderList)) finished_names = [] # add all the newznab info we got into our list if newznab_string: for curNewznabProviderStr in newznab_string.split('!!!'): if not curNewznabProviderStr: continue cur_name, cur_url, cur_key, cur_cat = curNewznabProviderStr.split( '|') cur_url = config.clean_url(cur_url) new_provider = NewznabProvider(cur_name, cur_url, api_key=cur_key, cat_ids=cur_cat) cur_id = new_provider.get_id() # if it already exists then update it if cur_id in newznab_provider_dict: newznab_provider_dict[cur_id].name = cur_name newznab_provider_dict[cur_id].url = cur_url newznab_provider_dict[cur_id].api_key = cur_key newznab_provider_dict[cur_id].cat_ids = split_and_strip( cur_cat) # a 0 in the key spot indicates that no key is needed if cur_key == '0': newznab_provider_dict[cur_id].needs_auth = False else: newznab_provider_dict[cur_id].needs_auth = True try: newznab_provider_dict[cur_id].search_mode = str( kwargs['{id}_search_mode'.format( id=cur_id)]).strip() except (AttributeError, KeyError): pass # these exceptions are actually catching unselected checkboxes try: newznab_provider_dict[ cur_id].search_fallback = config.checkbox_to_value( kwargs['{id}_search_fallback'.format( id=cur_id)]) except (AttributeError, KeyError): newznab_provider_dict[ cur_id].search_fallback = 0 # these exceptions are actually catching unselected checkboxes try: newznab_provider_dict[ cur_id].enable_daily = config.checkbox_to_value( kwargs['{id}_enable_daily'.format(id=cur_id)]) except (AttributeError, KeyError): newznab_provider_dict[ cur_id].enable_daily = 0 # these exceptions are actually catching unselected checkboxes try: newznab_provider_dict[ cur_id].enable_manualsearch = config.checkbox_to_value( kwargs['{id}_enable_manualsearch'.format( id=cur_id)]) except (AttributeError, KeyError): newznab_provider_dict[ cur_id].enable_manualsearch = 0 # these exceptions are actually catching unselected checkboxes try: newznab_provider_dict[ cur_id].enable_backlog = config.checkbox_to_value( kwargs['{id}_enable_backlog'.format( id=cur_id)]) except (AttributeError, KeyError): newznab_provider_dict[ cur_id].enable_backlog = 0 # these exceptions are actually catching unselected checkboxes else: app.newznabProviderList.append(new_provider) finished_names.append(cur_id) # delete anything that is missing for cur_provider in app.newznabProviderList: if cur_provider.get_id() not in finished_names: app.newznabProviderList.remove(cur_provider) # Update the custom newznab provider list NewznabProvider.save_newnab_providers() torrent_rss_provider_dict = dict( zip([x.get_id() for x in app.torrentRssProviderList], app.torrentRssProviderList)) finished_names = [] if torrentrss_string: for curTorrentRssProviderStr in torrentrss_string.split('!!!'): if not curTorrentRssProviderStr: continue cur_name, cur_url, cur_cookies, cur_title_tag = curTorrentRssProviderStr.split( '|') cur_url = config.clean_url(cur_url) new_provider = TorrentRssProvider(cur_name, cur_url, cur_cookies, cur_title_tag) cur_id = new_provider.get_id() # if it already exists then update it if cur_id in torrent_rss_provider_dict: torrent_rss_provider_dict[cur_id].name = cur_name torrent_rss_provider_dict[cur_id].url = cur_url torrent_rss_provider_dict[cur_id].cookies = cur_cookies torrent_rss_provider_dict[ cur_id].curTitleTAG = cur_title_tag else: app.torrentRssProviderList.append(new_provider) finished_names.append(cur_id) # delete anything that is missing for cur_provider in app.torrentRssProviderList: if cur_provider.get_id() not in finished_names: app.torrentRssProviderList.remove(cur_provider) # Update the torrentrss provider list app.TORRENTRSS_PROVIDERS = [ provider.name for provider in app.torrentRssProviderList ] disabled_list = [] # do the enable/disable for cur_providerStr in provider_str_list: cur_provider, cur_enabled = cur_providerStr.split(':') cur_enabled = try_int(cur_enabled) cur_prov_obj = [ x for x in providers.sorted_provider_list() if x.get_id() == cur_provider and hasattr(x, 'enabled') ] if cur_prov_obj: cur_prov_obj[0].enabled = bool(cur_enabled) if cur_enabled: provider_list.append(cur_provider) else: disabled_list.append(cur_provider) if cur_provider in newznab_provider_dict: newznab_provider_dict[cur_provider].enabled = bool(cur_enabled) elif cur_provider in torrent_rss_provider_dict: torrent_rss_provider_dict[cur_provider].enabled = bool( cur_enabled) provider_list.extend(disabled_list) # dynamically load provider settings for cur_torrent_provider in [ prov for prov in providers.sorted_provider_list() if prov.provider_type == GenericProvider.TORRENT ]: if hasattr(cur_torrent_provider, 'custom_url'): try: cur_torrent_provider.custom_url = str( kwargs['{id}_custom_url'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.custom_url = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'minseed'): try: cur_torrent_provider.minseed = int( str(kwargs['{id}_minseed'.format( id=cur_torrent_provider.get_id())]).strip()) except (AttributeError, KeyError): cur_torrent_provider.minseed = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'minleech'): try: cur_torrent_provider.minleech = int( str(kwargs['{id}_minleech'.format( id=cur_torrent_provider.get_id())]).strip()) except (AttributeError, KeyError): cur_torrent_provider.minleech = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'ratio'): try: ratio = float( str(kwargs['{id}_ratio'.format( id=cur_torrent_provider.get_id())]).strip()) cur_torrent_provider.ratio = (ratio, -1)[ratio < 0] except (AttributeError, KeyError, ValueError): cur_torrent_provider.ratio = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'digest'): try: cur_torrent_provider.digest = str( kwargs['{id}_digest'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.digest = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'hash'): try: cur_torrent_provider.hash = str(kwargs['{id}_hash'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.hash = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'api_key'): try: cur_torrent_provider.api_key = str( kwargs['{id}_api_key'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.api_key = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'username'): try: cur_torrent_provider.username = str( kwargs['{id}_username'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.username = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'password'): try: cur_torrent_provider.password = str( kwargs['{id}_password'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.password = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'passkey'): try: cur_torrent_provider.passkey = str( kwargs['{id}_passkey'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.passkey = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'pin'): try: cur_torrent_provider.pin = str(kwargs['{id}_pin'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.pin = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'confirmed'): try: cur_torrent_provider.confirmed = config.checkbox_to_value( kwargs['{id}_confirmed'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.confirmed = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'ranked'): try: cur_torrent_provider.ranked = config.checkbox_to_value( kwargs['{id}_ranked'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.ranked = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'engrelease'): try: cur_torrent_provider.engrelease = config.checkbox_to_value( kwargs['{id}_engrelease'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.engrelease = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'onlyspasearch'): try: cur_torrent_provider.onlyspasearch = config.checkbox_to_value( kwargs['{id}_onlyspasearch'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.onlyspasearch = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'sorting'): try: cur_torrent_provider.sorting = str( kwargs['{id}_sorting'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.sorting = 'seeders' # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'freeleech'): try: cur_torrent_provider.freeleech = config.checkbox_to_value( kwargs['{id}_freeleech'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.freeleech = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'search_mode'): try: cur_torrent_provider.search_mode = str( kwargs['{id}_search_mode'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): cur_torrent_provider.search_mode = 'eponly' # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'search_fallback'): try: cur_torrent_provider.search_fallback = config.checkbox_to_value( kwargs['{id}_search_fallback'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.search_fallback = 0 # these exceptions are catching unselected checkboxes if hasattr(cur_torrent_provider, 'enable_daily'): try: cur_torrent_provider.enable_daily = config.checkbox_to_value( kwargs['{id}_enable_daily'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'enable_manualsearch'): try: cur_torrent_provider.enable_manualsearch = config.checkbox_to_value( kwargs['{id}_enable_manualsearch'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.enable_manualsearch = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'enable_backlog'): try: cur_torrent_provider.enable_backlog = config.checkbox_to_value( kwargs['{id}_enable_backlog'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'cat'): try: cur_torrent_provider.cat = int( str(kwargs['{id}_cat'.format( id=cur_torrent_provider.get_id())]).strip()) except (AttributeError, KeyError): cur_torrent_provider.cat = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_torrent_provider, 'subtitle'): try: cur_torrent_provider.subtitle = config.checkbox_to_value( kwargs['{id}_subtitle'.format( id=cur_torrent_provider.get_id())]) except (AttributeError, KeyError): cur_torrent_provider.subtitle = 0 # these exceptions are actually catching unselected checkboxes if cur_torrent_provider.enable_cookies: try: cur_torrent_provider.cookies = str( kwargs['{id}_cookies'.format( id=cur_torrent_provider.get_id())]).strip() except (AttributeError, KeyError): pass # I don't want to configure a default value here, as it can also be configured intially as a custom rss torrent provider for cur_nzb_provider in [ prov for prov in providers.sorted_provider_list() if prov.provider_type == GenericProvider.NZB ]: # We don't want to overwrite the api key, as that's not available in the second tab for newznab providers. if hasattr(cur_nzb_provider, 'api_key') and not isinstance( cur_nzb_provider, NewznabProvider): try: cur_nzb_provider.api_key = str( kwargs['{id}_api_key'.format( id=cur_nzb_provider.get_id())]).strip() except (AttributeError, KeyError): cur_nzb_provider.api_key = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_nzb_provider, 'username'): try: cur_nzb_provider.username = str( kwargs['{id}_username'.format( id=cur_nzb_provider.get_id())]).strip() except (AttributeError, KeyError): cur_nzb_provider.username = None # these exceptions are actually catching unselected checkboxes if hasattr(cur_nzb_provider, 'search_mode'): try: cur_nzb_provider.search_mode = str( kwargs['{id}_search_mode'.format( id=cur_nzb_provider.get_id())]).strip() except (AttributeError, KeyError): cur_nzb_provider.search_mode = 'eponly' # these exceptions are actually catching unselected checkboxes if hasattr(cur_nzb_provider, 'search_fallback'): try: cur_nzb_provider.search_fallback = config.checkbox_to_value( kwargs['{id}_search_fallback'.format( id=cur_nzb_provider.get_id())]) except (AttributeError, KeyError): cur_nzb_provider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_nzb_provider, 'enable_daily'): try: cur_nzb_provider.enable_daily = config.checkbox_to_value( kwargs['{id}_enable_daily'.format( id=cur_nzb_provider.get_id())]) except (AttributeError, KeyError): cur_nzb_provider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_nzb_provider, 'enable_manualsearch'): try: cur_nzb_provider.enable_manualsearch = config.checkbox_to_value( kwargs['{id}_enable_manualsearch'.format( id=cur_nzb_provider.get_id())]) except (AttributeError, KeyError): cur_nzb_provider.enable_manualsearch = 0 # these exceptions are actually catching unselected checkboxes if hasattr(cur_nzb_provider, 'enable_backlog'): try: cur_nzb_provider.enable_backlog = config.checkbox_to_value( kwargs['{id}_enable_backlog'.format( id=cur_nzb_provider.get_id())]) except (AttributeError, KeyError): cur_nzb_provider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes # app.NEWZNAB_DATA = '!!!'.join([x.config_string() for x in app.newznabProviderList]) app.PROVIDER_ORDER = provider_list app.instance.save_config() if results: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', '<br>\n'.join(results)) else: ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE)) return self.redirect('/config/providers/')
print("Quality not in common.ANY, %r %s" % (quality, size)) continue return do_test # TODO: py.test parameters if __name__ == '__main__': print(""" ================== STARTING - Search TESTS ================== ###################################################################### """) # create the test methods for forceSearch in (True, False): for name, data in iteritems(TESTS): filename = name.replace(' ', '_') for provider in providers.sorted_provider_list(): if provider.provider_type == GenericProvider.TORRENT: if forceSearch: test_name = 'test_manual_%s_%s_%s' % (filename, data["tvdbid"], provider.name) else: test_name = 'test_%s_%s_%s' % (filename, data["tvdbid"], provider.name) test = generator(data, name, provider) setattr(SearchTest, test_name, test) SUITE = unittest.TestLoader().loadTestsFromTestCase(SearchTest) unittest.TextTestRunner(verbosity=2).run(SUITE)