def search_providers(series_obj, episodes, forced_search=False, down_cur_quality=False, manual_search=False, manual_search_type=u'episode'): """ Walk providers for information on shows. :param series_obj: Show we are looking for :param episodes: List, episodes we hope to find :param forced_search: Boolean, is this a forced search? :param down_cur_quality: Boolean, should we re-download currently available quality file :param manual_search: Boolean, should we choose what to download? :param manual_search_type: Episode or Season search :return: results for search """ found_results = {} final_results = [] manual_search_results = [] # build name cache for show name_cache.build_name_cache(series_obj) original_thread_name = threading.currentThread().name if manual_search: log.info(u'Using manual search providers') providers = enabled_providers(u'manualsearch') else: log.info(u'Using backlog search providers') providers = enabled_providers(u'backlog') if not providers: log.warning( u'No NZB/Torrent providers found or enabled in the application config for {0} searches.' u' Please check your settings', 'manual' if manual_search else 'backlog') threading.currentThread().name = original_thread_name for cur_provider in providers: threading.currentThread( ).name = original_thread_name + u' :: [' + cur_provider.name + u']' if cur_provider.anime_only and not series_obj.is_anime: log.debug(u'{0} is not an anime, skipping', series_obj.name) continue found_results[cur_provider.name] = {} search_count = 0 search_mode = cur_provider.search_mode # Always search for episode when manually searching when in sponly if search_mode == u'sponly' and (forced_search or manual_search): search_mode = u'eponly' if manual_search and manual_search_type == u'season': search_mode = u'sponly' while True: search_count += 1 if search_mode == u'eponly': log.info(u'Performing episode search for {0}', series_obj.name) else: log.info(u'Performing season pack search for {0}', series_obj.name) try: search_results = cur_provider.find_search_results( series_obj, episodes, search_mode, forced_search, down_cur_quality, manual_search, manual_search_type) except AuthException as error: log.error(u'Authentication error: {0}', ex(error)) break if search_results: # make a list of all the results for this provider for cur_ep in search_results: if cur_ep in found_results[cur_provider.name]: found_results[cur_provider. name][cur_ep] += search_results[cur_ep] else: found_results[ cur_provider.name][cur_ep] = search_results[cur_ep] # Sort the list by seeders if possible if cur_provider.provider_type == u'torrent' or getattr( cur_provider, u'torznab', None): found_results[cur_provider.name][cur_ep].sort( key=lambda d: int(d.seeders), reverse=True) break elif not cur_provider.search_fallback or search_count == 2: break # Don't fallback when doing manual season search if manual_search_type == u'season': break if search_mode == u'sponly': log.debug(u'Fallback episode search initiated') search_mode = u'eponly' else: log.debug(u'Fallback season pack search initiate') search_mode = u'sponly' # skip to next provider if we have no results to process if not found_results[cur_provider.name]: continue # Update the cache if a manual search is being run if manual_search: # Let's create a list with episodes that we where looking for if manual_search_type == u'season': # If season search type, we only want season packs searched_episode_list = [SEASON_RESULT] else: searched_episode_list = [ episode_obj.episode for episode_obj in episodes ] + [MULTI_EP_RESULT] for searched_episode in searched_episode_list: if (searched_episode in search_results and cur_provider.cache.update_cache_manual_search( search_results[searched_episode])): # If we have at least a result from one provider, it's good enough to be marked as result manual_search_results.append(True) # Continue because we don't want to pick best results as we are running a manual search by user continue # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[cur_provider.name]: best_season_result = pick_best_result( found_results[cur_provider.name][SEASON_RESULT]) highest_quality_overall = 0 for cur_episode in found_results[cur_provider.name]: for cur_result in found_results[cur_provider.name][cur_episode]: if cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality log.debug(u'The highest quality of any match is {0}', Quality.qualityStrings[highest_quality_overall]) # see if every episode is wanted if best_season_result: searched_seasons = {str(x.season) for x in episodes} # get the quality of the season nzb season_quality = best_season_result.quality log.debug(u'The quality of the season {0} is {1}', best_season_result.provider.provider_type, Quality.qualityStrings[season_quality]) main_db_con = db.DBConnection() selection = main_db_con.select( 'SELECT episode ' 'FROM tv_episodes ' 'WHERE indexer = ?' ' AND showid = ?' ' AND ( season IN ( {0} ) )'.format( ','.join(searched_seasons)), [series_obj.indexer, series_obj.series_id]) all_eps = [int(x[b'episode']) for x in selection] log.debug(u'Episode list: {0}', all_eps) all_wanted = True any_wanted = False for cur_ep_num in all_eps: for season in {x.season for x in episodes}: if not series_obj.want_episode(season, cur_ep_num, season_quality, down_cur_quality): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then # just download this and be done with it (unless single episodes are preferred) if all_wanted and best_season_result.quality == highest_quality_overall: log.info( u'All episodes in this season are needed, downloading {0} {1}', best_season_result.provider.provider_type, best_season_result.name) ep_objs = [] for cur_ep_num in all_eps: for season in {x.season for x in episodes}: ep_objs.append( series_obj.get_episode(season, cur_ep_num)) best_season_result.episodes = ep_objs # Remove provider from thread name before return results threading.currentThread().name = original_thread_name return [best_season_result] elif not any_wanted: log.debug( u'No episodes in this season are needed at this quality, ignoring {0} {1}', best_season_result.provider.provider_type, best_season_result.name) else: # Some NZB providers (e.g. Jackett) can also download torrents, but torrents cannot be split like NZB if (best_season_result.provider.provider_type == GenericProvider.NZB and not best_season_result.url.endswith( GenericProvider.TORRENT)): log.debug( u'Breaking apart the NZB and adding the individual ones to our results' ) # if not, break it apart and add them as the lowest priority results individual_results = nzb_splitter.split_result( best_season_result) for cur_result in individual_results: if len(cur_result.episodes) == 1: ep_number = cur_result.episodes[0].episode elif len(cur_result.episodes) > 1: ep_number = MULTI_EP_RESULT if ep_number in found_results[cur_provider.name]: found_results[cur_provider.name][ep_number].append( cur_result) else: found_results[cur_provider.name][ep_number] = [ cur_result ] # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, # creating multi-ep result for it. log.info( u'Adding multi-ep result for full-season torrent.' u' Undesired episodes can be skipped in torrent client if desired!' ) ep_objs = [] for cur_ep_num in all_eps: for season in {x.season for x in episodes}: ep_objs.append( series_obj.get_episode(season, cur_ep_num)) best_season_result.episodes = ep_objs if MULTI_EP_RESULT in found_results[cur_provider.name]: found_results[cur_provider.name][ MULTI_EP_RESULT].append(best_season_result) else: found_results[cur_provider.name][MULTI_EP_RESULT] = [ best_season_result ] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[cur_provider.name]: for _multi_result in found_results[ cur_provider.name][MULTI_EP_RESULT]: log.debug( u'Seeing if we want to bother with multi-episode result {0}', _multi_result.name) # Filter result by ignore/required/whitelist/blacklist/quality, etc multi_result = pick_best_result(_multi_result) if not multi_result: continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: # if we have results for the episode if ep_obj.episode in found_results[cur_provider.name] and \ len(found_results[cur_provider.name][ep_obj.episode]) > 0: not_needed_eps.append(ep_obj.episode) else: needed_eps.append(ep_obj.episode) log.debug( u'Single-ep check result is needed_eps: {0}, not_needed_eps: {1}', needed_eps, not_needed_eps) if not needed_eps: log.debug( u'All of these episodes were covered by single episode results,' u' ignoring this multi-episode result') continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: if ep_obj.episode in multi_results: multi_not_needed_eps.append(ep_obj.episode) else: multi_needed_eps.append(ep_obj.episode) log.debug( u'Multi-ep check result is multi_needed_eps: {0}, multi_not_needed_eps: {1}', multi_needed_eps, multi_not_needed_eps) if not multi_needed_eps: log.debug( u'All of these episodes were covered by another multi-episode nzb, ' u'ignoring this multi-ep result') continue # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result if ep_obj.episode in found_results[cur_provider.name]: log.debug( u'A needed multi-episode result overlaps with a single-episode result for episode {0},' u' removing the single-episode results from the list', ep_obj.episode, ) del found_results[cur_provider.name][ep_obj.episode] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) for cur_ep in found_results[cur_provider.name]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if not found_results[cur_provider.name][cur_ep]: continue # if all results were rejected move on to the next episode best_result = pick_best_result( found_results[cur_provider.name][cur_ep]) if not best_result: continue # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_resultEp in best_result.episodes: if best_resultEp in result.episodes: if result.quality < best_result.quality: final_results.pop(i) else: found = True if not found: # Skip the result if search delay is enabled for the provider. if not delay_search(best_result): final_results += [best_result] # Remove provider from thread name before return results threading.currentThread().name = original_thread_name if manual_search: # If results in manual search return True, else False return any(manual_search_results) else: return final_results
def run(self): ShowQueueItem.run(self) log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'Indexer Id: {0}'.format(self.indexer_id))) show_slug = indexer_id_to_slug(self.indexer, self.indexer_id) series = Series.from_identifier(SeriesIdentifier.from_slug(show_slug)) step = [] # Small helper, to reduce code for messaging def message_step(new_step): step.append(new_step) ws.Message('QueueItemShowAdd', dict(step=step, **self.to_json)).push() try: try: # Push an update to any open Web UIs through the WebSocket message_step('load show from {indexer}'.format( indexer=indexerApi(self.indexer).name)) api = series.identifier.get_indexer_api(self.options) if getattr(api[self.indexer_id], 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.indexer).name)) self._finish_early() raise SaveSeriesException( 'Indexer is missing a showname in this language: {0!r}' ) series.load_from_indexer(tvapi=api) message_step('load info from imdb') series.load_imdb_info() except IndexerException as error: log.warning( 'Unable to load series from indexer: {0!r}'.format(error)) raise SaveSeriesException( 'Unable to load series from indexer: {0!r}'.format(error)) message_step('check if show is already added') try: message_step('configure show options') series.configure(self) except KeyError as error: log.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}', { 'series_name': series.name, 'error': error }) ui.notifications.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=series.name, error=error)) raise SaveSeriesException( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=series.name, error=error)) except Exception as error: log.error('Error trying to configure show: {0}', error) log.debug(traceback.format_exc()) raise app.showList.append(series) series.save_to_db() try: message_step('load episodes from {indexer}'.format( indexer=indexerApi(self.indexer).name)) series.load_episodes_from_indexer(tvapi=api) # If we provide a default_status_after through the apiv2 series route options object. # set it after we've added the episodes. self.default_ep_status = self.options[ 'default_status_after'] or app.STATUS_DEFAULT_AFTER except IndexerException as error: log.warning( 'Unable to load series episodes from indexer: {0!r}'. format(error)) raise SaveSeriesException( 'Unable to load series episodes from indexer: {0!r}'. format(error)) message_step('create metadata in show folder') series.write_metadata() series.update_metadata() series.populate_cache() build_name_cache(series) # update internal name cache series.flush_episodes() series.sync_trakt() message_step('add scene numbering') series.add_scene_numbering() except SaveSeriesException as error: log.warning('Unable to add series: {0!r}'.format(error)) self.success = False self._finish_early() log.debug(traceback.format_exc()) default_status = self.options['default_status'] or app.STATUS_DEFAULT if statusStrings[default_status] == 'Wanted': message_step('trigger backlog search') app.backlog_search_scheduler.action.search_backlog([series]) self.success = True ws.Message( 'showAdded', series.to_json(detailed=False)).push() # Send ws update to client message_step('finished') self.finish()
def run(self): ShowQueueItem.run(self) logger.log(u"Starting to add show {0}".format( "by ShowDir: {0}".format(self.showDir) if self. showDir else u"by Indexer Id: {0}".format(self.indexer_id))) # make sure the Indexer IDs are valid try: l_indexer_api_params = indexerApi(self.indexer).api_params.copy() if self.lang: l_indexer_api_params['language'] = self.lang logger.log(u"" + str(indexerApi(self.indexer).name) + ": " + repr(l_indexer_api_params)) indexer_api = indexerApi( self.indexer).indexer(**l_indexer_api_params) s = indexer_api[self.indexer_id] # Let's try to create the show Dir if it's not provided. This way we force the show dir # to build build using the Indexers provided series name if not self.showDir and self.root_dir: show_name = get_showname_from_indexer(self.indexer, self.indexer_id, self.lang) if show_name: self.showDir = os.path.join(self.root_dir, sanitize_filename(show_name)) dir_exists = make_dir(self.showDir) if not dir_exists: logger.log( u"Unable to create the folder {0}, can't add the show" .format(self.showDir)) return chmod_as_parent(self.showDir) else: logger.log( u"Unable to get a show {0}, can't add the show".format( self.showDir)) return # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that # has no proper english version of the show if getattr(s, 'seriesname', None) is None: logger.log( u"Show in {0} has no name on {1}, probably searched with the wrong language." .format(self.showDir, indexerApi(self.indexer).name), logger.ERROR) ui.notifications.error( 'Unable to add show', 'Show in {0} has no name on {1}, probably the wrong language. \ Delete .nfo and manually add the correct language.' .format(self.showDir, indexerApi(self.indexer).name)) self._finishEarly() return # if the show has no episodes/seasons if not s: logger.log(u"Show " + str(s['seriesname']) + u" is on " + str(indexerApi(self.indexer).name) + u" but contains no season/episode data.") ui.notifications.error( "Unable to add show", "Show {0} is on {1} but contains no season/episode data.". format(s['seriesname'], indexerApi(self.indexer).name)) self._finishEarly() return # Check if we can already find this show in our current showList. try: check_existing_shows(s, self.indexer) except IndexerShowAllreadyInLibrary as e: logger.log( u"Could not add the show %s, as it already is in your library." u" Error: %s" % (s['seriesname'], e.message), logger.WARNING) ui.notifications.error('Unable to add show', 'reason: {0}'.format(e.message)) self._finishEarly() # Clean up leftover if the newly created directory is empty. delete_empty_folders(self.showDir) return # TODO: Add more specific indexer exceptions, that should provide the user with some accurate feedback. except IndexerShowIncomplete as e: logger.log( u"%s Error while loading information from indexer %s. " u"Error: %s" % (self.indexer_id, indexerApi(self.indexer).name, e.message), logger.WARNING) ui.notifications.error( "Unable to add show", "Unable to look up the show in {0} on {1} using ID {2} " "Reason: {3}".format(self.showDir, indexerApi(self.indexer).name, self.indexer_id, e.message)) self._finishEarly() return except IndexerShowNotFoundInLanguage as e: logger.log( u'{id}: Data retrieved from {indexer} was incomplete. The indexer does not provide ' u'show information in the searched language {language}. Aborting: {error_msg}' .format(id=self.indexer_id, indexer=indexerApi(self.indexer).name, language=e.language, error_msg=e.message), logger.WARNING) ui.notifications.error( 'Error adding show!', 'Unable to add show {indexer_id} on {indexer} with this language: {language}' .format(indexer_id=self.indexer_id, indexer=indexerApi(self.indexer).name, language=e.language)) self._finishEarly() return except Exception as e: logger.log( u"%s Error while loading information from indexer %s. " u"Error: %r" % (self.indexer_id, indexerApi(self.indexer).name, e.message), logger.ERROR) ui.notifications.error( "Unable to add show", "Unable to look up the show in {0} on {1} using ID {2}, not using the NFO. " "Delete .nfo and try adding manually again.".format( self.showDir, indexerApi(self.indexer).name, self.indexer_id)) self._finishEarly() return try: newShow = Series(self.indexer, self.indexer_id, self.lang) newShow.load_from_indexer(indexer_api) self.show = newShow # set up initial values self.show.location = self.showDir self.show.subtitles = self.subtitles if self.subtitles is not None else app.SUBTITLES_DEFAULT self.show.quality = self.quality if self.quality else app.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders is not None \ else app.FLATTEN_FOLDERS_DEFAULT self.show.anime = self.anime if self.anime is not None else app.ANIME_DEFAULT self.show.scene = self.scene if self.scene is not None else app.SCENE_DEFAULT self.show.paused = self.paused if self.paused is not None else False # set up default new/missing episode status logger.log( u"Setting all previously aired episodes to the specified status: {status}" .format(status=statusStrings[self.default_status])) self.show.default_ep_status = self.default_status if self.show.anime: self.show.release_groups = BlackAndWhiteList( self.show.indexerid) if self.blacklist: self.show.release_groups.set_black_keywords(self.blacklist) if self.whitelist: self.show.release_groups.set_white_keywords(self.whitelist) # # be smartish about this # if self.show.genre and "talk show" in self.show.genre.lower(): # self.show.air_by_date = 1 # if self.show.genre and "documentary" in self.show.genre.lower(): # self.show.air_by_date = 0 # if self.show.classification and "sports" in self.show.classification.lower(): # self.show.sports = 1 except IndexerException as e: logger.log( u"Unable to add show due to an error with " + indexerApi(self.indexer).name + ": " + e.message, logger.ERROR) if self.show: ui.notifications.error("Unable to add " + str(self.show.name) + " due to an error with " + indexerApi(self.indexer).name + "") else: ui.notifications.error( "Unable to add show due to an error with " + indexerApi(self.indexer).name + "") self._finishEarly() return except MultipleShowObjectsException: logger.log( u"The show in " + self.showDir + " is already in your show list, skipping", logger.WARNING) ui.notifications.error( 'Show skipped', "The show in " + self.showDir + " is already in your show list") self._finishEarly() return except Exception as e: logger.log(u"Error trying to add show: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise logger.log(u"Retrieving show info from IMDb", logger.DEBUG) try: self.show.load_imdb_info() except ImdbAPIError as e: logger.log(u"Something wrong on IMDb api: " + e.message, logger.INFO) except Exception as e: logger.log(u"Error loading IMDb info: " + e.message, logger.ERROR) try: self.show.save_to_db() except Exception as e: logger.log(u"Error saving the show to the database: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise # add it to the show list app.showList.append(self.show) try: self.show.load_episodes_from_indexer(tvapi=indexer_api) except Exception as e: logger.log( u"Error with " + indexerApi(self.show.indexer).name + ", not creating episode list: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) # update internal name cache name_cache.build_name_cache(self.show) try: self.show.load_episodes_from_dir() except Exception as e: logger.log(u"Error searching dir for episodes: " + e.message, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) # if they set default ep status to WANTED then run the backlog to search for episodes # FIXME: This needs to be a backlog queue item!!! if self.show.default_ep_status == WANTED: logger.log( u"Launching backlog for this show since its episodes are WANTED" ) app.backlog_search_scheduler.action.search_backlog([self.show]) self.show.write_metadata() self.show.update_metadata() self.show.populate_cache() self.show.flush_episodes() if app.USE_TRAKT: # if there are specific episodes that need to be added by trakt app.trakt_checker_scheduler.action.manage_new_show(self.show) # add show to trakt.tv library if app.TRAKT_SYNC: app.trakt_checker_scheduler.action.add_show_trakt_library( self.show) if app.TRAKT_SYNC_WATCHLIST: logger.log(u"update watchlist") notifiers.trakt_notifier.update_watchlist(show_obj=self.show) # Load XEM data to DB for show scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer, force=True) # check if show has XEM mapping so we can determine if searches # should go by scene numbering or indexer numbering. if not self.scene and scene_numbering.get_xem_numbering_for_show( self.show.indexerid, self.show.indexer): self.show.scene = 1 # After initial add, set to default_status_after. self.show.default_ep_status = self.default_status_after self.finish()
def search_providers(series_obj, episodes, forced_search=False, down_cur_quality=False, manual_search=False, manual_search_type=u'episode'): """ Walk providers for information on shows. :param series_obj: Show we are looking for :param episodes: List, episodes we hope to find :param forced_search: Boolean, is this a forced search? :param down_cur_quality: Boolean, should we re-download currently available quality file :param manual_search: Boolean, should we choose what to download? :param manual_search_type: Episode or Season search :return: results for search """ found_results = {} manual_search_results = [] multi_results = [] single_results = [] # build name cache for show name_cache.build_name_cache(series_obj) original_thread_name = threading.currentThread().name if manual_search: log.info(u'Using manual search providers') providers = enabled_providers(u'manualsearch') else: log.info(u'Using backlog search providers') providers = enabled_providers(u'backlog') if not providers: log.warning( u'No NZB/Torrent providers found or enabled in the application config for {0} searches.' u' Please check your settings', 'manual' if manual_search else 'backlog') threading.currentThread().name = original_thread_name for cur_provider in providers: threading.currentThread( ).name = '{original_thread_name} :: [{provider}]'.format( original_thread_name=original_thread_name, provider=cur_provider.name) if cur_provider.anime_only and not series_obj.is_anime: log.debug(u'{0} is not an anime, skipping', series_obj.name) continue found_results[cur_provider.name] = {} search_count = 0 search_mode = cur_provider.search_mode # Always search for episode when manually searching when in sponly if search_mode == u'sponly' and (forced_search or manual_search): search_mode = u'eponly' if manual_search and manual_search_type == u'season': search_mode = u'sponly' while True: search_count += 1 if search_mode == u'eponly': log.info(u'Performing episode search for {0}', series_obj.name) else: log.info(u'Performing season pack search for {0}', series_obj.name) try: search_results = [] cache_search_results = [] cache_multi = [] cache_single = [] if not manual_search: cache_search_results = cur_provider.search_results_in_cache( episodes) if cache_search_results: # From our provider multi_episode and single_episode results, collect candidates. cache_found_results = list_results_for_provider( cache_search_results, found_results, cur_provider) # We're passing the empty lists, because we don't want to include previous candidates cache_multi, cache_single = collect_candidates( cache_found_results, cur_provider, [], [], series_obj, down_cur_quality) # For now we only search if we didn't get any results back from cache, # but we might wanna check if there was something useful in cache. if not (cache_multi or cache_single): log.debug( u'Could not find any candidates in cache, searching provider.' ) search_results = cur_provider.find_search_results( series_obj, episodes, search_mode, forced_search, down_cur_quality, manual_search, manual_search_type) # Update the list found_results found_results = list_results_for_provider( search_results, found_results, cur_provider) else: found_results = cache_found_results except AuthException as error: log.error(u'Authentication error: {0!r}', error) break if search_results or cache_search_results: break elif not cur_provider.search_fallback or search_count == 2: break # Don't fallback when doing manual season search if manual_search_type == u'season': break if search_mode == u'sponly': log.debug(u'Fallback episode search initiated') search_mode = u'eponly' else: log.debug(u'Fallback season pack search initiated') search_mode = u'sponly' # skip to next provider if we have no results to process if not found_results[cur_provider.name]: continue # Update the cache if a manual search is being run if manual_search: # Let's create a list with episodes that we where looking for if manual_search_type == u'season': # If season search type, we only want season packs searched_episode_list = [SEASON_RESULT] else: searched_episode_list = [ episode_obj.episode for episode_obj in episodes ] + [MULTI_EP_RESULT] for searched_episode in searched_episode_list: if (searched_episode in search_results and cur_provider.cache.update_cache_manual_search( search_results[searched_episode])): # If we have at least a result from one provider, it's good enough to be marked as result manual_search_results.append(True) # Continue because we don't want to pick best results as we are running a manual search by user continue multi_results, single_results = collect_candidates( found_results, cur_provider, multi_results, single_results, series_obj, down_cur_quality) # Remove provider from thread name before return results threading.currentThread().name = original_thread_name if manual_search: # If results in manual search return True, else False return any(manual_search_results) else: return combine_results(multi_results, single_results)
def run(self): ShowQueueItem.run(self) log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'Indexer Id: {0}'.format(self.indexer_id))) # make sure the Indexer IDs are valid try: l_indexer_api_params = indexerApi(self.indexer).api_params.copy() if self.lang: l_indexer_api_params['language'] = self.lang log.info( '{indexer_name}: {indexer_params!r}', { 'indexer_name': indexerApi(self.indexer).name, 'indexer_params': l_indexer_api_params }) indexer_api = indexerApi( self.indexer).indexer(**l_indexer_api_params) s = indexer_api[self.indexer_id] # Let's try to create the show Dir if it's not provided. This way we force the show dir # to build build using the Indexers provided series name if not self.show_dir and self.root_dir: show_name = get_showname_from_indexer(self.indexer, self.indexer_id, self.lang) if show_name: self.show_dir = os.path.join(self.root_dir, sanitize_filename(show_name)) dir_exists = make_dir(self.show_dir) if not dir_exists: log.info( "Unable to create the folder {0}, can't add the show", self.show_dir) return chmod_as_parent(self.show_dir) else: log.info("Unable to get a show {0}, can't add the show", self.show_dir) return # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that # has no proper english version of the show if getattr(s, 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.indexer).name)) self._finishEarly() return # Check if we can already find this show in our current showList. try: check_existing_shows(s, self.indexer) except IndexerShowAlreadyInLibrary as error: log.warning( 'Could not add the show {series}, as it already is in your library.' ' Error: {error}', { 'series': s['seriesname'], 'error': error }) ui.notifications.error('Unable to add show', 'reason: {0}'.format(error)) self._finishEarly() # Clean up leftover if the newly created directory is empty. delete_empty_folders(self.show_dir) return # TODO: Add more specific indexer exceptions, that should provide the user with some accurate feedback. except IndexerShowNotFound as error: log.warning( '{id}: Unable to look up the show in {path} using id {id} on {indexer}.' ' Delete metadata files from the folder and try adding it again.\n' 'With error: {error}', { 'id': self.indexer_id, 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name, 'error': error }) ui.notifications.error( 'Unable to add show', 'Unable to look up the show in {path} using id {id} on {indexer}.' ' Delete metadata files from the folder and try adding it again.' .format(path=self.show_dir, id=self.indexer_id, indexer=indexerApi(self.indexer).name)) self._finishEarly() return except IndexerShowNotFoundInLanguage as error: log.warning( '{id}: Data retrieved from {indexer} was incomplete. The indexer does not provide' ' show information in the searched language {language}. Aborting: {error_msg}', { 'id': self.indexer_id, 'indexer': indexerApi(self.indexer).name, 'language': error.language, 'error_msg': error }) ui.notifications.error( 'Error adding show!', 'Unable to add show {indexer_id} on {indexer} with this language: {language}' .format(indexer_id=self.indexer_id, indexer=indexerApi(self.indexer).name, language=error.language)) self._finishEarly() return except Exception as error: log.error( '{id}: Error while loading information from indexer {indexer}. Error: {error!r}', { 'id': self.indexer_id, 'indexer': indexerApi(self.indexer).name, 'error': error }) ui.notifications.error( 'Unable to add show', 'Unable to look up the show in {path} on {indexer} using ID {id}.' .format(path=self.show_dir, indexer=indexerApi(self.indexer).name, id=self.indexer_id)) self._finishEarly() return try: newShow = Series(self.indexer, self.indexer_id, self.lang) newShow.load_from_indexer(indexer_api) self.show = newShow # set up initial values self.show.location = self.show_dir self.show.subtitles = self.subtitles if self.subtitles is not None else app.SUBTITLES_DEFAULT self.show.quality = self.quality if self.quality else app.QUALITY_DEFAULT self.show.season_folders = self.season_folders if self.season_folders is not None \ else app.SEASON_FOLDERS_DEFAULT self.show.anime = self.anime if self.anime is not None else app.ANIME_DEFAULT self.show.scene = self.scene if self.scene is not None else app.SCENE_DEFAULT self.show.paused = self.paused if self.paused is not None else False # set up default new/missing episode status log.info( 'Setting all previously aired episodes to the specified status: {status}', {'status': statusStrings[self.default_status]}) self.show.default_ep_status = self.default_status if self.show.anime: self.show.release_groups = BlackAndWhiteList(self.show) if self.blacklist: self.show.release_groups.set_black_keywords(self.blacklist) if self.whitelist: self.show.release_groups.set_white_keywords(self.whitelist) except IndexerException as error: log.error( 'Unable to add show due to an error with {indexer}: {error}', { 'indexer': indexerApi(self.indexer).name, 'error': error }) ui.notifications.error( 'Unable to add {series_name} due to an error with {indexer_name}' .format(series_name=self.show.name if self.show else 'show', indexer_name=indexerApi(self.indexer).name)) self._finishEarly() return except MultipleShowObjectsException: log.warning( 'The show in {show_dir} is already in your show list, skipping', {'show_dir': self.show_dir}) ui.notifications.error( 'Show skipped', 'The show in {show_dir} is already in your show list'.format( show_dir=self.show_dir)) self._finishEarly() return except Exception as error: log.error('Error trying to add show: {0}', error) log.debug(traceback.format_exc()) self._finishEarly() raise log.debug('Retrieving show info from IMDb') try: self.show.load_imdb_info() except ImdbAPIError as error: log.info('Something wrong on IMDb api: {0}', error) except RequestException as error: log.warning('Error loading IMDb info: {0}', error) try: log.debug('{id}: Saving new show to database', {'id': self.show.series_id}) self.show.save_to_db() except Exception as error: log.error('Error saving the show to the database: {0}', error) log.debug(traceback.format_exc()) self._finishEarly() raise # add it to the show list app.showList.append(self.show) try: self.show.load_episodes_from_indexer(tvapi=indexer_api) except Exception as error: log.error( 'Error with {indexer}, not creating episode list: {error}', { 'indexer': indexerApi(self.show.indexer).name, 'error': error }) log.debug(traceback.format_exc()) # update internal name cache name_cache.build_name_cache(self.show) try: self.show.load_episodes_from_dir() except Exception as error: log.error('Error searching dir for episodes: {0}', error) log.debug(traceback.format_exc()) # if they set default ep status to WANTED then run the backlog to search for episodes if self.show.default_ep_status == WANTED: log.info( 'Launching backlog for this show since its episodes are WANTED' ) wanted_segments = self.show.get_wanted_segments() for season, segment in viewitems(wanted_segments): cur_backlog_queue_item = BacklogQueueItem(self.show, segment) app.forced_search_queue_scheduler.action.add_item( cur_backlog_queue_item) log.info('Sending forced backlog for {show} season {season}' ' because some episodes were set to wanted'.format( show=self.show.name, season=season)) self.show.write_metadata() self.show.update_metadata() self.show.populate_cache() self.show.flush_episodes() if app.USE_TRAKT: # if there are specific episodes that need to be added by trakt app.trakt_checker_scheduler.action.manage_new_show(self.show) # add show to trakt.tv library if app.TRAKT_SYNC: app.trakt_checker_scheduler.action.add_show_trakt_library( self.show) if app.TRAKT_SYNC_WATCHLIST: log.info('update watchlist') notifiers.trakt_notifier.update_watchlist(show_obj=self.show) # Load XEM data to DB for show scene_numbering.xem_refresh(self.show, force=True) # check if show has XEM mapping so we can determine if searches # should go by scene numbering or indexer numbering. Warn the user. if not self.scene and scene_numbering.get_xem_numbering_for_show( self.show): log.warning( '{id}: while adding the show {title} we noticed thexem.de has an episode mapping available' '\nyou might want to consider enabling the scene option for this show.', { 'id': self.show.series_id, 'title': self.show.name }) ui.notifications.message( 'consider enabling scene for this show', 'for show {title} you might want to consider enabling the scene option' .format(title=self.show.name)) # After initial add, set to default_status_after. self.show.default_ep_status = self.default_status_after try: log.debug('{id}: Saving new show info to database', {'id': self.show.series_id}) self.show.save_to_db() except Exception as error: log.warning( '{id}: Error saving new show info to database: {error_msg}', { 'id': self.show.series_id, 'error_msg': error }) log.error(traceback.format_exc()) # Send ws update to client ws.Message('showAdded', self.show.to_json(detailed=False)).push() self.finish()
def search_providers(series_obj, episodes, forced_search=False, down_cur_quality=False, manual_search=False, manual_search_type=u'episode'): """ Walk providers for information on shows. :param series_obj: Show we are looking for :param episodes: List, episodes we hope to find :param forced_search: Boolean, is this a forced search? :param down_cur_quality: Boolean, should we re-download currently available quality file :param manual_search: Boolean, should we choose what to download? :param manual_search_type: Episode or Season search :return: results for search """ found_results = {} manual_search_results = [] multi_results = [] single_results = [] # build name cache for show name_cache.build_name_cache(series_obj) original_thread_name = threading.currentThread().name if manual_search: log.info(u'Using manual search providers') providers = enabled_providers(u'manualsearch') else: log.info(u'Using backlog search providers') providers = enabled_providers(u'backlog') if not providers: log.warning(u'No NZB/Torrent providers found or enabled in the application config for {0} searches.' u' Please check your settings', 'manual' if manual_search else 'backlog') threading.currentThread().name = original_thread_name for cur_provider in providers: threading.currentThread().name = '{original_thread_name} :: [{provider}]'.format( original_thread_name=original_thread_name, provider=cur_provider.name ) if cur_provider.anime_only and not series_obj.is_anime: log.debug(u'{0} is not an anime, skipping', series_obj.name) continue found_results[cur_provider.name] = {} search_count = 0 search_mode = cur_provider.search_mode # Always search for episode when manually searching when in sponly if search_mode == u'sponly' and (forced_search or manual_search): search_mode = u'eponly' if manual_search and manual_search_type == u'season': search_mode = u'sponly' while True: search_count += 1 if search_mode == u'eponly': log.info(u'Performing episode search for {0}', series_obj.name) else: log.info(u'Performing season pack search for {0}', series_obj.name) try: search_results = [] cache_search_results = [] cache_multi = [] cache_single = [] if not manual_search: cache_search_results = cur_provider.search_results_in_cache(episodes) if cache_search_results: # From our provider multi_episode and single_episode results, collect candidates. cache_found_results = list_results_for_provider(cache_search_results, found_results, cur_provider) # We're passing the empty lists, because we don't want to include previous candidates cache_multi, cache_single = collect_candidates(cache_found_results, cur_provider, [], [], series_obj, down_cur_quality) # For now we only search if we didn't get any results back from cache, # but we might wanna check if there was something useful in cache. if not (cache_multi or cache_single): log.debug(u'Could not find any candidates in cache, searching provider.') search_results = cur_provider.find_search_results(series_obj, episodes, search_mode, forced_search, down_cur_quality, manual_search, manual_search_type) # Update the list found_results found_results = list_results_for_provider(search_results, found_results, cur_provider) else: found_results = cache_found_results except AuthException as error: log.error(u'Authentication error: {0!r}', error) break if search_results or cache_search_results: break elif not cur_provider.search_fallback or search_count == 2: break # Don't fallback when doing manual season search if manual_search_type == u'season': break if search_mode == u'sponly': log.debug(u'Fallback episode search initiated') search_mode = u'eponly' else: log.debug(u'Fallback season pack search initiated') search_mode = u'sponly' # skip to next provider if we have no results to process if not found_results[cur_provider.name]: continue # Update the cache if a manual search is being run if manual_search: # Let's create a list with episodes that we where looking for if manual_search_type == u'season': # If season search type, we only want season packs searched_episode_list = [SEASON_RESULT] else: searched_episode_list = [episode_obj.episode for episode_obj in episodes] + [MULTI_EP_RESULT] for searched_episode in searched_episode_list: if (searched_episode in search_results and cur_provider.cache.update_cache_manual_search(search_results[searched_episode])): # If we have at least a result from one provider, it's good enough to be marked as result manual_search_results.append(True) # Continue because we don't want to pick best results as we are running a manual search by user continue multi_results, single_results = collect_candidates(found_results, cur_provider, multi_results, single_results, series_obj, down_cur_quality) # Remove provider from thread name before return results threading.currentThread().name = original_thread_name if manual_search: # If results in manual search return True, else False return any(manual_search_results) else: return combine_results(multi_results, single_results)
def search_providers(series_obj, episodes, forced_search=False, down_cur_quality=False, manual_search=False, manual_search_type=u'episode'): """ Walk providers for information on shows. :param series_obj: Show we are looking for :param episodes: List, episodes we hope to find :param forced_search: Boolean, is this a forced search? :param down_cur_quality: Boolean, should we re-download currently available quality file :param manual_search: Boolean, should we choose what to download? :param manual_search_type: Episode or Season search :return: results for search """ found_results = {} manual_search_results = [] multi_results = [] single_results = [] # build name cache for show name_cache.build_name_cache(series_obj) original_thread_name = threading.currentThread().name if manual_search: log.info(u'Using manual search providers') providers = enabled_providers(u'manualsearch') else: log.info(u'Using backlog search providers') providers = enabled_providers(u'backlog') if not providers: log.warning( u'No NZB/Torrent providers found or enabled in the application config for {0} searches.' u' Please check your settings', 'manual' if manual_search else 'backlog') threading.currentThread().name = original_thread_name for cur_provider in providers: threading.currentThread( ).name = original_thread_name + u' :: [' + cur_provider.name + u']' if cur_provider.anime_only and not series_obj.is_anime: log.debug(u'{0} is not an anime, skipping', series_obj.name) continue found_results[cur_provider.name] = {} search_count = 0 search_mode = cur_provider.search_mode # Always search for episode when manually searching when in sponly if search_mode == u'sponly' and (forced_search or manual_search): search_mode = u'eponly' if manual_search and manual_search_type == u'season': search_mode = u'sponly' while True: search_count += 1 if search_mode == u'eponly': log.info(u'Performing episode search for {0}', series_obj.name) else: log.info(u'Performing season pack search for {0}', series_obj.name) try: search_results = cur_provider.find_search_results( series_obj, episodes, search_mode, forced_search, down_cur_quality, manual_search, manual_search_type) except AuthException as error: log.error(u'Authentication error: {0}', ex(error)) break if search_results: # make a list of all the results for this provider for cur_ep in search_results: if cur_ep in found_results[cur_provider.name]: found_results[cur_provider. name][cur_ep] += search_results[cur_ep] else: found_results[ cur_provider.name][cur_ep] = search_results[cur_ep] # Sort the list by seeders if possible if cur_provider.provider_type == u'torrent' or getattr( cur_provider, u'torznab', None): found_results[cur_provider.name][cur_ep].sort( key=lambda d: int(d.seeders), reverse=True) break elif not cur_provider.search_fallback or search_count == 2: break # Don't fallback when doing manual season search if manual_search_type == u'season': break if search_mode == u'sponly': log.debug(u'Fallback episode search initiated') search_mode = u'eponly' else: log.debug(u'Fallback season pack search initiated') search_mode = u'sponly' # skip to next provider if we have no results to process if not found_results[cur_provider.name]: continue # Update the cache if a manual search is being run if manual_search: # Let's create a list with episodes that we where looking for if manual_search_type == u'season': # If season search type, we only want season packs searched_episode_list = [SEASON_RESULT] else: searched_episode_list = [ episode_obj.episode for episode_obj in episodes ] + [MULTI_EP_RESULT] for searched_episode in searched_episode_list: if (searched_episode in search_results and cur_provider.cache.update_cache_manual_search( search_results[searched_episode])): # If we have at least a result from one provider, it's good enough to be marked as result manual_search_results.append(True) # Continue because we don't want to pick best results as we are running a manual search by user continue # Collect candidates for multi-episode or season results candidates = (candidate for result, candidate in iteritems( found_results[cur_provider.name]) if result in (SEASON_RESULT, MULTI_EP_RESULT)) candidates = list(itertools.chain(*candidates)) if candidates: multi_results += collect_multi_candidates(candidates, series_obj, episodes, down_cur_quality) # Collect candidates for single-episode results single_results = collect_single_candidates( found_results[cur_provider.name], single_results) # Remove provider from thread name before return results threading.currentThread().name = original_thread_name if manual_search: # If results in manual search return True, else False return any(manual_search_results) else: return combine_results(multi_results, single_results)
def run(self): """Run QueueItemChangeIndexer queue item.""" step = [] # Small helper, to reduce code for messaging def message_step(new_step): step.append(new_step) ws.Message( 'QueueItemShow', dict(step=step, oldShow=self.old_show.to_json() if self.old_show else {}, newShow=self.new_show.to_json() if self.new_show else {}, **self.to_json)).push() ShowQueueItem.run(self) def get_show_from_slug(slug): identifier = SeriesIdentifier.from_slug(slug) if not identifier: raise ChangeIndexerException( f'Could not create identifier with slug {slug}') show = Series.find_by_identifier(identifier) return show try: # Create reference to old show, before starting the remove it. self.old_show = get_show_from_slug(self.old_slug) # Store needed options. self._store_options() # Start of removing the old show log.info('{id}: Removing {show}', { 'id': self.old_show.series_id, 'show': self.old_show.name }) message_step(f'Removing old show {self.old_show.name}') # Need to first remove the episodes from the Trakt collection, because we need the list of # Episodes from the db to know which eps to remove. if app.USE_TRAKT: message_step('Removing episodes from trakt collection') try: app.trakt_checker_scheduler.action.remove_show_trakt_library( self.old_show) except TraktException as error: log.warning( '{id}: Unable to delete show {show} from Trakt.' ' Please remove manually otherwise it will be added again.' ' Error: {error_msg}', { 'id': self.old_show.series_id, 'show': self.old_show.name, 'error_msg': error }) except Exception as error: log.exception( 'Exception occurred while trying to delete show {show}, error: {error', { 'show': self.old_show.name, 'error': error }) self.old_show.delete_show(full=False) # Send showRemoved to frontend, so we can remove it from localStorage. ws.Message('showRemoved', self.old_show.to_json( detailed=False)).push() # Send ws update to client # Double check to see if the show really has been removed, else bail. if get_show_from_slug(self.old_slug): raise ChangeIndexerException( f'Could not create identifier with slug {self.old_slug}') # Start adding the new show log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'New slug: {0}'.format(self.new_slug))) self.new_show = Series.from_identifier( SeriesIdentifier.from_slug(self.new_slug)) try: # Push an update to any open Web UIs through the WebSocket message_step('load show from {indexer}'.format( indexer=indexerApi(self.new_show.indexer).name)) api = self.new_show.identifier.get_indexer_api(self.options) if getattr(api[self.new_show.series_id], 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.new_show.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.new_show.indexer).name)) self._finish_early() raise SaveSeriesException( 'Indexer is missing a showname in this language: {0!r}' ) self.new_show.load_from_indexer(tvapi=api) message_step('load info from imdb') self.new_show.load_imdb_info() except IndexerException as error: log.warning( 'Unable to load series from indexer: {0!r}'.format(error)) raise SaveSeriesException( 'Unable to load series from indexer: {0!r}'.format(error)) try: message_step('configure show options') self.new_show.configure(self) except KeyError as error: log.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}', { 'series_name': self.new_show.name, 'error': error }) ui.notifications.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=self.new_show.name, error=error)) raise SaveSeriesException( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=self.new_show.name, error=error)) except Exception as error: log.error('Error trying to configure show: {0}', error) log.debug(traceback.format_exc()) raise app.showList.append(self.new_show) self.new_show.save_to_db() try: message_step('load episodes from {indexer}'.format( indexer=indexerApi(self.new_show.indexer).name)) self.new_show.load_episodes_from_indexer(tvapi=api) # If we provide a default_status_after through the apiv2 series route options object. # set it after we've added the episodes. self.new_show.default_ep_status = self.options[ 'default_status_after'] or app.STATUS_DEFAULT_AFTER except IndexerException as error: log.warning( 'Unable to load series episodes from indexer: {0!r}'. format(error)) raise SaveSeriesException( 'Unable to load series episodes from indexer: {0!r}'. format(error)) message_step('create metadata in show folder') self.new_show.write_metadata() self.new_show.update_metadata() self.new_show.populate_cache() build_name_cache(self.new_show) # update internal name cache self.new_show.flush_episodes() self.new_show.sync_trakt() message_step('add scene numbering') self.new_show.add_scene_numbering() if self.show_dir: # If a show dir was passed, this was added as an existing show. # For new shows we shouldn't have any files on disk. message_step('refresh episodes from disk') try: app.show_queue_scheduler.action.refreshShow(self.new_show) except CantRefreshShowException as error: log.warning( 'Unable to rescan episodes from disk: {0!r}'.format( error)) except (ChangeIndexerException, SaveSeriesException) as error: log.warning('Unable to add series: {0!r}'.format(error)) self.success = False self._finish_early() log.debug(traceback.format_exc()) default_status = self.options['default_status'] or app.STATUS_DEFAULT if statusStrings[default_status] == 'Wanted': message_step('trigger backlog search') app.backlog_search_scheduler.action.search_backlog([self.new_show]) self.success = True ws.Message('showAdded', self.new_show.to_json( detailed=False)).push() # Send ws update to client message_step('finished') self.finish()