def run(self): ShowQueueItem.run(self) log.info( '{id}: Performing refresh on {show}', {'id': self.show.series_id, 'show': self.show.name} ) ws.Message('QueueItemShow', self.to_json).push() try: self.show.refresh_dir() if self.force: self.show.update_metadata() self.show.write_metadata() self.show.populate_cache() # Load XEM data to DB for show scene_numbering.xem_refresh(self.show, force=True) self.success = True except IndexerException as error: log.warning( '{id}: Unable to contact {indexer}. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) except Exception as error: log.error( '{id}: Error while refreshing show {show}. Error: {error_msg}', {'id': self.show.series_id, 'show': self.show.name, 'error_msg': error} ) self.finish() ws.Message('QueueItemShow', self.to_json).push()
def run(self): ShowQueueItem.run(self) ws.Message('QueueItemShow', self.to_json).push() log.info( '{id}: Removing {show}', {'id': self.show.series_id, 'show': self.show.name} ) # Need to first remove the episodes from the Trakt collection, because we need the list of # Episodes from the db to know which eps to remove. if app.USE_TRAKT: try: app.trakt_checker_scheduler.action.remove_show_trakt_library(self.show) except TraktException as error: log.warning( '{id}: Unable to delete show {show} from Trakt.' ' Please remove manually otherwise it will be added again.' ' Error: {error_msg}', {'id': self.show.series_id, 'show': self.show.name, 'error_msg': error} ) except Exception as error: log.exception('Exception occurred while trying to delete show {show}, error: {error', {'show': self.show.name, 'error': error}) self.show.delete_show(full=self.full) self.finish() # Send showRemoved to frontend, so we can remove it from localStorage. ws.Message('QueueItemShow', self.show.to_json(detailed=False)).push()
def run(self): """Run postprocess queueitem thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info('Beginning postprocessing for path {path} and resource {resource}', { 'path': self.path, 'resource': self.resource_name }) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() if not self.path and self.resource_name: # We don't have a path, but do have a resource name. If this is a failed download. # Let's use the TV_DOWNLOAD_DIR as path combined with the resource_name. self.path = app.TV_DOWNLOAD_DIR if self.path: process_results = self.process_path() if process_results._output: self.to_json.update({'output': process_results._output}) log.info('Completed Postproccessing') # Use success as a flag for a finished PP. PP it self can be succeeded or failed. self.success = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc())
def run(self): """Run recommended show update thread.""" generic_queue.QueueItem.run(self) self.started = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() try: # Update recommended shows from trakt, imdb and anidb # recommended shows are dogpilled into cache/recommended.dbm log.info(u'Started caching recommended shows') self._get_trakt_shows() self._get_imdb_shows() self._get_anidb_shows() self._get_anilist_shows() log.info(u'Finished caching recommended shows') self.success = True except Exception as error: self.success = False log.exception('RecommendedShowQueueItem Exception, error: {error}', {'error': error}) self.success = bool(self.success) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() self.finish()
def run(self): """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True result = self.search_result try: log.info('Beginning to snatch release: {name}', {'name': result.name}) # Push an update to any open Web UIs through the WebSocket msg = ws.Message('QueueItemUpdate', self.to_json) msg.push() if result: if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' ' and size {size} from {provider}, through a {search_type} search', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, 'search_type': result.search_type } ) else: log.info( 'Downloading {name} with size: {size} from {provider}, through a {search_type} search', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, 'search_type': result.search_type } ) self.success = snatch_episode(result) else: log.info('Unable to snatch release: {name}', {'name': result.name}) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False log.exception('Snatch failed! For result: {name}', {'name': result.name}) ui.notifications.message('Error while snatching selected result', 'Unable to snatch the result for <i>{name}</i>'.format(name=result.name)) if self.success is None: self.success = False # Push an update to any open Web UIs through the WebSocket msg = ws.Message('QueueItemUpdate', self.to_json) msg.push() self.finish()
def run(self): """Run manual search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info( 'Beginning {search_type} {season_pack}search for: {ep}', { 'search_type': 'manual', 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() search_result = search_providers(self.show, self.segment, forced_search=True, down_cur_quality=True, manual_search=True, manual_search_type=self.manual_search_type) if search_result: self.results = search_result self.success = True if self.manual_search_type == 'season': ui.notifications.message('We have found season packs for {show_name}' .format(show_name=self.show.name), 'These should become visible in the manual select page.') else: ui.notifications.message('We have found results for {ep}' .format(ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.') else: ui.notifications.message('No results were found') log.info( 'Unable to find {search_type} {season_pack}results for: {ep}', { 'search_type': 'manual', 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) # TODO: Remove catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE) if self.success is None: self.success = False # Push an update to any open Web UIs through the WebSocket msg = ws.Message('QueueItemUpdate', self.to_json) msg.push() self.finish()
def run(self, force=False): """Start the Download Handler Thread.""" if self.amActive: log.debug( 'Download handler is still running, not starting it again') return self.amActive = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self._to_json).push() try: if app.USE_TORRENTS and app.TORRENT_METHOD != 'blackhole': torrent_client = torrent.get_client_class(app.TORRENT_METHOD)() self._update_status(torrent_client) self._check_postprocess(torrent_client) self._check_torrent_ratio(torrent_client) self._clean(torrent_client) except NotImplementedError: log.warning( 'Feature not currently implemented for this torrent client({torrent_client})', torrent_client=app.TORRENT_METHOD) except (RequestException, DownloadClientConnectionException) as error: log.warning( 'Unable to connect to {torrent_client}. Error: {error}', torrent_client=app.TORRENT_METHOD, error=error) except Exception as error: log.exception( 'Exception while checking torrent status. with error: {error}', {'error': error}) try: if app.USE_NZBS and app.NZB_METHOD != 'blackhole': nzb_client = sab if app.NZB_METHOD == 'sabnzbd' else nzbget self._update_status(nzb_client) self._check_postprocess(nzb_client) self._clean(nzb_client) except NotImplementedError: log.warning( 'Feature not currently implemented for this torrent client({torrent_client})', torrent_client=app.TORRENT_METHOD) except (RequestException, DownloadClientConnectionException) as error: log.warning( 'Unable to connect to {torrent_client}. Error: {error}', torrent_client=app.TORRENT_METHOD, error=error) except Exception as error: log.exception( 'Exception while checking torrent status. with error: {error}', {'error': error}) self.amActive = False # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self._to_json).push()
def run(self): ShowQueueItem.run(self) ws.Message('QueueItemShow', self.to_json).push() log.info( '{id}: Downloading subtitles for {show}', {'id': self.show.series_id, 'show': self.show.name} ) self.show.download_subtitles() self.finish() ws.Message('QueueItemShow', self.to_json).push()
def run(self, force=False): """Run the backlog.""" try: if force: self.forced = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self._to_json).push() self.search_backlog() ws.Message('QueueItemUpdate', self._to_json).push() except Exception: self.amActive = False raise
def run(self): """Run postprocess queueitem thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info('Beginning postprocessing for path {path}', {'path': self.path}) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() path = self.path or app.TV_DOWNLOAD_DIR process_method = self.process_method or app.PROCESS_METHOD process_results = ProcessResult(path, process_method, failed=self.failed) process_results.process(resource_name=self.resource_name, force=self.force, is_priority=self.is_priority, delete_on=self.delete_on, proc_type=self.proc_type, ignore_subs=self.ignore_subs) # A user might want to use advanced post-processing, but opt-out of failed download handling. if process_results.failed and app.USE_FAILED_DOWNLOADS: process_results.process_failed(path) # In case we have an info_hash or (nzbid), update the history table with the pp results. if self.info_hash: self.update_history(process_results) log.info('Completed Postproccessing') if process_results._output: self.to_json.update({'output': process_results._output}) # Use success as a flag for a finished PP. PP it self can be succeeded or failed. self.success = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc())
def patch(self, identifier, *args, **kwargs): """Patch general configuration.""" if not identifier: return self._bad_request('Config identifier not specified') if identifier != 'main': return self._not_found('Config not found') data = json_decode(self.request.body) accepted = {} ignored = {} for key, value in iter_nested_items(data): patch_field = self.patches.get(key) if patch_field and patch_field.patch(app, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) if ignored: log.warning('Config patch ignored {items!r}', {'items': ignored}) # Make sure to update the config file after everything is updated app.instance.save_config() # Push an update to any open Web UIs through the WebSocket msg = ws.Message('configUpdated', { 'section': identifier, 'config': DataGenerator.get_data(identifier) }) msg.push() self._ok(data=accepted)
def message_step(new_step): step.append(new_step) ws.Message( 'QueueItemShow', dict(step=step, oldShow=self.old_show.to_json() if self.old_show else {}, newShow=self.new_show.to_json() if self.new_show else {}, **self.to_json)).push()
def run(self): """Run proper search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info('Beginning proper search for new episodes') # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() # If force we should ignore existing processed propers self.ignore_processed_propers = False if self.force: self.ignore_processed_propers = True log.debug("Ignoring already processed propers as it's a forced search") log.info('Using proper search days: {search_days}', {'search_days': app.PROPERS_SEARCH_DAYS}) propers = self._get_proper_results() if propers: self._download_propers(propers) self._set_last_proper_search(datetime.datetime.today().toordinal()) run_at = '' if app.proper_finder_scheduler.start_time is None: run_in = app.proper_finder_scheduler.lastRun + \ app.proper_finder_scheduler.cycleTime - datetime.datetime.now() hours, remainder = divmod(run_in.seconds, 3600) minutes, seconds = divmod(remainder, 60) run_at = ', next check in approx. {0}'.format( '{0}h, {1}m'.format(hours, minutes) if 0 < hours else '{0}m, {1}s'.format(minutes, seconds)) log.info('Completed the search for new propers{run_at}', {'run_at': run_at}) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc())
def run(self): ShowQueueItem.run(self) ws.Message('QueueItemShow', self.to_json).push() log.info( 'Performing rename on {series_name}', {'series_name': self.show.name} ) try: self.show.validate_location except ShowDirectoryNotFoundException: log.warning( "Can't perform rename on {series_name} when the show dir is missing.", {'series_name': self.show.name} ) return ep_obj_rename_list = [] ep_obj_list = self.show.get_all_episodes(has_location=True) for cur_ep_obj in ep_obj_list: # Only want to rename if we have a location if cur_ep_obj.location: if cur_ep_obj.related_episodes: # do we have one of multi-episodes in the rename list already have_already = False for cur_related_ep in cur_ep_obj.related_episodes + [cur_ep_obj]: if cur_related_ep in ep_obj_rename_list: have_already = True break if not have_already: ep_obj_rename_list.append(cur_ep_obj) else: ep_obj_rename_list.append(cur_ep_obj) for cur_ep_obj in ep_obj_rename_list: cur_ep_obj.rename() self.finish() ws.Message('QueueItemShow', self.to_json).push()
def add_result_to_cache(self, cache): """Cache the item if needed.""" if self.add_cache_entry: # FIXME: Added repr parsing, as that prevents the logger from throwing an exception. # This can happen when there are unicode decoded chars in the release name. log.debug('Adding item from search to cache: {release_name!r}', release_name=self.name) # Push an update to any open Web UIs through the WebSocket ws.Message('addManualSearchResult', self.to_json()).push() return cache.add_cache_entry(self, parsed_result=self.parsed_result)
def error(self, title, message=''): """ Add an error notification to the queue. title: The title of the notification message: The message portion of the notification """ new_notification = Notification(title, message, ERROR) msg = ws.Message('notification', new_notification.data) msg.push()
def message(self, title, message=''): """ Add a regular notification to the queue. title: The title of the notification message: The message portion of the notification """ # self._messages.append(Notification(title, message, MESSAGE)) new_notification = Notification(title, message, MESSAGE) msg = ws.Message('notification', new_notification.data) msg.push()
def test_message_class(p): # Given event = p['event'] data = p['data'] expected = {'event': event, 'data': data} # When msg = ws.Message(event, data) # Then assert expected == msg.content assert json.dumps(expected) == msg.json()
def http_patch(self, identifier, *args, **kwargs): """Patch general configuration.""" if not identifier: return self._bad_request('Config identifier not specified') if identifier != 'main': return self._not_found('Config not found') data = json_decode(self.request.body) accepted = {} ignored = {} # Remove the metadata providers from the nested items. # It's ugly but I don't see a better solution for it right now. if data.get('metadata'): metadata_providers = data['metadata'].pop('metadataProviders') if metadata_providers: patch_metadata_providers = MetadataStructureField( app, 'metadata_provider_dict') if patch_metadata_providers and patch_metadata_providers.patch( app, metadata_providers): set_nested_value(accepted, 'metadata.metadataProviders', metadata_providers) else: set_nested_value(ignored, 'metadata.metadataProviders', metadata_providers) for key, value in iter_nested_items(data): patch_field = self.patches.get(key) if patch_field and patch_field.patch(app, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) if ignored: log.warning('Config patch ignored {items!r}', {'items': ignored}) # Make sure to update the config file after everything is updated app.instance.save_config() # Push an update to any open Web UIs through the WebSocket msg = ws.Message('configUpdated', { 'section': identifier, 'config': DataGenerator.get_data(identifier) }) msg.push() return self._ok(data=accepted)
def run(self): ShowQueueItem.run(self) ws.Message('QueueItemShow', self.to_json).push() log.debug( '{id}: Beginning update of {show}', {'id': self.show.series_id, 'show': self.show.name} ) log.debug( '{id}: Retrieving show info from {indexer}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name} ) try: # Let's make sure we refresh the indexer_api object attached to the show object. self.show.create_indexer() self.show.load_from_indexer() except IndexerError as error: log.warning( '{id}: Unable to contact {indexer}. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) return except IndexerAttributeNotFound as error: log.warning( '{id}: Data retrieved from {indexer} was incomplete. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) return except IndexerShowNotFoundInLanguage as error: log.warning( '{id}: Data retrieved from {indexer} was incomplete. The indexer does not provide' ' show information in the searched language {language}. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'language': error.language, 'error_msg': error} ) ui.notifications.error( 'Error changing language show!', 'Unable to change language for show {show_name}' ' on {indexer} to language: {language}'.format( show_name=self.show.name, indexer=indexerApi(self.show.indexer).name, language=error.language) ) return log.debug( '{id}: Retrieving show info from IMDb', {'id': self.show.series_id} ) try: self.show.load_imdb_info() except ImdbAPIError as error: log.info( '{id}: Something wrong on IMDb api: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) except RequestException as error: log.warning( '{id}: Error loading IMDb info: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) # have to save show before reading episodes from db try: log.debug( '{id}: Saving new IMDb show info to database', {'id': self.show.series_id} ) self.show.save_to_db() except Exception as error: log.warning( '{id}: Error saving new IMDb show info to database: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) log.error(traceback.format_exc()) # get episode list from DB try: episodes_from_db = self.show.load_episodes_from_db() except IndexerException as error: log.warning( '{id}: Unable to contact {indexer}. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) return # get episode list from the indexer try: episodes_from_indexer = self.show.load_episodes_from_indexer() except IndexerException as error: log.warning( '{id}: Unable to get info from {indexer}. The show info will not be refreshed.' ' Error: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) episodes_from_indexer = None if episodes_from_indexer is None: log.warning( '{id}: No data returned from {indexer} during full show update.' ' Unable to update this show', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name} ) else: # for each ep we found on the Indexer delete it from the DB list for cur_season in episodes_from_indexer: for cur_episode in episodes_from_indexer[cur_season]: if cur_season in episodes_from_db and cur_episode in episodes_from_db[cur_season]: del episodes_from_db[cur_season][cur_episode] # remaining episodes in the DB list are not on the indexer, just delete them from the DB for cur_season in episodes_from_db: for cur_episode in episodes_from_db[cur_season]: log.debug( '{id}: Permanently deleting episode {show} {ep} from the database', {'id': self.show.series_id, 'show': self.show.name, 'ep': episode_num(cur_season, cur_episode)} ) # Create the ep object only because Im going to delete it ep_obj = self.show.get_episode(cur_season, cur_episode) try: ep_obj.delete_episode() except EpisodeDeletedException: log.debug( '{id}: Episode {show} {ep} successfully deleted from the database', {'id': self.show.series_id, 'show': self.show.name, 'ep': episode_num(cur_season, cur_episode)} ) # Save only after all changes were applied try: log.debug( '{id}: Saving all updated show info to database', {'id': self.show.series_id} ) self.show.save_to_db() except Exception as error: log.warning( '{id}: Error saving all updated show info to database: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) log.error(traceback.format_exc()) # Replace the images in cache log.info( '{id}: Replacing images for show {show}', {'id': self.show.series_id, 'show': self.show.name} ) replace_images(self.show) log.debug( '{id}: Finished update of {show}', {'id': self.show.series_id, 'show': self.show.name} ) # Refresh show needs to be forced since current execution locks the queue app.show_queue_scheduler.action.refreshShow(self.show, True) self.finish() ws.Message('QueueItemShow', self.to_json).push()
def run(self): """Run QueueItemChangeIndexer queue item.""" step = [] # Small helper, to reduce code for messaging def message_step(new_step): step.append(new_step) ws.Message( 'QueueItemShow', dict(step=step, oldShow=self.old_show.to_json() if self.old_show else {}, newShow=self.new_show.to_json() if self.new_show else {}, **self.to_json)).push() ShowQueueItem.run(self) def get_show_from_slug(slug): identifier = SeriesIdentifier.from_slug(slug) if not identifier: raise ChangeIndexerException( f'Could not create identifier with slug {slug}') show = Series.find_by_identifier(identifier) return show try: # Create reference to old show, before starting the remove it. self.old_show = get_show_from_slug(self.old_slug) # Store needed options. self._store_options() # Start of removing the old show log.info('{id}: Removing {show}', { 'id': self.old_show.series_id, 'show': self.old_show.name }) message_step(f'Removing old show {self.old_show.name}') # Need to first remove the episodes from the Trakt collection, because we need the list of # Episodes from the db to know which eps to remove. if app.USE_TRAKT: message_step('Removing episodes from trakt collection') try: app.trakt_checker_scheduler.action.remove_show_trakt_library( self.old_show) except TraktException as error: log.warning( '{id}: Unable to delete show {show} from Trakt.' ' Please remove manually otherwise it will be added again.' ' Error: {error_msg}', { 'id': self.old_show.series_id, 'show': self.old_show.name, 'error_msg': error }) except Exception as error: log.exception( 'Exception occurred while trying to delete show {show}, error: {error', { 'show': self.old_show.name, 'error': error }) self.old_show.delete_show(full=False) # Send showRemoved to frontend, so we can remove it from localStorage. ws.Message('showRemoved', self.old_show.to_json( detailed=False)).push() # Send ws update to client # Double check to see if the show really has been removed, else bail. if get_show_from_slug(self.old_slug): raise ChangeIndexerException( f'Could not create identifier with slug {self.old_slug}') # Start adding the new show log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'New slug: {0}'.format(self.new_slug))) self.new_show = Series.from_identifier( SeriesIdentifier.from_slug(self.new_slug)) try: # Push an update to any open Web UIs through the WebSocket message_step('load show from {indexer}'.format( indexer=indexerApi(self.new_show.indexer).name)) api = self.new_show.identifier.get_indexer_api(self.options) if getattr(api[self.new_show.series_id], 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.new_show.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.new_show.indexer).name)) self._finish_early() raise SaveSeriesException( 'Indexer is missing a showname in this language: {0!r}' ) self.new_show.load_from_indexer(tvapi=api) message_step('load info from imdb') self.new_show.load_imdb_info() except IndexerException as error: log.warning( 'Unable to load series from indexer: {0!r}'.format(error)) raise SaveSeriesException( 'Unable to load series from indexer: {0!r}'.format(error)) try: message_step('configure show options') self.new_show.configure(self) except KeyError as error: log.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}', { 'series_name': self.new_show.name, 'error': error }) ui.notifications.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=self.new_show.name, error=error)) raise SaveSeriesException( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=self.new_show.name, error=error)) except Exception as error: log.error('Error trying to configure show: {0}', error) log.debug(traceback.format_exc()) raise app.showList.append(self.new_show) self.new_show.save_to_db() try: message_step('load episodes from {indexer}'.format( indexer=indexerApi(self.new_show.indexer).name)) self.new_show.load_episodes_from_indexer(tvapi=api) # If we provide a default_status_after through the apiv2 series route options object. # set it after we've added the episodes. self.new_show.default_ep_status = self.options[ 'default_status_after'] or app.STATUS_DEFAULT_AFTER except IndexerException as error: log.warning( 'Unable to load series episodes from indexer: {0!r}'. format(error)) raise SaveSeriesException( 'Unable to load series episodes from indexer: {0!r}'. format(error)) message_step('create metadata in show folder') self.new_show.write_metadata() self.new_show.update_metadata() self.new_show.populate_cache() build_name_cache(self.new_show) # update internal name cache self.new_show.flush_episodes() self.new_show.sync_trakt() message_step('add scene numbering') self.new_show.add_scene_numbering() if self.show_dir: # If a show dir was passed, this was added as an existing show. # For new shows we shouldn't have any files on disk. message_step('refresh episodes from disk') try: app.show_queue_scheduler.action.refreshShow(self.new_show) except CantRefreshShowException as error: log.warning( 'Unable to rescan episodes from disk: {0!r}'.format( error)) except (ChangeIndexerException, SaveSeriesException) as error: log.warning('Unable to add series: {0!r}'.format(error)) self.success = False self._finish_early() log.debug(traceback.format_exc()) default_status = self.options['default_status'] or app.STATUS_DEFAULT if statusStrings[default_status] == 'Wanted': message_step('trigger backlog search') app.backlog_search_scheduler.action.search_backlog([self.new_show]) self.success = True ws.Message('showAdded', self.new_show.to_json( detailed=False)).push() # Send ws update to client message_step('finished') self.finish()
def message_step(new_step): step.append(new_step) ws.Message('QueueItemShowAdd', dict(step=step, **self.to_json)).push()
def run(self): ShowQueueItem.run(self) log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'Indexer Id: {0}'.format(self.indexer_id))) show_slug = indexer_id_to_slug(self.indexer, self.indexer_id) series = Series.from_identifier(SeriesIdentifier.from_slug(show_slug)) step = [] # Small helper, to reduce code for messaging def message_step(new_step): step.append(new_step) ws.Message('QueueItemShowAdd', dict(step=step, **self.to_json)).push() try: try: # Push an update to any open Web UIs through the WebSocket message_step('load show from {indexer}'.format( indexer=indexerApi(self.indexer).name)) api = series.identifier.get_indexer_api(self.options) if getattr(api[self.indexer_id], 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.indexer).name)) self._finish_early() raise SaveSeriesException( 'Indexer is missing a showname in this language: {0!r}' ) series.load_from_indexer(tvapi=api) message_step('load info from imdb') series.load_imdb_info() except IndexerException as error: log.warning( 'Unable to load series from indexer: {0!r}'.format(error)) raise SaveSeriesException( 'Unable to load series from indexer: {0!r}'.format(error)) message_step('check if show is already added') try: message_step('configure show options') series.configure(self) except KeyError as error: log.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}', { 'series_name': series.name, 'error': error }) ui.notifications.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=series.name, error=error)) raise SaveSeriesException( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=series.name, error=error)) except Exception as error: log.error('Error trying to configure show: {0}', error) log.debug(traceback.format_exc()) raise app.showList.append(series) series.save_to_db() try: message_step('load episodes from {indexer}'.format( indexer=indexerApi(self.indexer).name)) series.load_episodes_from_indexer(tvapi=api) # If we provide a default_status_after through the apiv2 series route options object. # set it after we've added the episodes. self.default_ep_status = self.options[ 'default_status_after'] or app.STATUS_DEFAULT_AFTER except IndexerException as error: log.warning( 'Unable to load series episodes from indexer: {0!r}'. format(error)) raise SaveSeriesException( 'Unable to load series episodes from indexer: {0!r}'. format(error)) message_step('create metadata in show folder') series.write_metadata() series.update_metadata() series.populate_cache() build_name_cache(series) # update internal name cache series.flush_episodes() series.sync_trakt() message_step('add scene numbering') series.add_scene_numbering() except SaveSeriesException as error: log.warning('Unable to add series: {0!r}'.format(error)) self.success = False self._finish_early() log.debug(traceback.format_exc()) default_status = self.options['default_status'] or app.STATUS_DEFAULT if statusStrings[default_status] == 'Wanted': message_step('trigger backlog search') app.backlog_search_scheduler.action.search_backlog([series]) self.success = True ws.Message( 'showAdded', series.to_json(detailed=False)).push() # Send ws update to client message_step('finished') self.finish()
def run(self, force=False): # pylint: disable=too-many-branches, too-many-statements, too-many-locals """Check for needed subtitles for users' shows. :param force: True if a force search needs to be executed :type force: bool """ if self.amActive: logger.debug( u'Subtitle finder is still running, not starting it again') return if not app.USE_SUBTITLES: logger.warning(u'Subtitle search is disabled. Please enabled it') return if not enabled_service_list(): logger.warning( u'Not enough services selected. At least 1 service is required to search subtitles in the ' u'background') return self.amActive = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self._to_json).push() def dhm(td): """Create the string for subtitles delay.""" days_delay = td.days hours_delay = td.seconds // 60**2 minutes_delay = (td.seconds // 60) % 60 ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \ (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \ (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0] if days_delay == 1: ret = ret.replace('days', 'day') if hours_delay == 1: ret = ret.replace('hours', 'hour') if minutes_delay == 1: ret = ret.replace('minutes', 'minute') return ret.rstrip(', ') if app.POSTPONE_IF_NO_SUBS: self.subtitles_download_in_pp() logger.info(u'Checking for missed subtitles') main_db_con = db.DBConnection() # Shows with air date <= 30 days, have a limit of 100 results # Shows with air date > 30 days, have a limit of 200 results sql_args = [{ 'age_comparison': '<=', 'limit': 100 }, { 'age_comparison': '>', 'limit': 200 }] sql_like_languages = '%' + ','.join(sorted( wanted_languages())) + '%' if app.SUBTITLES_MULTI else '%und%' sql_results = [] for args in sql_args: sql_results += main_db_con.select( 'SELECT ' 's.show_name, ' 'e.indexer,' 'e.showid, ' 'e.season, ' 'e.episode,' 'e.release_name, ' 'e.status, ' 'e.subtitles, ' 'e.subtitles_searchcount AS searchcount, ' 'e.subtitles_lastsearch AS lastsearch, ' 'e.location, (? - e.airdate) as age ' 'FROM ' 'tv_episodes AS e ' 'INNER JOIN tv_shows AS s ' 'ON (e.showid = s.indexer_id AND e.indexer = s.indexer) ' 'WHERE ' 's.subtitles = 1 ' 'AND s.paused = 0 ' 'AND e.status = ? ' 'AND e.season > 0 ' "AND e.location != '' " 'AND age {} 30 ' 'AND e.subtitles NOT LIKE ? ' 'ORDER BY ' 'lastsearch ASC ' 'LIMIT {}'.format(args['age_comparison'], args['limit']), [ datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages ]) if not sql_results: logger.info('No subtitles to download') self.amActive = False return for ep_to_sub in sql_results: # give the CPU a break time.sleep(cpu_presets[app.CPU_PRESET]) ep_num = episode_num(ep_to_sub['season'], ep_to_sub['episode']) or \ episode_num(ep_to_sub['season'], ep_to_sub['episode'], numbering='absolute') subtitle_path = _encode(ep_to_sub['location']) if not os.path.isfile(subtitle_path): logger.debug( 'Episode file does not exist, cannot download subtitles for %s %s', ep_to_sub['show_name'], ep_num) continue if app.SUBTITLES_STOP_AT_FIRST and ep_to_sub['subtitles']: logger.debug( 'Episode already has one subtitle, skipping %s %s', ep_to_sub['show_name'], ep_num) continue if not needs_subtitles(ep_to_sub['subtitles']): logger.debug( 'Episode already has all needed subtitles, skipping %s %s', ep_to_sub['show_name'], ep_num) continue try: lastsearched = datetime.datetime.strptime( ep_to_sub['lastsearch'], dateTimeFormat) except ValueError: lastsearched = datetime.datetime.min if not force: now = datetime.datetime.now() days = int(ep_to_sub['age']) delay_time = datetime.timedelta( hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24) delay = lastsearched + delay_time - now # Search every hour until 10 days pass # After 10 days, search every 8 hours, after 30 days search once a month # Will always try an episode regardless of age for 3 times # The time resolution is minute # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot. if delay.total_seconds() > 60 and int( ep_to_sub['searchcount']) > 2: logger.debug('Subtitle search for %s %s delayed for %s', ep_to_sub['show_name'], ep_num, dhm(delay)) continue show_object = Show.find_by_id(app.showList, ep_to_sub['indexer'], ep_to_sub['showid']) if not show_object: logger.debug('Show with ID %s not found in the database', ep_to_sub['showid']) continue episode_object = show_object.get_episode(ep_to_sub['season'], ep_to_sub['episode']) if isinstance(episode_object, str): logger.debug('%s %s not found in the database', ep_to_sub['show_name'], ep_num) continue episode_object.download_subtitles() logger.info('Finished checking for missed subtitles') self.amActive = False # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self._to_json).push()
def run(self): ShowQueueItem.run(self) log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'Indexer Id: {0}'.format(self.indexer_id))) # make sure the Indexer IDs are valid try: l_indexer_api_params = indexerApi(self.indexer).api_params.copy() if self.lang: l_indexer_api_params['language'] = self.lang log.info( '{indexer_name}: {indexer_params!r}', { 'indexer_name': indexerApi(self.indexer).name, 'indexer_params': l_indexer_api_params }) indexer_api = indexerApi( self.indexer).indexer(**l_indexer_api_params) s = indexer_api[self.indexer_id] # Let's try to create the show Dir if it's not provided. This way we force the show dir # to build build using the Indexers provided series name if not self.show_dir and self.root_dir: show_name = get_showname_from_indexer(self.indexer, self.indexer_id, self.lang) if show_name: self.show_dir = os.path.join(self.root_dir, sanitize_filename(show_name)) dir_exists = make_dir(self.show_dir) if not dir_exists: log.info( "Unable to create the folder {0}, can't add the show", self.show_dir) return chmod_as_parent(self.show_dir) else: log.info("Unable to get a show {0}, can't add the show", self.show_dir) return # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that # has no proper english version of the show if getattr(s, 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.indexer).name)) self._finishEarly() return # Check if we can already find this show in our current showList. try: check_existing_shows(s, self.indexer) except IndexerShowAlreadyInLibrary as error: log.warning( 'Could not add the show {series}, as it already is in your library.' ' Error: {error}', { 'series': s['seriesname'], 'error': error }) ui.notifications.error('Unable to add show', 'reason: {0}'.format(error)) self._finishEarly() # Clean up leftover if the newly created directory is empty. delete_empty_folders(self.show_dir) return # TODO: Add more specific indexer exceptions, that should provide the user with some accurate feedback. except IndexerShowNotFound as error: log.warning( '{id}: Unable to look up the show in {path} using id {id} on {indexer}.' ' Delete metadata files from the folder and try adding it again.\n' 'With error: {error}', { 'id': self.indexer_id, 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name, 'error': error }) ui.notifications.error( 'Unable to add show', 'Unable to look up the show in {path} using id {id} on {indexer}.' ' Delete metadata files from the folder and try adding it again.' .format(path=self.show_dir, id=self.indexer_id, indexer=indexerApi(self.indexer).name)) self._finishEarly() return except IndexerShowNotFoundInLanguage as error: log.warning( '{id}: Data retrieved from {indexer} was incomplete. The indexer does not provide' ' show information in the searched language {language}. Aborting: {error_msg}', { 'id': self.indexer_id, 'indexer': indexerApi(self.indexer).name, 'language': error.language, 'error_msg': error }) ui.notifications.error( 'Error adding show!', 'Unable to add show {indexer_id} on {indexer} with this language: {language}' .format(indexer_id=self.indexer_id, indexer=indexerApi(self.indexer).name, language=error.language)) self._finishEarly() return except Exception as error: log.error( '{id}: Error while loading information from indexer {indexer}. Error: {error!r}', { 'id': self.indexer_id, 'indexer': indexerApi(self.indexer).name, 'error': error }) ui.notifications.error( 'Unable to add show', 'Unable to look up the show in {path} on {indexer} using ID {id}.' .format(path=self.show_dir, indexer=indexerApi(self.indexer).name, id=self.indexer_id)) self._finishEarly() return try: newShow = Series(self.indexer, self.indexer_id, self.lang) newShow.load_from_indexer(indexer_api) self.show = newShow # set up initial values self.show.location = self.show_dir self.show.subtitles = self.subtitles if self.subtitles is not None else app.SUBTITLES_DEFAULT self.show.quality = self.quality if self.quality else app.QUALITY_DEFAULT self.show.season_folders = self.season_folders if self.season_folders is not None \ else app.SEASON_FOLDERS_DEFAULT self.show.anime = self.anime if self.anime is not None else app.ANIME_DEFAULT self.show.scene = self.scene if self.scene is not None else app.SCENE_DEFAULT self.show.paused = self.paused if self.paused is not None else False # set up default new/missing episode status log.info( 'Setting all previously aired episodes to the specified status: {status}', {'status': statusStrings[self.default_status]}) self.show.default_ep_status = self.default_status if self.show.anime: self.show.release_groups = BlackAndWhiteList(self.show) if self.blacklist: self.show.release_groups.set_black_keywords(self.blacklist) if self.whitelist: self.show.release_groups.set_white_keywords(self.whitelist) except IndexerException as error: log.error( 'Unable to add show due to an error with {indexer}: {error}', { 'indexer': indexerApi(self.indexer).name, 'error': error }) ui.notifications.error( 'Unable to add {series_name} due to an error with {indexer_name}' .format(series_name=self.show.name if self.show else 'show', indexer_name=indexerApi(self.indexer).name)) self._finishEarly() return except MultipleShowObjectsException: log.warning( 'The show in {show_dir} is already in your show list, skipping', {'show_dir': self.show_dir}) ui.notifications.error( 'Show skipped', 'The show in {show_dir} is already in your show list'.format( show_dir=self.show_dir)) self._finishEarly() return except Exception as error: log.error('Error trying to add show: {0}', error) log.debug(traceback.format_exc()) self._finishEarly() raise log.debug('Retrieving show info from IMDb') try: self.show.load_imdb_info() except ImdbAPIError as error: log.info('Something wrong on IMDb api: {0}', error) except RequestException as error: log.warning('Error loading IMDb info: {0}', error) try: log.debug('{id}: Saving new show to database', {'id': self.show.series_id}) self.show.save_to_db() except Exception as error: log.error('Error saving the show to the database: {0}', error) log.debug(traceback.format_exc()) self._finishEarly() raise # add it to the show list app.showList.append(self.show) try: self.show.load_episodes_from_indexer(tvapi=indexer_api) except Exception as error: log.error( 'Error with {indexer}, not creating episode list: {error}', { 'indexer': indexerApi(self.show.indexer).name, 'error': error }) log.debug(traceback.format_exc()) # update internal name cache name_cache.build_name_cache(self.show) try: self.show.load_episodes_from_dir() except Exception as error: log.error('Error searching dir for episodes: {0}', error) log.debug(traceback.format_exc()) # if they set default ep status to WANTED then run the backlog to search for episodes if self.show.default_ep_status == WANTED: log.info( 'Launching backlog for this show since its episodes are WANTED' ) wanted_segments = self.show.get_wanted_segments() for season, segment in viewitems(wanted_segments): cur_backlog_queue_item = BacklogQueueItem(self.show, segment) app.forced_search_queue_scheduler.action.add_item( cur_backlog_queue_item) log.info('Sending forced backlog for {show} season {season}' ' because some episodes were set to wanted'.format( show=self.show.name, season=season)) self.show.write_metadata() self.show.update_metadata() self.show.populate_cache() self.show.flush_episodes() if app.USE_TRAKT: # if there are specific episodes that need to be added by trakt app.trakt_checker_scheduler.action.manage_new_show(self.show) # add show to trakt.tv library if app.TRAKT_SYNC: app.trakt_checker_scheduler.action.add_show_trakt_library( self.show) if app.TRAKT_SYNC_WATCHLIST: log.info('update watchlist') notifiers.trakt_notifier.update_watchlist(show_obj=self.show) # Load XEM data to DB for show scene_numbering.xem_refresh(self.show, force=True) # check if show has XEM mapping so we can determine if searches # should go by scene numbering or indexer numbering. Warn the user. if not self.scene and scene_numbering.get_xem_numbering_for_show( self.show): log.warning( '{id}: while adding the show {title} we noticed thexem.de has an episode mapping available' '\nyou might want to consider enabling the scene option for this show.', { 'id': self.show.series_id, 'title': self.show.name }) ui.notifications.message( 'consider enabling scene for this show', 'for show {title} you might want to consider enabling the scene option' .format(title=self.show.name)) # After initial add, set to default_status_after. self.show.default_ep_status = self.default_status_after try: log.debug('{id}: Saving new show info to database', {'id': self.show.series_id}) self.show.save_to_db() except Exception as error: log.warning( '{id}: Error saving new show info to database: {error_msg}', { 'id': self.show.series_id, 'error_msg': error }) log.error(traceback.format_exc()) # Send ws update to client ws.Message('showAdded', self.show.to_json(detailed=False)).push() self.finish()
def run(self): """Run failed thread.""" generic_queue.QueueItem.run(self) self.started = True # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() try: for ep_obj in self.segment: log.info('Marking episode as bad: {ep}', {'ep': ep_obj.pretty_name()}) failed_history.mark_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) if release: failed_history.log_failed(release) history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) log.info('Beginning failed download search for: {ep}', {'ep': ep_obj.pretty_name()}) # If it is wanted, self.down_cur_quality doesnt matter # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to! search_result = search_providers(self.show, self.segment, True) if search_result: for result in search_result: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) # Set the search_type for the result. result.search_type = SearchType.FAILED_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem( result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item( snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info( 'No needed episodes found during failed search for: {name}', {'name': self.show.name}) # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False log.info(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE) if self.success is None: self.success = False # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() self.finish()
def _log_history_item(action, ep_obj, resource=None, provider=None, proper_tags='', manually_searched=False, info_hash=None, size=-1, search_result=None, part_of_batch=False): """ Insert a history item in DB. If search_result it passed, it will overwrite other passed named paramaters. :param action: action taken (snatch, download, etc) :param ep_obj: episode object :param resource: resource used :param provider: provider class used :param search_result: SearchResult object """ log_date = datetime.datetime.today().strftime(History.date_format) provider_type = None client_status = None version = ep_obj.version if search_result: resource = search_result.name version = search_result.version proper_tags = '|'.join(search_result.proper_tags) manually_searched = search_result.manually_searched size = search_result.size provider_class = search_result.provider if provider_class is not None: provider = provider_class.name provider_type = provider_class.provider_type else: provider = 'unknown' provider_type = 'unknown' if (search_result.result_type == 'torrent' and search_result.hash) \ or (search_result.result_type == 'nzb' and search_result.nzb_id): if search_result.result_type == 'torrent': info_hash = search_result.hash.lower() elif search_result.result_type == 'nzb': info_hash = search_result.nzb_id client_status = ClientStatusEnum.SNATCHED.value main_db_con = db.DBConnection() sql_result = main_db_con.action( 'INSERT INTO history ' '(action, date, indexer_id, showid, season, episode, quality, ' 'resource, provider, version, proper_tags, manually_searched, ' 'info_hash, size, provider_type, client_status, part_of_batch) ' 'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)', [ action, log_date, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, ep_obj.quality, resource, provider, version, proper_tags, manually_searched, info_hash, size, provider_type, client_status, part_of_batch ]) # Update the history page in frontend. ws.Message( 'historyUpdate', create_history_item( main_db_con.select('SELECT * FROM history WHERE rowid = ?', [sql_result.lastrowid])[0])).push()
def run(self): """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True if not self.show.paused: try: log.info('Beginning backlog search for: {name}', {'name': self.show.name}) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) # Set the search_type for the result. result.search_type = SearchType.BACKLOG_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem( result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item( snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info( 'No needed episodes found during backlog search for: {name}', {'name': self.show.name}) # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE) if self.success is None: self.success = False # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() self.finish()
def patch(self, series_slug, path_param=None): """Patch series.""" if not series_slug: return self._method_not_allowed( 'Patching multiple series is not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) indexer_id = data.get('id', {}).get(identifier.indexer.slug) if indexer_id is not None and indexer_id != identifier.id: return self._bad_request('Conflicting series identifier') accepted = {} ignored = {} patches = { 'config.aliases': ListField(series, 'aliases'), 'config.defaultEpisodeStatus': StringField(series, 'default_ep_status_name'), 'config.dvdOrder': BooleanField(series, 'dvd_order'), 'config.seasonFolders': BooleanField(series, 'season_folders'), 'config.anime': BooleanField(series, 'anime'), 'config.scene': BooleanField(series, 'scene'), 'config.sports': BooleanField(series, 'sports'), 'config.paused': BooleanField(series, 'paused'), 'config.location': StringField(series, 'location'), 'config.airByDate': BooleanField(series, 'air_by_date'), 'config.subtitlesEnabled': BooleanField(series, 'subtitles'), 'config.release.requiredWords': ListField(series, 'release_required_words'), 'config.release.ignoredWords': ListField(series, 'release_ignored_words'), 'config.release.blacklist': ListField(series, 'blacklist'), 'config.release.whitelist': ListField(series, 'whitelist'), 'config.release.requiredWordsExclude': BooleanField(series, 'release_required_exclude'), 'config.release.ignoredWordsExclude': BooleanField(series, 'release_ignored_exclude'), 'language': StringField(series, 'lang'), 'config.qualities.allowed': ListField(series, 'qualities_allowed'), 'config.qualities.preferred': ListField(series, 'qualities_preferred'), 'config.qualities.combined': IntegerField(series, 'quality'), 'config.airdateOffset': IntegerField(series, 'airdate_offset'), 'config.showLists': ListField(Series, 'show_lists'), 'config.templates': BooleanField(series, 'templates'), 'config.searchTemplates': ListField(series, 'search_templates'), } for key, value in iter_nested_items(data): patch_field = patches.get(key) if patch_field and patch_field.patch(series, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) # Save patched attributes in db. series.save_to_db() if ignored: log.warning('Series patch ignored {items!r}', {'items': ignored}) # Push an update to any open Web UIs through the WebSocket msg = ws.Message('showUpdated', series.to_json(detailed=False)) msg.push() return self._ok(data=accepted)
def run(self): ShowQueueItem.run(self) ws.Message('QueueItemShow', self.to_json).push() log.info( '{id}: Beginning update of {show}{season}', {'id': self.show.series_id, 'show': self.show.name, 'season': ' with season(s) [{0}]'.format( ','.join(text_type(s) for s in self.seasons) if self.seasons else '') } ) log.debug( '{id}: Retrieving show info from {indexer}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name} ) try: # Let's make sure we refresh the indexer_api object attached to the show object. self.show.create_indexer() self.show.load_from_indexer() except IndexerError as error: log.warning( '{id}: Unable to contact {indexer}. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) return except IndexerAttributeNotFound as error: log.warning( '{id}: Data retrieved from {indexer} was incomplete.' ' Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) return log.debug( '{id}: Retrieving show info from IMDb', {'id': self.show.series_id} ) try: self.show.load_imdb_info() except ImdbAPIError as error: log.info( '{id}: Something wrong on IMDb api: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) except RequestException as error: log.warning( '{id}: Error loading IMDb info: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) # have to save show before reading episodes from db try: log.debug( '{id}: Saving new IMDb show info to database', {'id': self.show.series_id} ) self.show.save_to_db() except Exception as error: log.warning( '{id}: Error saving new IMDb show info to database: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) log.error(traceback.format_exc()) # get episode list from DB try: episodes_from_db = self.show.load_episodes_from_db(self.seasons) except IndexerException as error: log.warning( '{id}: Unable to contact {indexer}. Aborting: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) return # get episode list from the indexer try: episodes_from_indexer = self.show.load_episodes_from_indexer(self.seasons) except IndexerException as error: log.warning( '{id}: Unable to get info from {indexer}. The show info will not be refreshed.' ' Error: {error_msg}', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name, 'error_msg': error} ) episodes_from_indexer = None if episodes_from_indexer is None: log.warning( '{id}: No data returned from {indexer} during season show update.' ' Unable to update this show', {'id': self.show.series_id, 'indexer': indexerApi(self.show.indexer).name} ) else: # for each ep we found on the Indexer delete it from the DB list for cur_season in episodes_from_indexer: for cur_episode in episodes_from_indexer[cur_season]: if cur_season in episodes_from_db and cur_episode in episodes_from_db[cur_season]: del episodes_from_db[cur_season][cur_episode] # remaining episodes in the DB list are not on the indexer, just delete them from the DB for cur_season in episodes_from_db: for cur_episode in episodes_from_db[cur_season]: log.debug( '{id}: Permanently deleting episode {show} {ep} from the database', {'id': self.show.series_id, 'show': self.show.name, 'ep': episode_num(cur_season, cur_episode)} ) # Create the ep object only because Im going to delete it ep_obj = self.show.get_episode(cur_season, cur_episode) try: ep_obj.delete_episode() except EpisodeDeletedException: log.debug( '{id}: Episode {show} {ep} successfully deleted from the database', {'id': self.show.series_id, 'show': self.show.name, 'ep': episode_num(cur_season, cur_episode)} ) # Save only after all changes were applied try: log.debug( '{id}: Saving all updated show info to database', {'id': self.show.series_id} ) self.show.save_to_db() except Exception as error: log.warning( '{id}: Error saving all updated show info to database: {error_msg}', {'id': self.show.series_id, 'error_msg': error} ) log.error(traceback.format_exc()) log.info( '{id}: Finished update of {show}', {'id': self.show.series_id, 'show': self.show.name} ) self.finish() ws.Message('QueueItemShow', self.to_json).push()