def http_post(self, series_slug=None, path_param=None): """Add a new series.""" if series_slug is not None: return self._bad_request('Series slug should not be specified') data = json_decode(self.request.body) if not data or 'id' not in data: return self._bad_request('Invalid series data') ids = {k: v for k, v in viewitems(data['id']) if k != 'imdb'} if len(ids) != 1: return self._bad_request('Only 1 indexer identifier should be specified') identifier = SeriesIdentifier.from_slug('{slug}{id}'.format(slug=list(ids)[0], id=list(itervalues(ids))[0])) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if series: return self._conflict('Series already exist added') series = Series.from_identifier(identifier) if not Series.save_series(series): return self._not_found('Series not found in the specified indexer') return self._created(series.to_json(), identifier=identifier.slug)
def http_delete(self, series_slug, episode_slug, **kwargs): """Delete the episode.""" if not series_slug: return self._method_not_allowed('Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') try: episode.delete_episode() except EpisodeDeletedException: return self._no_content() else: return self._conflict('Unable to delete episode')
def http_patch(self, series_slug, episode_slug=None, path_param=None): """Patch episode.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) # Multi-patch request if not episode_slug: return self._patch_multi(series, data) episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') accepted = self._patch_episode(episode, data) return self._ok(data=accepted)
def http_get(self, series_slug, identifier): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 :param identifier: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if identifier == 'backlogged': # TODO: revisit allowed_qualities = self._parse(self.get_argument('allowed', default=None), str) allowed_qualities = list(map(int, allowed_qualities.split(','))) if allowed_qualities else [] preferred_qualities = self._parse(self.get_argument('preferred', default=None), str) preferred_qualities = list(map(int, preferred_qualities.split(','))) if preferred_qualities else [] new, existing = series.get_backlogged_episodes(allowed_qualities=allowed_qualities, preferred_qualities=preferred_qualities) data = {'new': new, 'existing': existing} return self._ok(data=data) return self._bad_request('Invalid request')
def http_patch(self, series_slug, path_param=None): """Patch series.""" if not series_slug: return self._method_not_allowed('Patching multiple series is not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) indexer_id = data.get('id', {}).get(identifier.indexer.slug) if indexer_id is not None and indexer_id != identifier.id: return self._bad_request('Conflicting series identifier') accepted = {} ignored = {} patches = { 'config.aliases': ListField(series, 'aliases'), 'config.defaultEpisodeStatus': StringField(series, 'default_ep_status_name'), 'config.dvdOrder': BooleanField(series, 'dvd_order'), 'config.seasonFolders': BooleanField(series, 'season_folders'), 'config.anime': BooleanField(series, 'anime'), 'config.scene': BooleanField(series, 'scene'), 'config.sports': BooleanField(series, 'sports'), 'config.paused': BooleanField(series, 'paused'), 'config.location': StringField(series, 'location'), 'config.airByDate': BooleanField(series, 'air_by_date'), 'config.subtitlesEnabled': BooleanField(series, 'subtitles'), 'config.release.requiredWords': ListField(series, 'release_required_words'), 'config.release.ignoredWords': ListField(series, 'release_ignore_words'), 'config.release.blacklist': ListField(series, 'blacklist'), 'config.release.whitelist': ListField(series, 'whitelist'), 'config.release.requiredWordsExclude': BooleanField(series, 'rls_require_exclude'), 'config.release.ignoredWordsExclude': BooleanField(series, 'rls_ignore_exclude'), 'language': StringField(series, 'lang'), 'config.qualities.allowed': ListField(series, 'qualities_allowed'), 'config.qualities.preferred': ListField(series, 'qualities_preferred'), 'config.qualities.combined': IntegerField(series, 'quality'), 'config.airdateOffset': IntegerField(series, 'airdate_offset'), } for key, value in iter_nested_items(data): patch_field = patches.get(key) if patch_field and patch_field.patch(series, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) # Save patched attributes in db. series.save_to_db() if ignored: log.warning('Series patch ignored {items!r}', {'items': ignored}) return self._ok(data=accepted)
def http_get(self, series_slug, path_param=None): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 :param path_param: """ arg_paused = self._parse_boolean(self.get_argument('paused', default=None)) def filter_series(current): return arg_paused is None or current.paused == arg_paused if not series_slug: detailed = self._parse_boolean(self.get_argument('detailed', default=False)) fetch = self._parse_boolean(self.get_argument('fetch', default=False)) data = [ s.to_json(detailed=detailed, fetch=fetch) for s in Series.find_series(predicate=filter_series) ] return self._paginate(data, sort='title') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(identifier, predicate=filter_series) if not series: return self._not_found('Series not found') detailed = self._parse_boolean(self.get_argument('detailed', default=True)) fetch = self._parse_boolean(self.get_argument('fetch', default=False)) data = series.to_json(detailed=detailed, fetch=fetch) if path_param: if path_param not in data: return self._bad_request("Invalid path parameter '{0}'".format(path_param)) data = data[path_param] return self._ok(data)
def http_delete(self, series_slug, path_param=None): """Delete the series.""" if not series_slug: return self._method_not_allowed('Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') remove_files = self._parse_boolean(self.get_argument('remove-files', default=None)) if not series.delete(remove_files): return self._conflict('Unable to delete series') return self._no_content()
def http_get(self, series_slug, identifier, *args, **kwargs): """Get an asset.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') asset_type = identifier or 'banner' asset = series.get_asset(asset_type, fallback=False) if not asset: return self._not_found('Asset not found') media = asset.media if not media: return self._not_found('{kind} not found'.format(kind=asset_type.capitalize())) return self._ok(stream=media, content_type=asset.media_type)
def delete(self, series_slug, path_param=None): """Delete the series.""" if not series_slug: return self._method_not_allowed( 'Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') remove_files = self._parse_boolean( self.get_argument('remove-files', default=None)) if not series.delete(remove_files): return self._conflict('Unable to delete series') return self._no_content()
def http_get(self, series_slug, identifier, *args, **kwargs): """Get an asset.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') asset_type = identifier or 'banner' asset = series.get_asset(asset_type, fallback=False) if not asset: return self._not_found('Asset not found') media = asset.media if not media: return self._not_found( '{kind} not found'.format(kind=asset_type.capitalize())) return self._ok(stream=media, content_type=asset.media_type)
def http_get(self, series_slug, episode_slug, path_param): """Query episode information. :param series_slug: series slug. E.g.: tvdb1234 :param episode_number: :param path_param: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if not episode_slug: detailed = self._parse_boolean(self.get_argument('detailed', default=False)) season = self._parse(self.get_argument('season', None), int) data = [e.to_json(detailed=detailed) for e in series.get_all_episodes(season=season)] return self._paginate(data, sort='airDate') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') detailed = self._parse_boolean(self.get_argument('detailed', default=True)) data = episode.to_json(detailed=detailed) if path_param: if path_param == 'metadata': data = episode.metadata() if episode.is_location_valid() else {} elif path_param in data: data = data[path_param] else: return self._bad_request("Invalid path parameter '{0}'".format(path_param)) return self._ok(data=data)
def emby_update(self): """Update emby's show library.""" show_slug = self.get_argument('showslug', '') show = None if show_slug: show_identifier = SeriesIdentifier.from_slug(show_slug) if not show_identifier: return self._bad_request('Invalid show slug') show = Series.find_by_identifier(show_identifier) if not show: return self._not_found('Series not found') if notifiers.emby_notifier.update_library(show): ui.notifications.message( f'Library update command sent to Emby host: {app.EMBY_HOST}') else: ui.notifications.error( f'Unable to contact Emby host: {app.EMBY_HOST}') return self._created()
def getSeasonSceneExceptions(self, showslug=None): """Get show name scene exceptions per season :param indexer: The shows indexer :param indexer_id: The shows indexer_id :return: A json with the scene exceptions per season. """ identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) return json.dumps({ 'seasonExceptions': { season: list(exception_name) for season, exception_name in iteritems( get_all_scene_exceptions(series_obj)) }, 'xemNumbering': { tvdb_season_ep[0]: anidb_season_ep[0] for (tvdb_season_ep, anidb_season_ep) in iteritems( get_xem_numbering_for_show(series_obj, refresh_data=False)) } })
def saveShowNotifyList(show=None, emails=None, prowlAPIs=None): series_identifier = SeriesIdentifier.from_slug(show) series_obj = Series.find_by_identifier(series_identifier) # Create a new dict, to force the "dirty" flag on the Series object. entries = {'emails': '', 'prowlAPIs': ''} if not series_obj: return 'show missing' if series_obj.notify_list: entries.update(series_obj.notify_list) if emails is not None: entries['emails'] = emails if prowlAPIs is not None: entries['prowlAPIs'] = prowlAPIs series_obj.notify_list = entries series_obj.save_to_db() return 'OK'
def http_post(self, series_slug): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) if not data or not all([data.get('type')]) or len(data) != 1: return self._bad_request('Invalid request body') if data['type'] == 'ARCHIVE_EPISODES': if series.set_all_episodes_archived(final_status_only=True): return self._created() return self._no_content() return self._bad_request('Invalid operation')
def post(self, series_slug): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) if not data or not all([data.get('type')]) or len(data) != 1: return self._bad_request('Invalid request body') if data['type'] == 'ARCHIVE_EPISODES': if series.set_all_episodes_archived(final_status_only=True): return self._created() return self._no_content() return self._bad_request('Invalid operation')
def updateShow(self, showslug=None): # @TODO: Replace with status=update or status=updating from PATCH /api/v2/show/{id} if showslug is None: return self._genericMessage('Error', 'Invalid show ID') identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) if series_obj is None: return self._genericMessage('Error', 'Unable to find the specified show') # force the update try: app.show_queue_scheduler.action.updateShow(series_obj) except CantUpdateShowException as e: ui.notifications.error('Unable to update this show.', ex(e)) # just give it some time time.sleep(cpu_presets[app.CPU_PRESET]) return self.redirect( '/home/displayShow?showslug={series_obj.slug}'.format( series_obj=series_obj))
def setSceneNumbering(self, showslug=None, for_season=None, for_episode=None, for_absolute=None, scene_season=None, scene_episode=None, scene_absolute=None): # sanitize: for_season = None if for_season in ['null', ''] else for_season for_episode = None if for_episode in ['null', ''] else for_episode for_absolute = None if for_absolute in ['null', ''] else for_absolute scene_season = None if scene_season in ['null', ''] else scene_season scene_episode = None if scene_episode in ['null', '' ] else scene_episode scene_absolute = None if scene_absolute in ['null', '' ] else scene_absolute identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) if not series_obj: return json.dumps({ 'success': False, 'errorMessage': 'Could not find show {show_slug} to set scene numbering'. format(show_slug=series_obj.slug), }) # Check if this is an anime, because we can't set the Scene numbering for anime shows if series_obj.is_anime and for_absolute is None: return json.dumps({ 'success': False, 'errorMessage': "You can't use the Scene numbering for anime shows. " 'Use the Scene Absolute field, to configure a diverging episode number.', 'sceneSeason': None, 'sceneAbsolute': None, }) elif not series_obj.is_anime and (for_season is None or for_episode is None): return json.dumps({ 'success': False, 'errorMessage': "You can't use the Scene Absolute for non-anime shows. " 'Use the scene field, to configure a diverging episode number.', 'sceneSeason': None, 'sceneAbsolute': None, }) elif series_obj.is_anime: result = { 'success': True, 'forAbsolute': for_absolute, } else: result = { 'success': True, 'forSeason': for_season, 'forEpisode': for_episode, } # retrieve the episode object and fail if we can't get one if series_obj.is_anime: ep_obj = series_obj.get_episode(absolute_number=for_absolute) else: ep_obj = series_obj.get_episode(for_season, for_episode) if not ep_obj: result.update({ 'success': False, 'errorMessage': ep_obj, }) elif series_obj.is_anime: logger.log( u'Set absolute scene numbering for {show} from {absolute} to {scene_absolute}' .format(show=series_obj.slug, absolute=for_absolute, scene_absolute=scene_absolute), logger.DEBUG) for_absolute = int(for_absolute) if scene_absolute is not None: scene_absolute = int(scene_absolute) set_scene_numbering(series_obj, absolute_number=for_absolute, scene_absolute=scene_absolute) else: logger.log( u'setEpisodeSceneNumbering for {show} from {season}x{episode} to {scene_season}x{scene_episode}' .format(show=series_obj.indexerid, season=for_season, episode=for_episode, scene_season=scene_season, scene_episode=scene_episode), logger.DEBUG) for_season = int(for_season) for_episode = int(for_episode) if scene_season is not None: scene_season = int(scene_season) if scene_episode is not None: scene_episode = int(scene_episode) set_scene_numbering(series_obj, season=for_season, episode=for_episode, scene_season=scene_season, scene_episode=scene_episode) if series_obj.is_anime: sn = get_scene_absolute_numbering(series_obj, for_absolute) result['sceneAbsolute'] = sn else: sn = get_scene_numbering(series_obj, for_season, for_episode) result['sceneSeason'], result['sceneEpisode'] = sn return json.dumps(result)
def run(self): ShowQueueItem.run(self) log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'Indexer Id: {0}'.format(self.indexer_id))) show_slug = indexer_id_to_slug(self.indexer, self.indexer_id) series = Series.from_identifier(SeriesIdentifier.from_slug(show_slug)) step = [] # Small helper, to reduce code for messaging def message_step(new_step): step.append(new_step) ws.Message('QueueItemShowAdd', dict(step=step, **self.to_json)).push() try: try: # Push an update to any open Web UIs through the WebSocket message_step('load show from {indexer}'.format( indexer=indexerApi(self.indexer).name)) api = series.identifier.get_indexer_api(self.options) if getattr(api[self.indexer_id], 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.indexer).name)) self._finish_early() raise SaveSeriesException( 'Indexer is missing a showname in this language: {0!r}' ) series.load_from_indexer(tvapi=api) message_step('load info from imdb') series.load_imdb_info() except IndexerException as error: log.warning( 'Unable to load series from indexer: {0!r}'.format(error)) raise SaveSeriesException( 'Unable to load series from indexer: {0!r}'.format(error)) message_step('check if show is already added') try: message_step('configure show options') series.configure(self) except KeyError as error: log.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}', { 'series_name': series.name, 'error': error }) ui.notifications.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=series.name, error=error)) raise SaveSeriesException( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=series.name, error=error)) except Exception as error: log.error('Error trying to configure show: {0}', error) log.debug(traceback.format_exc()) raise app.showList.append(series) series.save_to_db() try: message_step('load episodes from {indexer}'.format( indexer=indexerApi(self.indexer).name)) series.load_episodes_from_indexer(tvapi=api) # If we provide a default_status_after through the apiv2 series route options object. # set it after we've added the episodes. self.default_ep_status = self.options[ 'default_status_after'] or app.STATUS_DEFAULT_AFTER except IndexerException as error: log.warning( 'Unable to load series episodes from indexer: {0!r}'. format(error)) raise SaveSeriesException( 'Unable to load series episodes from indexer: {0!r}'. format(error)) message_step('create metadata in show folder') series.write_metadata() series.update_metadata() series.populate_cache() build_name_cache(series) # update internal name cache series.flush_episodes() series.sync_trakt() message_step('add scene numbering') series.add_scene_numbering() except SaveSeriesException as error: log.warning('Unable to add series: {0!r}'.format(error)) self.success = False self._finish_early() log.debug(traceback.format_exc()) default_status = self.options['default_status'] or app.STATUS_DEFAULT if statusStrings[default_status] == 'Wanted': message_step('trigger backlog search') app.backlog_search_scheduler.action.search_backlog([series]) self.success = True ws.Message( 'showAdded', series.to_json(detailed=False)).push() # Send ws update to client message_step('finished') self.finish()
def _search_backlog(self, data=None): """Queue a backlog search for results for the provided episodes or season. :param data: :return: :example: Start a backlog search for show slug tvdb1234 with episodes s01e01, s01e02, s03e03. route: `apiv2/search/backlog` { showSlug: "tvdb1234", episodes: [ "s01e01", "s01e02", "s03e03", ] } """ if not data: # Trigger a full backlog search if app.backlog_search_scheduler.forceRun(): return self._accepted('Full backlog search started') return self._bad_request('Triggering a full backlog search failed') if not data.get('showSlug'): return self._bad_request('You need to provide a show slug') if not data.get('episodes') and not data.get('season'): return self._bad_request( 'For a backlog search you need to provide a list of episodes or seasons' ) identifier = SeriesIdentifier.from_slug(data['showSlug']) if not identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') episode_segments = self._get_episode_segments(series, data) # If a season is passed, we transform it to a list of episode objects. And merge it with the episode_segment. # This because the backlog search has its own logic for searching per episode or season packs. And falling back # between them, if configured. if data.get('season'): for season_slug in data['season']: episode_season = int(season_slug[1:]) episodes = series.get_all_episodes(episode_season) for episode in episodes: if episode not in episode_segments[episode_season]: episode_segments[episode_season].append(episode) if not episode_segments: return self._not_found( 'Could not find any episode for show {show}. Did you provide the correct format?' .format(show=series.name)) for segment in itervalues(episode_segments): cur_backlog_queue_item = BacklogQueueItem(series, segment) app.forced_search_queue_scheduler.action.add_item( cur_backlog_queue_item) return self._accepted('Backlog search for {0} started'.format( data['showSlug']))
def addShowByID(self, showslug=None, show_name=None, which_series=None, indexer_lang=None, root_dir=None, default_status=None, quality_preset=None, any_qualities=None, best_qualities=None, season_folders=None, subtitles=None, full_show_path=None, other_shows=None, skip_show=None, provided_indexer=None, anime=None, scene=None, blacklist=None, whitelist=None, default_status_after=None, configure_show_options=False): """ Add's a new show with provided show options by indexer_id. Currently only TVDB and IMDB id's supported. """ identifier = SeriesIdentifier.from_slug(showslug) series_id = identifier.id indexername = identifier.indexer.slug if identifier.indexer.slug != 'tvdb': series_id = helpers.get_tvdb_from_id(identifier.id, indexername.upper()) if not series_id: log.info('Unable to find tvdb ID to add {name}', {'name': show_name}) ui.notifications.error( 'Unable to add {0}'.format(show_name), 'Could not add {0}. We were unable to locate the tvdb id at this time.'.format(show_name) ) return json_response( result=False, message='Unable to find tvdb ID to add {show}'.format(show=show_name) ) if Series.find_by_identifier(identifier): return json_response( result=False, message='Show already exists' ) # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: any_qualities = [] if best_qualities: best_qualities = best_qualities.split(',') else: best_qualities = [] # If configure_show_options is enabled let's use the provided settings configure_show_options = config.checkbox_to_value(configure_show_options) if configure_show_options: # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) season_folders = config.checkbox_to_value(season_folders) subtitles = config.checkbox_to_value(subtitles) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not any_qualities: any_qualities = [] if not best_qualities or try_int(quality_preset, None): best_qualities = [] if not isinstance(any_qualities, list): any_qualities = [any_qualities] if not isinstance(best_qualities, list): best_qualities = [best_qualities] quality = {'allowed': any_qualities, 'preferred': best_qualities} location = root_dir else: default_status = app.STATUS_DEFAULT allowed, preferred = Quality.split_quality(int(app.QUALITY_DEFAULT)) quality = {'allowed': allowed, 'preferred': preferred} season_folders = app.SEASON_FOLDERS_DEFAULT subtitles = app.SUBTITLES_DEFAULT anime = app.ANIME_DEFAULT scene = app.SCENE_DEFAULT default_status_after = app.STATUS_DEFAULT_AFTER if app.ROOT_DIRS: root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] else: location = None if not location: log.warning('There was an error creating the show, no root directory setting found') return json_response( result=False, message='No root directories set up, please go back and add one.' ) show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id) show_dir = None # add the show app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(series_id), show_dir, default_status=int(default_status), quality=quality, season_folders=season_folders, lang=indexer_lang, subtitles=subtitles, anime=anime, scene=scene, paused=None, blacklist=blacklist, whitelist=whitelist, default_status_after=int(default_status_after), root_dir=location) ui.notifications.message('Show added', 'Adding the specified show {0}'.format(show_name)) # done adding show return json_response( message='Adding the specified show {0}'.format(show_name), redirect='home' )
def resource_update_episode_status(self): """ Mass update episodes statuses for multiple shows at once. example: Pass the following structure: status: 3, shows: [ { 'slug': 'tvdb1234', 'episodes': ['s01e01', 's02e03', 's10e10'] }, ] """ data = json_decode(self.request.body) status = data.get('status') shows = data.get('shows', []) if status not in statusStrings: return self._bad_request('You need to provide a valid status code') ep_sql_l = [] for show in shows: # Loop through the shows. Each show should have an array of episode slugs series_identifier = SeriesIdentifier.from_slug(show.get('slug')) if not series_identifier: log.warning( 'Could not create a show identifier with slug {slug}', {'slug': show.get('slug')}) continue series = Series.find_by_identifier(series_identifier) if not series: log.warning( 'Could not match to a show in the library with slug {slug}', {'slug': show.get('slug')}) continue episodes = [] for episode_slug in show.get('episodes', []): episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: log.warning('Bad episode number from slug {slug}', {'slug': episode_slug}) continue episode = Episode.find_by_series_and_episode( series, episode_number) if not episode: log.warning('Episode not found with slug {slug}', {'slug': episode_slug}) ep_sql = episode.mass_update_episode_status(status) if ep_sql: ep_sql_l.append(ep_sql) # Keep an array of episodes for the trakt sync episodes.append(episode) if episodes: series.sync_trakt_episodes(status, episodes) if ep_sql_l: main_db_con = db.DBConnection() main_db_con.mass_action(ep_sql_l) return self._ok(data={'count': len(ep_sql_l)})
def manualSearchSubtitles(self, showslug=None, season=None, episode=None, release_id=None, picked_id=None): mode = 'downloading' if picked_id else 'searching' description = '' logger.log('Starting to manual {mode} subtitles'.format(mode=mode)) try: if release_id: # Release ID is sent when using postpone release = app.RELEASES_IN_PP[int(release_id)] indexer_name = release['indexername'] series_id = release['seriesid'] season = release['season'] episode = release['episode'] filepath = release['release'] identifier = SeriesIdentifier.from_id( indexer_name_to_id(indexer_name), series_id) else: filepath = None identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) ep_obj = series_obj.get_episode(season, episode) video_path = filepath or ep_obj.location release_name = ep_obj.release_name or os.path.basename(video_path) except IndexError: ui.notifications.message('Outdated list', 'Please refresh page and try again') logger.log('Outdated list. Please refresh page and try again', logger.WARNING) return json.dumps({ 'result': 'failure', 'description': 'Outdated list. Please refresh page and try again' }) except (ValueError, TypeError) as e: ui.notifications.message('Error', 'Please check logs') logger.log( 'Error while manual {mode} subtitles. Error: {error_msg}'. format(mode=mode, error_msg=e), logger.ERROR) return json.dumps({ 'result': 'failure', 'description': 'Error while manual {mode} subtitles. Error: {error_msg}'. format(mode=mode, error_msg=e) }) if not os.path.isfile(video_path): ui.notifications.message( ep_obj.series.name, "Video file no longer exists. Can't search for subtitles") logger.log( 'Video file no longer exists: {video_file}'.format( video_file=video_path), logger.DEBUG) return json.dumps({ 'result': 'failure', 'description': 'Video file no longer exists: {video_file}'.format( video_file=video_path) }) if mode == 'searching': logger.log( 'Manual searching subtitles for: {0}'.format(release_name)) found_subtitles = subtitles.list_subtitles(tv_episode=ep_obj, video_path=video_path) if found_subtitles: ui.notifications.message( ep_obj.series.name, 'Found {} subtitles'.format(len(found_subtitles))) else: ui.notifications.message(ep_obj.series.name, 'No subtitle found') if found_subtitles: result = 'success' else: result = 'failure' description = 'No subtitles found' subtitles_result = found_subtitles else: logger.log( 'Manual downloading subtitles for: {0}'.format(release_name)) new_manual_subtitle = subtitles.save_subtitle( tv_episode=ep_obj, subtitle_id=picked_id, video_path=video_path) if new_manual_subtitle: ui.notifications.message( ep_obj.series.name, 'Subtitle downloaded: {0}'.format( ','.join(new_manual_subtitle))) else: ui.notifications.message( ep_obj.series.name, 'Failed to download subtitle for {0}'.format(release_name)) if new_manual_subtitle: result = 'success' else: result = 'failure' description = 'Failed to download subtitle for {0}'.format( release_name) subtitles_result = new_manual_subtitle return json.dumps({ 'result': result, 'release': release_name, 'subtitles': subtitles_result, 'description': description })
def post(self, series_slug): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) if not data or not all([data.get('type')]): return self._bad_request('Invalid request body') if data['type'] == 'ARCHIVE_EPISODES': if series.set_all_episodes_archived(final_status_only=True): return self._created() return self._no_content() if data['type'] == 'TEST_RENAME': try: series.validate_location # @UnusedVariable except ShowDirectoryNotFoundException: return self._bad_request( "Can't rename episodes when the show dir is missing.") ep_obj_list = series.get_all_episodes(has_location=True) ep_obj_list = [x for x in ep_obj_list if x.location] ep_obj_rename_list = [] for ep_obj in ep_obj_list: has_already = False for check in ep_obj.related_episodes + [ep_obj]: if check in ep_obj_rename_list: has_already = True break if not has_already: ep_obj_rename_list.append(ep_obj) if ep_obj_rename_list: ep_obj_rename_list.reverse() return self._ok(data=[{ **ep_obj.to_json(detailed=True), **{ 'selected': False } } for ep_obj in ep_obj_rename_list]) if data['type'] == 'RENAME_EPISODES': episodes = data.get('episodes', []) if not episodes: return self._bad_request( 'You must provide at least one episode') try: series.validate_location # @UnusedVariable except ShowDirectoryNotFoundException: return self._bad_request( "Can't rename episodes when the show dir is missing.") main_db_con = db.DBConnection() for episode_slug in episodes: episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: continue episode = Episode.find_by_series_and_episode( series, episode_number) if not episode: continue # this is probably the worst possible way to deal with double eps # but I've kinda painted myself into a corner here with this stupid database ep_result = main_db_con.select( 'SELECT location ' 'FROM tv_episodes ' 'WHERE indexer = ? AND showid = ? AND season = ? AND episode = ? AND 5=5', [ series.indexer, series.series_id, episode.season, episode.episode ]) if not ep_result: log.warning( 'Unable to find an episode for {episode}, skipping', {'episode': episode}) continue related_eps_result = main_db_con.select( 'SELECT season, episode ' 'FROM tv_episodes ' 'WHERE location = ? AND episode != ?', [ep_result[0]['location'], episode.episode]) root_ep_obj = episode root_ep_obj.related_episodes = [] for cur_related_ep in related_eps_result: related_ep_obj = series.get_episode( cur_related_ep['season'], cur_related_ep['episode']) if related_ep_obj not in root_ep_obj.related_episodes: root_ep_obj.related_episodes.append(related_ep_obj) root_ep_obj.rename() return self._created() # This might also be moved to /notifications/kodi/update?showslug=.. if data['type'] == 'UPDATE_KODI': series_name = quote_plus(series.name.encode('utf-8')) if app.KODI_UPDATE_ONLYFIRST: host = app.KODI_HOST[0].strip() else: host = ', '.join(app.KODI_HOST) if notifiers.kodi_notifier.update_library(series_name=series_name): ui.notifications.message( f'Library update command sent to KODI host(s): {host}') else: ui.notifications.error( f'Unable to contact one or more KODI host(s): {host}') return self._created() return self._bad_request('Invalid operation')
def get(self, series_slug, episode_slug, path_param): """Query episode's history information. :param series_slug: series slug. E.g.: tvdb1234 :param episode_slug: episode slug. E.g.: s01e01 :param path_param: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if not episode_slug: return self._bad_request('Invalid episode slug') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._not_found('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') sql_base = """ SELECT rowid, date, action, quality, provider, version, resource, size, proper_tags, indexer_id, showid, season, episode, manually_searched FROM history WHERE showid = ? AND indexer_id = ? AND season = ? AND episode = ? """ params = [ series.series_id, series.indexer, episode.season, episode.episode ] sql_base += ' ORDER BY date DESC' results = db.DBConnection().select(sql_base, params) def data_generator(): """Read history data and normalize key/value pairs.""" for item in results: d = {} d['id'] = item['rowid'] d['series'] = SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug d['status'] = item['action'] d['actionDate'] = item['date'] d['resource'] = basename(item['resource']) d['size'] = item['size'] d['properTags'] = item['proper_tags'] d['statusName'] = statusStrings.get(item['action']) d['season'] = item['season'] d['episode'] = item['episode'] d['manuallySearched'] = bool(item['manually_searched']) d['provider'] = item['provider'] yield d if not results: return self._not_found( 'History data not found for show {show} and episode {episode}'. format(show=series.identifier.slug, episode=episode.slug)) return self._ok(data=list(data_generator()))
def create_history_item(history_row, compact=False): """ Create a history object, using the data from a history db row item. Calculate additional data, where needed. :param history_row: a main.db history row. :param compact: A boolean indicating if this is used for a compact layout. :returns: A dict with history information. """ from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider from medusa.tv.series import Series, SeriesIdentifier provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if history_row['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(history_row['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': history_row['provider'], 'imageName': f'{provider_id}.png' }) release_name = history_row['resource'] if history_row['action'] == DOWNLOADED: release_group = history_row['provider'] file_name = history_row['resource'] if history_row['action'] == SUBTITLED: subtitle_language = history_row['resource'] provider['name'] = history_row['provider'] if history_row['client_status'] is not None: status = ClientStatus(status=history_row['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if history_row['indexer_id'] and history_row['showid']: identifier = SeriesIdentifier.from_id(history_row['indexer_id'], history_row['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title history_row['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, history_row['season'], history_row['episode']) return { 'series': show_slug, 'status': history_row['action'], 'statusName': statusStrings.get(history_row['action']), 'actionDate': history_row['date'], 'quality': history_row['quality'], 'resource': basename(history_row['resource']), 'size': history_row['size'], 'properTags': history_row['proper_tags'], 'season': history_row['season'], 'episode': history_row['episode'], 'episodeTitle': history_row['episodeTitle'], 'manuallySearched': bool(history_row['manually_searched']), 'infoHash': history_row['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': history_row['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(history_row['part_of_batch']) }
def run(self): """Run QueueItemChangeIndexer queue item.""" step = [] # Small helper, to reduce code for messaging def message_step(new_step): step.append(new_step) ws.Message( 'QueueItemShow', dict(step=step, oldShow=self.old_show.to_json() if self.old_show else {}, newShow=self.new_show.to_json() if self.new_show else {}, **self.to_json)).push() ShowQueueItem.run(self) def get_show_from_slug(slug): identifier = SeriesIdentifier.from_slug(slug) if not identifier: raise ChangeIndexerException( f'Could not create identifier with slug {slug}') show = Series.find_by_identifier(identifier) return show try: # Create reference to old show, before starting the remove it. self.old_show = get_show_from_slug(self.old_slug) # Store needed options. self._store_options() # Start of removing the old show log.info('{id}: Removing {show}', { 'id': self.old_show.series_id, 'show': self.old_show.name }) message_step(f'Removing old show {self.old_show.name}') # Need to first remove the episodes from the Trakt collection, because we need the list of # Episodes from the db to know which eps to remove. if app.USE_TRAKT: message_step('Removing episodes from trakt collection') try: app.trakt_checker_scheduler.action.remove_show_trakt_library( self.old_show) except TraktException as error: log.warning( '{id}: Unable to delete show {show} from Trakt.' ' Please remove manually otherwise it will be added again.' ' Error: {error_msg}', { 'id': self.old_show.series_id, 'show': self.old_show.name, 'error_msg': error }) except Exception as error: log.exception( 'Exception occurred while trying to delete show {show}, error: {error', { 'show': self.old_show.name, 'error': error }) self.old_show.delete_show(full=False) # Send showRemoved to frontend, so we can remove it from localStorage. ws.Message('showRemoved', self.old_show.to_json( detailed=False)).push() # Send ws update to client # Double check to see if the show really has been removed, else bail. if get_show_from_slug(self.old_slug): raise ChangeIndexerException( f'Could not create identifier with slug {self.old_slug}') # Start adding the new show log.info('Starting to add show by {0}', ('show_dir: {0}'.format(self.show_dir) if self.show_dir else 'New slug: {0}'.format(self.new_slug))) self.new_show = Series.from_identifier( SeriesIdentifier.from_slug(self.new_slug)) try: # Push an update to any open Web UIs through the WebSocket message_step('load show from {indexer}'.format( indexer=indexerApi(self.new_show.indexer).name)) api = self.new_show.identifier.get_indexer_api(self.options) if getattr(api[self.new_show.series_id], 'seriesname', None) is None: log.error( 'Show in {path} has no name on {indexer}, probably searched with the wrong language.', { 'path': self.show_dir, 'indexer': indexerApi(self.new_show.indexer).name }) ui.notifications.error( 'Unable to add show', 'Show in {path} has no name on {indexer}, probably the wrong language.' ' Delete .nfo and manually add the correct language.'. format(path=self.show_dir, indexer=indexerApi(self.new_show.indexer).name)) self._finish_early() raise SaveSeriesException( 'Indexer is missing a showname in this language: {0!r}' ) self.new_show.load_from_indexer(tvapi=api) message_step('load info from imdb') self.new_show.load_imdb_info() except IndexerException as error: log.warning( 'Unable to load series from indexer: {0!r}'.format(error)) raise SaveSeriesException( 'Unable to load series from indexer: {0!r}'.format(error)) try: message_step('configure show options') self.new_show.configure(self) except KeyError as error: log.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}', { 'series_name': self.new_show.name, 'error': error }) ui.notifications.error( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=self.new_show.name, error=error)) raise SaveSeriesException( 'Unable to add show {series_name} due to an error with one of the provided options: {error}' .format(series_name=self.new_show.name, error=error)) except Exception as error: log.error('Error trying to configure show: {0}', error) log.debug(traceback.format_exc()) raise app.showList.append(self.new_show) self.new_show.save_to_db() try: message_step('load episodes from {indexer}'.format( indexer=indexerApi(self.new_show.indexer).name)) self.new_show.load_episodes_from_indexer(tvapi=api) # If we provide a default_status_after through the apiv2 series route options object. # set it after we've added the episodes. self.new_show.default_ep_status = self.options[ 'default_status_after'] or app.STATUS_DEFAULT_AFTER except IndexerException as error: log.warning( 'Unable to load series episodes from indexer: {0!r}'. format(error)) raise SaveSeriesException( 'Unable to load series episodes from indexer: {0!r}'. format(error)) message_step('create metadata in show folder') self.new_show.write_metadata() self.new_show.update_metadata() self.new_show.populate_cache() build_name_cache(self.new_show) # update internal name cache self.new_show.flush_episodes() self.new_show.sync_trakt() message_step('add scene numbering') self.new_show.add_scene_numbering() if self.show_dir: # If a show dir was passed, this was added as an existing show. # For new shows we shouldn't have any files on disk. message_step('refresh episodes from disk') try: app.show_queue_scheduler.action.refreshShow(self.new_show) except CantRefreshShowException as error: log.warning( 'Unable to rescan episodes from disk: {0!r}'.format( error)) except (ChangeIndexerException, SaveSeriesException) as error: log.warning('Unable to add series: {0!r}'.format(error)) self.success = False self._finish_early() log.debug(traceback.format_exc()) default_status = self.options['default_status'] or app.STATUS_DEFAULT if statusStrings[default_status] == 'Wanted': message_step('trigger backlog search') app.backlog_search_scheduler.action.search_backlog([self.new_show]) self.success = True ws.Message('showAdded', self.new_show.to_json( detailed=False)).push() # Send ws update to client message_step('finished') self.finish()
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] if item['client_status'] is not None: status = ClientStatus(status=item['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, item['season'], item['episode']) yield { 'id': item['rowid'], 'series': show_slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'episodeTitle': item['episodeTitle'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': item['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(item['part_of_batch']) }
def resource_existing_series(self): """Generate existing series folders data for adding existing shows.""" if not app.ROOT_DIRS: return self._not_found('No configured root dirs') root_dirs = app.ROOT_DIRS[1:] root_dirs_indices = self.get_argument('rootDirs', '') if root_dirs_indices: root_dirs_indices = set(root_dirs_indices.split(',')) try: root_dirs_indices = sorted(map(int, root_dirs_indices)) except ValueError as error: log.warning('Unable to parse root dirs indices: {indices}. Error: {error}', {'indices': root_dirs_indices, 'error': error}) return self._bad_request('Invalid root dirs indices') root_dirs = [root_dirs[idx] for idx in root_dirs_indices] dir_list = [] # Get a unique list of shows main_db_con = db.DBConnection() dir_results = main_db_con.select( 'SELECT location ' 'FROM tv_shows' ) root_dirs_tuple = tuple(root_dirs) dir_results = [ series['location'] for series in dir_results if series['location'].startswith(root_dirs_tuple) ] for root_dir in root_dirs: try: file_list = os.listdir(root_dir) except Exception as error: log.info('Unable to list directory {path}: {err!r}', {'path': root_dir, 'err': error}) continue for cur_file in file_list: try: cur_path = os.path.normpath(os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue except Exception as error: log.info('Unable to get current path {path} and {file}: {err!r}', {'path': root_dir, 'file': cur_file, 'err': error}) continue cur_dir = { 'path': cur_path, 'alreadyAdded': False, 'metadata': { 'seriesId': None, 'seriesName': None, 'indexer': None } } # Check if the folder is already in the library cur_dir['alreadyAdded'] = next((True for path in dir_results if path == cur_path), False) if not cur_dir['alreadyAdded']: # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in itervalues(app.metadata_provider_dict): (series_id, series_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) if all((series_id, series_name, indexer)): cur_dir['metadata'] = { 'seriesId': try_int(series_id), 'seriesName': series_name, 'indexer': try_int(indexer) } break series_identifier = SeriesIdentifier(indexer, series_id) cur_dir['alreadyAdded'] = bool(Series.find_by_identifier(series_identifier)) dir_list.append(cur_dir) return self._ok(data=dir_list)
def post(self): """Perform a mass update action.""" required_options = ( 'paused', 'defaultEpisodeStatus', 'anime', 'sports', 'scene', 'airByDate', 'seasonFolders', 'subtitles', 'qualities' ) data = json_decode(self.request.body) shows = data.get('shows', []) options = data.get('options') errors = 0 if not options: return self._bad_request('Options missing') missing_options = [] for req_option in required_options: if req_option not in options: missing_options.append(req_option) if missing_options: return self._bad_request(f"Missing options: {', '.join(missing_options)}") paused = options.get('paused') default_ep_status = options.get('defaultEpisodeStatus') if isinstance(default_ep_status, str): default_ep_status = {v: k for k, v in statusStrings.items()}.get(default_ep_status) anime = options.get('anime') sports = options.get('sports') scene = options.get('scene') air_by_date = options.get('airByDate') dvd_order = options.get('dvdOrder') season_folders = options.get('seasonFolders') subtitles = options.get('subtitles') qualities = options.get('qualities') for show_slug in shows: identifier = SeriesIdentifier.from_slug(show_slug) show_obj = Series.find_by_identifier(identifier) if not show_obj: continue cur_root_dir = path.dirname(show_obj._location) cur_show_dir = path.basename(show_obj._location) for root_dir in options.get('rootDirs'): if cur_root_dir != root_dir['old']: continue if root_dir['old'] != root_dir['new']: new_show_dir = path.join(root_dir['new'], cur_show_dir) log.info('For show {show_name} changing dir from {old_location} to {new_location}', { 'show_name': show_obj.name, 'old_location': show_obj._location, 'new_location': new_show_dir}) else: new_show_dir = show_obj._location new_paused = show_obj.paused if paused is None else paused new_default_ep_status = show_obj.default_ep_status if default_ep_status is None else default_ep_status new_anime = show_obj.anime if anime is None else anime new_sports = show_obj.sports if sports is None else sports new_scene = show_obj.scene if scene is None else scene new_air_by_date = show_obj.air_by_date if air_by_date is None else air_by_date new_dvd_order = show_obj.dvd_order if dvd_order is None else dvd_order new_season_folders = show_obj.season_folders if season_folders is None else season_folders new_subtitles = show_obj.subtitles if subtitles is None else subtitles # If both are false (two empty arrays), use the shows current value. if not qualities['allowed'] and not qualities['preferred']: new_quality_allowed, new_quality_preferred = show_obj.current_qualities else: new_quality_allowed, new_quality_preferred = qualities['allowed'], qualities['preferred'] # If user set quality_preset remove all preferred_qualities if Quality.combine_qualities(new_quality_allowed, new_quality_preferred) in qualityPresets: new_quality_preferred = [] errors += self.mass_edit_show( show_obj, location=new_show_dir, allowed_qualities=new_quality_allowed, preferred_qualities=new_quality_preferred, season_folders=new_season_folders, paused=new_paused, air_by_date=new_air_by_date, sports=new_sports, dvd_order=new_dvd_order, subtitles=new_subtitles, anime=new_anime, scene=new_scene, default_ep_status=new_default_ep_status, ) return self._created(data={'errors': errors})
def resource_existing_series(self): """Generate existing series folders data for adding existing shows.""" if not app.ROOT_DIRS: return self._not_found('No configured root dirs') root_dirs = app.ROOT_DIRS[1:] root_dirs_indices = self.get_argument('rootDirs', '') if root_dirs_indices: root_dirs_indices = set(root_dirs_indices.split(',')) try: root_dirs_indices = sorted(map(int, root_dirs_indices)) except ValueError as error: log.warning( 'Unable to parse root dirs indices: {indices}. Error: {error}', { 'indices': root_dirs_indices, 'error': error }) return self._bad_request('Invalid root dirs indices') root_dirs = [root_dirs[idx] for idx in root_dirs_indices] dir_list = [] # Get a unique list of shows main_db_con = db.DBConnection() dir_results = main_db_con.select('SELECT location ' 'FROM tv_shows') root_dirs_tuple = tuple(root_dirs) dir_results = [ series['location'] for series in dir_results if series['location'].startswith(root_dirs_tuple) ] for root_dir in root_dirs: try: file_list = os.listdir(root_dir) except Exception as error: log.info('Unable to list directory {path}: {err!r}', { 'path': root_dir, 'err': error }) continue for cur_file in file_list: try: cur_path = os.path.normpath( os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue except Exception as error: log.info( 'Unable to get current path {path} and {file}: {err!r}', { 'path': root_dir, 'file': cur_file, 'err': error }) continue cur_dir = { 'path': cur_path, 'alreadyAdded': False, 'metadata': { 'seriesId': None, 'seriesName': None, 'indexer': None } } # Check if the folder is already in the library cur_dir['alreadyAdded'] = next( (True for path in dir_results if path == cur_path), False) if not cur_dir['alreadyAdded']: # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in itervalues(app.metadata_provider_dict): (series_id, series_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) if all((series_id, series_name, indexer)): cur_dir['metadata'] = { 'seriesId': try_int(series_id), 'seriesName': series_name, 'indexer': try_int(indexer) } break series_identifier = SeriesIdentifier(indexer, series_id) cur_dir['alreadyAdded'] = bool( Series.find_by_identifier(series_identifier)) dir_list.append(cur_dir) return self._ok(data=dir_list)
def resource_existing_series(self): """Generate existing series folders data for adding existing shows.""" root_dirs = json_decode(self.get_argument('root-dir', '[]')) if not root_dirs: if app.ROOT_DIRS: root_dirs = app.ROOT_DIRS[1:] else: self._not_found('No configured root dirs') # Put the default root-dir first try: default_index = int(app.ROOT_DIRS[0]) default_root_dir = root_dirs[default_index] root_dirs.remove(default_root_dir) root_dirs.insert(0, default_root_dir) except (IndexError, ValueError): pass dir_list = [] # Get a unique list of shows main_db_con = db.DBConnection() dir_results = main_db_con.select( b'SELECT location ' b'FROM tv_shows' ) root_dirs_tuple = tuple(root_dirs) dir_results = [ series[b'location'] for series in dir_results if series[b'location'].startswith(root_dirs_tuple) ] for root_dir in root_dirs: try: file_list = os.listdir(root_dir) except Exception as error: log.info('Unable to list directory {path}: {err!r}', {'path': root_dir, 'err': error}) continue for cur_file in file_list: try: cur_path = os.path.normpath(os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue except Exception as error: log.info('Unable to get current path {path} and {file}: {err!r}', {'path': root_dir, 'file': cur_file, 'err': error}) continue cur_dir = { 'path': cur_path, 'alreadyAdded': False, 'existingInfo': { 'seriesId': None, 'seriesName': None, 'indexer': None } } # Check if the folder is already in the library cur_dir['alreadyAdded'] = next((True for path in dir_results if path == cur_path), False) if not cur_dir['alreadyAdded']: # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in itervalues(app.metadata_provider_dict): (series_id, series_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) if all((series_id, series_name, indexer)): cur_dir['existingInfo'] = { 'seriesId': try_int(series_id), 'seriesName': series_name, 'indexer': try_int(indexer) } break series_identifier = SeriesIdentifier(indexer, series_id) cur_dir['alreadyAdded'] = bool(Series.find_by_identifier(series_identifier)) dir_list.append(cur_dir) return self._ok(data=dir_list)
def get(self, series_slug, episode_slug, path_param): """Query episode's history information. :param series_slug: series slug. E.g.: tvdb1234 :param episode_slug: episode slug. E.g.: s01e01 :param path_param: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if not episode_slug: return self._bad_request('Invalid episode slug') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._not_found('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') sql_base = """ SELECT rowid, date, action, quality, provider, version, resource, size, proper_tags, indexer_id, showid, season, episode, manually_searched, info_hash FROM history WHERE showid = ? AND indexer_id = ? AND season = ? AND episode = ? """ params = [ series.series_id, series.indexer, episode.season, episode.episode ] sql_base += ' ORDER BY date DESC' results = db.DBConnection().select(sql_base, params) def data_generator(): """Read history data and normalize key/value pairs.""" for item in results: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider.update({ 'id': GenericProvider.make_id(item['provider']), 'name': item['provider'] }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider.update({ 'id': item['provider'], 'name': item['provider'] }) if item['action'] == SUBTITLED: subtitle_language = item['resource'] yield { 'id': item['rowid'], 'series': SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language } if not results: return self._not_found( 'History data not found for show {show} and episode {episode}'. format(show=series.identifier.slug, episode=episode.slug)) return self._ok(data=list(data_generator()))
def data_generator_compact(): """ Read and paginate history records. Results are provided grouped per showid+season+episode. The results are flattened into a structure of [{'actionDate': .., 'showSlug':.., 'rows':Array(history_items)},] """ start = arg_limit * (arg_page - 1) for compact_item in list(results.values())[start:start + arg_limit]: return_item = {'rows': []} for item in compact_item: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] item['showSlug'] = None item['showTitle'] = 'Missing Show' if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) item['showSlug'] = identifier.slug show = Series.find_by_identifier(identifier) if show: item['showTitle'] = show.title return_item['actionDate'] = item['date'] return_item['showSlug'] = item['showslug'] return_item[ 'episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( item['showTitle'], item['season'], item['episode']) return_item['quality'] = item['quality'] return_item['rows'].append({ 'actionDate': item['date'], 'id': item['rowid'], 'series': item['showSlug'], 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': item['showslug'], 'showTitle': item['showTitle'] }) yield return_item
def patch(self, series_slug, path_param=None): """Patch series.""" if not series_slug: return self._method_not_allowed( 'Patching multiple series is not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) indexer_id = data.get('id', {}).get(identifier.indexer.slug) if indexer_id is not None and indexer_id != identifier.id: return self._bad_request('Conflicting series identifier') accepted = {} ignored = {} patches = { 'config.aliases': ListField(series, 'aliases'), 'config.defaultEpisodeStatus': StringField(series, 'default_ep_status_name'), 'config.dvdOrder': BooleanField(series, 'dvd_order'), 'config.seasonFolders': BooleanField(series, 'season_folders'), 'config.anime': BooleanField(series, 'anime'), 'config.scene': BooleanField(series, 'scene'), 'config.sports': BooleanField(series, 'sports'), 'config.paused': BooleanField(series, 'paused'), 'config.location': StringField(series, 'location'), 'config.airByDate': BooleanField(series, 'air_by_date'), 'config.subtitlesEnabled': BooleanField(series, 'subtitles'), 'config.release.requiredWords': ListField(series, 'release_required_words'), 'config.release.ignoredWords': ListField(series, 'release_ignored_words'), 'config.release.blacklist': ListField(series, 'blacklist'), 'config.release.whitelist': ListField(series, 'whitelist'), 'config.release.requiredWordsExclude': BooleanField(series, 'release_required_exclude'), 'config.release.ignoredWordsExclude': BooleanField(series, 'release_ignored_exclude'), 'language': StringField(series, 'lang'), 'config.qualities.allowed': ListField(series, 'qualities_allowed'), 'config.qualities.preferred': ListField(series, 'qualities_preferred'), 'config.qualities.combined': IntegerField(series, 'quality'), 'config.airdateOffset': IntegerField(series, 'airdate_offset'), 'config.showLists': ListField(Series, 'show_lists'), 'config.templates': BooleanField(series, 'templates'), 'config.searchTemplates': ListField(series, 'search_templates'), } for key, value in iter_nested_items(data): patch_field = patches.get(key) if patch_field and patch_field.patch(series, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) # Save patched attributes in db. series.save_to_db() if ignored: log.warning('Series patch ignored {items!r}', {'items': ignored}) # Push an update to any open Web UIs through the WebSocket msg = ws.Message('showUpdated', series.to_json(detailed=False)) msg.push() return self._ok(data=accepted)
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() status_list = [ DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, ARCHIVED, IGNORED ] db = DBConnection() fields_to_select = ', '.join([ 'airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name', 'showid', 's.status' ]) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, next_week] + status_list) done_shows_list = [int(result['showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join( ['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER])) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER]) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, recently, WANTED, UNAIRED] + status_list) for index, item in enumerate(results): identifier = SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id']) show = Series.find_by_identifier(identifier) item['series_slug'] = identifier.slug results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results[index]['externals'] = show.externals results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace( 'pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['qualityValue'] = result['quality'] result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime( result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') # Monday - Sunday (0 - 6) result['weekday'] = date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def post(self, series_slug=None, path_param=None): """Add a new series.""" if series_slug is not None: return self._bad_request('Series slug should not be specified') data = json_decode(self.request.body) if not data or 'id' not in data: return self._bad_request('Invalid series data') ids = {k: v for k, v in viewitems(data['id'])} if len(ids) != 1: return self._bad_request( 'Only 1 indexer identifier should be specified') identifier = SeriesIdentifier.from_slug('{slug}{id}'.format( slug=list(ids)[0], id=list(itervalues(ids))[0])) if not identifier: return self._bad_request('Invalid series identifier') if Series.find_by_identifier(identifier): return self._conflict('Series already exist added') data_options = data.get('options', {}) try: options = { 'default_status': data_options.get('status'), 'quality': data_options.get('quality', { 'preferred': [], 'allowed': [] }), 'season_folders': data_options.get('seasonFolders'), 'lang': data_options.get('language'), 'subtitles': data_options.get('subtitles'), 'anime': data_options.get('anime'), 'scene': data_options.get('scene'), 'paused': data_options.get('paused'), 'blacklist': data_options['release'].get('blacklist', []) if data_options.get('release') else None, 'whitelist': data_options['release'].get('whitelist', []) if data_options.get('release') else None, 'default_status_after': None, 'root_dir': data_options.get('rootDir'), 'show_lists': data_options.get('showLists') } queue_item_obj = app.show_queue_scheduler.action.addShow( identifier.indexer.id, identifier.id, data_options.get('showDir'), **options) except SaveSeriesException as error: return self._not_found(error) return self._created(data=queue_item_obj.to_json)
def post(self): """Perform a mass update action.""" data = json_decode(self.request.body) update = data.get('update', []) rescan = data.get('rescan', []) rename = data.get('rename', []) subtitle = data.get('subtitle', []) delete = data.get('delete', []) remove = data.get('remove', []) image = data.get('image', []) result = { 'shows': defaultdict(list), 'totals': { 'update': 0, 'rescan': 0, 'rename': 0, 'subtitle': 0, 'delete': 0, 'remove': 0, 'image': 0 } } for slug in set(update + rescan + rename + subtitle + delete + remove + image): identifier = SeriesIdentifier.from_slug(slug) series_obj = Series.find_by_identifier(identifier) if not series_obj: result['shows'][slug].append('Unable to locate show: {show}'.format(show=slug)) continue if slug in delete + remove: app.show_queue_scheduler.action.removeShow(series_obj, slug in delete) if slug in delete: result['totals']['delete'] += 1 if slug in remove: result['totals']['remove'] += 1 continue # don't do anything else if it's being deleted or removed if slug in update: try: app.show_queue_scheduler.action.updateShow(series_obj) result['totals']['update'] += 1 except CantUpdateShowException as msg: result['shows'][slug].append('Unable to update show: {error}'.format(error=msg)) elif slug in rescan: # don't bother refreshing shows that were updated try: app.show_queue_scheduler.action.refreshShow(series_obj) result['totals']['rescan'] += 1 except CantRefreshShowException as msg: result['shows'][slug].append( 'Unable to refresh show {show.name}: {error}'.format(show=series_obj, error=msg) ) if slug in rename: app.show_queue_scheduler.action.renameShowEpisodes(series_obj) result['totals']['rename'] += 1 if slug in subtitle: app.show_queue_scheduler.action.download_subtitles(series_obj) result['totals']['subtitle'] += 1 if slug in image: image_cache.replace_images(series_obj) result['totals']['image'] += 1 if result['shows']: ui.notifications.error('Errors encountered', '<br />\n'.join(chain(*result['shows'].values()))) return self._created(data=result)