def http_delete(self, series_slug, episode_slug, **kwargs): """Delete the episode.""" if not series_slug: return self._method_not_allowed('Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') try: episode.delete_episode() except EpisodeDeletedException: return self._no_content() else: return self._conflict('Unable to delete episode')
def http_patch(self, series_slug, episode_slug=None, path_param=None): """Patch episode.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) # Multi-patch request if not episode_slug: return self._patch_multi(series, data) episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') accepted = self._patch_episode(episode, data) return self._ok(data=accepted)
def _search_manual(self, data): """Queue a manual search for results for the provided episodes or season. :param data: :return: """ if not data.get('showSlug'): return self._bad_request('For a manual search you need to provide a show slug') if not data.get('episodes') and not data.get('season'): return self._bad_request('For a manual search you need to provide a list of episodes or seasons') identifier = SeriesIdentifier.from_slug(data['showSlug']) if not identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') episode_segments = self._get_episode_segments(series, data) season_segments = self._get_season_segments(series, data) for segments in ({'segment': episode_segments, 'manual_search_type': 'episode'}, {'segment': season_segments, 'manual_search_type': 'season'}): for segment in itervalues(segments['segment']): cur_manual_search_queue_item = ManualSearchQueueItem(series, segment, manual_search_type=segments['manual_search_type']) app.forced_search_queue_scheduler.action.add_item(cur_manual_search_queue_item) if not episode_segments and not season_segments: return self._not_found('Could not find any episode for show {show}. Did you provide the correct format?' .format(show=series.name)) return self._accepted('Manual search for {0} started'.format(data['showSlug']))
def get(self, series_slug, identifier): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 :param identifier: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if identifier == 'backlogged': # TODO: revisit allowed_qualities = self._parse( self.get_argument('allowed', default=None), str) allowed_qualities = list(map( int, allowed_qualities.split(','))) if allowed_qualities else [] preferred_qualities = self._parse( self.get_argument('preferred', default=None), str) preferred_qualities = list(map(int, preferred_qualities.split( ','))) if preferred_qualities else [] new, existing = series.get_backlogged_episodes( allowed_qualities=allowed_qualities, preferred_qualities=preferred_qualities) data = {'new': new, 'existing': existing} return self._ok(data=data) return self._bad_request('Invalid request')
def get_show_from_slug(slug): identifier = SeriesIdentifier.from_slug(slug) if not identifier: raise ChangeIndexerException(f'Could not create identifier with slug {slug}') show = Series.find_by_identifier(identifier) return show
def delete(self, series_slug, episode_slug, **kwargs): """Delete the episode.""" if not series_slug: return self._method_not_allowed( 'Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') try: episode.delete_episode() except EpisodeDeletedException: return self._no_content() else: return self._conflict('Unable to delete episode')
def http_post(self, series_slug=None, path_param=None): """Add a new series.""" if series_slug is not None: return self._bad_request('Series slug should not be specified') data = json_decode(self.request.body) if not data or 'id' not in data: return self._bad_request('Invalid series data') ids = {k: v for k, v in viewitems(data['id']) if k != 'imdb'} if len(ids) != 1: return self._bad_request('Only 1 indexer identifier should be specified') identifier = SeriesIdentifier.from_slug('{slug}{id}'.format(slug=list(ids)[0], id=list(itervalues(ids))[0])) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if series: return self._conflict('Series already exist added') series = Series.from_identifier(identifier) if not Series.save_series(series): return self._not_found('Series not found in the specified indexer') return self._created(series.to_json(), identifier=identifier.slug)
def patch(self, series_slug, episode_slug=None, path_param=None): """Patch episode.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) # Multi-patch request if not episode_slug: return self._patch_multi(series, data) episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') accepted = self._patch_episode(episode, data) return self._ok(data=accepted)
def post(self, series_slug=None, path_param=None): """Add a new series.""" if series_slug is not None: return self._bad_request('Series slug should not be specified') data = json_decode(self.request.body) if not data or 'id' not in data: return self._bad_request('Invalid series data') ids = {k: v for k, v in viewitems(data['id']) if k != 'imdb'} if len(ids) != 1: return self._bad_request( 'Only 1 indexer identifier should be specified') identifier = SeriesIdentifier.from_slug('{slug}{id}'.format( slug=list(ids)[0], id=list(itervalues(ids))[0])) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if series: return self._conflict('Series already exist added') series = Series.from_identifier(identifier) if not Series.save_series(series): return self._not_found('Series not found in the specified indexer') return self._created(series.to_json(), identifier=identifier.slug)
def get(self, series_slug, path_param=None): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 :param path_param: """ arg_paused = self._parse_boolean(self.get_argument('paused', default=None)) def filter_series(current): return arg_paused is None or current.paused == arg_paused if not series_slug: detailed = self._parse_boolean(self.get_argument('detailed', default=False)) data = [s.to_json(detailed=detailed) for s in Series.find_series(predicate=filter_series)] return self._paginate(data, sort='title') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(identifier, predicate=filter_series) if not series: return self._not_found('Series not found') detailed = self._parse_boolean(self.get_argument('detailed', default=True)) data = series.to_json(detailed=detailed) if path_param: if path_param not in data: return self._bad_request("Invalid path parameter'{0}'".format(path_param)) data = data[path_param] return self._ok(data)
def http_get(self, series_slug, identifier): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 :param identifier: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if identifier == 'backlogged': # TODO: revisit allowed_qualities = self._parse(self.get_argument('allowed', default=None), str) allowed_qualities = list(map(int, allowed_qualities.split(','))) if allowed_qualities else [] preferred_qualities = self._parse(self.get_argument('preferred', default=None), str) preferred_qualities = list(map(int, preferred_qualities.split(','))) if preferred_qualities else [] new, existing = series.get_backlogged_episodes(allowed_qualities=allowed_qualities, preferred_qualities=preferred_qualities) data = {'new': new, 'existing': existing} return self._ok(data=data) return self._bad_request('Invalid request')
def http_patch(self, series_slug, path_param=None): """Patch series.""" if not series_slug: return self._method_not_allowed('Patching multiple series is not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) indexer_id = data.get('id', {}).get(identifier.indexer.slug) if indexer_id is not None and indexer_id != identifier.id: return self._bad_request('Conflicting series identifier') accepted = {} ignored = {} patches = { 'config.aliases': ListField(series, 'aliases'), 'config.defaultEpisodeStatus': StringField(series, 'default_ep_status_name'), 'config.dvdOrder': BooleanField(series, 'dvd_order'), 'config.seasonFolders': BooleanField(series, 'season_folders'), 'config.anime': BooleanField(series, 'anime'), 'config.scene': BooleanField(series, 'scene'), 'config.sports': BooleanField(series, 'sports'), 'config.paused': BooleanField(series, 'paused'), 'config.location': StringField(series, 'location'), 'config.airByDate': BooleanField(series, 'air_by_date'), 'config.subtitlesEnabled': BooleanField(series, 'subtitles'), 'config.release.requiredWords': ListField(series, 'release_required_words'), 'config.release.ignoredWords': ListField(series, 'release_ignore_words'), 'config.release.blacklist': ListField(series, 'blacklist'), 'config.release.whitelist': ListField(series, 'whitelist'), 'config.release.requiredWordsExclude': BooleanField(series, 'rls_require_exclude'), 'config.release.ignoredWordsExclude': BooleanField(series, 'rls_ignore_exclude'), 'language': StringField(series, 'lang'), 'config.qualities.allowed': ListField(series, 'qualities_allowed'), 'config.qualities.preferred': ListField(series, 'qualities_preferred'), 'config.qualities.combined': IntegerField(series, 'quality'), 'config.airdateOffset': IntegerField(series, 'airdate_offset'), } for key, value in iter_nested_items(data): patch_field = patches.get(key) if patch_field and patch_field.patch(series, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) # Save patched attributes in db. series.save_to_db() if ignored: log.warning('Series patch ignored {items!r}', {'items': ignored}) return self._ok(data=accepted)
def patch(self, series_slug, path_param=None): """Patch series.""" if not series_slug: return self._method_not_allowed('Patching multiple series is not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) indexer_id = data.get('id', {}).get(identifier.indexer.slug) if indexer_id is not None and indexer_id != identifier.id: return self._bad_request('Conflicting series identifier') accepted = {} ignored = {} patches = { 'config.aliases': ListField(series, 'aliases'), 'config.defaultEpisodeStatus': StringField(series, 'default_ep_status_name'), 'config.dvdOrder': BooleanField(series, 'dvd_order'), 'config.seasonFolders': BooleanField(series, 'season_folders'), 'config.anime': BooleanField(series, 'anime'), 'config.scene': BooleanField(series, 'scene'), 'config.sports': BooleanField(series, 'sports'), 'config.paused': BooleanField(series, 'paused'), 'config.location': StringField(series, '_location'), 'config.airByDate': BooleanField(series, 'air_by_date'), 'config.subtitlesEnabled': BooleanField(series, 'subtitles'), 'config.release.requiredWords': ListField(series, 'release_required_words'), 'config.release.ignoredWords': ListField(series, 'release_ignore_words'), 'config.release.blacklist': ListField(series, 'blacklist'), 'config.release.whitelist': ListField(series, 'whitelist'), 'language': StringField(series, 'lang'), 'config.qualities.allowed': ListField(series, 'qualities_allowed'), 'config.qualities.preferred': ListField(series, 'qualities_preferred'), 'config.qualities.combined': IntegerField(series, 'quality'), } for key, value in iter_nested_items(data): patch_field = patches.get(key) if patch_field and patch_field.patch(series, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) # Save patched attributes in db. series.save_to_db() if ignored: log.warning('Series patch ignored {items!r}', {'items': ignored}) self._ok(data=accepted)
def resource_search_missing_subtitles(self): """ Search missing subtitles for multiple episodes at once. example: Pass the following structure: language: 'all', // Or a three letter language code. shows: [ { 'slug': 'tvdb1234', 'episodes': ['s01e01', 's02e03', 's10e10'] }, ] """ data = json_decode(self.request.body) language = data.get('language', 'all') shows = data.get('shows', []) if language != 'all' and language not in subtitle_code_filter(): return self._bad_request( 'You need to provide a valid subtitle code') for show in shows: # Loop through the shows. Each show should have an array of episode slugs series_identifier = SeriesIdentifier.from_slug(show.get('slug')) if not series_identifier: log.warning( 'Could not create a show identifier with slug {slug}', {'slug': show.get('slug')}) continue series = Series.find_by_identifier(series_identifier) if not series: log.warning( 'Could not match to a show in the library with slug {slug}', {'slug': show.get('slug')}) continue for episode_slug in show.get('episodes', []): episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: log.warning('Bad episode number from slug {slug}', {'slug': episode_slug}) continue episode = Episode.find_by_series_and_episode( series, episode_number) if not episode: log.warning('Episode not found with slug {slug}', {'slug': episode_slug}) episode.download_subtitles( lang=language if language != 'all' else None) return self._ok()
def _search_failed(self, data): """Queue a failed search. :param data: :return: """ statuses = {} if not data.get('showSlug'): return self._bad_request( 'For a failed search you need to provide a show slug') if not data.get('episodes'): return self._bad_request( 'For a failed search you need to provide a list of episodes') identifier = SeriesIdentifier.from_slug(data['showSlug']) if not identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') season_segments = defaultdict(list) for episode_slug in data['episodes']: episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: statuses[episode_slug] = {'status': 400} continue episode = Episode.find_by_series_and_episode( series, episode_number) if not episode: statuses[episode_slug] = {'status': 404} continue season_segments[episode.season].append(episode) if not season_segments: return self._not_found( 'Could not find any episode for show {show}. Did you provide the correct format?' .format(show=series.name)) for segment in itervalues(season_segments): cur_failed_queue_item = FailedQueueItem(series, segment) app.forced_search_queue_scheduler.action.add_item( cur_failed_queue_item) return self._accepted('Failed search for {0} started'.format( data['showSlug']))
def searchEpisodeSubtitles(self, showslug=None, season=None, episode=None, lang=None): # retrieve the episode object and fail if we can't get one series_obj = Series.find_by_identifier( SeriesIdentifier.from_slug(showslug)) ep_obj = series_obj.get_episode(season, episode) if not ep_obj: return json.dumps({ 'result': 'failure', }) try: if lang: logger.log( 'Manual re-downloading subtitles for {show} with language {lang}' .format(show=ep_obj.series.name, lang=lang)) new_subtitles = ep_obj.download_subtitles(lang=lang) except Exception as error: return json.dumps({ 'result': 'failure', 'description': 'Error while downloading subtitles: {error}'.format( error=error) }) if new_subtitles: new_languages = [ subtitles.name_from_code(code) for code in new_subtitles ] description = 'New subtitles downloaded: {languages}'.format( languages=', '.join(new_languages)) result = 'success' else: new_languages = [] description = 'No subtitles downloaded' result = 'failure' ui.notifications.message(ep_obj.series.name, description) return json.dumps({ 'result': result, 'subtitles': ep_obj.subtitles, 'languages': new_languages, 'description': description })
def get(self, series_slug, identifier, *args, **kwargs): """Get an asset.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') asset_type = identifier or 'banner' asset = series.get_asset(asset_type) if not asset: return self._not_found('Asset not found') self._ok(stream=asset.media, content_type=asset.media_type)
def get(self, series_slug, episode_slug, path_param): """Query episode information. :param series_slug: series slug. E.g.: tvdb1234 :param episode_number: :param path_param: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if not episode_slug: detailed = self._parse_boolean( self.get_argument('detailed', default=False)) season = self._parse(self.get_argument('season', None), int) data = [ e.to_json(detailed=detailed) for e in series.get_all_episodes(season=season) ] return self._paginate(data, sort='airDate') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') detailed = self._parse_boolean( self.get_argument('detailed', default=True)) data = episode.to_json(detailed=detailed) if path_param: if path_param == 'metadata': data = episode.metadata() if episode.is_location_valid( ) else {} elif path_param in data: data = data[path_param] else: return self._bad_request( "Invalid path parameter '{0}'".format(path_param)) return self._ok(data=data)
def get(self, identifier): """Collect ran, running and queued searches for a specific show. :param identifier: """ if not identifier: return self._bad_request( 'You need to add the show slug to the route') series = SeriesIdentifier.from_slug(identifier) if not series: return self._bad_request('Invalid series slug') series_obj = Series.find_by_identifier(series) if not series_obj: return self._not_found('Series not found') return {'results': collect_episodes_from_search_thread(series_obj)}
def delete(self, series_slug, path_param=None): """Delete the series.""" if not series_slug: return self._method_not_allowed('Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') remove_files = self._parse_boolean(self.get_argument('remove-files', default=None)) if not series.delete(remove_files): return self._conflict('Unable to delete series') return self._no_content()
def http_delete(self, series_slug, path_param=None): """Delete the series.""" if not series_slug: return self._method_not_allowed('Deleting multiple series are not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') remove_files = self._parse_boolean(self.get_argument('remove-files', default=None)) if not series.delete(remove_files): return self._conflict('Unable to delete series') return self._no_content()
def patch(self, series_slug, path_param=None): """Patch series.""" if not series_slug: return self._method_not_allowed( 'Patching multiple series is not allowed') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series identifier') series = Series.find_by_identifier(identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) indexer_id = data.get('id', {}).get(identifier.indexer.slug) if indexer_id is not None and indexer_id != identifier.id: return self._bad_request('Conflicting series identifier') accepted = {} ignored = {} patches = { 'config.dvdOrder': BooleanField(series, 'dvd_order'), 'config.flattenFolders': BooleanField(series, 'flatten_folders'), 'config.scene': BooleanField(series, 'scene'), 'config.paused': BooleanField(series, 'paused'), 'config.location': StringField(series, '_location'), 'config.airByDate': BooleanField(series, 'air_by_date'), 'config.subtitlesEnabled': BooleanField(series, 'subtitles') } for key, value in iter_nested_items(data): patch_field = patches.get(key) if patch_field and patch_field.patch(series, value): set_nested_value(accepted, key, value) else: set_nested_value(ignored, key, value) # Save patched attributes in db. series.save_to_db() if ignored: log.warning('Series patch ignored %r', ignored) self._ok(data=accepted)
def subtitleShow(self, showslug=None): if showslug is None: return self._genericMessage('Error', 'Invalid show ID') identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) if series_obj is None: return self._genericMessage('Error', 'Unable to find the specified show') # search and download subtitles app.show_queue_scheduler.action.download_subtitles(series_obj) time.sleep(cpu_presets[app.CPU_PRESET]) return self.redirect( '/home/displayShow?showslug={series_obj.slug}'.format( series_obj=series_obj))
def http_get(self, series_slug, identifier, *args, **kwargs): """Get an asset.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') asset_type = identifier or 'banner' asset = series.get_asset(asset_type, fallback=False) if not asset: return self._not_found('Asset not found') media = asset.media if not media: return self._not_found('{kind} not found'.format(kind=asset_type.capitalize())) return self._ok(stream=media, content_type=asset.media_type)
def http_get(self, series_slug, identifier, *args, **kwargs): """Get an asset.""" series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') asset_type = identifier or 'banner' asset = series.get_asset(asset_type, fallback=False) if not asset: return self._not_found('Asset not found') media = asset.media if not media: return self._not_found( '{kind} not found'.format(kind=asset_type.capitalize())) return self._ok(stream=media, content_type=asset.media_type)
def http_get(self, series_slug, episode_slug, path_param): """Query episode information. :param series_slug: series slug. E.g.: tvdb1234 :param episode_number: :param path_param: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if not episode_slug: detailed = self._parse_boolean(self.get_argument('detailed', default=False)) season = self._parse(self.get_argument('season', None), int) data = [e.to_json(detailed=detailed) for e in series.get_all_episodes(season=season)] return self._paginate(data, sort='airDate') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._bad_request('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') detailed = self._parse_boolean(self.get_argument('detailed', default=True)) data = episode.to_json(detailed=detailed) if path_param: if path_param == 'metadata': data = episode.metadata() if episode.is_location_valid() else {} elif path_param in data: data = data[path_param] else: return self._bad_request("Invalid path parameter '{0}'".format(path_param)) return self._ok(data=data)
def emby_update(self): """Update emby's show library.""" show_slug = self.get_argument('showslug', '') show = None if show_slug: show_identifier = SeriesIdentifier.from_slug(show_slug) if not show_identifier: return self._bad_request('Invalid show slug') show = Series.find_by_identifier(show_identifier) if not show: return self._not_found('Series not found') if notifiers.emby_notifier.update_library(show): ui.notifications.message( f'Library update command sent to Emby host: {app.EMBY_HOST}') else: ui.notifications.error( f'Unable to contact Emby host: {app.EMBY_HOST}') return self._created()
def getSeasonSceneExceptions(self, showslug=None): """Get show name scene exceptions per season :param indexer: The shows indexer :param indexer_id: The shows indexer_id :return: A json with the scene exceptions per season. """ identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) return json.dumps({ 'seasonExceptions': { season: list(exception_name) for season, exception_name in iteritems( get_all_scene_exceptions(series_obj)) }, 'xemNumbering': { tvdb_season_ep[0]: anidb_season_ep[0] for (tvdb_season_ep, anidb_season_ep) in iteritems( get_xem_numbering_for_show(series_obj, refresh_data=False)) } })
def post(self, series_slug): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) if not data or not all([data.get('type')]) or len(data) != 1: return self._bad_request('Invalid request body') if data['type'] == 'ARCHIVE_EPISODES': if series.set_all_episodes_archived(final_status_only=True): return self._created() return self._no_content() return self._bad_request('Invalid operation')
def saveShowNotifyList(show=None, emails=None, prowlAPIs=None): series_identifier = SeriesIdentifier.from_slug(show) series_obj = Series.find_by_identifier(series_identifier) # Create a new dict, to force the "dirty" flag on the Series object. entries = {'emails': '', 'prowlAPIs': ''} if not series_obj: return 'show missing' if series_obj.notify_list: entries.update(series_obj.notify_list) if emails is not None: entries['emails'] = emails if prowlAPIs is not None: entries['prowlAPIs'] = prowlAPIs series_obj.notify_list = entries series_obj.save_to_db() return 'OK'
def http_post(self, series_slug): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') data = json_decode(self.request.body) if not data or not all([data.get('type')]) or len(data) != 1: return self._bad_request('Invalid request body') if data['type'] == 'ARCHIVE_EPISODES': if series.set_all_episodes_archived(final_status_only=True): return self._created() return self._no_content() return self._bad_request('Invalid operation')
def http_get(self, series_slug, path_param=None): """Query series information. :param series_slug: series slug. E.g.: tvdb1234 :param path_param: """ arg_paused = self._parse_boolean(self.get_argument('paused', default=None)) def filter_series(current): return arg_paused is None or current.paused == arg_paused if not series_slug: detailed = self._parse_boolean(self.get_argument('detailed', default=False)) fetch = self._parse_boolean(self.get_argument('fetch', default=False)) data = [ s.to_json(detailed=detailed, fetch=fetch) for s in Series.find_series(predicate=filter_series) ] return self._paginate(data, sort='title') identifier = SeriesIdentifier.from_slug(series_slug) if not identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(identifier, predicate=filter_series) if not series: return self._not_found('Series not found') detailed = self._parse_boolean(self.get_argument('detailed', default=True)) fetch = self._parse_boolean(self.get_argument('fetch', default=False)) data = series.to_json(detailed=detailed, fetch=fetch) if path_param: if path_param not in data: return self._bad_request("Invalid path parameter '{0}'".format(path_param)) data = data[path_param] return self._ok(data)
def updateShow(self, showslug=None): # @TODO: Replace with status=update or status=updating from PATCH /api/v2/show/{id} if showslug is None: return self._genericMessage('Error', 'Invalid show ID') identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) if series_obj is None: return self._genericMessage('Error', 'Unable to find the specified show') # force the update try: app.show_queue_scheduler.action.updateShow(series_obj) except CantUpdateShowException as e: ui.notifications.error('Unable to update this show.', ex(e)) # just give it some time time.sleep(cpu_presets[app.CPU_PRESET]) return self.redirect( '/home/displayShow?showslug={series_obj.slug}'.format( series_obj=series_obj))
def post(self, series_slug=None, path_param=None): """Add a new series.""" if series_slug is not None: return self._bad_request('Series slug should not be specified') data = json_decode(self.request.body) if not data or 'id' not in data: return self._bad_request('Invalid series data') ids = {k: v for k, v in viewitems(data['id'])} if len(ids) != 1: return self._bad_request( 'Only 1 indexer identifier should be specified') identifier = SeriesIdentifier.from_slug('{slug}{id}'.format( slug=list(ids)[0], id=list(itervalues(ids))[0])) if not identifier: return self._bad_request('Invalid series identifier') if Series.find_by_identifier(identifier): return self._conflict('Series already exist added') data_options = data.get('options', {}) try: options = { 'default_status': data_options.get('status'), 'quality': data_options.get('quality', { 'preferred': [], 'allowed': [] }), 'season_folders': data_options.get('seasonFolders'), 'lang': data_options.get('language'), 'subtitles': data_options.get('subtitles'), 'anime': data_options.get('anime'), 'scene': data_options.get('scene'), 'paused': data_options.get('paused'), 'blacklist': data_options['release'].get('blacklist', []) if data_options.get('release') else None, 'whitelist': data_options['release'].get('whitelist', []) if data_options.get('release') else None, 'default_status_after': None, 'root_dir': data_options.get('rootDir'), 'show_lists': data_options.get('showLists') } queue_item_obj = app.show_queue_scheduler.action.addShow( identifier.indexer.id, identifier.id, data_options.get('showDir'), **options) except SaveSeriesException as error: return self._not_found(error) return self._created(data=queue_item_obj.to_json)
def create_history_item(history_row, compact=False): """ Create a history object, using the data from a history db row item. Calculate additional data, where needed. :param history_row: a main.db history row. :param compact: A boolean indicating if this is used for a compact layout. :returns: A dict with history information. """ from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider from medusa.tv.series import Series, SeriesIdentifier provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if history_row['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(history_row['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': history_row['provider'], 'imageName': f'{provider_id}.png' }) release_name = history_row['resource'] if history_row['action'] == DOWNLOADED: release_group = history_row['provider'] file_name = history_row['resource'] if history_row['action'] == SUBTITLED: subtitle_language = history_row['resource'] provider['name'] = history_row['provider'] if history_row['client_status'] is not None: status = ClientStatus(status=history_row['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if history_row['indexer_id'] and history_row['showid']: identifier = SeriesIdentifier.from_id(history_row['indexer_id'], history_row['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title history_row['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, history_row['season'], history_row['episode']) return { 'series': show_slug, 'status': history_row['action'], 'statusName': statusStrings.get(history_row['action']), 'actionDate': history_row['date'], 'quality': history_row['quality'], 'resource': basename(history_row['resource']), 'size': history_row['size'], 'properTags': history_row['proper_tags'], 'season': history_row['season'], 'episode': history_row['episode'], 'episodeTitle': history_row['episodeTitle'], 'manuallySearched': bool(history_row['manually_searched']), 'infoHash': history_row['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': history_row['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(history_row['part_of_batch']) }
def get(self, series_slug, episode_slug, path_param): """Query episode's history information. :param series_slug: series slug. E.g.: tvdb1234 :param episode_slug: episode slug. E.g.: s01e01 :param path_param: """ series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series slug') series = Series.find_by_identifier(series_identifier) if not series: return self._not_found('Series not found') if not episode_slug: return self._bad_request('Invalid episode slug') episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: return self._not_found('Invalid episode number') episode = Episode.find_by_series_and_episode(series, episode_number) if not episode: return self._not_found('Episode not found') sql_base = """ SELECT rowid, date, action, quality, provider, version, resource, size, proper_tags, indexer_id, showid, season, episode, manually_searched, info_hash FROM history WHERE showid = ? AND indexer_id = ? AND season = ? AND episode = ? """ params = [ series.series_id, series.indexer, episode.season, episode.episode ] sql_base += ' ORDER BY date DESC' results = db.DBConnection().select(sql_base, params) def data_generator(): """Read history data and normalize key/value pairs.""" for item in results: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider.update({ 'id': GenericProvider.make_id(item['provider']), 'name': item['provider'] }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider.update({ 'id': item['provider'], 'name': item['provider'] }) if item['action'] == SUBTITLED: subtitle_language = item['resource'] yield { 'id': item['rowid'], 'series': SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language } if not results: return self._not_found( 'History data not found for show {show} and episode {episode}'. format(show=series.identifier.slug, episode=episode.slug)) return self._ok(data=list(data_generator()))
def resource_update_episode_status(self): """ Mass update episodes statuses for multiple shows at once. example: Pass the following structure: status: 3, shows: [ { 'slug': 'tvdb1234', 'episodes': ['s01e01', 's02e03', 's10e10'] }, ] """ data = json_decode(self.request.body) status = data.get('status') shows = data.get('shows', []) if status not in statusStrings: return self._bad_request('You need to provide a valid status code') ep_sql_l = [] for show in shows: # Loop through the shows. Each show should have an array of episode slugs series_identifier = SeriesIdentifier.from_slug(show.get('slug')) if not series_identifier: log.warning( 'Could not create a show identifier with slug {slug}', {'slug': show.get('slug')}) continue series = Series.find_by_identifier(series_identifier) if not series: log.warning( 'Could not match to a show in the library with slug {slug}', {'slug': show.get('slug')}) continue episodes = [] for episode_slug in show.get('episodes', []): episode_number = EpisodeNumber.from_slug(episode_slug) if not episode_number: log.warning('Bad episode number from slug {slug}', {'slug': episode_slug}) continue episode = Episode.find_by_series_and_episode( series, episode_number) if not episode: log.warning('Episode not found with slug {slug}', {'slug': episode_slug}) ep_sql = episode.mass_update_episode_status(status) if ep_sql: ep_sql_l.append(ep_sql) # Keep an array of episodes for the trakt sync episodes.append(episode) if episodes: series.sync_trakt_episodes(status, episodes) if ep_sql_l: main_db_con = db.DBConnection() main_db_con.mass_action(ep_sql_l) return self._ok(data={'count': len(ep_sql_l)})
def resource_existing_series(self): """Generate existing series folders data for adding existing shows.""" if not app.ROOT_DIRS: return self._not_found('No configured root dirs') root_dirs = app.ROOT_DIRS[1:] root_dirs_indices = self.get_argument('rootDirs', '') if root_dirs_indices: root_dirs_indices = set(root_dirs_indices.split(',')) try: root_dirs_indices = sorted(map(int, root_dirs_indices)) except ValueError as error: log.warning( 'Unable to parse root dirs indices: {indices}. Error: {error}', { 'indices': root_dirs_indices, 'error': error }) return self._bad_request('Invalid root dirs indices') root_dirs = [root_dirs[idx] for idx in root_dirs_indices] dir_list = [] # Get a unique list of shows main_db_con = db.DBConnection() dir_results = main_db_con.select('SELECT location ' 'FROM tv_shows') root_dirs_tuple = tuple(root_dirs) dir_results = [ series['location'] for series in dir_results if series['location'].startswith(root_dirs_tuple) ] for root_dir in root_dirs: try: file_list = os.listdir(root_dir) except Exception as error: log.info('Unable to list directory {path}: {err!r}', { 'path': root_dir, 'err': error }) continue for cur_file in file_list: try: cur_path = os.path.normpath( os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue except Exception as error: log.info( 'Unable to get current path {path} and {file}: {err!r}', { 'path': root_dir, 'file': cur_file, 'err': error }) continue cur_dir = { 'path': cur_path, 'alreadyAdded': False, 'metadata': { 'seriesId': None, 'seriesName': None, 'indexer': None } } # Check if the folder is already in the library cur_dir['alreadyAdded'] = next( (True for path in dir_results if path == cur_path), False) if not cur_dir['alreadyAdded']: # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in itervalues(app.metadata_provider_dict): (series_id, series_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) if all((series_id, series_name, indexer)): cur_dir['metadata'] = { 'seriesId': try_int(series_id), 'seriesName': series_name, 'indexer': try_int(indexer) } break series_identifier = SeriesIdentifier(indexer, series_id) cur_dir['alreadyAdded'] = bool( Series.find_by_identifier(series_identifier)) dir_list.append(cur_dir) return self._ok(data=dir_list)
def manualSearchSubtitles(self, showslug=None, season=None, episode=None, release_id=None, picked_id=None): mode = 'downloading' if picked_id else 'searching' description = '' logger.log('Starting to manual {mode} subtitles'.format(mode=mode)) try: if release_id: # Release ID is sent when using postpone release = app.RELEASES_IN_PP[int(release_id)] indexer_name = release['indexername'] series_id = release['seriesid'] season = release['season'] episode = release['episode'] filepath = release['release'] identifier = SeriesIdentifier.from_id( indexer_name_to_id(indexer_name), series_id) else: filepath = None identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) ep_obj = series_obj.get_episode(season, episode) video_path = filepath or ep_obj.location release_name = ep_obj.release_name or os.path.basename(video_path) except IndexError: ui.notifications.message('Outdated list', 'Please refresh page and try again') logger.log('Outdated list. Please refresh page and try again', logger.WARNING) return json.dumps({ 'result': 'failure', 'description': 'Outdated list. Please refresh page and try again' }) except (ValueError, TypeError) as e: ui.notifications.message('Error', 'Please check logs') logger.log( 'Error while manual {mode} subtitles. Error: {error_msg}'. format(mode=mode, error_msg=e), logger.ERROR) return json.dumps({ 'result': 'failure', 'description': 'Error while manual {mode} subtitles. Error: {error_msg}'. format(mode=mode, error_msg=e) }) if not os.path.isfile(video_path): ui.notifications.message( ep_obj.series.name, "Video file no longer exists. Can't search for subtitles") logger.log( 'Video file no longer exists: {video_file}'.format( video_file=video_path), logger.DEBUG) return json.dumps({ 'result': 'failure', 'description': 'Video file no longer exists: {video_file}'.format( video_file=video_path) }) if mode == 'searching': logger.log( 'Manual searching subtitles for: {0}'.format(release_name)) found_subtitles = subtitles.list_subtitles(tv_episode=ep_obj, video_path=video_path) if found_subtitles: ui.notifications.message( ep_obj.series.name, 'Found {} subtitles'.format(len(found_subtitles))) else: ui.notifications.message(ep_obj.series.name, 'No subtitle found') if found_subtitles: result = 'success' else: result = 'failure' description = 'No subtitles found' subtitles_result = found_subtitles else: logger.log( 'Manual downloading subtitles for: {0}'.format(release_name)) new_manual_subtitle = subtitles.save_subtitle( tv_episode=ep_obj, subtitle_id=picked_id, video_path=video_path) if new_manual_subtitle: ui.notifications.message( ep_obj.series.name, 'Subtitle downloaded: {0}'.format( ','.join(new_manual_subtitle))) else: ui.notifications.message( ep_obj.series.name, 'Failed to download subtitle for {0}'.format(release_name)) if new_manual_subtitle: result = 'success' else: result = 'failure' description = 'Failed to download subtitle for {0}'.format( release_name) subtitles_result = new_manual_subtitle return json.dumps({ 'result': result, 'release': release_name, 'subtitles': subtitles_result, 'description': description })
def setSceneNumbering(self, showslug=None, for_season=None, for_episode=None, for_absolute=None, scene_season=None, scene_episode=None, scene_absolute=None): # sanitize: for_season = None if for_season in ['null', ''] else for_season for_episode = None if for_episode in ['null', ''] else for_episode for_absolute = None if for_absolute in ['null', ''] else for_absolute scene_season = None if scene_season in ['null', ''] else scene_season scene_episode = None if scene_episode in ['null', '' ] else scene_episode scene_absolute = None if scene_absolute in ['null', '' ] else scene_absolute identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) if not series_obj: return json.dumps({ 'success': False, 'errorMessage': 'Could not find show {show_slug} to set scene numbering'. format(show_slug=series_obj.slug), }) # Check if this is an anime, because we can't set the Scene numbering for anime shows if series_obj.is_anime and for_absolute is None: return json.dumps({ 'success': False, 'errorMessage': "You can't use the Scene numbering for anime shows. " 'Use the Scene Absolute field, to configure a diverging episode number.', 'sceneSeason': None, 'sceneAbsolute': None, }) elif not series_obj.is_anime and (for_season is None or for_episode is None): return json.dumps({ 'success': False, 'errorMessage': "You can't use the Scene Absolute for non-anime shows. " 'Use the scene field, to configure a diverging episode number.', 'sceneSeason': None, 'sceneAbsolute': None, }) elif series_obj.is_anime: result = { 'success': True, 'forAbsolute': for_absolute, } else: result = { 'success': True, 'forSeason': for_season, 'forEpisode': for_episode, } # retrieve the episode object and fail if we can't get one if series_obj.is_anime: ep_obj = series_obj.get_episode(absolute_number=for_absolute) else: ep_obj = series_obj.get_episode(for_season, for_episode) if not ep_obj: result.update({ 'success': False, 'errorMessage': ep_obj, }) elif series_obj.is_anime: logger.log( u'Set absolute scene numbering for {show} from {absolute} to {scene_absolute}' .format(show=series_obj.slug, absolute=for_absolute, scene_absolute=scene_absolute), logger.DEBUG) for_absolute = int(for_absolute) if scene_absolute is not None: scene_absolute = int(scene_absolute) set_scene_numbering(series_obj, absolute_number=for_absolute, scene_absolute=scene_absolute) else: logger.log( u'setEpisodeSceneNumbering for {show} from {season}x{episode} to {scene_season}x{scene_episode}' .format(show=series_obj.indexerid, season=for_season, episode=for_episode, scene_season=scene_season, scene_episode=scene_episode), logger.DEBUG) for_season = int(for_season) for_episode = int(for_episode) if scene_season is not None: scene_season = int(scene_season) if scene_episode is not None: scene_episode = int(scene_episode) set_scene_numbering(series_obj, season=for_season, episode=for_episode, scene_season=scene_season, scene_episode=scene_episode) if series_obj.is_anime: sn = get_scene_absolute_numbering(series_obj, for_absolute) result['sceneAbsolute'] = sn else: sn = get_scene_numbering(series_obj, for_season, for_episode) result['sceneSeason'], result['sceneEpisode'] = sn return json.dumps(result)
def resource_existing_series(self): """Generate existing series folders data for adding existing shows.""" if not app.ROOT_DIRS: return self._not_found('No configured root dirs') root_dirs = app.ROOT_DIRS[1:] root_dirs_indices = self.get_argument('rootDirs', '') if root_dirs_indices: root_dirs_indices = set(root_dirs_indices.split(',')) try: root_dirs_indices = sorted(map(int, root_dirs_indices)) except ValueError as error: log.warning('Unable to parse root dirs indices: {indices}. Error: {error}', {'indices': root_dirs_indices, 'error': error}) return self._bad_request('Invalid root dirs indices') root_dirs = [root_dirs[idx] for idx in root_dirs_indices] dir_list = [] # Get a unique list of shows main_db_con = db.DBConnection() dir_results = main_db_con.select( 'SELECT location ' 'FROM tv_shows' ) root_dirs_tuple = tuple(root_dirs) dir_results = [ series['location'] for series in dir_results if series['location'].startswith(root_dirs_tuple) ] for root_dir in root_dirs: try: file_list = os.listdir(root_dir) except Exception as error: log.info('Unable to list directory {path}: {err!r}', {'path': root_dir, 'err': error}) continue for cur_file in file_list: try: cur_path = os.path.normpath(os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue except Exception as error: log.info('Unable to get current path {path} and {file}: {err!r}', {'path': root_dir, 'file': cur_file, 'err': error}) continue cur_dir = { 'path': cur_path, 'alreadyAdded': False, 'metadata': { 'seriesId': None, 'seriesName': None, 'indexer': None } } # Check if the folder is already in the library cur_dir['alreadyAdded'] = next((True for path in dir_results if path == cur_path), False) if not cur_dir['alreadyAdded']: # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in itervalues(app.metadata_provider_dict): (series_id, series_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) if all((series_id, series_name, indexer)): cur_dir['metadata'] = { 'seriesId': try_int(series_id), 'seriesName': series_name, 'indexer': try_int(indexer) } break series_identifier = SeriesIdentifier(indexer, series_id) cur_dir['alreadyAdded'] = bool(Series.find_by_identifier(series_identifier)) dir_list.append(cur_dir) return self._ok(data=dir_list)