def resource_get_subtitle_missed(self): """Return a list of episodes which are missing a specific subtitle language.""" language = self.get_argument('language' '').strip() main_db_con = db.DBConnection() results = main_db_con.select( 'SELECT show_name, tv_shows.show_id, tv_shows.indexer, ' 'tv_shows.indexer_id AS indexer_id, tv_episodes.subtitles subtitles, ' 'tv_episodes.episode AS episode, tv_episodes.season AS season, ' 'tv_episodes.name AS name ' 'FROM tv_episodes, tv_shows ' 'WHERE tv_shows.subtitles = 1 ' 'AND tv_episodes.status = ? ' 'AND tv_episodes.season != 0 ' "AND tv_episodes.location != '' " 'AND tv_episodes.showid = tv_shows.indexer_id ' 'AND tv_episodes.indexer = tv_shows.indexer ' 'ORDER BY show_name', [DOWNLOADED]) subtitle_status = {} for cur_status_result in results: cur_indexer = int(cur_status_result['indexer']) cur_series_id = int(cur_status_result['indexer_id']) show_slug = SeriesIdentifier.from_id(cur_indexer, cur_series_id).slug subtitles = cur_status_result['subtitles'].split(',') if language == 'all': if not frozenset(wanted_languages()).difference(subtitles): continue elif language in subtitles: continue if show_slug not in subtitle_status: subtitle_status[show_slug] = { 'selected': True, 'slug': show_slug, 'name': cur_status_result['show_name'], 'episodes': [], 'showEpisodes': False } subtitle_status[show_slug]['episodes'].append({ 'episode': cur_status_result['episode'], 'season': cur_status_result['season'], 'selected': True, 'slug': str( RelativeNumber(cur_status_result['season'], cur_status_result['episode'])), 'name': cur_status_result['name'], 'subtitles': subtitles }) return self._ok(data=subtitle_status)
def resource_get_episode_status(self): """Return a list of episodes with a specific status.""" status = self.get_argument('status' '').strip() status_list = [int(status)] if status_list: if status_list[0] == SNATCHED: status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] else: status_list = [] main_db_con = db.DBConnection() status_results = main_db_con.select( 'SELECT show_name, tv_shows.indexer, tv_shows.show_id, tv_shows.indexer_id AS indexer_id, ' 'tv_episodes.season AS season, tv_episodes.episode AS episode, tv_episodes.name as name ' 'FROM tv_episodes, tv_shows ' 'WHERE season != 0 ' 'AND tv_episodes.showid = tv_shows.indexer_id ' 'AND tv_episodes.indexer = tv_shows.indexer ' 'AND tv_episodes.status IN ({statuses}) '.format( statuses=','.join(['?'] * len(status_list))), status_list) episode_status = {} for cur_status_result in status_results: cur_indexer = int(cur_status_result['indexer']) cur_series_id = int(cur_status_result['indexer_id']) show_slug = SeriesIdentifier.from_id(cur_indexer, cur_series_id).slug if show_slug not in episode_status: episode_status[show_slug] = { 'selected': True, 'slug': show_slug, 'name': cur_status_result['show_name'], 'episodes': [], 'showEpisodes': False } episode_status[show_slug]['episodes'].append({ 'episode': cur_status_result['episode'], 'season': cur_status_result['season'], 'selected': True, 'slug': str( RelativeNumber(cur_status_result['season'], cur_status_result['episode'])), 'name': cur_status_result['name'] }) return self._ok(data={'episodeStatus': episode_status})
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider.update({ 'id': GenericProvider.make_id(item['provider']), 'name': item['provider'] }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] yield { 'id': item['rowid'], 'series': SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language }
def data_generator(): """Read history data and normalize key/value pairs.""" for item in results: d = {} d['id'] = item['rowid'] d['series'] = SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug d['status'] = item['action'] d['actionDate'] = item['date'] d['resource'] = basename(item['resource']) d['size'] = item['size'] d['properTags'] = item['proper_tags'] d['statusName'] = statusStrings.get(item['action']) d['season'] = item['season'] d['episode'] = item['episode'] d['manuallySearched'] = bool(item['manually_searched']) d['provider'] = item['provider'] yield d
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: d = {} d['id'] = item['rowid'] d['series'] = SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug d['status'] = item['action'] d['actionDate'] = item['date'] d['resource'] = basename(item['resource']) d['size'] = item['size'] d['properTags'] = item['proper_tags'] d['statusName'] = statusStrings.get(item['action']) d['season'] = item['season'] d['episode'] = item['episode'] d['manuallySearched'] = bool(item['manually_searched']) d['provider'] = item['provider'] yield d
def create_history_item(history_row, compact=False): """ Create a history object, using the data from a history db row item. Calculate additional data, where needed. :param history_row: a main.db history row. :param compact: A boolean indicating if this is used for a compact layout. :returns: A dict with history information. """ from medusa.providers import get_provider_class from medusa.providers.generic_provider import GenericProvider from medusa.tv.series import Series, SeriesIdentifier provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if history_row['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(history_row['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': history_row['provider'], 'imageName': f'{provider_id}.png' }) release_name = history_row['resource'] if history_row['action'] == DOWNLOADED: release_group = history_row['provider'] file_name = history_row['resource'] if history_row['action'] == SUBTITLED: subtitle_language = history_row['resource'] provider['name'] = history_row['provider'] if history_row['client_status'] is not None: status = ClientStatus(status=history_row['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if history_row['indexer_id'] and history_row['showid']: identifier = SeriesIdentifier.from_id(history_row['indexer_id'], history_row['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title history_row['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, history_row['season'], history_row['episode']) return { 'series': show_slug, 'status': history_row['action'], 'statusName': statusStrings.get(history_row['action']), 'actionDate': history_row['date'], 'quality': history_row['quality'], 'resource': basename(history_row['resource']), 'size': history_row['size'], 'properTags': history_row['proper_tags'], 'season': history_row['season'], 'episode': history_row['episode'], 'episodeTitle': history_row['episodeTitle'], 'manuallySearched': bool(history_row['manually_searched']), 'infoHash': history_row['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': history_row['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(history_row['part_of_batch']) }
def manualSearchSubtitles(self, showslug=None, season=None, episode=None, release_id=None, picked_id=None): mode = 'downloading' if picked_id else 'searching' description = '' logger.log('Starting to manual {mode} subtitles'.format(mode=mode)) try: if release_id: # Release ID is sent when using postpone release = app.RELEASES_IN_PP[int(release_id)] indexer_name = release['indexername'] series_id = release['seriesid'] season = release['season'] episode = release['episode'] filepath = release['release'] identifier = SeriesIdentifier.from_id( indexer_name_to_id(indexer_name), series_id) else: filepath = None identifier = SeriesIdentifier.from_slug(showslug) series_obj = Series.find_by_identifier(identifier) ep_obj = series_obj.get_episode(season, episode) video_path = filepath or ep_obj.location release_name = ep_obj.release_name or os.path.basename(video_path) except IndexError: ui.notifications.message('Outdated list', 'Please refresh page and try again') logger.log('Outdated list. Please refresh page and try again', logger.WARNING) return json.dumps({ 'result': 'failure', 'description': 'Outdated list. Please refresh page and try again' }) except (ValueError, TypeError) as e: ui.notifications.message('Error', 'Please check logs') logger.log( 'Error while manual {mode} subtitles. Error: {error_msg}'. format(mode=mode, error_msg=e), logger.ERROR) return json.dumps({ 'result': 'failure', 'description': 'Error while manual {mode} subtitles. Error: {error_msg}'. format(mode=mode, error_msg=e) }) if not os.path.isfile(video_path): ui.notifications.message( ep_obj.series.name, "Video file no longer exists. Can't search for subtitles") logger.log( 'Video file no longer exists: {video_file}'.format( video_file=video_path), logger.DEBUG) return json.dumps({ 'result': 'failure', 'description': 'Video file no longer exists: {video_file}'.format( video_file=video_path) }) if mode == 'searching': logger.log( 'Manual searching subtitles for: {0}'.format(release_name)) found_subtitles = subtitles.list_subtitles(tv_episode=ep_obj, video_path=video_path) if found_subtitles: ui.notifications.message( ep_obj.series.name, 'Found {} subtitles'.format(len(found_subtitles))) else: ui.notifications.message(ep_obj.series.name, 'No subtitle found') if found_subtitles: result = 'success' else: result = 'failure' description = 'No subtitles found' subtitles_result = found_subtitles else: logger.log( 'Manual downloading subtitles for: {0}'.format(release_name)) new_manual_subtitle = subtitles.save_subtitle( tv_episode=ep_obj, subtitle_id=picked_id, video_path=video_path) if new_manual_subtitle: ui.notifications.message( ep_obj.series.name, 'Subtitle downloaded: {0}'.format( ','.join(new_manual_subtitle))) else: ui.notifications.message( ep_obj.series.name, 'Failed to download subtitle for {0}'.format(release_name)) if new_manual_subtitle: result = 'success' else: result = 'failure' description = 'Failed to download subtitle for {0}'.format( release_name) subtitles_result = new_manual_subtitle return json.dumps({ 'result': result, 'release': release_name, 'subtitles': subtitles_result, 'description': description })
def get(self, identifier, path_param): """Query scene_exception information.""" cache_db_con = db.DBConnection('cache.db') sql_base = (b'SELECT ' b' exception_id, ' b' indexer, ' b' indexer_id, ' b' show_name, ' b' season, ' b' custom ' b'FROM scene_exceptions ') sql_where = [] params = [] if identifier is not None: sql_where.append(b'exception_id') params += [identifier] else: series_slug = self.get_query_argument('series', None) series_identifier = SeriesIdentifier.from_slug(series_slug) if series_slug and not series_identifier: return self._bad_request('Invalid series') season = self._parse(self.get_query_argument('season', None)) exception_type = self.get_query_argument('type', None) if exception_type and exception_type not in ('local', ): return self._bad_request('Invalid type') if series_identifier: sql_where.append(b'indexer') sql_where.append(b'indexer_id') params += [series_identifier.indexer.id, series_identifier.id] if season is not None: sql_where.append(b'season') params += [season] if exception_type == 'local': sql_where.append(b'custom') params += [1] if sql_where: sql_base += b' WHERE ' + b' AND '.join( [where + b' = ? ' for where in sql_where]) sql_results = cache_db_con.select(sql_base, params) data = [] for item in sql_results: d = NonEmptyDict() d['id'] = item[0] d['series'] = SeriesIdentifier.from_id(item[1], item[2]).slug d['name'] = item[3] d['season'] = item[4] if item[4] >= 0 else None d['type'] = 'local' if item[5] else None data.append(d) if not identifier: return self._paginate(data, sort='id') if not data: return self._not_found('Alias not found') data = data[0] if path_param: if path_param not in data: return self._bad_request('Invalid path parameter') data = data[path_param] return self._ok(data=data)
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, next_week] + qualities_list ) done_shows_list = [int(result[b'showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, recently, WANTED, UNAIRED] + qualities_list ) results = [dict(result) for result in results] for index, item in enumerate(results): item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def http_get(self, identifier, path_param): """Query scene_exception information.""" cache_db_con = db.DBConnection('cache.db') sql_base = ('SELECT ' ' exception_id, ' ' indexer, ' ' indexer_id, ' ' show_name, ' ' season, ' ' custom ' 'FROM scene_exceptions ') sql_where = [] params = [] if identifier is not None: sql_where.append('exception_id') params += [identifier] else: series_slug = self.get_query_argument('series', None) series_identifier = SeriesIdentifier.from_slug(series_slug) if series_slug and not series_identifier: return self._bad_request('Invalid series') season = self._parse(self.get_query_argument('season', None)) exception_type = self.get_query_argument('type', None) if exception_type and exception_type not in ('local', ): return self._bad_request('Invalid type') if series_identifier: sql_where.append('indexer') sql_where.append('indexer_id') params += [series_identifier.indexer.id, series_identifier.id] if season is not None: sql_where.append('season') params += [season] if exception_type == 'local': sql_where.append('custom') params += [1] if sql_where: sql_base += ' WHERE ' + ' AND '.join([where + ' = ? ' for where in sql_where]) sql_results = cache_db_con.select(sql_base, params) data = [] for item in sql_results: d = {} d['id'] = item['exception_id'] d['series'] = SeriesIdentifier.from_id(item['indexer'], item['indexer_id']).slug d['name'] = item['show_name'] d['season'] = item['season'] if item['season'] >= 0 else None d['type'] = 'local' if item['custom'] else None data.append(d) if not identifier: return self._paginate(data, sort='id') if not data: return self._not_found('Alias not found') data = data[0] if path_param: if path_param not in data: return self._bad_request('Invalid path parameter') data = data[path_param] return self._ok(data=data)
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() status_list = [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, ARCHIVED, IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, next_week] + status_list ) done_shows_list = [int(result['showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER])) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER] ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, recently, WANTED, UNAIRED] + status_list ) for index, item in enumerate(results): item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def data_generator_compact(): """ Read and paginate history records. Results are provided grouped per showid+season+episode. The results are flattened into a structure of [{'actionDate': .., 'showSlug':.., 'rows':Array(history_items)},] """ start = arg_limit * (arg_page - 1) for compact_item in list(results.values())[start:start + arg_limit]: return_item = {'rows': []} for item in compact_item: provider = {} release_group = None release_name = None file_name = None subtitle_language = None if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] item['showSlug'] = None item['showTitle'] = 'Missing Show' if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) item['showSlug'] = identifier.slug show = Series.find_by_identifier(identifier) if show: item['showTitle'] = show.title return_item['actionDate'] = item['date'] return_item['showSlug'] = item['showslug'] return_item[ 'episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( item['showTitle'], item['season'], item['episode']) return_item['quality'] = item['quality'] return_item['rows'].append({ 'actionDate': item['date'], 'id': item['rowid'], 'series': item['showSlug'], 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'release_name': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': item['showslug'], 'showTitle': item['showTitle'] }) yield return_item
def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: provider = {} release_group = None release_name = None file_name = None subtitle_language = None show_slug = None client_status = None show_slug = None show_title = 'Missing Show' if item['action'] in (SNATCHED, FAILED): provider_id = GenericProvider.make_id(item['provider']) provider_class = get_provider_class(provider_id) if provider_class: provider.update({ 'id': provider_class.get_id(), 'name': provider_class.name, 'imageName': provider_class.image_name() }) else: provider.update({ 'id': provider_id, 'name': item['provider'], 'imageName': f'{provider_id}.png' }) release_name = item['resource'] if item['action'] == DOWNLOADED: release_group = item['provider'] file_name = item['resource'] if item['action'] == SUBTITLED: subtitle_language = item['resource'] provider['name'] = item['provider'] if item['client_status'] is not None: status = ClientStatus(status=item['client_status']) client_status = { 'status': [s.value for s in status], 'string': status.status_to_array_string() } if item['indexer_id'] and item['showid']: identifier = SeriesIdentifier.from_id( item['indexer_id'], item['showid']) show_slug = identifier.slug show = Series.find_by_identifier(identifier) if show: show_title = show.title item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format( show_title, item['season'], item['episode']) yield { 'id': item['rowid'], 'series': show_slug, 'status': item['action'], 'statusName': statusStrings.get(item['action']), 'actionDate': item['date'], 'quality': item['quality'], 'resource': basename(item['resource']), 'size': item['size'], 'properTags': item['proper_tags'], 'season': item['season'], 'episode': item['episode'], 'episodeTitle': item['episodeTitle'], 'manuallySearched': bool(item['manually_searched']), 'infoHash': item['info_hash'], 'provider': provider, 'releaseName': release_name, 'releaseGroup': release_group, 'fileName': file_name, 'subtitleLanguage': subtitle_language, 'showSlug': show_slug, 'showTitle': show_title, 'providerType': item['provider_type'], 'clientStatus': client_status, 'partOfBatch': bool(item['part_of_batch']) }