def calendar(self): """ Provides a subscribable URL for iCal subscriptions """ logger.log('Receiving iCal request from {ip}'.format( ip=self.request.remote_ip)) # Create a iCal string ical = 'BEGIN:VCALENDAR\r\n' ical += 'VERSION:2.0\r\n' ical += 'X-WR-CALNAME:Medusa\r\n' ical += 'X-WR-CALDESC:Medusa\r\n' ical += 'PRODID://Medusa Upcoming Episodes//\r\n' future_weeks = try_int(self.get_argument('future', 52), 52) past_weeks = try_int(self.get_argument('past', 52), 52) # Limit dates past_date = (datetime.date.today() + datetime.timedelta(weeks=-past_weeks)).toordinal() future_date = (datetime.date.today() + datetime.timedelta(weeks=future_weeks)).toordinal() # Get all the shows that are not paused and are currently on air (from kjoconnor Fork) main_db_con = db.DBConnection() calendar_shows = main_db_con.select( b'SELECT show_name, indexer_id, network, airs, runtime ' b'FROM tv_shows ' b'WHERE ( status = ? OR status = ? ) AND paused != 1', ('Continuing', 'Returning Series')) for show in calendar_shows: # Get all episodes of this show airing between today and next month episode_list = main_db_con.select( b'SELECT indexerid, name, season, episode, description, airdate ' b'FROM tv_episodes ' b'WHERE airdate >= ? AND airdate < ? AND showid = ?', (past_date, future_date, int(show[b'indexer_id']))) utc = tz.gettz('GMT') for episode in episode_list: air_date_time = network_timezones.parse_date_time( episode[b'airdate'], show[b'airs'], show[b'network']).astimezone(utc) air_date_time_end = air_date_time + datetime.timedelta( minutes=try_int(show[b'runtime'], 60)) # Create event for episode ical += 'BEGIN:VEVENT\r\n' ical += 'DTSTART:{date}\r\n'.format( date=air_date_time.strftime('%Y%m%dT%H%M%SZ')) ical += 'DTEND:{date}\r\n'.format( date=air_date_time_end.strftime('%Y%m%dT%H%M%SZ')) if app.CALENDAR_ICONS: icon_url = '{base_url}/images/ico/favicon-16.png'.format( base_url=app.BASE_PYMEDUSA_URL) ical += 'X-GOOGLE-CALENDAR-CONTENT-ICON:{icon_url}\r\n'.format( icon_url=icon_url) ical += 'X-GOOGLE-CALENDAR-CONTENT-DISPLAY:CHIP\r\n' ical += 'SUMMARY: {show} - {season}x{episode} - {title}\r\n'.format( show=show[b'show_name'], season=episode[b'season'], episode=episode[b'episode'], title=episode[b'name'], ) ical += 'UID:Medusa-{date}-{show}-E{episode}S{season}\r\n'.format( date=datetime.date.today().isoformat(), show=show[b'show_name'].replace(' ', '-'), episode=episode[b'episode'], season=episode[b'season'], ) ical += 'DESCRIPTION: {date} on {network}'.format( date=show[b'airs'] or '(Unknown airs)', network=show[b'network'] or 'Unknown network', ) if episode[b'description']: ical += ' \\n\\n {description}\r\n'.format( description=episode[b'description'].splitlines()[0]) else: ical += '\r\n' ical += 'END:VEVENT\r\n' # Ending the iCal ical += 'END:VCALENDAR' return ical
def backlogOverview(self): t = PageTemplate(rh=self, filename='manage_backlogOverview.mako') show_counts = {} show_cats = {} show_sql_results = {} backlog_periods = { 'all': None, 'one_day': datetime.timedelta(days=1), 'three_days': datetime.timedelta(days=3), 'one_week': datetime.timedelta(days=7), 'one_month': datetime.timedelta(days=30), } backlog_period = backlog_periods.get(app.BACKLOG_PERIOD) backlog_status = { 'all': [Overview.QUAL, Overview.WANTED], 'quality': [Overview.QUAL], 'wanted': [Overview.WANTED] } selected_backlog_status = backlog_status.get(app.BACKLOG_STATUS) main_db_con = db.DBConnection() for cur_show in app.showList: if cur_show.paused: continue ep_counts = { Overview.WANTED: 0, Overview.QUAL: 0, } ep_cats = {} sql_results = main_db_con.select( b""" SELECT e.status, e.quality, e.season, e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND e.indexer = ? AND e.showid = ? ORDER BY e.season DESC, e.episode DESC """, [cur_show.indexer, cur_show.series_id]) filtered_episodes = [] backlogged_episodes = [dict(row) for row in sql_results] for cur_result in backlogged_episodes: cur_ep_cat = cur_show.get_overview( cur_result[b'status'], cur_result[b'quality'], backlog_mode=True, manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: if cur_ep_cat in selected_backlog_status and cur_result[ b'airdate'] != 1: air_date = datetime.datetime.fromordinal( cur_result[b'airdate']) if air_date.year >= 1970 or cur_show.network: air_date = sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time( cur_result[b'airdate'], cur_show.airs, cur_show.network)) if backlog_period and air_date < datetime.datetime.now( app_timezone) - backlog_period: continue else: air_date = None episode_string = u'{ep}'.format( ep=(episode_num(cur_result[b'season'], cur_result[b'episode']) or episode_num(cur_result[b'season'], cur_result[b'episode'], numbering='absolute'))) ep_cats[episode_string] = cur_ep_cat ep_counts[cur_ep_cat] += 1 cur_result[b'airdate'] = air_date cur_result[b'episode_string'] = episode_string filtered_episodes.append(cur_result) show_counts[(cur_show.indexer, cur_show.series_id)] = ep_counts show_cats[(cur_show.indexer, cur_show.series_id)] = ep_cats show_sql_results[(cur_show.indexer, cur_show.series_id)] = filtered_episodes return t.render(showCounts=show_counts, showCats=show_cats, showSQLResults=show_sql_results, controller='manage', action='backlogOverview')
def resource_get_episode_backlog(self): """Collect backlog search information for each show.""" status = self.get_argument('status' '').strip() period = self.get_argument('period', '').strip() available_backlog_status = { 'all': [Overview.QUAL, Overview.WANTED], 'quality': [Overview.QUAL], 'wanted': [Overview.WANTED] } available_backlog_periods = { 'all': None, 'one_day': datetime.timedelta(days=1), 'three_days': datetime.timedelta(days=3), 'one_week': datetime.timedelta(days=7), 'one_month': datetime.timedelta(days=30), } if status not in available_backlog_status: return self._bad_request( "allowed status values are: 'all', 'quality' and 'wanted'") if period not in available_backlog_periods: return self._bad_request( "allowed period values are: 'all', 'one_day', 'three_days', 'one_week' and 'one_month'" ) backlog_status = available_backlog_status.get(status) backlog_period = available_backlog_periods.get(period) main_db_con = db.DBConnection() results = [] for cur_show in app.showList: if cur_show.paused: continue ep_counts = { 'wanted': 0, 'allowed': 0, } ep_cats = {} sql_results = main_db_con.select( """ SELECT e.status, e.quality, e.season, e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND e.indexer = ? AND e.showid = ? ORDER BY e.season DESC, e.episode DESC """, [cur_show.indexer, cur_show.series_id]) filtered_episodes = [] for cur_result in sql_results: cur_ep_cat = cur_show.get_overview( cur_result['status'], cur_result['quality'], backlog_mode=True, manually_searched=cur_result['manually_searched']) if cur_ep_cat: if cur_ep_cat in backlog_status and cur_result[ 'airdate'] != 1: air_date = datetime.datetime.fromordinal( cur_result['airdate']) if air_date.year >= 1970 or cur_show.network: air_date = sbdatetime.convert_to_setting( network_timezones.parse_date_time( cur_result['airdate'], cur_show.airs, cur_show.network)) if backlog_period and air_date < datetime.datetime.now( app_timezone) - backlog_period: continue else: air_date = None episode_string = u'{ep}'.format( ep=(episode_num(cur_result['season'], cur_result['episode']) or episode_num(cur_result['season'], cur_result['episode'], numbering='absolute'))) cur_ep_cat_string = Overview.overviewStrings[ cur_ep_cat] ep_cats[episode_string] = cur_ep_cat_string ep_counts[cur_ep_cat_string] += 1 cur_result['airdate'] = air_date.isoformat('T') cur_result['manuallySearched'] = cur_result[ 'manually_searched'] del cur_result['manually_searched'] cur_result['statusString'] = statusStrings[ cur_result['status']] cur_result['qualityString'] = Quality.qualityStrings[ cur_result['quality']] cur_result['slug'] = episode_string filtered_episodes.append(cur_result) if filtered_episodes: results.append({ 'slug': cur_show.identifier.slug, 'name': cur_show.name, 'quality': cur_show.quality, 'episodeCount': ep_counts, 'category': ep_cats, 'episodes': filtered_episodes }) return self._ok(data=results)
def backlogOverview(self): t = PageTemplate(rh=self, filename='manage_backlogOverview.mako') show_counts = {} show_cats = {} show_sql_results = {} backlog_periods = { 'all': None, 'one_day': datetime.timedelta(days=1), 'three_days': datetime.timedelta(days=3), 'one_week': datetime.timedelta(days=7), 'one_month': datetime.timedelta(days=30), } backlog_period = backlog_periods.get(app.BACKLOG_PERIOD) backlog_status = { 'all': [Overview.QUAL, Overview.WANTED], 'quality': [Overview.QUAL], 'wanted': [Overview.WANTED] } selected_backlog_status = backlog_status.get(app.BACKLOG_STATUS) main_db_con = db.DBConnection() for cur_show in app.showList: if cur_show.paused: continue ep_counts = { Overview.WANTED: 0, Overview.QUAL: 0, } ep_cats = {} sql_results = main_db_con.select( """ SELECT e.status, e.quality, e.season, e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND e.indexer = ? AND e.showid = ? ORDER BY e.season DESC, e.episode DESC """, [cur_show.indexer, cur_show.series_id] ) filtered_episodes = [] for cur_result in sql_results: cur_ep_cat = cur_show.get_overview(cur_result['status'], cur_result['quality'], backlog_mode=True, manually_searched=cur_result['manually_searched']) if cur_ep_cat: if cur_ep_cat in selected_backlog_status and cur_result['airdate'] != 1: air_date = datetime.datetime.fromordinal(cur_result['airdate']) if air_date.year >= 1970 or cur_show.network: air_date = sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time(cur_result['airdate'], cur_show.airs, cur_show.network)) if backlog_period and air_date < datetime.datetime.now(app_timezone) - backlog_period: continue else: air_date = None episode_string = u'{ep}'.format(ep=(episode_num(cur_result['season'], cur_result['episode']) or episode_num(cur_result['season'], cur_result['episode'], numbering='absolute'))) ep_cats[episode_string] = cur_ep_cat ep_counts[cur_ep_cat] += 1 cur_result['airdate'] = air_date cur_result['episode_string'] = episode_string filtered_episodes.append(cur_result) show_counts[(cur_show.indexer, cur_show.series_id)] = ep_counts show_cats[(cur_show.indexer, cur_show.series_id)] = ep_cats show_sql_results[(cur_show.indexer, cur_show.series_id)] = filtered_episodes return t.render( showCounts=show_counts, showCats=show_cats, showSQLResults=show_sql_results, controller='manage', action='backlogOverview')
def run(self, force=False): """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress() and not force: log.warning('Manual search is running. Unable to start Daily search') return self.amActive = True # Let's keep track of the exact time the scheduler kicked in, # as we need to compare to this time for each provider. scheduler_start_time = int(time()) if not network_dict: update_network_dict() # The tvshows airdate_offset field is used to configure a search offset for specific shows. # This way we can search/accept results early or late, depending on the value. main_db_con = DBConnection() min_offset_show = main_db_con.select( 'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset ' 'FROM tv_shows ' 'WHERE paused = 0 AND airdate_offset < 0' ) additional_search_offset = 0 if min_offset_show and min_offset_show[0]['offsets'] > 0: additional_search_offset = int(ceil(abs(min_offset_show[0]['min_offset']) / 24.0)) log.debug('Using an airdate offset of {min_offset_show} as we found show(s) with an airdate' ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']}) cur_time = datetime.now(app_timezone) cur_date = ( date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset) ).toordinal() episodes_from_db = main_db_con.select( 'SELECT indexer, showid, airdate, season, episode ' 'FROM tv_episodes ' 'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date] ) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode['indexer'] series_id = db_episode['showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode']) if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta(minutes=try_int(series_obj.runtime, 60)) if series_obj.airdate_offset != 0: log.debug( '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours', {'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset}) # filter out any episodes that haven't finished airing yet if end_time + timedelta(hours=series_obj.airdate_offset) > cur_time: continue with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', } ) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue a daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(scheduler_start_time, force=force) ) self.amActive = False
def per_show_stats(): """Generate per-show library statistics.""" pre_today = [SKIPPED, WANTED, FAILED] snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] downloaded = [DOWNLOADED, ARCHIVED] def query_in(items): return '({0})'.format(','.join(map(str, items))) query = dedent("""\ SELECT tv_eps.indexer AS indexerId, tv_eps.showid AS seriesId, SUM( season > 0 AND episode > 0 AND airdate > 1 AND tv_eps.status IN {status_quality} ) AS epSnatched, SUM( season > 0 AND episode > 0 AND airdate > 1 AND tv_eps.status IN {status_download} ) AS epDownloaded, SUM( season > 0 AND episode > 0 AND airdate > 1 AND ( (airdate <= {today} AND tv_eps.status IN {status_pre_today}) OR tv_eps.status IN {status_both} ) ) AS epTotal, (SELECT airdate FROM tv_episodes WHERE tv_episodes.showid=tv_eps.showid AND tv_episodes.indexer=tv_eps.indexer AND airdate >= {today} AND (tv_eps.status = {unaired} OR tv_eps.status = {wanted}) ORDER BY airdate ASC LIMIT 1 ) AS epAirsNext, (SELECT airdate FROM tv_episodes WHERE tv_episodes.showid=tv_eps.showid AND tv_episodes.indexer=tv_eps.indexer AND airdate > {today} AND tv_eps.status <> {unaired} ORDER BY airdate DESC LIMIT 1 ) AS epAirsPrev, SUM(file_size) AS seriesSize, tv_shows.airs as airs, tv_shows.network as network FROM tv_episodes tv_eps, tv_shows WHERE tv_eps.showid = tv_shows.indexer_id AND tv_eps.indexer = tv_shows.indexer GROUP BY tv_eps.showid, tv_eps.indexer; """).format( status_quality=query_in(snatched), status_download=query_in(downloaded), status_both=query_in(snatched + downloaded), today=date.today().toordinal(), status_pre_today=query_in(pre_today), skipped=SKIPPED, wanted=WANTED, unaired=UNAIRED, ) main_db_con = db.DBConnection() sql_result = main_db_con.select(query) stats_data = {} stats_data['stats'] = [] stats_data['maxDownloadCount'] = 1000 for cur_result in sql_result: stats_data['stats'].append(cur_result) if cur_result['epTotal'] > stats_data['maxDownloadCount']: stats_data['maxDownloadCount'] = cur_result['epTotal'] if cur_result['epAirsNext']: cur_result['epAirsNext'] = parse_date_time( cur_result['epAirsNext'], cur_result['airs'], cur_result['network']) if cur_result['epAirsPrev']: cur_result['epAirsPrev'] = parse_date_time( cur_result['epAirsPrev'], cur_result['airs'], cur_result['network']) stats_data['maxDownloadCount'] *= 100 return stats_data
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, next_week] + qualities_list ) done_shows_list = [int(result[b'showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, recently, WANTED, UNAIRED] + qualities_list ) results = [dict(result) for result in results] for index, item in enumerate(results): item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def run(self, force=False): # pylint:disable=too-many-branches """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress( ) and not force: log.warning( 'Manual search is running. Unable to start Daily search') return self.amActive = True if not network_dict: update_network_dict() cur_time = datetime.now(app_timezone) cur_date = (date.today() + timedelta(days=1 if network_dict else 2)).toordinal() main_db_con = DBConnection() episodes_from_db = main_db_con.select( b'SELECT indexer, showid, airdate, season, episode ' b'FROM tv_episodes ' b'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date]) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode[b'indexer'] series_id = db_episode[b'showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode[b'airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta( minutes=try_int(series_obj.runtime, 60)) # filter out any episodes that haven't finished airing yet, if end_time > cur_time: continue cur_ep = series_obj.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', }) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue episode for daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(force=force)) self.amActive = False
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() status_list = [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, ARCHIVED, IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, next_week] + status_list ) done_shows_list = [int(result['showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER])) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER] ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, recently, WANTED, UNAIRED] + status_list ) for index, item in enumerate(results): item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def run(self, force=False): """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress( ) and not force: log.warning( 'Manual search is running. Unable to start Daily search') return self.amActive = True # Let's keep track of the exact time the scheduler kicked in, # as we need to compare to this time for each provider. scheduler_start_time = int(time()) if not network_dict: update_network_dict() # The tvshows airdate_offset field is used to configure a search offset for specific shows. # This way we can search/accept results early or late, depending on the value. main_db_con = DBConnection() min_offset_show = main_db_con.select( 'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset ' 'FROM tv_shows ' 'WHERE paused = 0 AND airdate_offset < 0') additional_search_offset = 0 if min_offset_show and min_offset_show[0]['offsets'] > 0: additional_search_offset = int( ceil(abs(min_offset_show[0]['min_offset']) / 24.0)) log.debug( 'Using an airdate offset of {min_offset_show} as we found show(s) with an airdate' ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']}) cur_time = datetime.now(app_timezone) cur_date = (date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset)).toordinal() episodes_from_db = main_db_con.select( 'SELECT indexer, showid, airdate, season, episode ' 'FROM tv_episodes ' 'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date]) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode['indexer'] series_id = db_episode['showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode']) if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta( minutes=try_int(series_obj.runtime, 60)) if series_obj.airdate_offset != 0: log.debug( '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours', { 'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset }) # filter out any episodes that haven't finished airing yet if end_time + timedelta( hours=series_obj.airdate_offset) > cur_time: continue with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', }) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue a daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(scheduler_start_time, force=force)) self.amActive = False
def calendar(self): """ Provides a subscribable URL for iCal subscriptions """ logger.log('Receiving iCal request from {ip}'.format(ip=self.request.remote_ip)) # Create a iCal string ical = 'BEGIN:VCALENDAR\r\n' ical += 'VERSION:2.0\r\n' ical += 'X-WR-CALNAME:Medusa\r\n' ical += 'X-WR-CALDESC:Medusa\r\n' ical += 'PRODID://Medusa Upcoming Episodes//\r\n' future_weeks = try_int(self.get_argument('future', 52), 52) past_weeks = try_int(self.get_argument('past', 52), 52) # Limit dates past_date = (datetime.date.today() + datetime.timedelta(weeks=-past_weeks)).toordinal() future_date = (datetime.date.today() + datetime.timedelta(weeks=future_weeks)).toordinal() # Get all the shows that are not paused and are currently on air (from kjoconnor Fork) main_db_con = db.DBConnection() calendar_shows = main_db_con.select( 'SELECT show_name, indexer_id, network, airs, runtime ' 'FROM tv_shows ' 'WHERE ( status = ? OR status = ? ) AND paused != 1', ('Continuing', 'Returning Series') ) for show in calendar_shows: # Get all episodes of this show airing between today and next month episode_list = main_db_con.select( 'SELECT indexerid, name, season, episode, description, airdate ' 'FROM tv_episodes ' 'WHERE airdate >= ? AND airdate < ? AND showid = ?', (past_date, future_date, int(show['indexer_id'])) ) utc = tz.gettz('GMT') for episode in episode_list: air_date_time = network_timezones.parse_date_time(episode['airdate'], show['airs'], show['network']).astimezone(utc) air_date_time_end = air_date_time + datetime.timedelta( minutes=try_int(show['runtime'], 60)) # Create event for episode ical += 'BEGIN:VEVENT\r\n' ical += 'DTSTART:{date}\r\n'.format(date=air_date_time.strftime('%Y%m%dT%H%M%SZ')) ical += 'DTEND:{date}\r\n'.format(date=air_date_time_end.strftime('%Y%m%dT%H%M%SZ')) if app.CALENDAR_ICONS: icon_url = '{base_url}/images/ico/favicon-16.png'.format(base_url=app.BASE_PYMEDUSA_URL) ical += 'X-GOOGLE-CALENDAR-CONTENT-ICON:{icon_url}\r\n'.format(icon_url=icon_url) ical += 'X-GOOGLE-CALENDAR-CONTENT-DISPLAY:CHIP\r\n' ical += 'SUMMARY: {show} - {season}x{episode} - {title}\r\n'.format( show=show['show_name'], season=episode['season'], episode=episode['episode'], title=episode['name'], ) ical += 'UID:Medusa-{date}-{show}-E{episode}S{season}\r\n'.format( date=datetime.date.today().isoformat(), show=show['show_name'].replace(' ', '-'), episode=episode['episode'], season=episode['season'], ) ical += 'DESCRIPTION: {date} on {network}'.format( date=show['airs'] or '(Unknown airs)', network=show['network'] or 'Unknown network', ) if episode['description']: ical += ' \\n\\n {description}\r\n'.format(description=episode['description'].splitlines()[0]) else: ical += '\r\n' ical += 'END:VEVENT\r\n' # Ending the iCal ical += 'END:VCALENDAR' return ical
def get(self): """Query episode's history information.""" sort = self.get_argument('sort', default='desc') categories = self.get_arguments('category[]') or ['missed'] paused = self.get_argument('paused', default=False) grouped_coming_episodes = ComingEpisodes.get_coming_episodes( categories, sort, True, paused) data = {section: [] for section in grouped_coming_episodes} for section, coming_episodes in grouped_coming_episodes.items(): for coming_episode in coming_episodes: airs_time = ' '.join(coming_episode['airs'].split(' ')[-2:]) airdate_oridinal = datetime.strptime( coming_episode['airdate'], '%Y-%m-%d').date().toordinal() show_air_time = parse_date_time(airdate_oridinal, airs_time) data[section].append({ 'airdate': coming_episode['airdate'], 'airs': coming_episode['airs'], 'localAirTime': show_air_time.replace(microsecond=0).isoformat(), 'epName': coming_episode['name'], 'epPlot': coming_episode['description'], 'season': coming_episode['season'], 'episode': coming_episode['episode'], 'episodeSlug': 's{season:02d}e{episode:02d}'.format( season=coming_episode['season'], episode=coming_episode['episode']), 'indexerId': coming_episode['indexer_id'], 'indexer': coming_episode['indexer'], 'network': coming_episode['network'], 'paused': coming_episode['paused'], 'quality': coming_episode['qualityValue'], 'showSlug': coming_episode['series_slug'], 'showName': coming_episode['show_name'], 'showStatus': coming_episode['status'], 'tvdbid': coming_episode['tvdbid'], 'weekday': coming_episode['weekday'], 'runtime': coming_episode['runtime'], 'externals': coming_episode['externals'] }) return self._ok(data)