def overall_stats(): db = DBConnection() shows = app.showList today = date.today().toordinal() downloaded_status = [DOWNLOADED, ARCHIVED] snatched_status = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] total_status = [SKIPPED, WANTED] results = db.select('SELECT airdate, status, quality ' 'FROM tv_episodes ' 'WHERE season > 0 ' 'AND episode > 0 ' 'AND airdate > 1') stats = { 'episodes': { 'downloaded': 0, 'snatched': 0, 'total': 0, }, 'shows': { 'active': len([ show for show in shows if show.paused == 0 and show.status == 'Continuing' ]), 'total': len(shows), }, } for result in results: if result['status'] in downloaded_status: stats['episodes']['downloaded'] += 1 stats['episodes']['total'] += 1 elif result['status'] in snatched_status: stats['episodes']['snatched'] += 1 stats['episodes']['total'] += 1 elif result['airdate'] <= today and result[ 'status'] in total_status: stats['episodes']['total'] += 1 return stats
def overall_stats(): db = DBConnection() shows = app.showList today = date.today().toordinal() downloaded_status = [DOWNLOADED, ARCHIVED] snatched_status = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] total_status = [SKIPPED, WANTED] results = db.select( 'SELECT airdate, status, quality ' 'FROM tv_episodes ' 'WHERE season > 0 ' 'AND episode > 0 ' 'AND airdate > 1' ) stats = { 'episodes': { 'downloaded': 0, 'snatched': 0, 'total': 0, }, 'shows': { 'active': len([show for show in shows if show.paused == 0 and show.status == 'Continuing']), 'total': len(shows), }, } for result in results: if result['status'] in downloaded_status: stats['episodes']['downloaded'] += 1 stats['episodes']['total'] += 1 elif result['status'] in snatched_status: stats['episodes']['snatched'] += 1 stats['episodes']['total'] += 1 elif result['airdate'] <= today and result['status'] in total_status: stats['episodes']['total'] += 1 return stats
def find_search_results(self, show, episodes, search_mode, forced_search=False, download_current_quality=False, manual_search=False, manual_search_type='episode'): """Search episodes based on param.""" self._check_auth() self.show = show results = {} items_list = [] for episode in episodes: if not manual_search: cache_result = self.cache.search_cache( episode, forced_search=forced_search, down_cur_quality=download_current_quality) if cache_result: if episode.episode not in results: results[episode.episode] = cache_result else: results[episode.episode].extend(cache_result) continue search_strings = [] season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' if season_search: search_strings = self._get_season_search_strings(episode) elif search_mode == 'eponly': search_strings = self._get_episode_search_strings(episode) for search_string in search_strings: # Find results from the provider items_list += self.search(search_string, ep_obj=episode, manual_search=manual_search) # In season search, we can't loop in episodes lists as we only need one episode to get the season string if search_mode == 'sponly': break if len(results) == len(episodes): return results if items_list: # categorize the items into lists by quality items = defaultdict(list) for item in items_list: items[self.get_quality(item, anime=show.is_anime)].append(item) # temporarily remove the list of items with unknown quality unknown_items = items.pop(Quality.UNKNOWN, []) # make a generator to sort the remaining items by descending quality items_list = (items[quality] for quality in sorted(items, reverse=True)) # unpack all of the quality lists into a single sorted list items_list = list(chain(*items_list)) # extend the list with the unknown qualities, now sorted at the bottom of the list items_list.extend(unknown_items) cl = [] # Move through each item and parse it into a quality search_results = [] for item in items_list: # Make sure we start with a TorrentSearchResult, NZBDataSearchResult or NZBSearchResult search result obj. search_result = self.get_result() search_results.append(search_result) search_result.item = item search_result.download_current_quality = download_current_quality # FIXME: Should be changed to search_result.search_type search_result.forced_search = forced_search (search_result.name, search_result.url) = self._get_title_and_url(item) (search_result.seeders, search_result.leechers) = self._get_result_info(item) search_result.size = self._get_size(item) search_result.pubdate = self._get_pubdate(item) search_result.result_wanted = True try: search_result.parsed_result = NameParser( parse_method=('normal', 'anime')[show.is_anime]).parse( search_result.name) except (InvalidNameException, InvalidShowException) as error: log.debug( 'Error during parsing of release name: {release_name}, with error: {error}', { 'release_name': search_result.name, 'error': error }) search_result.add_cache_entry = False search_result.result_wanted = False continue # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result # to the search_result. search_result.show = search_result.parsed_result.show search_result.quality = search_result.parsed_result.quality search_result.release_group = search_result.parsed_result.release_group search_result.version = search_result.parsed_result.version search_result.actual_season = search_result.parsed_result.season_number search_result.actual_episodes = search_result.parsed_result.episode_numbers if not manual_search: if not (search_result.show.air_by_date or search_result.show.sports): if search_mode == 'sponly': if search_result.parsed_result.episode_numbers: log.debug( 'This is supposed to be a season pack search but the result {0} is not a valid ' 'season pack, skipping it', search_result.name) search_result.result_wanted = False continue elif not [ ep for ep in episodes if search_result.parsed_result.season_number == (ep.season, ep.scene_season)[ep.series.is_scene] ]: log.debug( 'This season result {0} is for a season we are not searching for, ' 'skipping it', search_result.name) search_result.result_wanted = False continue else: # I'm going to split these up for better readability # Check if at least got a season parsed. if search_result.parsed_result.season_number is None: log.debug( "The result {0} doesn't seem to have a valid season that we are currently trying to " "snatch, skipping it", search_result.name) search_result.result_wanted = False continue # Check if we at least got some episode numbers parsed. if not search_result.parsed_result.episode_numbers: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " "snatch, skipping it", search_result.name) search_result.result_wanted = False continue # Compare the episodes and season from the result with what was searched. if not [ searched_episode for searched_episode in episodes if searched_episode.season == search_result.parsed_result.season_number and (searched_episode.episode, searched_episode. scene_episode)[searched_episode.series.is_scene] in search_result.parsed_result.episode_numbers ]: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " "snatch, skipping it", search_result.name) search_result.result_wanted = False continue # We've performed some checks to decided if we want to continue with this result. # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be # a valid result. Let's store the parsed season and episode number and continue. search_result.actual_season = search_result.parsed_result.season_number search_result.actual_episodes = search_result.parsed_result.episode_numbers else: # air_by_date or sportshow. search_result.same_day_special = False if not search_result.parsed_result.is_air_by_date: log.debug( "This is supposed to be a date search but the result {0} didn't parse as one, " "skipping it", search_result.name) search_result.result_wanted = False continue else: # Use a query against the tv_episodes table, to match the parsed air_date against. air_date = search_result.parsed_result.air_date.toordinal( ) db = DBConnection() sql_results = db.select( 'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', [search_result.show.indexerid, air_date]) if len(sql_results) == 2: if int(sql_results[0][b'season']) == 0 and int( sql_results[1][b'season']) != 0: search_result.actual_season = int( sql_results[1][b'season']) search_result.actual_episodes = [ int(sql_results[1][b'episode']) ] search_result.same_day_special = True elif int(sql_results[1][b'season']) == 0 and int( sql_results[0][b'season']) != 0: search_result.actual_season = int( sql_results[0][b'season']) search_result.actual_episodes = [ int(sql_results[0][b'episode']) ] search_result.same_day_special = True elif len(sql_results) != 1: log.warning( "Tried to look up the date for the episode {0} but the database didn't return proper " "results, skipping it", search_result.name) search_result.result_wanted = False continue # @TODO: Need to verify and test this. if search_result.result_wanted and not search_result.same_day_special: search_result.actual_season = int( sql_results[0][b'season']) search_result.actual_episodes = [ int(sql_results[0][b'episode']) ] # Iterate again over the search results, and see if there is anything we want. for search_result in search_results: # Try to cache the item if we want to. cache_result = search_result.add_result_to_cache(self.cache) if cache_result is not None: cl.append(cache_result) if not search_result.result_wanted: log.debug( "We aren't interested in this result: {0} with url: {1}", search_result.name, search_result.url) continue log.debug('Found result {0} at {1}', search_result.name, search_result.url) episode_object = search_result.create_episode_object() # result = self.get_result(episode_object, search_result) search_result.finish_search_result(self) if not episode_object: episode_number = SEASON_RESULT log.debug('Found season pack result {0} at {1}', search_result.name, search_result.url) elif len(episode_object) == 1: episode_number = episode_object[0].episode log.debug('Found single episode result {0} at {1}', search_result.name, search_result.url) else: episode_number = MULTI_EP_RESULT log.debug( 'Found multi-episode ({0}) result {1} at {2}', ', '.join( map(str, search_result.parsed_result.episode_numbers)), search_result.name, search_result.url) if episode_number not in results: results[episode_number] = [search_result] else: results[episode_number].append(search_result) if cl: # Access to a protected member of a client class db = self.cache._get_db() db.mass_action(cl) return results
def __init__(self): from medusa.db import DBConnection self.db = DBConnection()
class History(object): date_format = '%Y%m%d%H%M%S' def __init__(self): from medusa.db import DBConnection self.db = DBConnection() def clear(self): """ Clear all the history """ self.db.action('DELETE ' 'FROM history ' 'WHERE 1 = 1') def get(self, limit=100, action=None): """ :param limit: The maximum number of elements to return :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or no value will return everything (up to ``limit``) :return: The last ``limit`` elements of type ``action`` in the history """ # TODO: Make this a generator instead # TODO: Split compact and detailed into separate methods # TODO: Add a date limit as well # TODO: Clean up history.mako actions = History._get_actions(action) limit = max(try_int(limit), 0) common_sql = 'SELECT show_name, h.indexer_id, showid, season, episode, h.quality, ' \ 'action, provider, resource, date, h.proper_tags, h.manually_searched ' \ 'FROM history h, tv_shows s ' \ 'WHERE h.showid = s.indexer_id AND h.indexer_id = s.indexer ' filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') ' order_sql = 'ORDER BY date DESC ' if actions: sql_results = self.db.select(common_sql + filter_sql + order_sql, actions) else: sql_results = self.db.select(common_sql + order_sql) detailed = [] compact = dict() # TODO: Convert to a defaultdict and compact items as needed # TODO: Convert to using operators to combine items for row in sql_results: row = History.Item(*row) if not limit or len(detailed) < limit: detailed.append(row) if row.index in compact: compact[row.index].actions.append(row.cur_action) elif not limit or len(compact) < limit: compact[row.index] = row.compacted() results = namedtuple('results', ['detailed', 'compact']) return results(detailed, compact.values()) def trim(self, days=30): """ Remove expired elements from history :param days: number of days to keep """ date = datetime.today() - timedelta(days) self.db.action('DELETE ' 'FROM history ' 'WHERE date < ?', [date.strftime(History.date_format)]) @staticmethod def _get_actions(action): action = action.lower() if isinstance(action, (str, text_type)) else '' result = None if action == 'downloaded': result = Quality.DOWNLOADED elif action == 'snatched': result = Quality.SNATCHED return result or [] action_fields = ('action', 'provider', 'resource', 'date', 'proper_tags', 'manually_searched') # A specific action from history Action = namedtuple('Action', action_fields) Action.width = len(action_fields) index_fields = ('indexer_id', 'show_id', 'season', 'episode', 'quality') # An index for an item or compact item from history Index = namedtuple('Index', index_fields) Index.width = len(index_fields) compact_fields = ('show_name', 'index', 'actions') # Related items compacted with a list of actions from history CompactItem = namedtuple('CompactItem', compact_fields) item_fields = tuple( # make it a tuple so its immutable ['show_name'] + list(index_fields) + list(action_fields)) class Item(namedtuple('Item', item_fields)): # TODO: Allow items to be added to a compact item """ An individual row item from history """ # prevent creation of a __dict__ when subclassing # from a class that uses __slots__ __slots__ = () @property def index(self): """ Create a look-up index for the item """ return History.Index( self.indexer_id, self.show_id, self.season, self.episode, self.quality, ) @property def cur_action(self): """ Create the current action from action_fields """ return History.Action( self.action, self.provider, self.resource, self.date, self.proper_tags, self.manually_searched, ) def compacted(self): """ Create a CompactItem :returns: the current item in compact form """ result = History.CompactItem( self.show_name, self.index, [self.cur_action], # actions ) return result def __add__(self, other): """ Combines two history items with the same index :param other: The other item to add :returns: a compact item with elements from both items :raises AssertionError: if indexes do not match """ # Index comparison and validation is done by _radd_ return self.compacted() + other def __radd__(self, other): """ Adds a history item to a compact item :param other: The compact item to append :returns: the updated compact item :raises AssertionError: if indexes do not match """ if self.index == other.index: other.actions.append(self.cur_action) return other else: raise AssertionError('cannot add items with different indexes')
def find_search_results(self, series, episodes, search_mode, forced_search=False, download_current_quality=False, manual_search=False, manual_search_type='episode'): """ Search episodes based on param. Search the provider using http queries. :param series: Series object :param episodes: List of Episode objects :param search_mode: 'eponly' or 'sponly' :param forced_search: Flag if the search was triggered by a forced search :param download_current_quality: Flag if we want to include an already downloaded quality in the new search :param manual_search: Flag if the search was triggered by a manual search :param manual_search_type: How the manual search was started: For example an 'episode' or 'season' :return: A dict of search results, ordered by episode number. """ self._check_auth() self.series = series season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' results = [] for episode in episodes: search_strings = [] if season_search: search_strings = self._get_season_search_strings(episode) elif search_mode == 'eponly': search_strings = self._get_episode_search_strings(episode) for search_string in search_strings: # Find results from the provider items = self.search(search_string, ep_obj=episode, manual_search=manual_search) for item in items: result = self.get_result(series=series, item=item) if result not in results: result.quality = Quality.quality_from_name( result.name, series.is_anime) results.append(result) # In season search, we can't loop in episodes lists as we # only need one episode to get the season string if search_mode == 'sponly': break log.debug('Found {0} unique search results', len(results)) # sort qualities in descending order results.sort(key=operator.attrgetter('quality'), reverse=True) # Move through each item and parse with NameParser() for search_result in results: if forced_search: search_result.search_type = FORCED_SEARCH search_result.download_current_quality = download_current_quality search_result.result_wanted = True try: search_result.parsed_result = NameParser( parse_method=('normal', 'anime')[series.is_anime]).parse( search_result.name) except (InvalidNameException, InvalidShowException) as error: log.debug( 'Error during parsing of release name: {release_name}, with error: {error}', { 'release_name': search_result.name, 'error': error }) search_result.add_cache_entry = False search_result.result_wanted = False continue # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result # to the search_result. search_result.series = search_result.parsed_result.series search_result.quality = search_result.parsed_result.quality search_result.release_group = search_result.parsed_result.release_group search_result.version = search_result.parsed_result.version search_result.actual_season = search_result.parsed_result.season_number search_result.actual_episodes = search_result.parsed_result.episode_numbers if not manual_search: if not (search_result.series.air_by_date or search_result.series.sports): if search_mode == 'sponly': if search_result.parsed_result.episode_numbers: log.debug( 'This is supposed to be a season pack search but the result {0} is not a valid ' 'season pack, skipping it', search_result.name) search_result.result_wanted = False continue elif not [ ep for ep in episodes if search_result.parsed_result.season_number == (ep.season, ep.scene_season)[ep.series.is_scene] ]: log.debug( 'This season result {0} is for a season we are not searching for, ' 'skipping it', search_result.name) search_result.result_wanted = False continue else: # I'm going to split these up for better readability # Check if at least got a season parsed. if search_result.parsed_result.season_number is None: log.debug( "The result {0} doesn't seem to have a valid season that we are currently trying to " 'snatch, skipping it', search_result.name) search_result.result_wanted = False continue # Check if we at least got some episode numbers parsed. if not search_result.parsed_result.episode_numbers: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " 'snatch, skipping it', search_result.name) search_result.result_wanted = False continue # Compare the episodes and season from the result with what was searched. wanted_ep = False for searched_ep in episodes: if searched_ep.series.is_scene and searched_ep.scene_episode: season = searched_ep.scene_season episode = searched_ep.scene_episode else: season = searched_ep.season episode = searched_ep.episode if (season == search_result.parsed_result.season_number and episode in search_result.parsed_result. episode_numbers): wanted_ep = True break if not wanted_ep: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " 'snatch, skipping it', search_result.name) search_result.result_wanted = False continue # We've performed some checks to decided if we want to continue with this result. # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be # a valid result. Let's store the parsed season and episode number and continue. search_result.actual_season = search_result.parsed_result.season_number search_result.actual_episodes = search_result.parsed_result.episode_numbers else: # air_by_date or sportshow. search_result.same_day_special = False if not search_result.parsed_result.is_air_by_date: log.debug( "This is supposed to be a date search but the result {0} didn't parse as one, " 'skipping it', search_result.name) search_result.result_wanted = False continue else: # Use a query against the tv_episodes table, to match the parsed air_date against. air_date = search_result.parsed_result.air_date.toordinal( ) db = DBConnection() sql_results = db.select( 'SELECT season, episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND airdate = ?', [ search_result.series.indexer, search_result.series.series_id, air_date ]) if len(sql_results) == 2: if int(sql_results[0]['season']) == 0 and int( sql_results[1]['season']) != 0: search_result.actual_season = int( sql_results[1]['season']) search_result.actual_episodes = [ int(sql_results[1]['episode']) ] search_result.same_day_special = True elif int(sql_results[1]['season']) == 0 and int( sql_results[0]['season']) != 0: search_result.actual_season = int( sql_results[0]['season']) search_result.actual_episodes = [ int(sql_results[0]['episode']) ] search_result.same_day_special = True elif len(sql_results) != 1: log.warning( "Tried to look up the date for the episode {0} but the database didn't return proper " 'results, skipping it', search_result.name) search_result.result_wanted = False continue # @TODO: Need to verify and test this. if search_result.result_wanted and not search_result.same_day_special: search_result.actual_season = int( sql_results[0]['season']) search_result.actual_episodes = [ int(sql_results[0]['episode']) ] final_results = {} cl = [] # Iterate again over the search results, and see if there is anything we want. for search_result in results: # Try to cache the item if we want to. cache_result = search_result.add_result_to_cache(self.cache) if cache_result is not None: cl.append(cache_result) if not search_result.result_wanted: log.debug( "We aren't interested in this result: {0} with url: {1}", search_result.name, search_result.url) continue log.debug('Found result {0} at {1}', search_result.name, search_result.url) search_result.update_search_result() if search_result.episode_number == SEASON_RESULT: log.debug('Found season pack result {0} at {1}', search_result.name, search_result.url) elif search_result.episode_number == MULTI_EP_RESULT: log.debug( 'Found multi-episode ({0}) result {1} at {2}', ', '.join( map(str, search_result.parsed_result.episode_numbers)), search_result.name, search_result.url) else: log.debug('Found single episode result {0} at {1}', search_result.name, search_result.url) if search_result.episode_number not in final_results: final_results[search_result.episode_number] = [search_result] else: final_results[search_result.episode_number].append( search_result) if cl: # Access to a protected member of a client class db = self.cache._get_db() db.mass_action(cl) return final_results
def run(self, force=False): """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress() and not force: log.warning('Manual search is running. Unable to start Daily search') return self.amActive = True # Let's keep track of the exact time the scheduler kicked in, # as we need to compare to this time for each provider. scheduler_start_time = int(time()) if not network_dict: update_network_dict() # The tvshows airdate_offset field is used to configure a search offset for specific shows. # This way we can search/accept results early or late, depending on the value. main_db_con = DBConnection() min_offset_show = main_db_con.select( 'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset ' 'FROM tv_shows ' 'WHERE paused = 0 AND airdate_offset < 0' ) additional_search_offset = 0 if min_offset_show and min_offset_show[0]['offsets'] > 0: additional_search_offset = int(ceil(abs(min_offset_show[0]['min_offset']) / 24.0)) log.debug('Using an airdate offset of {min_offset_show} as we found show(s) with an airdate' ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']}) cur_time = datetime.now(app_timezone) cur_date = ( date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset) ).toordinal() episodes_from_db = main_db_con.select( 'SELECT indexer, showid, airdate, season, episode ' 'FROM tv_episodes ' 'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date] ) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode['indexer'] series_id = db_episode['showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode']) if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta(minutes=try_int(series_obj.runtime, 60)) if series_obj.airdate_offset != 0: log.debug( '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours', {'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset}) # filter out any episodes that haven't finished airing yet if end_time + timedelta(hours=series_obj.airdate_offset) > cur_time: continue with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', } ) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue a daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(scheduler_start_time, force=force) ) self.amActive = False
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, next_week] + qualities_list ) done_shows_list = [int(result[b'showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', [today, recently, WANTED, UNAIRED] + qualities_list ) results = [dict(result) for result in results] for index, item in enumerate(results): item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def run(self, force=False): # pylint:disable=too-many-branches """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress( ) and not force: log.warning( 'Manual search is running. Unable to start Daily search') return self.amActive = True if not network_dict: update_network_dict() cur_time = datetime.now(app_timezone) cur_date = (date.today() + timedelta(days=1 if network_dict else 2)).toordinal() main_db_con = DBConnection() episodes_from_db = main_db_con.select( b'SELECT indexer, showid, airdate, season, episode ' b'FROM tv_episodes ' b'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date]) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode[b'indexer'] series_id = db_episode[b'showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode[b'airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta( minutes=try_int(series_obj.runtime, 60)) # filter out any episodes that haven't finished airing yet, if end_time > cur_time: continue cur_ep = series_obj.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', }) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue episode for daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(force=force)) self.amActive = False
def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() status_list = [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, ARCHIVED, IGNORED] db = DBConnection() fields_to_select = ', '.join( ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND airdate >= ? ' 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, next_week] + status_list ) done_shows_list = [int(result['showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) placeholder2 = ','.join(['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER])) # FIXME: This inner join is not multi indexer friendly. results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND showid NOT IN (' + placeholder + ') ' 'AND s.indexer_id = e.showid ' 'AND airdate = (SELECT airdate ' 'FROM tv_episodes inner_e ' 'WHERE inner_e.season != 0 ' 'AND inner_e.showid = e.showid ' 'AND inner_e.indexer = e.indexer ' 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER] ) results += db.select( 'SELECT %s ' % fields_to_select + 'FROM tv_episodes e, tv_shows s ' 'WHERE season != 0 ' 'AND s.indexer_id = e.showid ' 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', [today, recently, WANTED, UNAIRED] + status_list ) for index, item in enumerate(results): item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def run(self, force=False): """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress( ) and not force: log.warning( 'Manual search is running. Unable to start Daily search') return self.amActive = True # Let's keep track of the exact time the scheduler kicked in, # as we need to compare to this time for each provider. scheduler_start_time = int(time()) if not network_dict: update_network_dict() # The tvshows airdate_offset field is used to configure a search offset for specific shows. # This way we can search/accept results early or late, depending on the value. main_db_con = DBConnection() min_offset_show = main_db_con.select( 'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset ' 'FROM tv_shows ' 'WHERE paused = 0 AND airdate_offset < 0') additional_search_offset = 0 if min_offset_show and min_offset_show[0]['offsets'] > 0: additional_search_offset = int( ceil(abs(min_offset_show[0]['min_offset']) / 24.0)) log.debug( 'Using an airdate offset of {min_offset_show} as we found show(s) with an airdate' ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']}) cur_time = datetime.now(app_timezone) cur_date = (date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset)).toordinal() episodes_from_db = main_db_con.select( 'SELECT indexer, showid, airdate, season, episode ' 'FROM tv_episodes ' 'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date]) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode['indexer'] series_id = db_episode['showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode']) if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta( minutes=try_int(series_obj.runtime, 60)) if series_obj.airdate_offset != 0: log.debug( '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours', { 'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset }) # filter out any episodes that haven't finished airing yet if end_time + timedelta( hours=series_obj.airdate_offset) > cur_time: continue with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', }) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue a daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(scheduler_start_time, force=force)) self.amActive = False
def find_search_results(self, series, episodes, search_mode, forced_search=False, download_current_quality=False, manual_search=False, manual_search_type='episode'): """ Search episodes based on param. Search the provider using http queries. :param series: Series object :param episodes: List of Episode objects :param search_mode: 'eponly' or 'sponly' :param forced_search: Flag if the search was triggered by a forced search :param download_current_quality: Flag if we want to include an already downloaded quality in the new search :param manual_search: Flag if the search was triggered by a manual search :param manual_search_type: How the manual search was started: For example an 'episode' or 'season' :return: A dict of search results, ordered by episode number. """ self._check_auth() self.series = series results = {} items_list = [] season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' for episode in episodes: search_strings = [] if season_search: search_strings = self._get_season_search_strings(episode) elif search_mode == 'eponly': search_strings = self._get_episode_search_strings(episode) for search_string in search_strings: # Find results from the provider items_list += self.search( search_string, ep_obj=episode, manual_search=manual_search ) # In season search, we can't loop in episodes lists as we # only need one episode to get the season string if search_mode == 'sponly': break # Remove duplicate items unique_items = self.remove_duplicate_mappings(items_list) log.debug('Found {0} unique items', len(unique_items)) # categorize the items into lists by quality categorized_items = defaultdict(list) for item in unique_items: quality = self.get_quality(item, anime=series.is_anime) categorized_items[quality].append(item) # sort qualities in descending order sorted_qualities = sorted(categorized_items, reverse=True) log.debug('Found qualities: {0}', sorted_qualities) # chain items sorted by quality sorted_items = chain.from_iterable( categorized_items[quality] for quality in sorted_qualities ) # unpack all of the quality lists into a single sorted list items_list = list(sorted_items) # Move through each item and parse it into a quality search_results = [] for item in items_list: # Make sure we start with a TorrentSearchResult, NZBDataSearchResult or NZBSearchResult search result obj. search_result = self.get_result() search_results.append(search_result) search_result.item = item search_result.download_current_quality = download_current_quality # FIXME: Should be changed to search_result.search_type search_result.forced_search = forced_search (search_result.name, search_result.url) = self._get_title_and_url(item) (search_result.seeders, search_result.leechers) = self._get_result_info(item) search_result.size = self._get_size(item) search_result.pubdate = self._get_pubdate(item) search_result.result_wanted = True try: search_result.parsed_result = NameParser( parse_method=('normal', 'anime')[series.is_anime]).parse( search_result.name) except (InvalidNameException, InvalidShowException) as error: log.debug('Error during parsing of release name: {release_name}, with error: {error}', {'release_name': search_result.name, 'error': error}) search_result.add_cache_entry = False search_result.result_wanted = False continue # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result # to the search_result. search_result.series = search_result.parsed_result.series search_result.quality = search_result.parsed_result.quality search_result.release_group = search_result.parsed_result.release_group search_result.version = search_result.parsed_result.version search_result.actual_season = search_result.parsed_result.season_number search_result.actual_episodes = search_result.parsed_result.episode_numbers if not manual_search: if not (search_result.series.air_by_date or search_result.series.sports): if search_mode == 'sponly': if search_result.parsed_result.episode_numbers: log.debug( 'This is supposed to be a season pack search but the result {0} is not a valid ' 'season pack, skipping it', search_result.name ) search_result.result_wanted = False continue elif not [ep for ep in episodes if search_result.parsed_result.season_number == (ep.season, ep.scene_season) [ep.series.is_scene]]: log.debug( 'This season result {0} is for a season we are not searching for, ' 'skipping it', search_result.name ) search_result.result_wanted = False continue else: # I'm going to split these up for better readability # Check if at least got a season parsed. if search_result.parsed_result.season_number is None: log.debug( "The result {0} doesn't seem to have a valid season that we are currently trying to " 'snatch, skipping it', search_result.name ) search_result.result_wanted = False continue # Check if we at least got some episode numbers parsed. if not search_result.parsed_result.episode_numbers: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " 'snatch, skipping it', search_result.name ) search_result.result_wanted = False continue # Compare the episodes and season from the result with what was searched. if not [searched_episode for searched_episode in episodes if searched_episode.season == search_result.parsed_result.season_number and (searched_episode.episode, searched_episode.scene_episode) [searched_episode.series.is_scene] in search_result.parsed_result.episode_numbers]: log.debug( "The result {0} doesn't seem to match an episode that we are currently trying to " 'snatch, skipping it', search_result.name ) search_result.result_wanted = False continue # We've performed some checks to decided if we want to continue with this result. # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be # a valid result. Let's store the parsed season and episode number and continue. search_result.actual_season = search_result.parsed_result.season_number search_result.actual_episodes = search_result.parsed_result.episode_numbers else: # air_by_date or sportshow. search_result.same_day_special = False if not search_result.parsed_result.is_air_by_date: log.debug( "This is supposed to be a date search but the result {0} didn't parse as one, " 'skipping it', search_result.name ) search_result.result_wanted = False continue else: # Use a query against the tv_episodes table, to match the parsed air_date against. air_date = search_result.parsed_result.air_date.toordinal() db = DBConnection() sql_results = db.select( 'SELECT season, episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND airdate = ?', [search_result.series.indexer, search_result.series.series_id, air_date] ) if len(sql_results) == 2: if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0: search_result.actual_season = int(sql_results[1]['season']) search_result.actual_episodes = [int(sql_results[1]['episode'])] search_result.same_day_special = True elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0: search_result.actual_season = int(sql_results[0]['season']) search_result.actual_episodes = [int(sql_results[0]['episode'])] search_result.same_day_special = True elif len(sql_results) != 1: log.warning( "Tried to look up the date for the episode {0} but the database didn't return proper " 'results, skipping it', search_result.name ) search_result.result_wanted = False continue # @TODO: Need to verify and test this. if search_result.result_wanted and not search_result.same_day_special: search_result.actual_season = int(sql_results[0]['season']) search_result.actual_episodes = [int(sql_results[0]['episode'])] cl = [] # Iterate again over the search results, and see if there is anything we want. for search_result in search_results: # Try to cache the item if we want to. cache_result = search_result.add_result_to_cache(self.cache) if cache_result is not None: cl.append(cache_result) if not search_result.result_wanted: log.debug("We aren't interested in this result: {0} with url: {1}", search_result.name, search_result.url) continue log.debug('Found result {0} at {1}', search_result.name, search_result.url) search_result.create_episode_object() # result = self.get_result(episode_object, search_result) search_result.finish_search_result(self) if not search_result.actual_episodes: episode_number = SEASON_RESULT log.debug('Found season pack result {0} at {1}', search_result.name, search_result.url) elif len(search_result.actual_episodes) == 1: episode_number = search_result.actual_episode log.debug('Found single episode result {0} at {1}', search_result.name, search_result.url) else: episode_number = MULTI_EP_RESULT log.debug('Found multi-episode ({0}) result {1} at {2}', ', '.join(map(str, search_result.parsed_result.episode_numbers)), search_result.name, search_result.url) if episode_number not in results: results[episode_number] = [search_result] else: results[episode_number].append(search_result) if cl: # Access to a protected member of a client class db = self.cache._get_db() db.mass_action(cl) return results