def find_propers(self, search_date=None): results = [] db = DBConnection() placeholders = ", ".join( ["?"] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST)) sql_results = db.select( f"SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) WHERE e.airdate >= ? AND e.status IN ({placeholders}) and e.is_proper = 0", [ search_date.toordinal(), *Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST ], ) for result in sql_results or []: show = Show.find(settings.showList, int(result["showid"])) if show: episode = show.getEpisode(result["season"], result["episode"]) for term in self.proper_strings: search_strings = self.get_episode_search_strings( episode, add_string=term) for search_string in search_strings: for item in self.search(search_string): title, url = self._get_title_and_url(item) results.append( Proper(title, url, datetime.today(), show)) return results
def overall_stats(): db = DBConnection() shows = settings.showList today = date.today().toordinal() downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST total_status = [SKIPPED, WANTED] results = db.select('SELECT airdate, status ' 'FROM tv_episodes ' 'WHERE season > 0 ' 'AND episode > 0 ' 'AND airdate > 1') stats = { 'episodes': { 'downloaded': 0, 'snatched': 0, 'total': 0, }, 'shows': { 'active': len([ show for show in shows if show.paused == 0 and show.status == 'Continuing' ]), 'total': len(shows), }, } for result in results: if result['status'] in downloaded_status: stats['episodes']['downloaded'] += 1 stats['episodes']['total'] += 1 elif result['status'] in snatched_status: stats['episodes']['snatched'] += 1 stats['episodes']['total'] += 1 elif result['airdate'] <= today and result[ 'status'] in total_status: stats['episodes']['total'] += 1 return stats
def overall_stats(): db = DBConnection() shows = settings.showList today = date.today().toordinal() downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST total_status = [SKIPPED, WANTED] results = db.select("SELECT airdate, status " "FROM tv_episodes " "WHERE season > 0 " "AND episode > 0 " "AND airdate > 1") stats = { "episodes": { "downloaded": 0, "snatched": 0, "total": 0, }, "shows": { "active": len([ show for show in shows if show.paused == 0 and show.status == "Continuing" ]), "total": len(shows), }, } for result in results: if result["status"] in downloaded_status: stats["episodes"]["downloaded"] += 1 stats["episodes"]["total"] += 1 elif result["status"] in snatched_status: stats["episodes"]["snatched"] += 1 stats["episodes"]["total"] += 1 elif result["airdate"] <= today and result[ "status"] in total_status: stats["episodes"]["total"] += 1 return stats
class History(object): date_format = '%Y%m%d%H%M%S' def __init__(self): self.db = DBConnection() def remove(self, toRemove): """ Removes the selected history :param toRemove: Contains the properties of the log entries to remove """ query = '' for item in toRemove: query = query + ' OR ' if query != '' else '' query = query + '(date IN ({0}) AND showid = {1} ' \ 'AND season = {2} AND episode = {3})' \ .format(','.join(item['dates']), item['show_id'], \ item['season'], item['episode']) self.db.action( 'DELETE FROM history WHERE ' + query ) def clear(self): """ Clear all the history """ self.db.action( 'DELETE ' 'FROM history ' 'WHERE 1 = 1' ) def get(self, limit=100, action=None): """ :param limit: The maximum number of elements to return :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or no value will return everything (up to ``limit``) :return: The last ``limit`` elements of type ``action`` in the history """ actions = History._get_actions(action) limit = History._get_limit(limit) common_sql = 'SELECT action, date, episode, provider, h.quality, resource, season, show_name, showid ' \ 'FROM history h, tv_shows s ' \ 'WHERE h.showid = s.indexer_id ' filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') ' order_sql = 'ORDER BY date DESC ' if limit == 0: if actions: results = self.db.select(common_sql + filter_sql + order_sql, actions) else: results = self.db.select(common_sql + order_sql) else: if actions: results = self.db.select(common_sql + filter_sql + order_sql + 'LIMIT ?', actions + [limit]) else: results = self.db.select(common_sql + order_sql + 'LIMIT ?', [limit]) data = [] for result in results: data.append({ 'action': result['action'], 'date': result['date'], 'episode': result['episode'], 'provider': result['provider'], 'quality': result['quality'], 'resource': result['resource'], 'season': result['season'], 'show_id': result['showid'], 'show_name': result['show_name'] }) return data def trim(self): """ Remove all elements older than 30 days from the history """ # self.db.action("DELETE FROM history WHERE date < datetime('now', '-30 days')") self.db.action( 'DELETE ' 'FROM history ' 'WHERE date < ?', [(datetime.today() - timedelta(days=30)).strftime(History.date_format)] ) @staticmethod def _get_actions(action): action = action.lower() if isinstance(action, str) else '' if action == 'downloaded': return Quality.DOWNLOADED if action == 'snatched': return Quality.SNATCHED return [] @staticmethod def _get_limit(limit): limit = try_int(limit, 0) return max(limit, 0)
def __init__(self): self.db = DBConnection()
def find_search_results(self, show, episodes, search_mode, manual_search=False, download_current_quality=False): self._check_auth() self.show = show results = {} items_list = [] searched_scene_season = None for episode in episodes: cache_result = self.cache.search_cache( episode, manual_search=manual_search, down_cur_quality=download_current_quality) if cache_result: if episode.episode not in results: results[episode.episode] = cache_result else: results[episode.episode].extend(cache_result) continue if len( episodes ) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season: continue search_strings = [] searched_scene_season = episode.scene_season if len(episodes) > 1 and search_mode == 'sponly': search_strings = self.get_season_search_strings(episode) elif search_mode == 'eponly': search_strings = self.get_episode_search_strings(episode) for search_string in search_strings: items_list += self.search(search_string, ep_obj=episode) if len(results) == len(episodes): return results if items_list: items = {} unknown_items = [] for item in items_list: quality = self.get_quality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: unknown_items.append(item) elif quality == Quality.NONE: pass # Skipping an HEVC when HEVC is not allowed by settings else: if quality not in items: items[quality] = [] items[quality].append(item) items_list = list( chain(*[v for (k_, v) in sorted(items.items(), reverse=True)])) items_list += unknown_items cl = [] for item in items_list: title, url = self._get_title_and_url(item) seeders, leechers = self._get_seeders_and_leechers(item) size = self._get_size(item) try: parse_result = NameParser( parse_method=('normal', 'anime')[show.is_anime]).parse(title) except (InvalidNameException, InvalidShowException) as error: logger.debug("{0}".format(error)) continue show_object = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version add_cache_entry = False if not (show_object.air_by_date or show_object.sports): if search_mode == 'sponly': if parse_result.episode_numbers: logger.debug( 'This is supposed to be a season pack search but the result {0} is not a valid season pack, skipping it' .format(title)) add_cache_entry = True elif not [ ep for ep in episodes if parse_result.season_number == (ep.season, ep.scene_season)[ep.show.is_scene] ]: logger.info( 'This season result {0} is for a season we are not searching for, skipping it' .format(title), logger.DEBUG) add_cache_entry = True else: if not all([ parse_result.season_number is not None, parse_result.episode_numbers, [ ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] == (parse_result.season_number, parse_result.scene_season)[ep.show.is_scene] and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers ] ]) and not all([ # fallback for anime on absolute numbering parse_result.is_anime, parse_result.ab_episode_numbers is not None, [ ep for ep in episodes if ep.show.is_anime and ep. absolute_number in parse_result.ab_episode_numbers ] ]): logger.info( 'The result {0} doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' .format(title)) add_cache_entry = True if not add_cache_entry: actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: same_day_special = False if not parse_result.is_air_by_date: logger.debug( 'This is supposed to be a date search but the result {0} didn\'t parse as one, skipping it' .format(title)) add_cache_entry = True else: air_date = parse_result.air_date.toordinal() db = DBConnection() sql_results = db.select( 'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', [show_object.indexerid, air_date]) if len(sql_results) == 2: if int(sql_results[0]['season']) == 0 and int( sql_results[1]['season']) != 0: actual_season = int(sql_results[1]['season']) actual_episodes = [int(sql_results[1]['episode'])] same_day_special = True elif int(sql_results[1]['season']) == 0 and int( sql_results[0]['season']) != 0: actual_season = int(sql_results[0]['season']) actual_episodes = [int(sql_results[0]['episode'])] same_day_special = True elif len(sql_results) != 1: logger.warning( 'Tried to look up the date for the episode {0} but the database didn\'t give proper results, skipping it' .format(title)) add_cache_entry = True if not add_cache_entry and not same_day_special: actual_season = int(sql_results[0]['season']) actual_episodes = [int(sql_results[0]['episode'])] if add_cache_entry: logger.debug( 'Adding item from search to cache: {0}'.format(title)) ci = self.cache._add_cache_entry(title, url, size, seeders, leechers, parse_result=parse_result) if ci is not None: cl.append(ci) continue episode_wanted = True for episode_number in actual_episodes: if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search, download_current_quality): episode_wanted = False break if not episode_wanted: logger.debug(_('Ignoring result ') + f'{title}.') continue logger.debug( _('Found result {title} at {url}'.format(title=title, url=url))) episode_object = [] for current_episode in actual_episodes: episode_object.append( show_object.getEpisode(actual_season, current_episode)) result = self.get_result(episode_object) result.show = show_object result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(episode_object) == 1: episode_number = episode_object[0].episode logger.debug('Single episode result.') elif len(episode_object) > 1: episode_number = MULTI_EP_RESULT logger.debug( 'Separating multi-episode result to check for later - result contains episodes: {0}' .format(parse_result.episode_numbers)) elif len(episode_object) == 0: episode_number = SEASON_RESULT logger.debug( 'Separating full season result to check for later') if episode_number not in results: results[episode_number] = [result] else: results[episode_number].append(result) if cl: # Access to a protected member of a client class cache_db = self.cache._get_db() cache_db.mass_upsert('results', cl) return results
def get_coming_episodes(categories, sort, group, paused=settings.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() recently = ( date.today() - timedelta(days=settings.COMING_EPS_MISSED_RANGE)).toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() db = DBConnection(row_type='dict') fields_to_select = ', '.join([ 'airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'e.location', 'name', 'network', 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 'e.status as epstatus', 's.status' ]) status_list = [WANTED, UNAIRED] + SNATCHED sql_l = [] for show_obj in settings.showList: next_air_date = show_obj.nextEpisode() sql_l.append([ 'SELECT DISTINCT {0} '.format(fields_to_select) + 'FROM tv_episodes e, tv_shows s ' 'WHERE showid = ? ' 'AND airdate <= ? ' 'AND airdate >= ? ' 'AND s.indexer_id = e.showid ' 'AND e.status IN (' + ','.join(['?'] * len(status_list)) + ')', [show_obj.indexerid, next_air_date or today, recently] + status_list ]) results = [] for sql_i in sql_l: if results: results += db.select(*sql_i) else: results = db.select(*sql_i) for index, item in enumerate(results): results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) results[index]['snatchedsort'] = int( not results[index]['epstatus'] in SNATCHED) results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result['paused'] and not paused: continue result['airs'] = str(result['airs']).replace('am', ' AM').replace( 'pm', ' PM').replace(' ', ' ') result['airdate'] = result['localtime'].toordinal() if result['epstatus'] in SNATCHED: if result['location']: continue else: category = 'snatched' elif result['airdate'] < today: category = 'missed' elif result['airdate'] >= next_week: category = 'later' elif result['airdate'] == today: category = 'today' else: category = 'soon' if len(categories) > 0 and category not in categories: continue if not result['network']: result['network'] = '' result['quality'] = get_quality_string(result['quality']) result['airs'] = sbdatetime.sbftime( result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ') result['weekday'] = 1 + result['localtime'].weekday() result['tvdbid'] = result['indexer_id'] result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat) result['localtime'] = result['localtime'].toordinal() grouped_results[category].append(result) return grouped_results
def get_coming_episodes(categories, sort, group, paused=settings.COMING_EPS_DISPLAY_PAUSED): """ :param categories: The categories of coming episodes. See ``ComingEpisodes.categories`` :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts`` :param group: ``True`` to group the coming episodes by category, ``False`` otherwise :param paused: ``True`` to include paused shows, ``False`` otherwise :return: The list of coming episodes """ categories = ComingEpisodes._get_categories(categories) sort = ComingEpisodes._get_sort(sort) today = date.today().toordinal() recently = ( date.today() - timedelta(days=settings.COMING_EPS_MISSED_RANGE)).toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() db = DBConnection(row_type="dict") fields_to_select = ", ".join([ "airdate", "airs", "e.description as description", "episode", "imdb_id", "e.indexer", "indexer_id", "e.location", "name", "network", "paused", "quality", "runtime", "season", "show_name", "showid", "e.status as epstatus", "s.status", ]) status_list = [WANTED, UNAIRED] + SNATCHED sql_l = [] for show_obj in settings.showList: next_air_date = show_obj.nextEpisode() sql_l.append([ "SELECT DISTINCT {0} ".format(fields_to_select) + "FROM tv_episodes e, tv_shows s " "WHERE showid = ? " "AND airdate <= ? " "AND airdate >= ? " "AND s.indexer_id = e.showid " "AND e.status IN (" + ",".join(["?"] * len(status_list)) + ")", [show_obj.indexerid, next_air_date or today, recently] + status_list, ]) results = [] for sql_i in sql_l: if results: results += db.select(*sql_i) else: results = db.select(*sql_i) for index, item in enumerate(results): results[index]["localtime"] = sbdatetime.convert_to_setting( parse_date_time(item["airdate"], item["airs"], item["network"])) results[index]["snatchedsort"] = int( not results[index]["epstatus"] in SNATCHED) results.sort(key=ComingEpisodes.sorts[sort]) if not group: return results grouped_results = ComingEpisodes._get_categories_map(categories) for result in results: if result["paused"] and not paused: continue result["airs"] = str(result["airs"]).replace("am", " AM").replace( "pm", " PM").replace(" ", " ") result["airdate"] = result["localtime"].toordinal() if result["epstatus"] in SNATCHED: if result["location"]: continue else: category = "snatched" elif result["airdate"] < today: category = "missed" elif result["airdate"] >= next_week: category = "later" elif result["airdate"] == today: category = "today" else: category = "soon" if categories and category not in categories: continue if not result["network"]: result["network"] = "" result["quality"] = get_quality_string(result["quality"]) result["airs"] = sbdatetime.sbftime( result["localtime"], t_preset=timeFormat).lstrip("0").replace(" 0", " ") result["weekday"] = 1 + result["localtime"].weekday() result["tvdbid"] = result["indexer_id"] result["airdate"] = sbdatetime.sbfdate(result["localtime"], d_preset=dateFormat) result["localtime"] = result["localtime"].toordinal() grouped_results[category].append(result) return grouped_results
class History(object): date_format = "%Y%m%d%H%M%S" def __init__(self): self.db = DBConnection() def remove(self, toRemove): """ Removes the selected history :param toRemove: Contains the properties of the log entries to remove """ query = "" for item in toRemove: query = query + " OR " if query != "" else "" query = query + "(date IN ({0}) AND showid = {1} " "AND season = {2} AND episode = {3})".format( ",".join(item["dates"]), item["show_id"], item["season"], item["episode"] ) self.db.action("DELETE FROM history WHERE " + query) def clear(self): """ Clear all the history """ self.db.action("DELETE " "FROM history " "WHERE 1 = 1") def get(self, limit=100, action=None): """ :param limit: The maximum number of elements to return :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or no value will return everything (up to ``limit``) :return: The last ``limit`` elements of type ``action`` in the history """ actions = History._get_actions(action) limit = History._get_limit(limit) common_sql = ( "SELECT action, date, episode, provider, h.quality, resource, season, show_name, showid " "FROM history h, tv_shows s " "WHERE h.showid = s.indexer_id " ) filter_sql = "AND action in (" + ",".join(["?"] * len(actions)) + ") " order_sql = "ORDER BY date DESC " if limit == 0: if actions: results = self.db.select(common_sql + filter_sql + order_sql, actions) else: results = self.db.select(common_sql + order_sql) else: if actions: results = self.db.select(common_sql + filter_sql + order_sql + "LIMIT ?", actions + [limit]) else: results = self.db.select(common_sql + order_sql + "LIMIT ?", [limit]) data = [] for result in results: data.append( { "action": result["action"], "date": result["date"], "episode": result["episode"], "provider": result["provider"], "quality": result["quality"], "resource": result["resource"], "season": result["season"], "show_id": result["showid"], "show_name": result["show_name"], } ) return data def trim(self): """ Remove all elements older than 30 days from the history """ # self.db.action("DELETE FROM history WHERE date < datetime('now', '-30 days')") self.db.action("DELETE " "FROM history " "WHERE date < ?", [(datetime.today() - timedelta(days=30)).strftime(History.date_format)]) @staticmethod def _get_actions(action): action = action.lower() if isinstance(action, str) else "" if action == "downloaded": return Quality.DOWNLOADED if action == "snatched": return Quality.SNATCHED return [] @staticmethod def _get_limit(limit): limit = try_int(limit, 0) return max(limit, 0)