def config_system(monkeypatch): def memory_usage_mock(*args, **kwargs): return '124.86 MB' monkeypatch.setattr(helpers, 'memory_usage', memory_usage_mock) section_data = {} section_data['memoryUsage'] = memory_usage_mock() section_data['schedulers'] = [{ 'key': scheduler[0], 'name': scheduler[1] } for scheduler in all_schedulers] section_data['showQueue'] = [] section_data['postProcessQueue'] = [] section_data['diskSpace'] = { 'rootDir': [], 'tvDownloadDir': { 'freeSpace': False, 'location': None, 'type': 'TV Download Directory' } } section_data['branch'] = app.BRANCH section_data['commitHash'] = app.CUR_COMMIT_HASH section_data['release'] = app.APP_VERSION section_data['sslVersion'] = app.OPENSSL_VERSION section_data['pythonVersion'] = sys.version section_data['databaseVersion'] = {} section_data['databaseVersion']['major'] = app.MAJOR_DB_VERSION section_data['databaseVersion']['minor'] = app.MINOR_DB_VERSION section_data['os'] = platform.platform() section_data['pid'] = app.PID section_data['locale'] = '.'.join( [text_type(loc or 'Unknown') for loc in app.LOCALE]) section_data['localUser'] = app.OS_USER or 'Unknown' section_data['timezone'] = app_timezone.tzname(datetime.datetime.now()) section_data['programDir'] = app.PROG_DIR section_data['dataDir'] = app.DATA_DIR section_data['configFile'] = app.CONFIG_FILE section_data['dbPath'] = db.DBConnection().path section_data['cacheDir'] = app.CACHE_DIR section_data['logDir'] = app.LOG_DIR section_data['appArgs'] = app.MY_ARGS section_data['webRoot'] = app.WEB_ROOT section_data['runsInDocker'] = bool(app.RUNS_IN_DOCKER) section_data['newestVersionMessage'] = app.NEWEST_VERSION_STRING section_data['ffprobeVersion'] = 'ffprobe not available' section_data['gitRemoteBranches'] = app.GIT_REMOTE_BRANCHES section_data['cpuPresets'] = cpu_presets section_data['news'] = {} section_data['news']['lastRead'] = app.NEWS_LAST_READ section_data['news']['latest'] = app.NEWS_LATEST section_data['news']['unread'] = app.NEWS_UNREAD return section_data
def retrieve_exceptions(force=False, exception_type=None): """ Look up the exceptions from all sources. Parses the exceptions into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. :param force: If enabled this will force the refresh of scene exceptions using the medusa exceptions, xem exceptions and anidb exceptions. :param exception_type: Only refresh a specific exception_type. Options are: 'medusa', 'anidb', 'xem' """ custom_exceptions = _get_custom_exceptions(force) if exception_type in [ 'custom_exceptions', None ] else defaultdict(dict) xem_exceptions = _get_xem_exceptions(force) if exception_type in [ 'xem', None ] else defaultdict(dict) anidb_exceptions = _get_anidb_exceptions(force) if exception_type in [ 'anidb', None ] else defaultdict(dict) # Combined scene exceptions from all sources combined_exceptions = combine_exceptions( # Custom scene exceptions custom_exceptions, # XEM scene exceptions xem_exceptions, # AniDB scene exceptions anidb_exceptions, ) queries = [] main_db_con = db.DBConnection() # TODO: See if this can be optimized for indexer in combined_exceptions: for series_id in combined_exceptions[indexer]: sql_ex = main_db_con.select( 'SELECT title, indexer ' 'FROM scene_exceptions ' 'WHERE indexer = ? AND ' 'series_id = ?', [indexer, series_id]) existing_exceptions = [x['title'] for x in sql_ex] for exception_dict in combined_exceptions[indexer][series_id]: for scene_exception, season in iteritems(exception_dict): if scene_exception not in existing_exceptions: queries.append([ 'INSERT OR IGNORE INTO scene_exceptions ' '(indexer, series_id, title, season, custom) ' 'VALUES (?,?,?,?,?)', [ indexer, series_id, scene_exception, season, False ] ]) if queries: main_db_con.mass_action(queries) logger.info('Updated scene exceptions.')
def add_episode_watchlist(self): """Add episode to Tratk watchlist.""" if not(app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT): return main_db_con = db.DBConnection() statuses = [SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED] sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ 'FROM tv_episodes AS e, tv_shows AS s ' \ 'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid AND s.paused = 0 ' \ 'AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) sql_result = main_db_con.select(sql_selection, statuses) if not sql_result: return episodes = [] shows = {} for cur_episode in sql_result: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode['indexer']): continue show_id = cur_episode['indexer'], cur_episode['indexer_id'] episode = cur_episode['season'], cur_episode['episode'] if show_id not in shows: shows[show_id] = [] shows[show_id].append(episode) media_object_shows = [] for show_id in shows: episodes = [] show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1]) for season, episode in shows[show_id]: if not self._check_list( indexer=show_obj.indexer, indexer_id=show_obj.series_id, season=season, episode=episode, list_type='Collection' ): continue log.info("Adding episode '{show}' {ep} to Trakt watchlist", { 'show': show_obj.name, 'ep': episode_num(season, episode) }) episodes.append(show_obj.get_episode(season, episode)) media_object_shows.append(create_episode_structure(show_obj, episodes)) try: sync.add_to_watchlist({'shows': media_object_shows}) self._get_episode_watchlist() except (TraktException, RequestException) as error: log.info('Unable to add episode to Trakt watchlist. Error: {error!r}', { 'error': error })
def add_episode_trakt_collection(self): """Add all existing episodes to Trakt collections. For episodes that have a media file (location) """ if not(app.TRAKT_SYNC and app.USE_TRAKT): return main_db_con = db.DBConnection() statuses = [DOWNLOADED, ARCHIVED] sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ 'FROM tv_episodes AS e, tv_shows AS s ' \ 'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \ "AND e.status in ({0}) AND e.location <> ''".format(','.join(['?'] * len(statuses))) sql_result = main_db_con.select(sql_selection, statuses) if not sql_result: return episodes = [] shows = {} for cur_episode in sql_result: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode['indexer']): continue show_id = cur_episode['indexer'], cur_episode['indexer_id'] episode = cur_episode['season'], cur_episode['episode'] if show_id not in shows: shows[show_id] = [] shows[show_id].append(episode) media_object_shows = [] for show_id in shows: episodes = [] show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1]) for season, episode in shows[show_id]: if not self._check_list( indexer=show_obj.indexer, indexer_id=show_obj.series_id, season=season, episode=episode, list_type='Collection' ): continue log.info("Adding episode '{show}' {ep} to Trakt collection", { 'show': show_obj.name, 'ep': episode_num(season, episode) }) episodes.append(show_obj.get_episode(season, episode)) media_object_shows.append(create_episode_structure(show_obj, episodes)) try: sync.add_to_collection({'shows': media_object_shows}) self._get_show_collection() except TraktException as error: log.info('Unable to add episodes to Trakt collection. Error: {error!r}', {'error': error})
def find_release(ep_obj): """ Find releases in history by show ID and season. Return None for release if multiple found or no release found. """ release = None provider = None # Clear old snatches for this release if any exist failed_db_con = db.DBConnection('failed.db') failed_db_con.action( 'DELETE FROM history ' 'WHERE showid = {0}' ' AND season = {1}' ' AND episode = {2}' ' AND date < ( SELECT max(date)' ' FROM history' ' WHERE showid = {0}' ' AND season = {1}' ' AND episode = {2}' ' )'.format (ep_obj.series.indexerid, ep_obj.season, ep_obj.episode) ) # Search for release in snatch history results = failed_db_con.select( 'SELECT release, provider, date ' 'FROM history ' 'WHERE showid=?' ' AND season=?' ' AND episode=?', [ep_obj.series.indexerid, ep_obj.season, ep_obj.episode] ) for result in results: release = str(result['release']) provider = str(result['provider']) date = result['date'] # Clear any incomplete snatch records for this release if any exist failed_db_con.action( 'DELETE FROM history ' 'WHERE release=?' ' AND date!=?', [release, date] ) # Found a previously failed release logger.log(u'Failed release found for {show} {ep}: {release}'.format (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode), release=result['release']), logger.DEBUG) return release, provider # Release was not found logger.log(u'No releases found for {show} {ep}'.format (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode)), logger.DEBUG) return release, provider
def subtitleMissed(self, whichSubs=None): t = PageTemplate(rh=self, filename='manage_subtitleMissed.mako') if not whichSubs: return t.render(whichSubs=whichSubs, show_names=None, ep_counts=None, sorted_show_ids=None, controller='manage', action='subtitleMissed') main_db_con = db.DBConnection() status_results = main_db_con.select( 'SELECT show_name, tv_shows.show_id, tv_shows.indexer, ' 'tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles ' 'FROM tv_episodes, tv_shows ' 'WHERE tv_shows.subtitles = 1 ' 'AND tv_episodes.status = ? ' 'AND tv_episodes.season != 0 ' "AND tv_episodes.location != '' " 'AND tv_episodes.showid = tv_shows.indexer_id ' 'AND tv_episodes.indexer = tv_shows.indexer ' 'ORDER BY show_name', [DOWNLOADED]) ep_counts = {} show_names = {} sorted_show_ids = [] for cur_status_result in status_results: if whichSubs == 'all': if not frozenset(subtitles.wanted_languages()).difference( cur_status_result['subtitles'].split(',')): continue elif whichSubs in cur_status_result['subtitles']: continue # FIXME: This will cause multi-indexer results where series_id overlaps for different indexers. # Fix by using tv_shows.show_id in stead. cur_indexer_id = int(cur_status_result['indexer']) cur_series_id = int(cur_status_result['indexer_id']) if (cur_indexer_id, cur_series_id) not in ep_counts: ep_counts[(cur_indexer_id, cur_series_id)] = 1 else: ep_counts[(cur_indexer_id, cur_series_id)] += 1 show_names[(cur_indexer_id, cur_series_id)] = cur_status_result['show_name'] if (cur_indexer_id, cur_series_id) not in sorted_show_ids: sorted_show_ids.append((cur_indexer_id, cur_series_id)) return t.render(whichSubs=whichSubs, show_names=show_names, ep_counts=ep_counts, sorted_show_ids=sorted_show_ids, title='Missing Subtitles', header='Missing Subtitles', controller='manage', action='subtitleMissed')
def update_network_dict(): """Update timezone information from Medusa repositories.""" logger.log('Started updating network timezones', logger.DEBUG) url = '{base_url}/sb_network_timezones/network_timezones.txt'.format(base_url=app.BASE_PYMEDUSA_URL) response = session.get(url) if not response or not response.text: logger.log('Updating network timezones failed, this can happen from time to time. URL: %s' % url, logger.INFO) load_network_dict() return remote_networks = {} try: for line in response.text.splitlines(): (key, val) = line.strip().rsplit(u':', 1) if key is None or val is None: continue remote_networks[key] = val except (IOError, OSError) as error: logger.log('Unable to build the network dictionary. Aborting update. Error: {error}'.format (error=error), logger.WARNING) return # Don't continue because if empty dict, var `existing` be false for all networks, thus deleting all if not remote_networks: logger.log(u'Unable to update network timezones as fetched network dict is empty', logger.WARNING) return cache_db_con = db.DBConnection('cache.db') sql_result = cache_db_con.select('SELECT network_name, timezone FROM network_timezones;') network_list = { row['network_name']: row['timezone'] for row in sql_result } queries = [] for network, timezone in iteritems(remote_networks): existing = network in network_list if not existing: queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]]) elif network_list[network] != timezone: queries.append(['UPDATE OR IGNORE network_timezones SET timezone = ? WHERE network_name = ?;', [timezone, network]]) if existing: # if the network from cache DB is in the remote network, remove from the `to remove` list del network_list[network] if network_list: # Delete all networks that are not in the remote network list purged = [x for x in network_list] queries.append(['DELETE FROM network_timezones WHERE network_name IN (%s);' % ','.join(['?'] * len(purged)), purged]) if queries: cache_db_con.mass_action(queries) load_network_dict() logger.log(u'Finished updating network timezones', logger.DEBUG)
def saveNameCacheToDb(): """Commit cache to database file.""" cache_db_con = db.DBConnection('cache.db') for name, series in iteritems(name_cache): indexer_id, series_id = series cache_db_con.action( "INSERT OR REPLACE INTO scene_names (indexer_id, name, indexer) VALUES (?, ?, ?)", [series_id, name, indexer_id])
def _download_propers(self, proper_list): """ Download proper (snatch it). :param proper_list: """ for candidate in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta(days=30) main_db_con = db.DBConnection() history_results = main_db_con.select( b'SELECT resource, proper_tags FROM history ' b'WHERE showid = ? ' b'AND season = ? ' b'AND episode IN ({episodes}) ' b'AND quality = ? ' b'AND date >= ? ' b'AND action IN (?, ?, ?, ?)'.format( episodes=','.join( text_type(ep) for ep in candidate.actual_episodes ), ), [candidate.indexerid, candidate.actual_season, candidate.quality, history_limit.strftime(History.date_format), DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST]) proper_tags_len = len(candidate.proper_tags) proper_name = self._canonical_name(candidate.name, clear_extension=True) proper_name_ext = self._canonical_name(candidate.name) for result in history_results: proper_tags = result[b'proper_tags'] if proper_tags and len(proper_tags.split('|')) >= proper_tags_len: log.debug( 'Current release has the same or more proper tags,' ' skipping new proper {result!r}', {'result': candidate.name}, ) break # make sure that none of the existing history downloads are the same proper we're # trying to downloadif the result exists in history already we need to skip it if proper_name == self._canonical_name( result[b'resource'], clear_extension=True ) or proper_name_ext == self._canonical_name(result[b'resource']): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': candidate.name}, ) break else: candidate.create_episode_object() # snatch it snatch_episode(candidate)
def set_last_refresh(source): """ Update last cache update time for shows in list. :param source: scene exception source refreshed (e.g. xem) """ cache_db_con = db.DBConnection('cache.db') cache_db_con.upsert('scene_exceptions_refresh', {'last_refreshed': int(time.time())}, {'list': source})
def _del_all_keywords(self, table): """Remove all keywords for current show. :param table: SQL table remove keywords from """ main_db_con = db.DBConnection() main_db_con.action( b'DELETE FROM [{table}] ' b'WHERE show_id = ?'.format(table=table), [self.show_id])
def remove_episode_watchlist(self): """Remove episode from Trakt watchlist.""" if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer ' \ b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(i) for i in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode[b'indexer']): continue if self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'showid'], season=cur_episode[b'season'], episode=cur_episode[b'episode']): log.info( "Removing episode '{show}' {ep} from Trakt watchlist", { 'show': cur_episode[b'show_name'], 'ep': episode_num(cur_episode[b'season'], cur_episode[b'episode']) }) title = get_title_without_year( cur_episode[b'show_name'], cur_episode[b'startyear']) trakt_data.append( (cur_episode[b'showid'], cur_episode[b'indexer'], title, cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/watchlist/remove', data, method='POST') self._get_episode_watchlist() except (TraktException, AuthException, TokenExpiredException) as e: log.info( 'Unable to remove episodes from Trakt watchlist. Error: {error}', {'error': e.message})
def episodeStatuses(self, whichStatus=None): if whichStatus: status_list = [int(whichStatus)] if status_list[0] == SNATCHED: status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] else: status_list = [] t = PageTemplate(rh=self, filename='manage_episodeStatuses.mako') # if we have no status then this is as far as we need to go if not status_list: return t.render(show_names=None, whichStatus=whichStatus, ep_counts=None, sorted_show_ids=None, controller='manage', action='episodeStatuses') main_db_con = db.DBConnection() status_results = main_db_con.select( b'SELECT show_name, tv_shows.indexer, tv_shows.show_id, tv_shows.indexer_id AS indexer_id ' b'FROM tv_episodes, tv_shows ' b'WHERE season != 0 ' b'AND tv_episodes.showid = tv_shows.indexer_id ' b'AND tv_episodes.indexer = tv_shows.indexer ' b'AND tv_episodes.status IN ({statuses}) ' b'ORDER BY show_name'.format(statuses=','.join(['?'] * len(status_list))), status_list) ep_counts = {} show_names = {} sorted_show_ids = [] for cur_status_result in status_results: cur_indexer = int(cur_status_result[b'indexer']) cur_series_id = int(cur_status_result[b'indexer_id']) if (cur_indexer, cur_series_id) not in ep_counts: ep_counts[(cur_indexer, cur_series_id)] = 1 else: ep_counts[(cur_indexer, cur_series_id)] += 1 show_names[(cur_indexer, cur_series_id)] = cur_status_result[b'show_name'] if (cur_indexer, cur_series_id) not in sorted_show_ids: sorted_show_ids.append((cur_indexer, cur_series_id)) return t.render(title='Episode Overview', header='Episode Overview', whichStatus=whichStatus, show_names=show_names, ep_counts=ep_counts, sorted_show_ids=sorted_show_ids, controller='manage', action='episodeStatuses')
def add_episode_trakt_collection(self): """Add all existing episodes to Trakt collections. For episodes that have a media file (location) """ if app.TRAKT_SYNC and app.USE_TRAKT: main_db_con = db.DBConnection() statuses = [DOWNLOADED, ARCHIVED] sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ 'FROM tv_episodes AS e, tv_shows AS s ' \ 'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \ "AND e.status in ({0}) AND e.location <> ''".format(','.join(['?'] * len(statuses))) sql_result = main_db_con.select(sql_selection, statuses) if sql_result: trakt_data = [] for cur_episode in sql_result: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode['indexer']): continue if not self._check_list( indexer=cur_episode['indexer'], indexer_id=cur_episode['indexer_id'], season=cur_episode['season'], episode=cur_episode['episode'], list_type='Collection'): log.info( "Adding episode '{show}' {ep} to Trakt collection", { 'show': cur_episode['show_name'], 'ep': episode_num(cur_episode['season'], cur_episode['episode']) }) title = get_title_without_year( cur_episode['show_name'], cur_episode['startyear']) trakt_data.append( (cur_episode['indexer_id'], cur_episode['indexer'], title, cur_episode['startyear'], cur_episode['season'], cur_episode['episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/collection', data, method='POST') self._get_show_collection() except (TraktException, AuthException, TokenExpiredException) as error: log.info( 'Unable to add episodes to Trakt collection. Error: {error!r}', {'error': error})
def get_categories(): """Compile a structure with the sources and their available sub-categories.""" recommended_db_con = db.DBConnection('recommended.db') results = recommended_db_con.select( 'SELECT source, subcat FROM shows GROUP BY source, subcat') categories = defaultdict(list) for result in results: categories[result['source']].append(result['subcat']) return categories
def resource_get_episode_status(self): """Return a list of episodes with a specific status.""" status = self.get_argument('status' '').strip() status_list = [int(status)] if status_list: if status_list[0] == SNATCHED: status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] else: status_list = [] main_db_con = db.DBConnection() status_results = main_db_con.select( 'SELECT show_name, tv_shows.indexer, tv_shows.show_id, tv_shows.indexer_id AS indexer_id, ' 'tv_episodes.season AS season, tv_episodes.episode AS episode, tv_episodes.name as name ' 'FROM tv_episodes, tv_shows ' 'WHERE season != 0 ' 'AND tv_episodes.showid = tv_shows.indexer_id ' 'AND tv_episodes.indexer = tv_shows.indexer ' 'AND tv_episodes.status IN ({statuses}) '.format( statuses=','.join(['?'] * len(status_list))), status_list) episode_status = {} for cur_status_result in status_results: cur_indexer = int(cur_status_result['indexer']) cur_series_id = int(cur_status_result['indexer_id']) show_slug = SeriesIdentifier.from_id(cur_indexer, cur_series_id).slug if show_slug not in episode_status: episode_status[show_slug] = { 'selected': True, 'slug': show_slug, 'name': cur_status_result['show_name'], 'episodes': [], 'showEpisodes': False } episode_status[show_slug]['episodes'].append({ 'episode': cur_status_result['episode'], 'season': cur_status_result['season'], 'selected': True, 'slug': str( RelativeNumber(cur_status_result['season'], cur_status_result['episode'])), 'name': cur_status_result['name'] }) return self._ok(data={'episodeStatus': episode_status})
def subtitleMissed(self, whichSubs=None): t = PageTemplate(rh=self, filename='manage_subtitleMissed.mako') if not whichSubs: return t.render(whichSubs=whichSubs, title='Missing Subtitles', header='Missing Subtitles', topmenu='manage', show_names=None, ep_counts=None, sorted_show_ids=None, controller='manage', action='subtitleMissed') main_db_con = db.DBConnection() status_results = main_db_con.select( b'SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles ' b'FROM tv_episodes, tv_shows ' b'WHERE tv_shows.subtitles = 1 ' b'AND tv_episodes.status LIKE \'%4\' ' b'AND tv_episodes.season != 0 ' b'AND tv_episodes.location != \'\' ' b'AND tv_episodes.showid = tv_shows.indexer_id ' b'ORDER BY show_name') ep_counts = {} show_names = {} sorted_show_ids = [] for cur_status_result in status_results: if whichSubs == 'all': if not frozenset(subtitles.wanted_languages()).difference( cur_status_result[b'subtitles'].split(',')): continue elif whichSubs in cur_status_result[b'subtitles']: continue cur_indexer_id = int(cur_status_result[b'indexer_id']) if cur_indexer_id not in ep_counts: ep_counts[cur_indexer_id] = 1 else: ep_counts[cur_indexer_id] += 1 show_names[cur_indexer_id] = cur_status_result[b'show_name'] if cur_indexer_id not in sorted_show_ids: sorted_show_ids.append(cur_indexer_id) return t.render(whichSubs=whichSubs, show_names=show_names, ep_counts=ep_counts, sorted_show_ids=sorted_show_ids, title='Missing Subtitles', header='Missing Subtitles', topmenu='manage', controller='manage', action='subtitleMissed')
def add_episode_watchlist(self): """Add episode to Tratk watchlist.""" if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() status = Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + [ WANTED ] selection_status = [b'?' for _ in status] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid AND s.paused = 0 ' \ b'AND e.status in ({0})'.format(b','.join(selection_status)) sql_result = main_db_con.select(sql_selection, status) episodes = [dict(i) for i in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode[b'indexer']): continue if not self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'showid'], season=cur_episode[b'season'], episode=cur_episode[b'episode']): log.info( "Adding episode '{show}' {ep} to Trakt watchlist", { 'show': cur_episode[b'show_name'], 'ep': episode_num(cur_episode[b'season'], cur_episode[b'episode']) }) title = get_title_without_year( cur_episode[b'show_name'], cur_episode[b'startyear']) trakt_data.append( (cur_episode[b'showid'], cur_episode[b'indexer'], title, cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/watchlist', data, method='POST') self._get_episode_watchlist() except (TraktException, AuthException, TokenExpiredException) as e: log.info( 'Unable to add episode to Trakt watchlist. Error: {error}', {'error': e.message})
def get(self, series_slug, path_param): """ Get history records. History records can be specified using a show slug. """ sql_base = """ SELECT rowid, date, action, quality, provider, version, proper_tags, manually_searched, resource, size, indexer_id, showid, season, episode FROM history """ params = [] arg_page = self._get_page() arg_limit = self._get_limit(default=50) if series_slug is not None: series_identifier = SeriesIdentifier.from_slug(series_slug) if not series_identifier: return self._bad_request('Invalid series') sql_base += ' WHERE indexer_id = ? AND showid = ?' params += [series_identifier.indexer.id, series_identifier.id] sql_base += ' ORDER BY date DESC' results = db.DBConnection().select(sql_base, params) def data_generator(): """Read and paginate history records.""" start = arg_limit * (arg_page - 1) for item in results[start:start + arg_limit]: d = {} d['id'] = item['rowid'] d['series'] = SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug d['status'] = item['action'] d['actionDate'] = item['date'] d['resource'] = basename(item['resource']) d['size'] = item['size'] d['properTags'] = item['proper_tags'] d['statusName'] = statusStrings.get(item['action']) d['season'] = item['season'] d['episode'] = item['episode'] d['manuallySearched'] = bool(item['manually_searched']) d['provider'] = item['provider'] yield d if not results: return self._not_found('History data not found') return self._paginate(data_generator=data_generator)
def _migrate_v1(self): """ Reads in the old naming settings from your config and generates a new config template from them. """ app.NAMING_PATTERN = self._name_to_pattern() log.info(u"Based on your old settings I'm setting your new naming pattern to: {pattern}", {'pattern': app.NAMING_PATTERN}) app.NAMING_CUSTOM_ABD = bool(check_setting_int(self.config_obj, 'General', 'naming_dates', 0)) if app.NAMING_CUSTOM_ABD: app.NAMING_ABD_PATTERN = self._name_to_pattern(True) log.info(u'Adding a custom air-by-date naming pattern to your config: {pattern}', {'pattern': app.NAMING_ABD_PATTERN}) else: app.NAMING_ABD_PATTERN = naming.name_abd_presets[0] app.NAMING_MULTI_EP = int(check_setting_int(self.config_obj, 'General', 'naming_multi_ep_type', 1)) # see if any of their shows used season folders main_db_con = db.DBConnection() season_folder_shows = main_db_con.select(b'SELECT indexer_id FROM tv_shows WHERE flatten_folders = 0 LIMIT 1') # if any shows had season folders on then prepend season folder to the pattern if season_folder_shows: old_season_format = check_setting_str(self.config_obj, 'General', 'season_folders_format', 'Season %02d') if old_season_format: try: new_season_format = old_season_format % 9 new_season_format = str(new_season_format).replace('09', '%0S') new_season_format = new_season_format.replace('9', '%S') log.info( u'Changed season folder format from {old_season_format} to {new_season_format}, ' u'prepending it to your naming config', {'old_season_format': old_season_format, 'new_season_format': new_season_format} ) app.NAMING_PATTERN = new_season_format + os.sep + app.NAMING_PATTERN except (TypeError, ValueError): log.error(u"Can't change {old_season_format} to new season format", {'old_season_format': old_season_format}) # if no shows had it on then don't flatten any shows and don't put season folders in the config else: log.info(u"No shows were using season folders before so I'm disabling flattening on all shows") # don't flatten any shows at all main_db_con.action(b'UPDATE tv_shows SET flatten_folders = 0') app.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
def _del_all_keywords(self, table): """Remove all keywords for current show. :param table: SQL table remove keywords from """ main_db_con = db.DBConnection() main_db_con.action( 'DELETE FROM [{table}] ' 'WHERE show_id = ? AND indexer_id = ?'.format(table=table), [self.series_obj.series_id, self.series_obj.indexer] )
def _get_last_proper_search(): """Find last propersearch from DB.""" main_db_con = db.DBConnection() sql_results = main_db_con.select(b'SELECT last_proper_search FROM info') try: last_proper_search = datetime.date.fromordinal(int(sql_results[0][b'last_proper_search'])) except Exception: return datetime.date.fromordinal(1) return last_proper_search
def getDBcompare(self): """ Compare the current DB version with the new branch version. :return: 'upgrade', 'equal', or 'downgrade' """ try: self.updater.need_update() cur_hash = str(self.updater.get_newest_commit_hash()) assert len( cur_hash ) == 40, 'Commit hash wrong length: {length} hash: {hash}'.format( length=len(cur_hash), hash=cur_hash) check_url = 'http://cdn.rawgit.com/{org}/{repo}/{commit}/medusa/databases/main_db.py'.format( org=app.GIT_ORG, repo=app.GIT_REPO, commit=cur_hash) response = self.session.get(check_url) # Get remote DB version match_max_db = re.search( r'MAX_DB_VERSION\s*=\s*(?P<version>\d{2,3})', response.text) new_branch_major_db_version = int( match_max_db.group('version')) if match_max_db else None match_minor_db = re.search( r'CURRENT_MINOR_DB_VERSION\s*=\s*(?P<version>\d{1,2})', response.text) new_branch_min_db_version = int( match_minor_db.group('version')) if match_minor_db else None # Check local DB version main_db_con = db.DBConnection() cur_branch_major_db_version, cur_branch_minor_db_version = main_db_con.checkDBVersion( ) if any([ cur_branch_major_db_version is None, cur_branch_minor_db_version is None, new_branch_major_db_version is None, new_branch_min_db_version is None ]): return 'Could not compare database versions, aborting' if new_branch_major_db_version > cur_branch_major_db_version: return 'upgrade' elif new_branch_major_db_version == cur_branch_major_db_version: if new_branch_min_db_version < cur_branch_minor_db_version: return 'downgrade' elif new_branch_min_db_version > cur_branch_minor_db_version: return 'upgrade' return 'equal' else: return 'downgrade' except Exception as e: return repr(e)
def log_success(release): """Log release as success on failed.db.""" release = prepare_failed_name(release) failed_db_con = db.DBConnection('failed.db') failed_db_con.action( 'DELETE ' 'FROM history ' 'WHERE release=?', [release] )
def test_scene_ex_reset_name_cache(self): # clear the exceptions test_cache_db_con = db.DBConnection('cache.db') test_cache_db_con.action("DELETE FROM scene_exceptions") # put something in the cache name_cache.addNameToCache('Cached Name', 0) # updating should not clear the cache this time since our exceptions didn't change scene_exceptions.retrieve_exceptions() self.assertEqual(name_cache.retrieveNameFromCache('Cached Name'), 0)
def _add_keywords(self, table, values): """Add keywords into database for current show. :param table: SQL table to add keywords to :param values: Values to be inserted in table """ main_db_con = db.DBConnection() for value in values: main_db_con.action( b'INSERT INTO [{table}] (show_id, keyword) ' b'VALUES (?,?)'.format(table=table), [self.show_id, value])
def _download_propers(self, proper_list): """ Download proper (snatch it). :param proper_list: """ for cur_proper in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta( days=30) main_db_con = db.DBConnection() history_results = main_db_con.select( b'SELECT resource FROM history ' b'WHERE showid = ? ' b'AND season = ? ' b'AND episode = ? ' b'AND quality = ? ' b'AND date >= ? ' b"AND (action LIKE '%02' OR action LIKE '%04' OR action LIKE '%09' OR action LIKE '%12')", [ cur_proper.indexerid, cur_proper.actual_season, cur_proper.actual_episode, cur_proper.quality, history_limit.strftime(History.date_format) ]) # make sure that none of the existing history downloads are the same proper we're trying to download # if the result exists in history already we need to skip it clean_proper_name = self._canonical_name(cur_proper.name, clear_extension=True) if any(clean_proper_name == self._canonical_name( cur_result[b'resource'], clear_extension=True) for cur_result in history_results): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': cur_proper.name}) continue else: # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = self._canonical_name(cur_proper.name) if any(clean_proper_name == self._canonical_name( cur_result[b'resource']) for cur_result in history_results): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': cur_proper.name}) continue cur_proper.create_episode_object() # snatch it snatch_episode(cur_proper) time.sleep(cpu_presets[app.CPU_PRESET])
def wanted_episodes(series_obj, from_date): """ Get a list of episodes that we want to download. :param series_obj: Series these episodes are from :param from_date: Search from a certain date :return: list of wanted episodes """ wanted = [] allowed_qualities, preferred_qualities = series_obj.current_qualities all_qualities = list(set(allowed_qualities + preferred_qualities)) log.debug(u'Seeing if we need anything from {0}', series_obj.name) con = db.DBConnection() sql_results = con.select( 'SELECT status, quality, season, episode, manually_searched ' 'FROM tv_episodes ' 'WHERE indexer = ? ' ' AND showid = ?' ' AND season > 0' ' AND airdate > ?', [series_obj.indexer, series_obj.series_id, from_date.toordinal()] ) # check through the list of statuses to see if we want any for episode in sql_results: cur_status, cur_quality = int(episode['status'] or UNSET), int(episode['quality'] or Quality.NA) should_search, should_search_reason = Quality.should_search( cur_status, cur_quality, series_obj, episode['manually_searched'] ) if not should_search: continue else: log.debug( u'Searching for {show} {ep}. Reason: {reason}', { u'show': series_obj.name, u'ep': episode_num(episode['season'], episode['episode']), u'reason': should_search_reason, } ) ep_obj = series_obj.get_episode(episode['season'], episode['episode']) ep_obj.wanted_quality = [ quality for quality in all_qualities if Quality.is_higher_quality( cur_quality, quality, allowed_qualities, preferred_qualities ) ] wanted.append(ep_obj) return wanted
def remove_episode_trakt_collection(self, filter_show=None): """Remove episode from trakt collection. For episodes that no longer have a media file (location) :param filter_show: optional. Only remove episodes from trakt collection for given shows """ if app.TRAKT_SYNC_REMOVE and app.TRAKT_SYNC and app.USE_TRAKT: params = [] main_db_con = db.DBConnection() selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name,' \ b'e.season, e.episode, e.status ' \ b'FROM tv_episodes AS e, tv_shows AS s WHERE e.indexer = s.indexer AND ' \ b's.indexer_id = e.showid and e.location = "" ' \ b'AND e.status in ({0})'.format(','.join(selection_status)) if filter_show: sql_selection += b' AND s.indexer_id = ? AND e.indexer = ?' params = [filter_show.series_id, filter_show.indexer] sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED + params) episodes = [dict(e) for e in sql_result] if episodes: trakt_data = [] for cur_episode in episodes: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode[b'indexer']): continue if self._check_list(indexer=cur_episode[b'indexer'], indexer_id=cur_episode[b'indexer_id'], season=cur_episode[b'season'], episode=cur_episode[b'episode'], list_type='Collection'): log.info("Removing episode '{show}' {ep} from Trakt collection", { 'show': cur_episode[b'show_name'], 'ep': episode_num(cur_episode[b'season'], cur_episode[b'episode']) }) title = get_title_without_year(cur_episode[b'show_name'], cur_episode[b'startyear']) trakt_data.append((cur_episode[b'indexer_id'], cur_episode[b'indexer'], title, cur_episode[b'startyear'], cur_episode[b'season'], cur_episode[b'episode'])) if trakt_data: try: data = self.trakt_bulk_data_generate(trakt_data) self._request('sync/collection/remove', data, method='POST') self._get_show_collection() except (TraktException, AuthException, TokenExpiredException) as e: log.info('Unable to remove episodes from Trakt collection. Error: {error}', { 'error': e.message })
def trim_history(days=30, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0): """Trim old results from failed history.""" today = datetime.today() age = timedelta(days, seconds, microseconds, milliseconds, minutes, hours, weeks) failed_db_con = db.DBConnection('failed.db') failed_db_con.action( 'DELETE FROM history ' 'WHERE date < ?', [(today - age).strftime(History.date_format)] )