def sync_trakt_shows(self): """Sync Trakt shows watchlist.""" if not self.show_watchlist: log.info('No shows found in your Trakt watchlist. Nothing to sync') else: trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) for watchlisted_show in self.show_watchlist: trakt_show = watchlisted_show['show'] if trakt_show['year'] and trakt_show['ids']['slug'].endswith( str(trakt_show['year'])): show_name = '{title} ({year})'.format( title=trakt_show['title'], year=trakt_show['year']) else: show_name = trakt_show['title'] show = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) indexer = indexerConfig[i]['id'] show = Show.find(app.showList, indexer_id, indexer) if show: break if not show: # If can't find with available indexers try IMDB trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find(app.showList, indexer_id, EXTERNAL_IMDB) if not show: # If can't find with available indexers try TRAKT trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find(app.showList, indexer_id, EXTERNAL_TRAKT) if show: continue indexer_id = trakt_show['ids'].get( get_trakt_indexer(trakt_default_indexer), -1) if int(app.TRAKT_METHOD_ADD) != 2: self.add_show(trakt_default_indexer, indexer_id, show_name, SKIPPED) else: self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED) if int(app.TRAKT_METHOD_ADD) == 1: new_show = Show.find(app.showList, indexer_id, indexer) if new_show: set_episode_to_wanted(new_show, 1, 1) else: log.warning( 'Unable to find the new added show.' 'Pilot will be set to wanted in the next Trakt run' ) self.todoWanted.append(indexer_id) log.debug('Synced shows with Trakt watchlist')
def sync_trakt_shows(self): """Sync Trakt shows watchlist.""" if not self.show_watchlist: log.info('No shows found in your Trakt watchlist. Nothing to sync') else: trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) for watchlisted_show in self.show_watchlist: trakt_show = watchlisted_show['show'] if trakt_show['year'] and trakt_show['ids']['slug'].endswith(str(trakt_show['year'])): show_name = '{title} ({year})'.format(title=trakt_show['title'], year=trakt_show['year']) else: show_name = trakt_show['title'] show = None indexer = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) indexer = indexerConfig[i]['id'] show = Show.find_by_id(app.showList, indexer, indexer_id) if show: break if not show: # If can't find with available indexers try IMDB trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id) if not show: # If can't find with available indexers try TRAKT trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find_by_id(app.showList, EXTERNAL_TRAKT, indexer_id) if show: continue indexer_id = trakt_show['ids'].get(get_trakt_indexer(trakt_default_indexer), -1) if int(app.TRAKT_METHOD_ADD) != 2: self.add_show(trakt_default_indexer, indexer_id, show_name, SKIPPED) else: self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED) if int(app.TRAKT_METHOD_ADD) == 1 and indexer: new_show = Show.find_by_id(app.showList, indexer, indexer_id) if new_show: set_episode_to_wanted(new_show, 1, 1) else: log.warning('Unable to find the new added show.' 'Pilot will be set to wanted in the next Trakt run') self.todoWanted.append(indexer_id) log.debug('Synced shows with Trakt watchlist')
def sync_trakt_episodes(self): """Sync Trakt episodes watchlist.""" if not self.episode_watchlist: log.info( 'No episodes found in your Trakt watchlist. Nothing to sync') return added_shows = [] trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) for watchlist_item in self.episode_watchlist: trakt_show = watchlist_item.show trakt_episode = watchlist_item.episode trakt_season = watchlist_item.season show = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) if not trakt_indexer: continue indexer_id = trakt_show['ids'].get(trakt_indexer, -1) indexer = indexerConfig[i]['id'] show = Show.find_by_id(app.showList, indexer, indexer_id) if show: break if not show: # If can't find with available indexers try IMDB trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id) if not show: # If can't find with available indexers try TRAKT trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find_by_id(app.showList, EXTERNAL_TRAKT, indexer_id) # If can't find show add with default trakt indexer if not show: trakt_indexer = get_trakt_indexer(trakt_default_indexer) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) # Only add show if we didn't added it before if indexer_id not in added_shows: self.add_show(trakt_default_indexer, indexer_id, trakt_show['title'], SKIPPED) added_shows.append(indexer_id) elif not trakt_season == 0 and not show.paused: set_episode_to_wanted(show, trakt_season, trakt_episode) log.debug('Synced episodes with Trakt watchlist')
def get_episodes(search_thread, searchstatus): """Get all episodes located in a search thread with a specific status.""" results = [] # NOTE!: Show.find called with just indexerid! show_obj = Show.find(app.showList, int(search_thread.show.indexerid)) if not show_obj: if not search_thread.show.is_recently_deleted: log.error(u'No Show Object found for show with indexerID: {0}', search_thread.show.indexerid) return results if not isinstance(search_thread.segment, list): search_thread.segment = [search_thread.segment] for ep_obj in search_thread.segment: ep = show_obj.get_episode(ep_obj.season, ep_obj.episode) results.append({ 'show': show_obj.indexerid, 'episode': ep.episode, 'episodeindexid': ep.indexerid, 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], 'quality': get_quality_class(ep), 'overview': Overview.overviewStrings[show_obj.get_overview(ep.status, manually_searched=ep.manually_searched)], }) return results
def test_validate_indexer_id(self): app.QUALITY_DEFAULT = Quality.FULLHDTV app.showList = [] show123 = TestTVShow(1, 123) show789 = TestTVShow(1, 789) app.showList = [ show123, show789, ] series_id_list = [ None, '', u'789', 123, ] results_list = [ (None, None), (None, None), (None, show789), (None, show123), ] self.assertEqual( len(series_id_list), len(results_list), 'Number of parameters (%d) and results (%d) does not match' % (len(series_id_list), len(results_list)) ) for (index, series_id) in enumerate(series_id_list): self.assertEqual(Show._validate_indexer_id(1, series_id), results_list[index])
def get_episode(series_id, season=None, episode=None, absolute=None, indexer=None): """ Get a specific episode object based on show, season and episode number. :param show: Series id :param season: Season number :param episode: Episode number :param absolute: Optional if the episode number is a scene absolute number :param indexer: Optional indexer id. :return: episode object """ if series_id is None: return 'Invalid show parameters' series_obj = Show.find_by_id(app.showList, indexer, series_id) if series_obj is None: return 'Invalid show parameters' if absolute: ep_obj = series_obj.get_episode(absolute_number=absolute) elif season and episode: ep_obj = series_obj.get_episode(season, episode) else: return 'Invalid parameters' if ep_obj is None: return 'Unable to retrieve episode' return ep_obj
def find_propers(self, proper_candidates): """Find propers in providers.""" results = [] for proper_candidate in proper_candidates: series_obj = Show.find_by_id(app.showList, proper_candidate['indexer'], proper_candidate['showid']) if series_obj: self.series = series_obj episode_obj = series_obj.get_episode(proper_candidate['season'], proper_candidate['episode']) for term in self.proper_strings: search_strings = self._get_episode_search_strings(episode_obj, add_string=term) for item in self.search(search_strings[0], ep_obj=episode_obj): search_result = self.get_result() results.append(search_result) search_result.name, search_result.url = self._get_title_and_url(item) search_result.seeders, search_result.leechers = self._get_result_info(item) search_result.size = self._get_size(item) search_result.pubdate = self._get_pubdate(item) # This will be retrieved from the parser search_result.proper_tags = '' search_result.search_type = PROPER_SEARCH search_result.date = datetime.today() search_result.series = series_obj return results
def add_show(indexer, indexer_id, show_name, status): """Add a new show with default settings.""" if not Show.find(app.showList, int(indexer_id)): root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None if location: log.info( "Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}", { 'show': show_name, 'indexer_name': indexerConfig[indexer]['identifier'], 'id': indexer_id }) app.show_queue_scheduler.action.addShow( indexer, indexer_id, None, default_status=status, quality=int(app.QUALITY_DEFAULT), flatten_folders=int(app.FLATTEN_FOLDERS_DEFAULT), paused=app.TRAKT_START_PAUSED, default_status_after=status, root_dir=location) else: log.warning( "Error creating show '{show}' folder. No default root directory", {'show': show_name}) return
def get_episodes(search_thread, searchstatus): """Get all episodes located in a search thread with a specific status.""" results = [] # Search again for the show in the library. Might have been deleted very recently. series_obj = Show.find_by_id(app.showList, search_thread.show.indexer, search_thread.show.series_id) if not series_obj: if not search_thread.show.is_recently_deleted: log.error(u'No Show Object found for show with indexerID: {0}', search_thread.show.indexerid) return results if not isinstance(search_thread.segment, list): search_thread.segment = [search_thread.segment] for ep_obj in search_thread.segment: ep = series_obj.get_episode(ep_obj.season, ep_obj.episode) results.append({ 'indexer_id': series_obj.indexer, 'series_id': series_obj.series_id, 'episode': ep.episode, 'episodeindexerid': ep.indexerid, 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], 'quality_name': Quality.qualityStrings[ep.quality], 'quality_style': get_quality_class(ep), 'overview': Overview.overviewStrings[series_obj.get_overview( ep.status, ep.quality, manually_searched=ep.manually_searched )], }) return results
def find_propers(self, proper_candidates): """Find propers in providers.""" results = [] for proper_candidate in proper_candidates: show_obj = Show.find(app.showList, int(proper_candidate[b'showid'])) if proper_candidate[b'showid'] else None if show_obj: self.show = show_obj episode_obj = show_obj.get_episode(proper_candidate[b'season'], proper_candidate[b'episode']) for term in self.proper_strings: search_strings = self._get_episode_search_strings(episode_obj, add_string=term) for item in self.search(search_strings[0], ep_obj=episode_obj): search_result = self.get_result() results.append(search_result) search_result.name, search_result.url = self._get_title_and_url(item) search_result.seeders, search_result.leechers = self._get_result_info(item) search_result.size = self._get_size(item) search_result.pubdate = self._get_pubdate(item) # This will be retrieved from the parser search_result.proper_tags = '' search_result.search_type = PROPER_SEARCH search_result.date = datetime.today() search_result.show = show_obj return results
def test_validate_indexer_id(self): app.QUALITY_DEFAULT = Quality.FULLHDTV app.showList = [] show123 = TestTVShow(1, 123) show789 = TestTVShow(1, 789) app.showList = [ show123, show789, ] series_id_list = [ None, '', u'789', 123, ] results_list = [ (None, None), (None, None), (None, show789), (None, show123), ] self.assertEqual( len(series_id_list), len(results_list), 'Number of parameters (%d) and results (%d) does not match' % (len(series_id_list), len(results_list))) for (index, series_id) in enumerate(series_id_list): self.assertEqual(Show._validate_indexer_id(1, series_id), results_list[index])
def backlogShow(self, indexer_id): show_obj = Show.find(app.showList, int(indexer_id)) if show_obj: app.backlog_search_scheduler.action.search_backlog([show_obj]) return self.redirect('/manage/backlogOverview/')
def get_episode(show, season=None, episode=None, absolute=None): """ Get a specific episode object based on show, season and episode number. :param show: Season number :param season: Season number :param episode: Episode number :param absolute: Optional if the episode number is a scene absolute number :return: episode object """ if show is None: return 'Invalid show parameters' show_obj = Show.find(app.showList, int(show)) if show_obj is None: return 'Invalid show parameters' if absolute: ep_obj = show_obj.get_episode(absolute_number=absolute) elif season and episode: ep_obj = show_obj.get_episode(season, episode) else: return 'Invalid parameters' if ep_obj is None: return 'Unable to retrieve episode' return ep_obj
def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=True): """ Returns a tuple, (season, episode), with the scene numbering (if there is one), otherwise returns the xem numbering (if fallback_to_xem is set), otherwise returns the TVDB numbering. (so the return values will always be set) :param indexer_id: int :param season: int :param episode: int :param fallback_to_xem: bool If set (the default), check xem for matches if there is no local scene numbering :return: (int, int) a tuple with (season, episode) """ if indexer_id is None or season is None or episode is None: return season, episode showObj = Show.find(app.showList, int(indexer_id)) if showObj and not showObj.is_scene: return season, episode result = find_scene_numbering(int(indexer_id), int(indexer), season, episode) if result: return result else: if fallback_to_xem: xem_result = find_xem_numbering(int(indexer_id), int(indexer), season, episode) if xem_result: return xem_result return season, episode
def remove_episode_watchlist(self): """Remove episode from Trakt watchlist.""" if not (app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT): return main_db_con = db.DBConnection() statuses = [DOWNLOADED, ARCHIVED] sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ 'FROM tv_episodes AS e, tv_shows AS s ' \ 'WHERE e.indexer = s.indexer ' \ 'AND s.indexer_id = e.showid AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) sql_result = main_db_con.select(sql_selection, statuses) if not sql_result: return episodes = [] shows = {} for cur_episode in sql_result: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode['indexer']): continue show_id = cur_episode['indexer'], cur_episode['indexer_id'] episode = cur_episode['season'], cur_episode['episode'] if show_id not in shows: shows[show_id] = [] shows[show_id].append(episode) media_object_shows = [] for show_id in shows: episodes = [] show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1]) for season, episode in shows[show_id]: if not self._check_list(indexer=show_obj.indexer, indexer_id=show_obj.series_id, season=season, episode=episode, list_type='Collection'): continue log.info("Removing episode '{show}' {ep} from Trakt watchlist", { 'show': show_obj.name, 'ep': episode_num(season, episode) }) episodes.append(show_obj.get_episode(season, episode)) media_object_shows.append( create_episode_structure(show_obj, episodes)) try: sync.remove_from_collection({'shows': media_object_shows}) self._get_episode_watchlist() except TraktException as error: log.info( 'Unable to remove episodes from Trakt watchlist. Error: {error!r}', {'error': error})
def test_validate_indexer_id(self): app.QUALITY_DEFAULT = Quality.FULLHDTV app.showList = [] show123 = TestTVShow(0, 123) show456 = TestTVShow(0, 456) show789 = TestTVShow(0, 789) app.showList = [ show123, show456, show789, ] invalid_show_id = ('Invalid show ID', None) indexer_id_list = [ None, '', u'', '123', u'123', '456', u'456', '789', u'789', 123, 456, 789, ['123', '456'], [u'123', u'456'], [123, 456] ] results_list = [ invalid_show_id, invalid_show_id, invalid_show_id, (None, show123), (None, show123), (None, show456), (None, show456), (None, show789), (None, show789), (None, show123), (None, show456), (None, show789), invalid_show_id, invalid_show_id, invalid_show_id ] self.assertEqual( len(indexer_id_list), len(results_list), 'Number of parameters (%d) and results (%d) does not match' % (len(indexer_id_list), len(results_list)) ) for (index, indexer_id) in enumerate(indexer_id_list): self.assertEqual(Show._validate_indexer_id(indexer_id), results_list[index]) # pylint: disable=protected-access
def get_episodes(search_thread, searchstatus): """Get all episodes located in a search thread with a specific status.""" results = [] # Search again for the show in the library. Might have been deleted very recently. series_obj = Show.find_by_id(app.showList, search_thread.show.indexer, search_thread.show.series_id) if not series_obj: if not search_thread.show.is_recently_deleted: log.error(u'No Show Object found for show with indexerID: {0}', search_thread.show.indexerid) return results if not isinstance(search_thread.segment, list): search_thread.segment = [search_thread.segment] for ep_obj in search_thread.segment: ep = series_obj.get_episode(ep_obj.season, ep_obj.episode) results.append({ 'indexer_id': series_obj.indexer, 'series_id': series_obj.series_id, 'episode': ep.episode, 'episodeindexerid': ep.indexerid, 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], 'quality': get_quality_class(ep), 'overview': Overview.overviewStrings[series_obj.get_overview( ep.status, manually_searched=ep.manually_searched )], }) return results
def find_propers(self, proper_candidates): """Find propers in providers.""" results = [] for proper_candidate in proper_candidates: series_obj = Show.find_by_id(app.showList, proper_candidate['indexer'], proper_candidate['showid']) if series_obj: self.series = series_obj episode_obj = series_obj.get_episode( proper_candidate['season'], proper_candidate['episode']) for term in self.proper_strings: search_strings = self._get_episode_search_strings( episode_obj, add_string=term) for item in self.search(search_strings[0], ep_obj=episode_obj): search_result = self.get_result(series=series_obj, item=item) if search_result in results: continue search_result.search_type = PROPER_SEARCH results.append(search_result) return results
def add_show(indexer, indexer_id, show_name, status): """Add a new show with default settings.""" if Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id): return root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None if location: log.info( "Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}", { 'show': show_name, 'indexer_name': indexerConfig[indexer]['identifier'], 'id': indexer_id }) allowed, preferred = Quality.split_quality(int( app.QUALITY_DEFAULT)) quality = {'allowed': allowed, 'preferred': preferred} app.show_queue_scheduler.action.addShow( indexer, indexer_id, None, default_status=status, quality=quality, season_folders=int(app.SEASON_FOLDERS_DEFAULT), paused=app.TRAKT_START_PAUSED, default_status_after=status, root_dir=location) tries = 0 while tries < 3: if Show.find_by_id(app.showList, indexer, indexer_id): return # Wait before show get's added and refreshed time.sleep(60) tries += 1 log.warning("Error creating show '{show}. Please check logs' ", {'show': show_name}) return else: log.warning( "Error creating show '{show}' folder. No default root directory", {'show': show_name}) return
def backlogShow(self, indexername, seriesid): indexer_id = indexer_name_to_id(indexername) series_obj = Show.find_by_id(app.showList, indexer_id, seriesid) if series_obj: app.backlog_search_scheduler.action.search_backlog([series_obj]) return self.redirect('/manage/backlogOverview/')
def sync_trakt_episodes(self): """Sync Trakt episodes watchlist.""" if not self.episode_watchlist: log.info('No episodes found in your Trakt watchlist. Nothing to sync') return added_shows = [] trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) for watchlist_item in self.episode_watchlist: trakt_show = watchlist_item['show'] trakt_episode = watchlist_item['episode'].get('number', -1) trakt_season = watchlist_item['episode'].get('season', -1) show = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) indexer = indexerConfig[i]['id'] show = Show.find_by_id(app.showList, indexer, indexer_id) if show: break if not show: # If can't find with available indexers try IMDB trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id) if not show: # If can't find with available indexers try TRAKT trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find_by_id(app.showList, EXTERNAL_TRAKT, indexer_id) # If can't find show add with default trakt indexer if not show: indexer_id = trakt_show['ids'].get(get_trakt_indexer(trakt_default_indexer), -1) # Only add show if we didn't added it before if indexer_id not in added_shows: self.add_show(trakt_default_indexer, indexer_id, trakt_show['title'], SKIPPED) added_shows.append(indexer_id) elif not trakt_season == 0 and not show.paused: set_episode_to_wanted(show, trakt_season, trakt_episode) log.debug('Synced episodes with Trakt watchlist')
def add_episode_trakt_collection(self): """Add all existing episodes to Trakt collections. For episodes that have a media file (location) """ if not(app.TRAKT_SYNC and app.USE_TRAKT): return main_db_con = db.DBConnection() statuses = [DOWNLOADED, ARCHIVED] sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ 'FROM tv_episodes AS e, tv_shows AS s ' \ 'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \ "AND e.status in ({0}) AND e.location <> ''".format(','.join(['?'] * len(statuses))) sql_result = main_db_con.select(sql_selection, statuses) if not sql_result: return episodes = [] shows = {} for cur_episode in sql_result: # Check if TRAKT supports that indexer if not get_trakt_indexer(cur_episode['indexer']): continue show_id = cur_episode['indexer'], cur_episode['indexer_id'] episode = cur_episode['season'], cur_episode['episode'] if show_id not in shows: shows[show_id] = [] shows[show_id].append(episode) media_object_shows = [] for show_id in shows: episodes = [] show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1]) for season, episode in shows[show_id]: if not self._check_list( indexer=show_obj.indexer, indexer_id=show_obj.series_id, season=season, episode=episode, list_type='Collection' ): continue log.info("Adding episode '{show}' {ep} to Trakt collection", { 'show': show_obj.name, 'ep': episode_num(season, episode) }) episodes.append(show_obj.get_episode(season, episode)) media_object_shows.append(create_episode_structure(show_obj, episodes)) try: sync.add_to_collection({'shows': media_object_shows}) self._get_show_collection() except (TraktException, RequestException) as error: log.info('Unable to add episodes to Trakt collection. Error: {error!r}', {'error': error})
def test_find(self): app.QUALITY_DEFAULT = Quality.FULLHDTV app.showList = [] show123 = TestTVShow(0, 123) show456 = TestTVShow(0, 456) show789 = TestTVShow(0, 789) shows = [show123, show456, show789] shows_duplicate = shows + shows test_cases = { (False, None): None, (False, ''): None, (False, '123'): None, (False, 123): None, (False, 12.3): None, (True, None): None, (True, ''): None, (True, '123'): None, (True, 123): show123, (True, 12.3): None, (True, 456): show456, (True, 789): show789, } unicode_test_cases = { (False, u''): None, (False, u'123'): None, (True, u''): None, (True, u'123'): None, } for tests in test_cases, unicode_test_cases: for ((use_shows, indexer_id), result) in iteritems(tests): if use_shows: self.assertEqual(Show.find(shows, indexer_id), result) else: self.assertEqual(Show.find(None, indexer_id), result) with self.assertRaises(MultipleShowObjectsException): Show.find(shows_duplicate, 456)
def get_episodes(search_thread, searchstatus): """Get all episodes located in a search thread with a specific status.""" results = [] # Search again for the show in the library. Might have been deleted very recently. series_obj = Show.find_by_id(app.showList, search_thread.show.indexer, search_thread.show.series_id) if not series_obj: if not search_thread.show.is_recently_deleted: log.error(u'No Show Object found for show with indexerID: {0}', search_thread.show.indexerid) return results if not isinstance(search_thread.segment, list): search_thread.segment = [search_thread.segment] for ep_obj in search_thread.segment: ep = series_obj.get_episode(ep_obj.season, ep_obj.episode) results.append({ 'indexer_id': series_obj.indexer, 'series_id': series_obj.series_id, 'episode': ep.episode, 'episodeindexerid': ep.indexerid, 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], # TODO: `quality_name` and `quality_style` should both be removed # when converting forced/manual episode search to Vue (use QualityPill component directly) 'quality_name': Quality.qualityStrings[ep.quality], 'quality_style': Quality.quality_keys.get(ep.quality) or Quality.quality_keys[Quality.UNKNOWN], 'overview': Overview.overviewStrings[series_obj.get_overview( ep.status, ep.quality, manually_searched=ep.manually_searched)], 'queuetime': search_thread.queue_time.isoformat(), 'starttime': search_thread.start_time.isoformat() if search_thread.start_time else None, }) return results
def add_show(indexer, indexer_id, show_name, status): """Add a new show with default settings.""" if not Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id): root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None if location: log.info("Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}", { 'show': show_name, 'indexer_name': indexerConfig[indexer]['identifier'], 'id': indexer_id }) app.show_queue_scheduler.action.addShow(indexer, indexer_id, None, default_status=status, quality=int(app.QUALITY_DEFAULT), season_folders=int(app.SEASON_FOLDERS_DEFAULT), paused=app.TRAKT_START_PAUSED, default_status_after=status, root_dir=location) tries = 0 while tries < 3: if Show.find_by_id(app.showList, indexer, indexer_id): return # Wait before show get's added and refreshed time.sleep(60) tries += 1 log.warning("Error creating show '{show}. Please check logs' ", { 'show': show_name }) return else: log.warning("Error creating show '{show}' folder. No default root directory", { 'show': show_name }) return
def togglePause(self, showslug=None): # @TODO: Replace with PUT to update the state var /api/v2/show/{id} identifier = SeriesIdentifier.from_slug(showslug) error, series_obj = Show.pause(identifier.indexer.slug, identifier.id) if error is not None: return self._genericMessage('Error', error) ui.notifications.message('{show} has been {state}'.format( show=series_obj.name, state='paused' if series_obj.paused else 'resumed')) return self.redirect( '/home/displayShow?showslug={series_obj.slug}'.format( series_obj=series_obj))
def set_scene_numbering( indexer_id, indexer, season=None, episode=None, # pylint:disable=too-many-arguments absolute_number=None, sceneSeason=None, sceneEpisode=None, sceneAbsolute=None): """ Set scene numbering for a season/episode. To clear the scene numbering, leave both sceneSeason and sceneEpisode as None. """ if indexer_id is None: return indexer_id = int(indexer_id) indexer = int(indexer) main_db_con = db.DBConnection() # Season/episode can be 0 so can't check "if season" if season is not None and episode is not None and absolute_number is None: main_db_con.action( "INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)", [indexer, indexer_id, season, episode]) main_db_con.action( "UPDATE scene_numbering SET scene_season = ?, scene_episode = ? WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", [sceneSeason, sceneEpisode, indexer, indexer_id, season, episode]) # absolute_number can be 0 so can't check "if absolute_number" else: main_db_con.action( "INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, absolute_number) VALUES (?,?,?)", [indexer, indexer_id, absolute_number]) main_db_con.action( "UPDATE scene_numbering SET scene_absolute_number = ? WHERE indexer = ? and indexer_id = ? and absolute_number = ?", [sceneAbsolute, indexer, indexer_id, absolute_number]) # Reload data from DB so that cache and db are in sync show = Show.find(app.showList, indexer_id) show.flush_episodes() show.erase_cached_parse()
def refreshShow(self, showslug=None): # @TODO: Replace with status=refresh from PATCH /api/v2/show/{id} identifier = SeriesIdentifier.from_slug(showslug) error, series_obj = Show.refresh(identifier.indexer.slug, identifier.id) # This is a show validation error if error is not None and series_obj is None: return self._genericMessage('Error', error) # This is a refresh error if error is not None: ui.notifications.error('Unable to refresh this show.', error) time.sleep(cpu_presets[app.CPU_PRESET]) return self.redirect( '/home/displayShow?showslug={series_obj.slug}'.format( series_obj=series_obj))
def downloadSubtitleMissed(self, *args, **kwargs): to_download = {} # make a list of all shows and their associated args for arg in kwargs: indexer_id, series_id, what = arg.split('-') # we don't care about unchecked checkboxes if kwargs[arg] != 'on': continue if (indexer_id, series_id) not in to_download: to_download[(indexer_id, series_id)] = [] to_download[(indexer_id, series_id)].append(what) for cur_indexer_id, cur_series_id in to_download: # get a list of all the eps we want to download subtitles if they just said 'all' if 'all' in to_download[(cur_indexer_id, cur_series_id)]: main_db_con = db.DBConnection() all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' b'WHERE status = ? ' b'AND season != 0 ' b'AND indexer = ? ' b'AND showid = ? ' b"AND location != ''", [DOWNLOADED, cur_indexer_id, cur_series_id]) to_download[(cur_indexer_id, cur_series_id)] = [ 's' + str(x[b'season']) + 'e' + str(x[b'episode']) for x in all_eps_results ] for epResult in to_download[(cur_indexer_id, cur_series_id)]: season, episode = epResult.lstrip('s').split('e') series_obj = Show.find_by_id(app.showList, cur_indexer_id, cur_series_id) series_obj.get_episode(season, episode).download_subtitles() return self.redirect('/manage/subtitleMissed/')
def _get_episodes_info(self, tvdb_id, episodes, season=None): """Add full episode information for existing episodes.""" series = Show.find_by_id(app.showList, 1, tvdb_id) if not series: return episodes existing_episodes = series.get_all_episodes(season=season, has_location=True) if not existing_episodes: return episodes for i, ep in enumerate(episodes): # Try to be as conservative as possible. Only query if the episode # exists on disk and it needs episode metadata. if any(ep_obj.indexerid == ep.id and needs_metadata(ep_obj) for ep_obj in existing_episodes): episode = self.config['session'].episodes_api.episodes_id_get( ep.id, accept_language=self.config['language'] ) episodes[i] = episode.data return episodes
def _get_episodes_info(self, tvdb_id, episodes, season=None): """Add full episode information for existing episodes.""" series = Show.find_by_id(app.showList, 1, tvdb_id) if not series: return episodes existing_episodes = series.get_all_episodes(season=season, has_location=True) if not existing_episodes: return episodes for i, ep in enumerate(episodes): # Try to be as conservative as possible. Only query if the episode # exists on disk and it needs episode metadata. if any(ep_obj.indexerid == ep.id and needs_metadata(ep_obj) for ep_obj in existing_episodes): episode = self.config['session'].episodes_api.episodes_id_get( ep.id, accept_language=self.config['language']) episodes[i] = episode.data return episodes
def downloadSubtitleMissed(self, *args, **kwargs): to_download = {} # make a list of all shows and their associated args for arg in kwargs: indexer_id, series_id, what = arg.split('-') # we don't care about unchecked checkboxes if kwargs[arg] != 'on': continue if (indexer_id, series_id) not in to_download: to_download[(indexer_id, series_id)] = [] to_download[(indexer_id, series_id)].append(what) for cur_indexer_id, cur_series_id in to_download: # get a list of all the eps we want to download subtitles if they just said 'all' if 'all' in to_download[(cur_indexer_id, cur_series_id)]: main_db_con = db.DBConnection() all_eps_results = main_db_con.select( 'SELECT season, episode ' 'FROM tv_episodes ' 'WHERE status = ? ' 'AND season != 0 ' 'AND indexer = ? ' 'AND showid = ? ' "AND location != ''", [DOWNLOADED, cur_indexer_id, cur_series_id] ) to_download[(cur_indexer_id, cur_series_id)] = ['s' + str(x['season']) + 'e' + str(x['episode']) for x in all_eps_results] for ep_result in to_download[(cur_indexer_id, cur_series_id)]: season, episode = ep_result.lstrip('s').split('e') series_obj = Show.find_by_id(app.showList, cur_indexer_id, cur_series_id) series_obj.get_episode(season, episode).download_subtitles() return self.redirect('/manage/subtitleMissed/')
def downloadSubtitleMissed(self, *args, **kwargs): to_download = {} # make a list of all shows and their associated args for arg in kwargs: indexer_id, what = arg.split('-') # we don't care about unchecked checkboxes if kwargs[arg] != 'on': continue if indexer_id not in to_download: to_download[indexer_id] = [] to_download[indexer_id].append(what) for cur_indexer_id in to_download: # get a list of all the eps we want to download subtitles if they just said 'all' if 'all' in to_download[cur_indexer_id]: main_db_con = db.DBConnection() all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' b'WHERE status LIKE \'%4\' ' b'AND season != 0 ' b'AND showid = ? ' b'AND location != \'\'', [cur_indexer_id]) to_download[cur_indexer_id] = [ str(x[b'season']) + 'x' + str(x[b'episode']) for x in all_eps_results ] for epResult in to_download[cur_indexer_id]: season, episode = epResult.split('x') show = Show.find(app.showList, int(cur_indexer_id)) show.get_episode(season, episode).download_subtitles() return self.redirect('/manage/subtitleMissed/')
def _postprocess(self, path, info_hash, resource_name, failed=False, client_type=None): """Queue a postprocess action.""" # Use the info hash get a segment of episodes. history_items = self.main_db_con.select( 'SELECT * FROM history WHERE info_hash = ?', [info_hash] ) episodes = [] for history_item in history_items: # Search for show in library show = Show.find_by_id(app.showList, history_item['indexer_id'], history_item['showid']) if not show: # Show is "no longer" available in library. continue episodes.append(show.get_episode(history_item['season'], history_item['episode'])) queue_item = PostProcessQueueItem( path, info_hash, resource_name=resource_name, failed=failed, episodes=episodes, client_type=client_type ) app.post_processor_queue_scheduler.action.add_item(queue_item)
def deleteShow(self, showslug=None, full=0): # @TODO: Replace with DELETE to delete the show resource /api/v2/show/{id} if showslug: identifier = SeriesIdentifier.from_slug(showslug) error, series_obj = Show.delete(identifier.indexer.slug, identifier.id, full) if error is not None: return self._genericMessage('Error', error) ui.notifications.message( '{show} has been {state} {details}'.format( show=series_obj.name, state='trashed' if app.TRASH_REMOVE_SHOW else 'deleted', details='(with all related media)' if full else '(media untouched)', )) time.sleep(cpu_presets[app.CPU_PRESET]) # Remove show from 'RECENT SHOWS' in 'Shows' menu app.SHOWS_RECENT = [ show for show in app.SHOWS_RECENT if show['showSlug'] != showslug ]
def overall_stats(): """Generate overall library statistics.""" return Show.overall_stats()
def addShowByID(self, indexername=None, seriesid=None, show_name=None, which_series=None, indexer_lang=None, root_dir=None, default_status=None, quality_preset=None, any_qualities=None, best_qualities=None, season_folders=None, subtitles=None, full_show_path=None, other_shows=None, skip_show=None, provided_indexer=None, anime=None, scene=None, blacklist=None, whitelist=None, default_status_after=None, configure_show_options=False): """ Add's a new show with provided show options by indexer_id. Currently only TVDB and IMDB id's supported. """ series_id = seriesid if indexername != 'tvdb': series_id = helpers.get_tvdb_from_id(seriesid, indexername.upper()) if not series_id: logger.log(u'Unable to to find tvdb ID to add %s' % show_name) ui.notifications.error( 'Unable to add %s' % show_name, 'Could not add %s. We were unable to locate the tvdb id at this time.' % show_name) return if Show.find_by_id(app.showList, INDEXER_TVDBV2, series_id): return # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: any_qualities = [] if best_qualities: best_qualities = best_qualities.split(',') else: best_qualities = [] # If configure_show_options is enabled let's use the provided settings configure_show_options = config.checkbox_to_value( configure_show_options) if configure_show_options: # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) season_folders = config.checkbox_to_value(season_folders) subtitles = config.checkbox_to_value(subtitles) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not any_qualities: any_qualities = [] if not best_qualities or try_int(quality_preset, None): best_qualities = [] if not isinstance(any_qualities, list): any_qualities = [any_qualities] if not isinstance(best_qualities, list): best_qualities = [best_qualities] quality = Quality.combine_qualities( [int(q) for q in any_qualities], [int(q) for q in best_qualities]) location = root_dir else: default_status = app.STATUS_DEFAULT quality = app.QUALITY_DEFAULT season_folders = app.SEASON_FOLDERS_DEFAULT subtitles = app.SUBTITLES_DEFAULT anime = app.ANIME_DEFAULT scene = app.SCENE_DEFAULT default_status_after = app.STATUS_DEFAULT_AFTER if app.ROOT_DIRS: root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] else: location = None if not location: logger.log( u'There was an error creating the show, ' u'no root directory setting found', logger.WARNING) return 'No root directories setup, please go back and add one.' show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id) show_dir = None # add the show app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(series_id), show_dir, int(default_status), quality, season_folders, indexer_lang, subtitles, anime, scene, None, blacklist, whitelist, int(default_status_after), root_dir=location) ui.notifications.message( 'Show added', 'Adding the specified show {0}'.format(show_name)) # done adding show return self.redirect('/home/')
def run(self, force=False): # pylint: disable=too-many-branches, too-many-statements, too-many-locals """Check for needed subtitles for users' shows. :param force: True if a force search needs to be executed :type force: bool """ if self.amActive: logger.debug( u'Subtitle finder is still running, not starting it again') return if not app.USE_SUBTITLES: logger.warning(u'Subtitle search is disabled. Please enabled it') return if not enabled_service_list(): logger.warning( u'Not enough services selected. At least 1 service is required to search subtitles in the ' u'background') return self.amActive = True def dhm(td): """Create the string for subtitles delay.""" days_delay = td.days hours_delay = td.seconds // 60**2 minutes_delay = (td.seconds // 60) % 60 ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \ (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \ (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0] if days_delay == 1: ret = ret.replace('days', 'day') if hours_delay == 1: ret = ret.replace('hours', 'hour') if minutes_delay == 1: ret = ret.replace('minutes', 'minute') return ret.rstrip(', ') if app.POSTPONE_IF_NO_SUBS: self.subtitles_download_in_pp() logger.info(u'Checking for missed subtitles') database = db.DBConnection() # Shows with air date <= 30 days, have a limit of 100 results # Shows with air date > 30 days, have a limit of 200 results sql_args = [{ 'age_comparison': '<=', 'limit': 100 }, { 'age_comparison': '>', 'limit': 200 }] sql_like_languages = '%' + ','.join(sorted( wanted_languages())) + '%' if app.SUBTITLES_MULTI else '%und%' sql_results = [] for args in sql_args: sql_results += database.select( 'SELECT ' 's.show_name, ' 'e.indexer,' 'e.showid, ' 'e.season, ' 'e.episode,' 'e.release_name, ' 'e.status, ' 'e.subtitles, ' 'e.subtitles_searchcount AS searchcount, ' 'e.subtitles_lastsearch AS lastsearch, ' 'e.location, (? - e.airdate) as age ' 'FROM ' 'tv_episodes AS e ' 'INNER JOIN tv_shows AS s ' 'ON (e.showid = s.indexer_id AND e.indexer = s.indexer) ' 'WHERE ' 's.subtitles = 1 ' 'AND s.paused = 0 ' 'AND e.status = ? ' 'AND e.season > 0 ' "AND e.location != '' " 'AND age {} 30 ' 'AND e.subtitles NOT LIKE ? ' 'ORDER BY ' 'lastsearch ASC ' 'LIMIT {}'.format(args['age_comparison'], args['limit']), [ datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages ]) if not sql_results: logger.info('No subtitles to download') self.amActive = False return for ep_to_sub in sql_results: # give the CPU a break time.sleep(cpu_presets[app.CPU_PRESET]) ep_num = episode_num(ep_to_sub[b'season'], ep_to_sub[b'episode']) or \ episode_num(ep_to_sub[b'season'], ep_to_sub[b'episode'], numbering='absolute') subtitle_path = _encode(ep_to_sub[b'location'], fallback='utf-8') if not os.path.isfile(subtitle_path): logger.debug( 'Episode file does not exist, cannot download subtitles for %s %s', ep_to_sub[b'show_name'], ep_num) continue if app.SUBTITLES_STOP_AT_FIRST and ep_to_sub[b'subtitles']: logger.debug( 'Episode already has one subtitle, skipping %s %s', ep_to_sub[b'show_name'], ep_num) continue if not needs_subtitles(ep_to_sub[b'subtitles']): logger.debug( 'Episode already has all needed subtitles, skipping %s %s', ep_to_sub[b'show_name'], ep_num) continue try: lastsearched = datetime.datetime.strptime( ep_to_sub[b'lastsearch'], dateTimeFormat) except ValueError: lastsearched = datetime.datetime.min if not force: now = datetime.datetime.now() days = int(ep_to_sub[b'age']) delay_time = datetime.timedelta( hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24) delay = lastsearched + delay_time - now # Search every hour until 10 days pass # After 10 days, search every 8 hours, after 30 days search once a month # Will always try an episode regardless of age for 3 times # The time resolution is minute # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot. if delay.total_seconds() > 60 and int( ep_to_sub[b'searchcount']) > 2: logger.debug('Subtitle search for %s %s delayed for %s', ep_to_sub[b'show_name'], ep_num, dhm(delay)) continue show_object = Show.find_by_id(app.showList, ep_to_sub[b'indexer'], ep_to_sub[b'showid']) if not show_object: logger.debug('Show with ID %s not found in the database', ep_to_sub[b'showid']) continue episode_object = show_object.get_episode(ep_to_sub[b'season'], ep_to_sub[b'episode']) if isinstance(episode_object, str): logger.debug('%s %s not found in the database', ep_to_sub[b'show_name'], ep_num) continue episode_object.download_subtitles() logger.info('Finished checking for missed subtitles') self.amActive = False
def addShowByID(self, indexername=None, seriesid=None, show_name=None, which_series=None, indexer_lang=None, root_dir=None, default_status=None, quality_preset=None, any_qualities=None, best_qualities=None, season_folders=None, subtitles=None, full_show_path=None, other_shows=None, skip_show=None, provided_indexer=None, anime=None, scene=None, blacklist=None, whitelist=None, default_status_after=None, configure_show_options=False): """ Add's a new show with provided show options by indexer_id. Currently only TVDB and IMDB id's supported. """ series_id = seriesid if indexername != 'tvdb': series_id = helpers.get_tvdb_from_id(seriesid, indexername.upper()) if not series_id: log.info('Unable to find tvdb ID to add {name}', {'name': show_name}) ui.notifications.error( 'Unable to add {0}'.format(show_name), 'Could not add {0}. We were unable to locate the tvdb id at this time.'.format(show_name) ) return json_response( result=False, message='Unable to find tvdb ID to add {show}'.format(show=show_name) ) if Show.find_by_id(app.showList, INDEXER_TVDBV2, series_id): return json_response( result=False, message='Show already exists' ) # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: any_qualities = [] if best_qualities: best_qualities = best_qualities.split(',') else: best_qualities = [] # If configure_show_options is enabled let's use the provided settings configure_show_options = config.checkbox_to_value(configure_show_options) if configure_show_options: # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) season_folders = config.checkbox_to_value(season_folders) subtitles = config.checkbox_to_value(subtitles) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not any_qualities: any_qualities = [] if not best_qualities or try_int(quality_preset, None): best_qualities = [] if not isinstance(any_qualities, list): any_qualities = [any_qualities] if not isinstance(best_qualities, list): best_qualities = [best_qualities] quality = Quality.combine_qualities([int(q) for q in any_qualities], [int(q) for q in best_qualities]) location = root_dir else: default_status = app.STATUS_DEFAULT quality = app.QUALITY_DEFAULT season_folders = app.SEASON_FOLDERS_DEFAULT subtitles = app.SUBTITLES_DEFAULT anime = app.ANIME_DEFAULT scene = app.SCENE_DEFAULT default_status_after = app.STATUS_DEFAULT_AFTER if app.ROOT_DIRS: root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] else: location = None if not location: log.warning('There was an error creating the show, no root directory setting found') return json_response( result=False, message='No root directories set up, please go back and add one.' ) show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id) show_dir = None # add the show app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(series_id), show_dir, int(default_status), quality, season_folders, indexer_lang, subtitles, anime, scene, None, blacklist, whitelist, int(default_status_after), root_dir=location) ui.notifications.message('Show added', 'Adding the specified show {0}'.format(show_name)) # done adding show return json_response( message='Adding the specified show {0}'.format(show_name), redirect='home' )
def find_episodes(self, episodes): """ Search cache for episodes. NOTE: This is currently only used by the Backlog/Forced Search. As we determine the candidates there. The following checks are performed on the cache results: * Filter out non-anime results on Anime only providers * Check if the series is still in our library :param episodes: Single or list of episode object(s) :return list of SearchResult objects. """ cache_results = defaultdict(list) results = [] cache_db_con = self._get_db() if not episodes: sql_results = cache_db_con.select( 'SELECT * FROM [{name}]'.format(name=self.provider_id)) elif not isinstance(episodes, list): sql_results = cache_db_con.select( 'SELECT * FROM [{name}] ' 'WHERE indexer = ? AND ' 'indexerid = ? AND ' 'season = ? AND ' 'episodes LIKE ?'.format(name=self.provider_id), [episodes.series.indexer, episodes.series.series_id, episodes.season, '%|{0}|%'.format(episodes.episode)] ) else: for ep_obj in episodes: results.append([ 'SELECT * FROM [{name}] ' 'WHERE indexer = ? AND ' 'indexerid = ? AND ' 'season = ? AND ' 'episodes LIKE ?'.format( name=self.provider_id ), [ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, '%|{0}|%'.format(ep_obj.episode)]] ) if results: # Only execute the query if we have results sql_results = cache_db_con.mass_action(results, fetchall=True) sql_results = list(itertools.chain(*sql_results)) else: sql_results = [] log.debug( '{id}: No cached results in {provider} for series {show_name!r} episode {ep}', { 'id': episodes[0].series.series_id, 'provider': self.provider.name, 'show_name': episodes[0].series.name, 'ep': episode_num(episodes[0].season, episodes[0].episode), } ) # for each cache entry for cur_result in sql_results: if cur_result['indexer'] is None: log.debug('Ignoring result: {0}, missing indexer. This is probably a result added' ' prior to medusa version 0.2.0', cur_result['name']) continue search_result = self.provider.get_result() # get the show, or ignore if it's not one of our shows series_obj = Show.find_by_id(app.showList, int(cur_result['indexer']), int(cur_result['indexerid'])) if not series_obj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not series_obj.is_anime: log.debug('{0} is not an anime, skipping', series_obj.name) continue # build a result object search_result.quality = int(cur_result['quality']) search_result.release_group = cur_result['release_group'] search_result.version = cur_result['version'] search_result.name = cur_result['name'] search_result.url = cur_result['url'] search_result.actual_season = int(cur_result['season']) # TODO: Add support for season results sql_episodes = cur_result['episodes'].strip('|') # Season result if not sql_episodes: ep_objs = series_obj.get_all_episodes(search_result.actual_season) if not ep_objs: # We couldn't get any episodes for this season, which is odd, skip the result. log.debug("We couldn't get any episodes for season {0} of {1}, skipping", search_result.actual_season, search_result.name) continue actual_episodes = [ep.episode for ep in ep_objs] episode_number = SEASON_RESULT # Multi or single episode result else: actual_episodes = [int(ep) for ep in sql_episodes.split('|')] ep_objs = [series_obj.get_episode(search_result.actual_season, ep) for ep in actual_episodes] if len(actual_episodes) == 1: episode_number = actual_episodes[0] else: episode_number = MULTI_EP_RESULT search_result.episodes = ep_objs search_result.actual_episodes = actual_episodes # Map the remaining attributes search_result.series = series_obj search_result.seeders = cur_result['seeders'] search_result.leechers = cur_result['leechers'] search_result.size = cur_result['size'] search_result.pubdate = cur_result['pubdate'] search_result.proper_tags = cur_result['proper_tags'].split('|') if cur_result['proper_tags'] else '' search_result.content = None # add it to the list cache_results[episode_number].append(search_result) # datetime stamp this search so cache gets cleared self.searched = time() return cache_results
def find_needed_episodes(self, episode, forced_search=False, down_cur_quality=False): """Find needed episodes.""" needed_eps = {} results = [] cache_db_con = self._get_db() if not episode: sql_results = cache_db_con.select( b'SELECT * FROM [{name}]'.format(name=self.provider_id)) elif not isinstance(episode, list): sql_results = cache_db_con.select( b'SELECT * FROM [{name}] ' b'WHERE indexerid = ? AND' b' season = ? AND' b' episodes LIKE ?'.format(name=self.provider_id), [ episode.series.indexerid, episode.season, b'%|{0}|%'.format(episode.episode) ]) else: for ep_obj in episode: results.append([ b'SELECT * FROM [{name}] ' b'WHERE indexerid = ? AND' b' season = ? AND' b' episodes LIKE ? AND ' b' quality IN ({qualities})'.format( name=self.provider_id, qualities=','.join( (str(x) for x in ep_obj.wanted_quality))), [ ep_obj.series.indexerid, ep_obj.season, b'%|{0}|%'.format(ep_obj.episode) ] ]) if results: # Only execute the query if we have results sql_results = cache_db_con.mass_action(results, fetchall=True) sql_results = list(itertools.chain(*sql_results)) else: sql_results = [] log.debug( '{id}: No cached results in {provider} for series {show_name!r} episode {ep}', { 'id': episode[0].series.indexerid, 'provider': self.provider.name, 'show_name': episode[0].series.name, 'ep': episode_num(episode[0].season, episode[0].episode), }) # for each cache entry for cur_result in sql_results: search_result = self.provider.get_result() # ignored/required words, and non-tv junk if not naming.filter_bad_releases(cur_result[b'name']): continue # get the show, or ignore if it's not one of our shows show_obj = Show.find(app.showList, int(cur_result[b'indexerid'])) if not show_obj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not show_obj.is_anime: log.debug('{0} is not an anime, skipping', show_obj.name) continue # get season and ep data (ignoring multi-eps for now) search_result.season = int(cur_result[b'season']) if search_result.season == -1: continue cur_ep = cur_result[b'episodes'].split('|')[1] if not cur_ep: continue cur_ep = int(cur_ep) search_result.quality = int(cur_result[b'quality']) search_result.release_group = cur_result[b'release_group'] search_result.version = cur_result[b'version'] # if the show says we want that episode then add it to the list if not show_obj.want_episode(search_result.season, cur_ep, search_result.quality, forced_search, down_cur_quality): log.debug('Ignoring {0}', cur_result[b'name']) continue search_result.episodes = [ show_obj.get_episode(search_result.season, cur_ep) ] search_result.actual_episodes = [search_result.episodes[0].episode] search_result.actual_season = search_result.season # build a result object search_result.name = cur_result[b'name'] search_result.url = cur_result[b'url'] log.debug( '{id}: Using cached results from {provider} for series {show_name!r} episode {ep}', { 'id': search_result.episodes[0].series.indexerid, 'provider': self.provider.name, 'show_name': search_result.episodes[0].series.name, 'ep': episode_num(search_result.episodes[0].season, search_result.episodes[0].episode), }) # Map the remaining attributes search_result.show = show_obj search_result.seeders = cur_result[b'seeders'] search_result.leechers = cur_result[b'leechers'] search_result.size = cur_result[b'size'] search_result.pubdate = cur_result[b'pubdate'] search_result.proper_tags = cur_result[b'proper_tags'].split( '|') if cur_result[b'proper_tags'] else '' search_result.content = None # FIXME: Should be changed to search_result.search_type search_result.forced_search = forced_search search_result.download_current_quality = down_cur_quality episode_object = search_result.episodes[0] # add it to the list if episode_object not in needed_eps: needed_eps[episode_object] = [search_result] else: needed_eps[episode_object].append(search_result) # datetime stamp this search so cache gets cleared self.searched = time() return needed_eps
def run(self, force=False): # pylint: disable=too-many-branches, too-many-statements, too-many-locals """Check for needed subtitles for users' shows. :param force: True if a force search needs to be executed :type force: bool """ if self.amActive: logger.debug(u'Subtitle finder is still running, not starting it again') return if not app.USE_SUBTITLES: logger.warning(u'Subtitle search is disabled. Please enabled it') return if not enabled_service_list(): logger.warning(u'Not enough services selected. At least 1 service is required to search subtitles in the ' u'background') return self.amActive = True def dhm(td): """Create the string for subtitles delay.""" days_delay = td.days hours_delay = td.seconds // 60 ** 2 minutes_delay = (td.seconds // 60) % 60 ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \ (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \ (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0] if days_delay == 1: ret = ret.replace('days', 'day') if hours_delay == 1: ret = ret.replace('hours', 'hour') if minutes_delay == 1: ret = ret.replace('minutes', 'minute') return ret.rstrip(', ') if app.POSTPONE_IF_NO_SUBS: self.subtitles_download_in_pp() logger.info(u'Checking for missed subtitles') main_db_con = db.DBConnection() # Shows with air date <= 30 days, have a limit of 100 results # Shows with air date > 30 days, have a limit of 200 results sql_args = [{'age_comparison': '<=', 'limit': 100}, {'age_comparison': '>', 'limit': 200}] sql_like_languages = '%' + ','.join(sorted(wanted_languages())) + '%' if app.SUBTITLES_MULTI else '%und%' sql_results = [] for args in sql_args: sql_results += main_db_con.select( 'SELECT ' 's.show_name, ' 'e.indexer,' 'e.showid, ' 'e.season, ' 'e.episode,' 'e.release_name, ' 'e.status, ' 'e.subtitles, ' 'e.subtitles_searchcount AS searchcount, ' 'e.subtitles_lastsearch AS lastsearch, ' 'e.location, (? - e.airdate) as age ' 'FROM ' 'tv_episodes AS e ' 'INNER JOIN tv_shows AS s ' 'ON (e.showid = s.indexer_id AND e.indexer = s.indexer) ' 'WHERE ' 's.subtitles = 1 ' 'AND s.paused = 0 ' 'AND e.status = ? ' 'AND e.season > 0 ' "AND e.location != '' " 'AND age {} 30 ' 'AND e.subtitles NOT LIKE ? ' 'ORDER BY ' 'lastsearch ASC ' 'LIMIT {}'.format (args['age_comparison'], args['limit']), [datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages] ) if not sql_results: logger.info('No subtitles to download') self.amActive = False return for ep_to_sub in sql_results: # give the CPU a break time.sleep(cpu_presets[app.CPU_PRESET]) ep_num = episode_num(ep_to_sub['season'], ep_to_sub['episode']) or \ episode_num(ep_to_sub['season'], ep_to_sub['episode'], numbering='absolute') subtitle_path = _encode(ep_to_sub['location']) if not os.path.isfile(subtitle_path): logger.debug('Episode file does not exist, cannot download subtitles for %s %s', ep_to_sub['show_name'], ep_num) continue if app.SUBTITLES_STOP_AT_FIRST and ep_to_sub['subtitles']: logger.debug('Episode already has one subtitle, skipping %s %s', ep_to_sub['show_name'], ep_num) continue if not needs_subtitles(ep_to_sub['subtitles']): logger.debug('Episode already has all needed subtitles, skipping %s %s', ep_to_sub['show_name'], ep_num) continue try: lastsearched = datetime.datetime.strptime(ep_to_sub['lastsearch'], dateTimeFormat) except ValueError: lastsearched = datetime.datetime.min if not force: now = datetime.datetime.now() days = int(ep_to_sub['age']) delay_time = datetime.timedelta(hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24) delay = lastsearched + delay_time - now # Search every hour until 10 days pass # After 10 days, search every 8 hours, after 30 days search once a month # Will always try an episode regardless of age for 3 times # The time resolution is minute # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot. if delay.total_seconds() > 60 and int(ep_to_sub['searchcount']) > 2: logger.debug('Subtitle search for %s %s delayed for %s', ep_to_sub['show_name'], ep_num, dhm(delay)) continue show_object = Show.find_by_id(app.showList, ep_to_sub['indexer'], ep_to_sub['showid']) if not show_object: logger.debug('Show with ID %s not found in the database', ep_to_sub['showid']) continue episode_object = show_object.get_episode(ep_to_sub['season'], ep_to_sub['episode']) if isinstance(episode_object, str): logger.debug('%s %s not found in the database', ep_to_sub['show_name'], ep_num) continue episode_object.download_subtitles() logger.info('Finished checking for missed subtitles') self.amActive = False
def run(self, force=False): """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress() and not force: log.warning('Manual search is running. Unable to start Daily search') return self.amActive = True # Let's keep track of the exact time the scheduler kicked in, # as we need to compare to this time for each provider. scheduler_start_time = int(time()) if not network_dict: update_network_dict() # The tvshows airdate_offset field is used to configure a search offset for specific shows. # This way we can search/accept results early or late, depending on the value. main_db_con = DBConnection() min_offset_show = main_db_con.select( 'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset ' 'FROM tv_shows ' 'WHERE paused = 0 AND airdate_offset < 0' ) additional_search_offset = 0 if min_offset_show and min_offset_show[0]['offsets'] > 0: additional_search_offset = int(ceil(abs(min_offset_show[0]['min_offset']) / 24.0)) log.debug('Using an airdate offset of {min_offset_show} as we found show(s) with an airdate' ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']}) cur_time = datetime.now(app_timezone) cur_date = ( date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset) ).toordinal() episodes_from_db = main_db_con.select( 'SELECT indexer, showid, airdate, season, episode ' 'FROM tv_episodes ' 'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date] ) new_releases = [] series_obj = None for db_episode in episodes_from_db: indexer_id = db_episode['indexer'] series_id = db_episode['showid'] try: if not series_obj or series_id != series_obj.indexerid: series_obj = Show.find_by_id(app.showList, indexer_id, series_id) # for when there is orphaned series in the database but not loaded into our show list if not series_obj or series_obj.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': series_id}) continue cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode']) if series_obj.airs and series_obj.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network) end_time = show_air_time.astimezone(app_timezone) + timedelta(minutes=try_int(series_obj.runtime, 60)) if series_obj.airdate_offset != 0: log.debug( '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours', {'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset}) # filter out any episodes that haven't finished airing yet if end_time + timedelta(hours=series_obj.airdate_offset) > cur_time: continue with cur_ep.lock: cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', } ) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue a daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(scheduler_start_time, force=force) ) self.amActive = False