def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): status_list = [int(oldStatus)] if status_list[0] == SNATCHED: status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST to_change = {} # make a list of all shows and their associated args for arg in kwargs: indexer_id, what = arg.split("-") # we don't care about unchecked checkboxes if kwargs[arg] != "on": continue if indexer_id not in to_change: to_change[indexer_id] = [] to_change[indexer_id].append(what) main_db_con = db.DBConnection() for cur_indexer_id in to_change: # get a list of all the eps we want to change if they just said "all" if "all" in to_change[cur_indexer_id]: all_eps_results = main_db_con.select( "SELECT season, episode FROM tv_episodes WHERE status IN ({0}) AND season != 0 AND showid = ?".format(",".join(["?"] * len(status_list))), status_list + [cur_indexer_id], ) all_eps = [str(x["season"]) + "x" + str(x["episode"]) for x in all_eps_results] to_change[cur_indexer_id] = all_eps self.setStatus(cur_indexer_id, "|".join(to_change[cur_indexer_id]), newStatus, direct=True) return self.redirect("/manage/episodeStatuses/")
def showSubtitleMissed(indexer_id, whichSubs): main_db_con = db.DBConnection() cur_show_results = main_db_con.select( "SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? {0} AND (status LIKE '%4' OR status LIKE '%6') and " "location != ''".format( ("AND season != 0 ", "")[settings.SUBTITLES_INCLUDE_SPECIALS]), [int(indexer_id)], ) result = {} for cur_result in cur_show_results: if whichSubs == "all": if not frozenset( subtitle_module.wanted_languages()).difference( cur_result["subtitles"].split(",")): continue elif whichSubs in cur_result["subtitles"]: continue cur_season = int(cur_result["season"]) cur_episode = int(cur_result["episode"]) if cur_season not in result: result[cur_season] = {} if cur_episode not in result[cur_season]: result[cur_season][cur_episode] = {} result[cur_season][cur_episode]["name"] = cur_result["name"] result[cur_season][cur_episode]["subtitles"] = cur_result[ "subtitles"] return json.dumps(result)
def test_all_possible_show_names(self): """ Test all possible show names """ # common.sceneExceptions[-1] = ['Exception Test'] test_cache_db_con = db.DBConnection("cache.db") test_cache_db_con.action( "INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)", [-1, "Exception Test", -1]) common.countryList["Full Country Name"] = "FCN" self._test_all_possible_show_names("Show Name", expected=["Show Name"]) self._test_all_possible_show_names( "Show Name", -1, expected=["Show Name", "Exception Test"]) self._test_all_possible_show_names( "Show Name FCN", expected=["Show Name FCN", "Show Name (Full Country Name)"]) self._test_all_possible_show_names( "Show Name (FCN)", expected=["Show Name (FCN)", "Show Name (Full Country Name)"]) self._test_all_possible_show_names( "Show Name Full Country Name", expected=["Show Name Full Country Name", "Show Name (FCN)"]) self._test_all_possible_show_names( "Show Name (Full Country Name)", expected=["Show Name (Full Country Name)", "Show Name (FCN)"])
def failedDownloads(self, limit=100, toRemove=None): failed_db_con = db.DBConnection("failed.db") if limit == "0": sql_results = failed_db_con.select("SELECT * FROM failed") else: sql_results = failed_db_con.select("SELECT * FROM failed LIMIT ?", [limit]) toRemove = toRemove.split("|") if toRemove else [] for release in toRemove: failed_db_con.action("DELETE FROM failed WHERE failed.release = ?", [release]) if toRemove: return self.redirect("/manage/failedDownloads/") t = PageTemplate(rh=self, filename="manage_failedDownloads.mako") return t.render( limit=limit, failedResults=sql_results, title=_("Failed Downloads"), header=_("Failed Downloads"), topmenu="manage", controller="manage", action="failedDownloads", )
def test_all_possible_show_names(self): """ Test all possible show names """ # common.sceneExceptions[-1] = ['Exception Test'] test_cache_db_con = db.DBConnection('cache.db') test_cache_db_con.action( "INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)", [-1, 'Exception Test', -1]) common.countryList['Full Country Name'] = 'FCN' self._test_all_possible_show_names('Show Name', expected=['Show Name']) self._test_all_possible_show_names( 'Show Name', -1, expected=['Show Name', 'Exception Test']) self._test_all_possible_show_names( 'Show Name FCN', expected=['Show Name FCN', 'Show Name (Full Country Name)']) self._test_all_possible_show_names( 'Show Name (FCN)', expected=['Show Name (FCN)', 'Show Name (Full Country Name)']) self._test_all_possible_show_names( 'Show Name Full Country Name', expected=['Show Name Full Country Name', 'Show Name (FCN)']) self._test_all_possible_show_names( 'Show Name (Full Country Name)', expected=['Show Name (Full Country Name)', 'Show Name (FCN)'])
def setup_test_db(): """ Set up the test databases. """ # Upgrade the db to the latest version. # upgrading the db db.upgrade_database(db.DBConnection(), main.InitialSchema) # fix up any db problems db.sanity_check_database(db.DBConnection(), main.MainSanityCheck) # and for cache.db too db.upgrade_database(db.DBConnection("cache.db"), cache.InitialSchema) # and for failed.db too db.upgrade_database(db.DBConnection("failed.db"), failed.InitialSchema)
def _generate_recipients(show): addrs = [] main_db_con = db.DBConnection() # Grab the global recipients if settings.EMAIL_LIST: for addr in settings.EMAIL_LIST.split(','): if len(addr.strip()) > 0: addrs.append(addr) # Grab the per-show-notification recipients if show is not None: for s in show: for subs in main_db_con.select( 'SELECT notify_list FROM tv_shows WHERE show_name = ?', (s, )): if subs['notify_list']: if subs['notify_list'][0] == '{': entries = dict( ast.literal_eval(subs['notify_list'])) for addr in entries['emails'].split(','): if len(addr.strip()) > 0: addrs.append(addr) else: # Legacy for addr in subs['notify_list'].split(','): if len(addr.strip()) > 0: addrs.append(addr) addrs = set(addrs) logger.debug('Notification recipients: {0}'.format(addrs)) return addrs
def _generate_recipients(show=None): apis = [] mydb = db.DBConnection(row_type="dict") # Grab the global recipient(s) if settings.PROWL_API: for api in settings.PROWL_API.split(","): if api.strip(): apis.append(api.strip()) # Grab the per-show-notification recipients if show is not None: for value in show: for subs in mydb.select( "SELECT notify_list FROM tv_shows WHERE show_name = ?", (value, )): if subs["notify_list"] and subs["notify_list"][ 0] == "{": # legacy format handling entries = dict(ast.literal_eval(subs["notify_list"])) for api in entries["prowlAPIs"].split(","): if api.strip(): apis.append(api.strip()) apis = set(apis) return apis
def downloadSubtitleMissed(self, *args, **kwargs): to_download = {} # make a list of all shows and their associated args for arg in kwargs: indexer_id, what = arg.split("-") # we don't care about unchecked checkboxes if kwargs[arg] != "on": continue if indexer_id not in to_download: to_download[indexer_id] = [] to_download[indexer_id].append(what) for cur_indexer_id in to_download: # get a list of all the eps we want to download subtitles if they just said "all" if "all" in to_download[cur_indexer_id]: main_db_con = db.DBConnection() all_eps_results = main_db_con.select( "SELECT season, episode FROM tv_episodes WHERE (status LIKE '%4' OR status LIKE '%6') {0} AND showid = ? AND location != ''".format( ("AND season != 0 ", "")[settings.SUBTITLES_INCLUDE_SPECIALS] ), [cur_indexer_id], ) to_download[cur_indexer_id] = [str(x["season"]) + "x" + str(x["episode"]) for x in all_eps_results] for epResult in to_download[cur_indexer_id]: season, episode = epResult.split("x") show = Show.find(settings.showList, int(cur_indexer_id)) show.getEpisode(season, episode).download_subtitles() return self.redirect("/manage/subtitleMissed/")
def compare_db_version(self): try: self.need_update() cur_hash = self.get_newest_commit_hash() assert len( cur_hash ) == 40, "Commit hash wrong length: {0} hash: {1}".format( len(cur_hash), cur_hash) check_url = "https://raw.githubusercontent.com/{0}/{1}/{2}/sickchill/oldbeard/databases/main.py" for attempt in (cur_hash, "master"): response = helpers.getURL(check_url.format( settings.GIT_ORG, settings.GIT_REPO, attempt), session=self.session, returns='text') if response: break assert response, "Empty response from {0}".format(check_url) match = re.search(r"MAX_DB_VERSION\s=\s(?P<version>\d{2,3})", response) destination_db_version = int(match.group('version')) main_db_con = db.DBConnection() current_db_version = main_db_con.get_db_version() if destination_db_version > current_db_version: return 'upgrade' elif destination_db_version == current_db_version: return 'equal' else: return 'downgrade' except Exception as e: return repr(e)
def subtitleMissed(self, whichSubs=None): t = PageTemplate(rh=self, filename="manage_subtitleMissed.mako") if not whichSubs: return t.render( whichSubs=whichSubs, title=_("Episode Overview"), header=_("Episode Overview"), topmenu="manage", show_names=None, ep_counts=None, sorted_show_ids=None, controller="manage", action="subtitleMissed", ) main_db_con = db.DBConnection() status_results = main_db_con.select( "SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles " + "FROM tv_episodes, tv_shows " + "WHERE tv_shows.subtitles = 1 AND (tv_episodes.status LIKE '%4' OR tv_episodes.status LIKE '%6') AND tv_episodes.season != 0 " + "AND tv_episodes.location != '' AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name" ) ep_counts = {} show_names = {} sorted_show_ids = [] for cur_status_result in status_results: if whichSubs == "all": if not frozenset( subtitle_module.wanted_languages()).difference( cur_status_result["subtitles"].split(",")): continue elif whichSubs in cur_status_result["subtitles"]: continue cur_indexer_id = int(cur_status_result["indexer_id"]) if cur_indexer_id not in ep_counts: ep_counts[cur_indexer_id] = 1 else: ep_counts[cur_indexer_id] += 1 show_names[cur_indexer_id] = cur_status_result["show_name"] if cur_indexer_id not in sorted_show_ids: sorted_show_ids.append(cur_indexer_id) return t.render( whichSubs=whichSubs, show_names=show_names, ep_counts=ep_counts, sorted_show_ids=sorted_show_ids, title=_("Missing Subtitles"), header=_("Missing Subtitles"), topmenu="manage", controller="manage", action="subtitleMissed", )
def episodeStatuses(self, whichStatus=None): if whichStatus: status_list = [int(whichStatus)] if status_list[0] == SNATCHED: status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST else: status_list = [] t = PageTemplate(rh=self, filename="manage_episodeStatuses.mako") # if we have no status then this is as far as we need to go if not status_list: return t.render( title=_("Episode Overview"), header=_("Episode Overview"), topmenu="manage", show_names=None, whichStatus=whichStatus, ep_counts=None, sorted_show_ids=None, controller="manage", action="episodeStatuses", ) main_db_con = db.DBConnection() status_results = main_db_con.select( "SELECT show_name, tv_shows.indexer_id AS indexer_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN ({0}) AND season != 0 AND " "tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name". format(",".join(["?"] * len(status_list))), status_list, ) ep_counts = {} show_names = {} sorted_show_ids = [] for cur_status_result in status_results: cur_indexer_id = int(cur_status_result["indexer_id"]) if cur_indexer_id not in ep_counts: ep_counts[cur_indexer_id] = 1 else: ep_counts[cur_indexer_id] += 1 show_names[cur_indexer_id] = cur_status_result["show_name"] if cur_indexer_id not in sorted_show_ids: sorted_show_ids.append(cur_indexer_id) return t.render( title=_("Episode Overview"), header=_("Episode Overview"), topmenu="manage", whichStatus=whichStatus, show_names=show_names, ep_counts=ep_counts, sorted_show_ids=sorted_show_ids, controller="manage", action="episodeStatuses", )
def test_scene_ex_reset_name_cache(self): """ Test scene exceptions reset name cache """ # clear the exceptions test_cache_db_con = db.DBConnection('cache.db') test_cache_db_con.action("DELETE FROM scene_exceptions") # put something in the cache name_cache.addNameToCache('Cached Name', 0) # updating should not clear the cache this time since our exceptions didn't change scene_exceptions.retrieve_exceptions() self.assertEqual(name_cache.retrieveNameFromCache('Cached Name'), 0)
def test_scene_ex_reset_name_cache(self): """ Test scene exceptions reset name cache """ # clear the exceptions test_cache_db_con = db.DBConnection("cache.db") test_cache_db_con.action("DELETE FROM scene_exceptions") # put something in the cache name_cache.add_name("Cached Name", 0) # updating should not clear the cache this time since our exceptions didn't change scene_exceptions.retrieve_exceptions() assert name_cache.get_id_from_name("Cached Name") == 0
def _is_season_pack(name): try: parse_result = NameParser(tryIndexers=True).parse(name) except (InvalidNameException, InvalidShowException) as error: logger.debug("{0}".format(error)) return False main_db_con = db.DBConnection() sql_selection = "select count(*) as count from tv_episodes where showid = ? and season = ?" episodes = main_db_con.select( sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True
def load_shows_from_db(): """ Populates the showList with shows from the database """ logger.debug("Loading initial show list") main_db_con = db.DBConnection() sql_results = main_db_con.select("SELECT indexer, indexer_id, location FROM tv_shows;") settings.showList = [] for sql_show in sql_results: try: cur_show = TVShow(sql_show["indexer"], sql_show["indexer_id"]) cur_show.nextEpisode() settings.showList.append(cur_show) except Exception as error: logger.exception("There was an error creating the show in {0}: Error {1}".format(sql_show["location"], error)) logger.debug(traceback.format_exc())
def compare_db_version(self): try: self.need_update() newest_version = self.get_newest_commit_hash() if isinstance(newest_version, str): if len(newest_version) != 40: raise UpdaterException( f"Commit hash wrong length: {len(newest_version)} hash: {newest_version}" ) else: newest_version = f"v{newest_version.major}.{newest_version.minor:02d}.{newest_version.micro:02d}-{newest_version.post}" response = helpers.getURL( f"https://raw.githubusercontent.com/{settings.GIT_ORG}/{settings.GIT_REPO}/{newest_version}/sickchill/oldbeard/databases/main.py", session=self.session, returns="text", ) if not response: response = helpers.getURL( f"https://raw.githubusercontent.com/{settings.GIT_ORG}/{settings.GIT_REPO}/master/sickchill/oldbeard/databases/main.py", session=self.session, returns="text", ) if not response: raise UpdaterException( f"Empty response from GitHub for {newest_version}") match = re.search(r"MAX_DB_VERSION\s=\s(?P<version>\d{2,3})", response) destination_db_version = int(match.group("version")) main_db_con = db.DBConnection() current_db_version = main_db_con.get_db_version() if destination_db_version > current_db_version: return "upgrade" elif destination_db_version == current_db_version: return "equal" else: return "downgrade" except Exception as e: return repr(e)
def showEpisodeStatuses(indexer_id, whichStatus): status_list = [int(whichStatus)] if status_list[0] == SNATCHED: status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST main_db_con = db.DBConnection() cur_show_results = main_db_con.select( "SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN ({0})" .format(','.join(['?'] * len(status_list))), [int(indexer_id)] + status_list) result = {} for cur_result in cur_show_results: cur_season = int(cur_result["season"]) cur_episode = int(cur_result["episode"]) if cur_season not in result: result[cur_season] = {} result[cur_season][cur_episode] = cur_result["name"] return json.dumps(result)
def setUpClass(cls): num_legacy_shows = 3 num_shows = 3 num_episodes_per_show = 5 cls.mydb = db.DBConnection() cls.legacy_shows = [] cls.shows = [] # Per-show-notifications were originally added for email notifications only. To add # this feature to other notifiers, it was necessary to alter the way text is stored in # one of the DB columns. Therefore, to test properly, we must create some shows that # store emails in the old method (legacy method) and then other shows that will use # the new method. for show_counter in range(100, 100 + num_legacy_shows): show = TVShow(1, show_counter) show.name = "Show " + str(show_counter) show.episodes = [] for episode_counter in range(0, num_episodes_per_show): episode = TVEpisode(show, test.SEASON, episode_counter) episode.name = "Episode " + str(episode_counter + 1) episode.quality = "SDTV" show.episodes.append(episode) show.saveToDB() cls.legacy_shows.append(show) for show_counter in range(200, 200 + num_shows): show = TVShow(1, show_counter) show.name = "Show " + str(show_counter) show.episodes = [] for episode_counter in range(0, num_episodes_per_show): episode = TVEpisode(show, test.SEASON, episode_counter) episode.name = "Episode " + str(episode_counter + 1) episode.quality = "SDTV" show.episodes.append(episode) show.saveToDB() cls.shows.append(show)
def backlogOverview(self): t = PageTemplate(rh=self, filename="manage_backlogOverview.mako") showCounts = {} showCats = {} showSQLResults = {} main_db_con = db.DBConnection() for curShow in settings.showList: epCounts = { Overview.SKIPPED: 0, Overview.WANTED: 0, Overview.QUAL: 0, Overview.GOOD: 0, Overview.UNAIRED: 0, Overview.SNATCHED: 0, Overview.SNATCHED_PROPER: 0, Overview.SNATCHED_BEST: 0, } epCats = {} sql_results = main_db_con.select( "SELECT status, season, episode, name, airdate FROM tv_episodes WHERE tv_episodes.season IS NOT NULL " "AND tv_episodes.showid IN (SELECT tv_shows.indexer_id FROM tv_shows WHERE tv_shows.indexer_id = ? " "AND paused = 0) ORDER BY tv_episodes.season DESC, tv_episodes.episode DESC", [curShow.indexerid], ) for curResult in sql_results: curEpCat = curShow.getOverview( curResult["status"], backlog=settings.BACKLOG_MISSING_ONLY) if curEpCat: epCats["{ep}".format(ep=episode_num( curResult["season"], curResult["episode"]))] = curEpCat epCounts[curEpCat] += 1 showCounts[curShow.indexerid] = epCounts showCats[curShow.indexerid] = epCats showSQLResults[curShow.indexerid] = sql_results def showQualSnatched(show): return Quality.splitQuality(show.quality)[1] totalWanted = totalQual = totalQualSnatched = 0 backLogShows = sorted( [ x for x in settings.showList if (showCounts[x.indexerid][Overview.QUAL] or showCounts[x.indexerid][Overview.WANTED] or ( 0, showCounts[x.indexerid][Overview.SNATCHED] )[len(showQualSnatched(x)) > 0]) ], key=lambda x: x.sort_name, ) for curShow in backLogShows: totalWanted += showCounts[curShow.indexerid][Overview.WANTED] totalQual += showCounts[curShow.indexerid][Overview.QUAL] if showQualSnatched(curShow): totalQualSnatched += showCounts[curShow.indexerid][ Overview.SNATCHED] return t.render( showCounts=showCounts, showCats=showCats, totalQual=totalQual, showQualSnatched=showQualSnatched, totalWanted=totalWanted, totalQualSnatched=totalQualSnatched, backLogShows=backLogShows, showSQLResults=showSQLResults, controller="manage", action="backlogOverview", title=_("Backlog Overview"), header=_("Backlog Overview"), topmenu="manage", )
def _parse_string(self, name, skip_scene_detection=False): if not name: return matches = [] best_result = None for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes: match = cur_regex.match(name) if not match: continue result = ParseResult(name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = list(match.groupdict()) if "series_name" in named_groups: result.series_name = match.group("series_name") if result.series_name: result.series_name = self.clean_series_name( result.series_name) result.score += 1 if "series_num" in named_groups and match.group("series_num"): result.score += 1 if "season_num" in named_groups: tmp_season = int(match.group("season_num")) if cur_regex_name == "bare" and tmp_season in (19, 20): continue if cur_regex_name == "fov" and tmp_season > 500: continue result.season_number = tmp_season result.score += 1 if "ep_num" in named_groups: ep_num = self._convert_number(match.group("ep_num")) if "extra_ep_num" in named_groups and match.group( "extra_ep_num"): tmp_episodes = list( range( ep_num, self._convert_number(match.group("extra_ep_num")) + 1)) if len(tmp_episodes) > 4: continue else: tmp_episodes = [ep_num] result.episode_numbers = tmp_episodes result.score += 3 if "ep_ab_num" in named_groups: ep_ab_num = self._convert_number(match.group("ep_ab_num")) if "extra_ab_ep_num" in named_groups and match.group( "extra_ab_ep_num"): result.ab_episode_numbers = list( range( ep_ab_num, self._convert_number( match.group("extra_ab_ep_num")) + 1)) result.score += 1 else: result.ab_episode_numbers = [ep_ab_num] result.score += 1 if "air_date" in named_groups: air_date = match.group("air_date") try: # Workaround for shows that get interpreted as 'air_date' incorrectly. # Shows so far are 11.22.63 and 9-1-1 excluded_shows = ["112263", "911"] assert re.sub(r"[^\d]*", "", air_date) not in excluded_shows # noinspection PyUnresolvedReferences check = dateutil.parser.parse( air_date, fuzzy_with_tokens=True)[0].date() # Make sure a 20th century date isn't returned as a 21st century date # 1 Year into the future (No releases should be coming out a year ahead of time, that's just insane) if check > check.today() and ( check - check.today()).days // 365 > 1: check = check.replace(year=check.year - 100) result.air_date = check result.score += 1 except Exception as error: logger.debug(error) continue if "extra_info" in named_groups: tmp_extra_info = match.group("extra_info") # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and cur_regex_name == "season_only" and re.search( r"([. _-]|^)(special|extra)s?\w*([. _-]|$)", tmp_extra_info, re.I): continue result.extra_info = tmp_extra_info result.score += 1 if "release_group" in named_groups: result.release_group = match.group("release_group") result.score += 1 if "version" in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group("version") if version: result.version = version else: result.version = 1 else: result.version = -1 matches.append(result) # only get matches with series_name # TODO: This makes tests fail when checking filenames that do not include the show name (refresh, force update, etc) # matches = [x for x in matches if x.series_name] if matches: # pick best match with highest score based on placement best_result = max(sorted(matches, reverse=True, key=attrgetter("which_regex")), key=attrgetter("score")) show = None if best_result and best_result.series_name and not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(best_result.series_name, self.tryIndexers) # confirm passed in show object indexer id matches result show object indexer id if show: if self.showObj and show.indexerid != self.showObj.indexerid: show = None best_result.show = show elif self.showObj and not show: best_result.show = self.showObj # Only allow anime matches if resolved show or specified show is anime best_result = self.check_anime_preferred(best_result, matches) # if this is a naming pattern test or result doesn't have a show object then return best result if not best_result.show or self.naming_pattern: return best_result # get quality best_result.quality = common.Quality.nameQuality( name, best_result.show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if best_result.is_air_by_date: airdate = best_result.air_date.toordinal() main_db_con = db.DBConnection() sql_result = main_db_con.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", [ best_result.show.indexerid, best_result.show.indexer, airdate ], ) season_number = None episode_numbers = [] if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] if season_number is None or not episode_numbers: try: epObj = sickchill.indexer.episode( best_result.show, firstAired=best_result.air_date) season_number = epObj["airedSeason"] episode_numbers = [epObj["airedEpisode"]] except Exception: logger.warning( f"Unable to find episode with date {best_result.air_date} for show {best_result.show.name}, skipping" ) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if best_result.show.is_scene: (s, e) = scene_numbering.get_indexer_numbering( best_result.show.indexerid, best_result.show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif best_result.show.is_anime and best_result.ab_episode_numbers: best_result.scene_season = scene_exceptions.get_scene_exception_by_name( best_result.series_name)[1] for epAbsNo in best_result.ab_episode_numbers: a = epAbsNo if best_result.show.is_scene and not skip_scene_detection: a = scene_numbering.get_indexer_absolute_numbering( best_result.show.indexerid, best_result.show.indexer, epAbsNo, True, best_result.scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number( best_result.show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif best_result.season_number and best_result.episode_numbers: for epNo in best_result.episode_numbers: s = best_result.season_number e = epNo if best_result.show.is_scene and not skip_scene_detection: (s, e) = scene_numbering.get_indexer_numbering( best_result.show.indexerid, best_result.show.indexer, best_result.season_number, epNo) if best_result.show.is_anime: a = helpers.get_absolute_number_from_season_and_episode( best_result.show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for oldbeard, so we'd need to flag it. new_season_numbers = list( set(new_season_numbers)) # remove duplicates if len(new_season_numbers) > 1: raise InvalidNameException( f"Scene numbering results episodes from seasons {new_season_numbers}, " f"(i.e. more than one) and sickchill does not support this. Sorry." ) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = sorted(set(new_episode_numbers)) # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if new_absolute_numbers: best_result.ab_episode_numbers = new_absolute_numbers if new_season_numbers and new_episode_numbers: best_result.episode_numbers = new_episode_numbers best_result.season_number = new_season_numbers[0] if best_result.show.is_scene and not skip_scene_detection: logger.debug( f"Converted parsed result {best_result.original_name} into {best_result}" ) # CPU sleep time.sleep(0.02) return best_result
def massAddTable(self, rootDir=None): t = PageTemplate(rh=self, filename="home_massAddTable.mako") if not rootDir: return _("No folders selected.") elif not isinstance(rootDir, list): root_dirs = [rootDir] else: root_dirs = rootDir root_dirs = [unquote_plus(xhtml_unescape(x)) for x in root_dirs] if settings.ROOT_DIRS: default_index = int(settings.ROOT_DIRS.split("|")[0]) else: default_index = 0 if len(root_dirs) > default_index: tmp = root_dirs[default_index] if tmp in root_dirs: root_dirs.remove(tmp) root_dirs.insert(0, tmp) dir_list = [] main_db_con = db.DBConnection() for root_dir in root_dirs: # noinspection PyBroadException try: file_list = os.listdir(root_dir) except Exception: continue for cur_file in file_list: # noinspection PyBroadException try: cur_path = os.path.normpath( os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue # ignore Synology folders if cur_file.lower() in ["#recycle", "@eadir"]: continue except Exception: continue cur_dir = { "dir": cur_path, "existing_info": (None, None, None), "display_dir": "<b>" + os.path.dirname(cur_path) + os.sep + "</b>" + os.path.basename(cur_path), } # see if the folder is in KODI already dirResults = main_db_con.select( "SELECT indexer_id FROM tv_shows WHERE location = ? LIMIT 1", [cur_path]) if dirResults: cur_dir["added_already"] = True else: cur_dir["added_already"] = False dir_list.append(cur_dir) indexer_id = show_name = indexer = None for cur_provider in settings.metadata_provider_dict.values(): if not (indexer_id and show_name): (indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) if all((indexer_id, show_name, indexer)): break if all((indexer_id, show_name, indexer)): cur_dir["existing_info"] = (indexer_id, show_name, indexer) if indexer_id and Show.find(settings.showList, indexer_id): cur_dir["added_already"] = True return t.render(dirList=dir_list)
def run(self, force=False): if self.amActive: return self.amActive = True try: logger.info('ShowUpdater for tvdb Api V3 starting') cache_db_con = db.DBConnection('cache.db') for index, provider in sickchill.indexer: database_result = cache_db_con.select( 'SELECT `time` FROM lastUpdate WHERE provider = ?', [provider.name]) last_update = int( database_result[0][0]) if database_result else 0 network_timezones.update_network_dict() update_timestamp = int(time.time()) updated_shows = [] if last_update: logger.info('Last update: {}'.format( time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(last_update)))) current_check = update_timestamp while current_check >= last_update: try: TvdbData = sickchill.indexer[1].updates( fromTime=current_check - self.seven_days, toTime=current_check) TvdbData.series() updated_shows.extend( [d['id'] for d in TvdbData.series]) except Exception as error: logger.info(str(error)) current_check -= self.seven_days - 1 else: logger.info( _('No last update time from the cache, so we do a full update for all shows' )) pi_list = [] for cur_show in settings.showList: try: cur_show.nextEpisode() skip_update = False # Skip ended shows until interval is met if cur_show.status == 'Ended' and settings.ENDED_SHOWS_UPDATE_INTERVAL != 0: # 0 is always if settings.ENDED_SHOWS_UPDATE_INTERVAL == -1: # Never skip_update = True if (datetime.datetime.today() - datetime.datetime.fromordinal(cur_show.last_update_indexer or 1)).days < \ settings.ENDED_SHOWS_UPDATE_INTERVAL: skip_update = True # Just update all of the shows for now until they fix the updates api # When last_update is not set from the cache or the show was in the tvdb updated list we update the show if not last_update or (cur_show.indexerid in updated_shows and not skip_update): pi_list.append( settings.showQueueScheduler.action.update_show( cur_show, True)) else: pi_list.append( settings.showQueueScheduler.action. refresh_show(cur_show, force)) except (CantUpdateShowException, CantRefreshShowException) as error: logger.info( _('Automatic update failed: {0}').format( str(error))) ui.ProgressIndicators.setIndicator( 'dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list)) if database_result: cache_db_con.action( 'UPDATE lastUpdate SET `time` = ? WHERE provider = ?', [str(update_timestamp), provider.name]) else: cache_db_con.action( 'INSERT INTO lastUpdate (time, provider) VALUES (?, ?)', [str(update_timestamp), provider.name]) except Exception as error: logger.exception(str(error)) self.amActive = False