def set_custom_exceptions(tvdb_id, show_names): """ Set custom exception list for a show. 'show_names' is a list of show names. """ myDB = db.DBConnection() _check_for_schema() changed_exceptions = False # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM custom_exceptions WHERE tvdb_id = ?", [tvdb_id])] for show_name in show_names: # if this exception isn't already in the DB then add it if show_name not in existing_exceptions: myDB.action("INSERT INTO custom_exceptions (tvdb_id, show_name) VALUES (?,?)", [tvdb_id, show_name]) changed_exceptions = True # also need to delete anything we have in the db which is not now in show_names for show_name in existing_exceptions: if show_name not in show_names: myDB.action('DELETE FROM custom_exceptions where tvdb_id = ? and show_name = ?', [tvdb_id, show_name]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: name_cache.clearCache()
def execute(self): ShowQueueItem.execute(self) logger.log(u"Starting to add show " + self.showDir) try: # make sure the tvdb ids are valid try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms["language"] = self.lang logger.log(u"TVDB: " + repr(ltvdb_api_parms)) t = tvdb_api.Tvdb(**ltvdb_api_parms) s = t[self.tvdb_id] # this usually only happens if they have an NFO in their show dir which gave us a TVDB ID that has no # proper english version of the show if not s or not s["seriesname"]: ui.notifications.error( "Unable to add show", "Show in " + self.showDir + " has no name on TVDB, probably the wrong language. Delete .nfo and add manually in the correct language.", ) self._finishEarly() return except tvdb_exceptions.tvdb_exception, e: logger.log(u"Error contacting TVDB: " + ex(e), logger.ERROR) ui.notifications.error( "Unable to add show", "Unable to look up the show in " + self.showDir + " on TVDB, not using the NFO. Delete .nfo and add manually in the correct language.", ) self._finishEarly() return # clear the name cache name_cache.clearCache() newShow = TVShow(self.tvdb_id, self.lang) newShow.loadFromTVDB() self.show = newShow # set up initial values self.show.location = self.showDir self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = ( self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT ) self.show.paused = False # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ buil_name_set() exception_dict = {} # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' logger.log(u"Check scene exceptions update") url_data = helpers.getURL(url) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) return else: # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode('utf-8') tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[tvdb_id] = alias_list myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])] for cur_exception in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: logger.log(u"Updated scene exceptions") name_cache.clearCache() else: logger.log(u"No scene exceptions update needed")
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' excepfile= os.path.join(os.path.join(sickbeard.PROG_DIR,'Used_Files'),'exceptions.txt') logger.log(u"Check scene exceptions file to update db") f=open(excepfile,"r") data = f.read() if data is None: # When urlData is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get file: " + excepfile, logger.ERROR) return else: # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in data.splitlines(): try: cur_line = cur_line.decode('utf-8') except: cur_line = cur_line.decode('latin-1') tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [{re.sub(r'\\(.)', r'\1', x):-1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[tvdb_id] = alias_list xem_exceptions = _xem_excpetions_fetcher() exception_dict = dict(xem_exceptions.items() + exception_dict.items()) if not len(exception_dict): logger.log("Retreived exception list is totally empty. Assuming remote server error not flushing local and stoping now") return False myDB = db.DBConnection("cache.db") # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])] for cur_exception_dict in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it cur_exception, curSeason = cur_exception_dict.items()[0] if cur_exception not in existing_exceptions: myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name, season) VALUES (?,?,?)", [cur_tvdb_id, cur_exception, curSeason]) name_cache.clearCache() f.close()
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://xbianonpi.github.com/sb_tvdb_scene_exceptions/exceptions.txt' logger.log(u"Check scene exceptions update") url_data = helpers.getURL(url) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) return else: # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode('utf-8') tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[tvdb_id] = alias_list myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])] for cur_exception in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: logger.log(u"Updated scene exceptions") name_cache.clearCache() else: logger.log(u"No scene exceptions update needed")
def execute(self): ShowQueueItem.execute(self) logger.log(u"Starting to add show " + self.showDir) try: # make sure the tvdb ids are valid try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms['language'] = self.lang logger.log(u"TVDB: " + repr(ltvdb_api_parms)) t = tvdb_api.Tvdb(**ltvdb_api_parms) s = t[self.tvdb_id] # this usually only happens if they have an NFO in their show dir which gave us a TVDB ID that has no # proper english version of the show if not s or not s['seriesname']: ui.notifications.error( "Unable to add show", "Show in " + self.showDir + " has no name on TVDB, probably the wrong language. Delete .nfo and add manually in the correct language." ) self._finishEarly() return except tvdb_exceptions.tvdb_exception, e: logger.log( u"Error contacting TVDB: " + e.message.decode(sickbeard.SYS_ENCODING), logger.ERROR) ui.notifications.error( "Unable to add show", "Unable to look up the show in " + self.showDir + " on TVDB, not using the NFO. Delete .nfo and add manually in the correct language." ) self._finishEarly() return # clear the name cache name_cache.clearCache() newShow = TVShow(self.tvdb_id, self.lang) newShow.loadFromTVDB() self.show = newShow # set up initial values self.show.location = self.showDir self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.seasonfolders = self.season_folders if self.season_folders != None else sickbeard.SEASON_FOLDERS_DEFAULT self.show.paused = False # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' open_url = urllib.urlopen(url) # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in open_url.readlines(): cur_line = cur_line.decode('utf-8') tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [ re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases) ] exception_dict[tvdb_id] = alias_list myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [ x["show_name"] for x in myDB.select( "SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id]) ] for cur_exception in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: myDB.action( "INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: name_cache.clearCache()
def retrieve_exceptions(): """ Looks up the exceptions on the show-api server, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ global _dyn_cache exception_dict = {} # Moved the exceptions onto our show-api server (to allow for future source merging) url = 'http://show-api.tvtumbler.com/api/exceptions' logger.log(u"Check scene exceptions update") url_data = helpers.getURL(url) if url_data is None: logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) return else: exception_dict = json.loads(url_data) myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # update our cache _dyn_cache[str(cur_tvdb_id)] = exception_dict[cur_tvdb_id] # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])] for cur_exception in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: logger.log(u'Adding exception %s: %s' % (cur_tvdb_id, cur_exception), logger.DEBUG) myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) changed_exceptions = True # check for any exceptions which have been deleted for cur_exception in existing_exceptions: if cur_exception not in exception_dict[cur_tvdb_id]: logger.log(u'Removing exception %s: %s' % (cur_tvdb_id, cur_exception), logger.DEBUG) myDB.action("DELETE FROM scene_exceptions WHERE tvdb_id = ? AND show_name = ?", [cur_tvdb_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: logger.log(u"Updated scene exceptions") name_cache.clearCache() else: logger.log(u"No scene exceptions update needed")
def update_scene_exceptions(indexer_id, scene_exceptions): """ Given a indexer_id, and a list of all show scene exceptions, update the db. """ myDB = db.DBConnection("cache.db") myDB.action("DELETE FROM scene_exceptions WHERE indexer_id=?", [indexer_id]) for cur_exception in scene_exceptions: myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name) VALUES (?,?)", [indexer_id, cur_exception]) name_cache.clearCache()
def update_scene_exceptions(tvdb_id, scene_exceptions): """ Given a tvdb_id, and a list of all show scene exceptions, update the db. """ myDB = db.DBConnection("cache.db") myDB.action('DELETE FROM scene_exceptions WHERE tvdb_id=?', [tvdb_id]) for cur_exception in scene_exceptions: myDB.action( "INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [tvdb_id, cur_exception]) name_cache.clearCache()
def update_scene_exceptions(indexer_id, scene_exceptions): """ Given a indexer_id, and a list of all show scene exceptions, update the db. """ myDB = db.DBConnection('cache.db') myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id]) logger.log(u"Updating scene exceptions", logger.MESSAGE) for cur_season in [-1] + get_scene_seasons(indexer_id): for cur_exception in scene_exceptions: myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)", [indexer_id, cur_exception, cur_season, 1]) name_cache.clearCache()
def retrieve_exceptions(localOnly=False): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' url2 = 'http://lad1337.github.com/sb_tvdb_scene_exceptions/anime_exceptions.txt' exception_dict = {} if not localOnly: exception_dict = _retrieve_exceptions_fetcher(url) exception_dict.update(_retrieve_exceptions_fetcher(url2)) # server anime exceptions local_exceptions = _retrieve_anidb_mainnames() for local_ex in local_exceptions: # anidb xml anime exceptions if local_ex in exception_dict: exception_dict[local_ex] = exception_dict[local_ex] + local_exceptions[local_ex] else: exception_dict[local_ex] = local_exceptions[local_ex] xem_exceptions = _xem_excpetions_fetcher() for xem_ex in xem_exceptions: # anidb xml anime exceptions if xem_ex in exception_dict: exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex] else: exception_dict[xem_ex] = xem_exceptions[xem_ex] if not len(exception_dict): logger.log("retrived exception list is totally empty. Assuming remote server error not flushing local and stoping now") return False myDB = db.DBConnection("cache.db") myDB.action("DELETE FROM scene_exceptions") # flush current list # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: for cur_exception_dict in exception_dict[cur_tvdb_id]: cur_exception, curSeason = cur_exception_dict.items()[0] myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name, season) VALUES (?,?,?)", [cur_tvdb_id, cur_exception, curSeason]) name_cache.clearCache() global excpetionCache global seasonExcpetionCache excpetionCache = {} seasonExcpetionCache = {} buil_name_set()
def update_scene_exceptions(tvdb_id, scene_exceptions): """ Given a tvdb_id, and a list of all show scene exceptions, update the db. """ myDB = db.DBConnection("cache.db") sql_cur_season = myDB.select("SELECT season FROM scene_exceptions WHERE tvdb_id=?", [tvdb_id]) if sql_cur_season: cur_season = sql_cur_season[0][0] else: cur_season =-1 myDB.action('DELETE FROM scene_exceptions WHERE tvdb_id=?', [tvdb_id]) for cur_exception in scene_exceptions: myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name, season) VALUES (?,?,?)", [tvdb_id, cur_exception, cur_season]) name_cache.clearCache()
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://cytec.github.com/sb_tvdb_scene_exceptions/exceptions.txt' open_url = urllib.urlopen(url) # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in open_url.readlines(): tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[tvdb_id] = alias_list myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id])] for cur_exception in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: name_cache.clearCache()
def update_scene_exceptions(indexer_id, scene_exceptions): """ Given a indexer_id, and a list of all show scene exceptions, update the db. """ global exceptionIndexerCache myDB = db.DBConnection('cache.db') myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id]) logger.log(u"Updating internal scene name cache", logger.MESSAGE) for cur_season in [-1] + sickbeard.scene_exceptions.get_scene_seasons(indexer_id): for cur_exception in scene_exceptions: exceptionIndexerCache[helpers.full_sanitizeSceneName(cur_exception)] = indexer_id myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)", [indexer_id, cur_exception, cur_season, 1]) name_cache.clearCache()
def update_scene_exceptions(indexer_id, scene_exceptions): """ Given a indexer_id, and a list of all show scene exceptions, update the db. """ global exceptionIndexerCache myDB = db.DBConnection("cache.db") myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id]) logger.log(u"Updating internal scene name cache", logger.MESSAGE) for cur_season in [-1] + sickbeard.scene_exceptions.get_scene_seasons(indexer_id): for cur_exception in scene_exceptions: exceptionIndexerCache[helpers.full_sanitizeSceneName(cur_exception)] = indexer_id myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)", [indexer_id, cur_exception, cur_season, 1]) name_cache.clearCache()
def run(self, force=False): self.amActive = True # clear internal name cache name_cache.clearCache() # get and update scene exceptions lists scene_exceptions.retrieve_exceptions() # build internal name cache for searches and parsing name_cache.buildNameCache() # refresh network timezones network_timezones.update_network_dict() # sure, why not? if sickbeard.USE_FAILED_DOWNLOADS: failed_history.trimHistory() self.amActive = False
def update_scene_exceptions(tvdb_id, scene_exceptions): """ Given a tvdb_id, and a list of all show scene exceptions, update the db. """ myDB = db.DBConnection("cache.db") sql_cur_season = myDB.select( "SELECT season FROM scene_exceptions WHERE tvdb_id=?", [tvdb_id]) if sql_cur_season: cur_season = sql_cur_season[0][0] else: cur_season = -1 myDB.action('DELETE FROM scene_exceptions WHERE tvdb_id=?', [tvdb_id]) for cur_exception in scene_exceptions: myDB.action( "INSERT INTO scene_exceptions (tvdb_id, show_name, season) VALUES (?,?,?)", [tvdb_id, cur_exception, cur_season]) name_cache.clearCache()
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' open_url = urllib.urlopen(url) # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in open_url.readlines(): tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [ re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases) ] exception_dict[tvdb_id] = alias_list myDB = db.DBConnection("cache.db") myDB.action("DELETE FROM scene_exceptions WHERE 1=1") # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: for cur_exception in exception_dict[cur_tvdb_id]: myDB.action( "INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) # since this could invalidate the results of the cache we clear it out after updating name_cache.clearCache()
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' open_url = urllib.urlopen(url) # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in open_url.readlines(): tvdb_id, sep, aliases = cur_line.partition(':') if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[tvdb_id] = alias_list myDB = db.DBConnection("cache.db") myDB.action("DELETE FROM scene_exceptions WHERE 1=1") # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: for cur_exception in exception_dict[cur_tvdb_id]: myDB.action("INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) # since this could invalidate the results of the cache we clear it out after updating name_cache.clearCache()
def set_custom_exceptions(tvdb_id, show_names): """ Set custom exception list for a show. 'show_names' is a list of show names. """ global _dyn_cache if not show_names: show_names = [] myDB = db.DBConnection() _check_for_schema() changed_exceptions = False # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM custom_exceptions WHERE tvdb_id = ?", [tvdb_id])] for show_name in show_names: # if this exception isn't already in the DB then add it if show_name not in existing_exceptions: myDB.action("INSERT INTO custom_exceptions (tvdb_id, show_name) VALUES (?,?)", [tvdb_id, show_name]) changed_exceptions = True # also need to delete anything we have in the db which is not now in show_names for show_name in existing_exceptions: if show_name not in show_names: myDB.action('DELETE FROM custom_exceptions where tvdb_id = ? and show_name = ?', [tvdb_id, show_name]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: name_cache.clearCache() # put the new list into the dynamic cache _dyn_cache[str(tvdb_id)] = show_names
def execute(self): ShowQueueItem.execute(self) logger.log(u"Starting to add show " + self.showDir) try: # make sure the tvdb ids are valid try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms['language'] = self.lang logger.log(u"TVDB: " + repr(ltvdb_api_parms)) t = tvdb_api.Tvdb(**ltvdb_api_parms) s = t[self.tvdb_id] # this usually only happens if they have an NFO in their show dir which gave us a TVDB ID that has no proper english version of the show if not s['seriesname']: logger.log( u"Show in " + self.showDir + " has no name on TVDB, probably the wrong language used to search with.", logger.ERROR) ui.notifications.error( "Unable to add show", "Show in " + self.showDir + " has no name on TVDB, probably the wrong language. Delete .nfo and add manually in the correct language." ) self._finishEarly() return # if the show has no episodes/seasons if not s: logger.log( u"Show " + str(s['seriesname']) + " is on TVDB but contains no season/episode data.", logger.ERROR) ui.notifications.error( "Unable to add show", "Show " + str(s['seriesname']) + " is on TVDB but contains no season/episode data.") self._finishEarly() return except tvdb_exceptions.tvdb_exception, e: logger.log(u"Error contacting TVDB: " + ex(e), logger.ERROR) ui.notifications.error( "Unable to add show", "Unable to look up the show in " + self.showDir + " on TVDB, not using the NFO. Delete .nfo and add manually in the correct language." ) self._finishEarly() return # clear the name cache name_cache.clearCache() newShow = TVShow(self.tvdb_id, self.lang) newShow.loadFromTVDB() self.show = newShow # set up initial values self.show.location = self.showDir self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT self.show.stay_ahead = self.stay_ahead if self.stay_ahead != None else sickbeard.STAY_AHEAD_DEFAULT self.show.paused = 0 self.show.added_date = int(time.time()) # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ provider = 'sb_tvdb_scene_exceptions' remote_exception_dict = {} local_exception_dict = {} query_list = [] # remote exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' logger.log(u"Check scene exceptions update") # get remote exceptions url_data = helpers.getURL(url) if not url_data: # when url_data is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) return False else: # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode('utf-8') tvdb_id, sep, aliases = cur_line.partition(':') # @UnusedVariable if not aliases: continue cur_tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] remote_exception_dict[cur_tvdb_id] = alias_list # get local exceptions myDB = db.DBConnection("cache.db", row_type="dict") sql_result = myDB.select("SELECT tvdb_id, show_name FROM scene_exceptions WHERE provider=?;", [provider]) for cur_result in sql_result: cur_tvdb_id = cur_result["tvdb_id"] if cur_tvdb_id not in local_exception_dict: local_exception_dict[cur_tvdb_id] = [] local_exception_dict[cur_tvdb_id].append(cur_result["show_name"]) # check remote against local for added exceptions for cur_tvdb_id in remote_exception_dict: if cur_tvdb_id not in local_exception_dict: local_exception_dict[cur_tvdb_id] = [] for cur_exception_name in remote_exception_dict[cur_tvdb_id]: if cur_exception_name not in local_exception_dict[cur_tvdb_id]: query_list.append(["INSERT INTO scene_exceptions (tvdb_id,show_name,provider) VALUES (?,?,?);", [cur_tvdb_id, cur_exception_name, provider]]) # check local against remote for removed exceptions for cur_tvdb_id in local_exception_dict: if cur_tvdb_id not in remote_exception_dict: query_list.append(["DELETE FROM scene_exceptions WHERE tvdb_id=? AND provider=?;", [cur_tvdb_id, provider]]) else: for cur_exception_name in local_exception_dict[cur_tvdb_id]: if cur_exception_name not in remote_exception_dict[cur_tvdb_id]: query_list.append(["DELETE FROM scene_exceptions WHERE tvdb_id= ? AND show_name=? AND provider=?;", [cur_tvdb_id, cur_exception_name, provider]]) if query_list: logger.log(u"Updating scene exceptions") myDB.mass_action(query_list, logTransaction=True) logger.log(u"Clear name cache") name_cache.clearCache() logger.log(u"Performing a vacuum on database: " + myDB.filename) myDB.action("VACUUM") else: logger.log(u"No scene exceptions update needed") return True
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages url = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt' excepfile = os.path.join(os.path.join(sickbeard.PROG_DIR, 'Used_Files'), 'exceptions.txt') logger.log(u"Check scene exceptions file to update db") f = open(excepfile, "r") data = f.read() if data is None: # When urlData is None, trouble connecting to github logger.log( u"Check scene exceptions update failed. Unable to get file: " + excepfile, logger.ERROR) return else: # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in data.splitlines(): try: cur_line = cur_line.decode('utf-8') except: cur_line = cur_line.decode('latin-1') tvdb_id, sep, aliases = cur_line.partition(':') #@UnusedVariable if not aliases: continue tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [{ re.sub(r'\\(.)', r'\1', x): -1 } for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[tvdb_id] = alias_list xem_exceptions = _xem_excpetions_fetcher() exception_dict = dict(xem_exceptions.items() + exception_dict.items()) if not len(exception_dict): logger.log( "Retreived exception list is totally empty. Assuming remote server error not flushing local and stoping now" ) return False myDB = db.DBConnection("cache.db") myDB.action("DELETE FROM scene_exceptions") # flush current list # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: for cur_exception_dict in exception_dict[cur_tvdb_id]: cur_exception, curSeason = cur_exception_dict.items()[0] myDB.action( "INSERT INTO scene_exceptions (tvdb_id, show_name, season) VALUES (?,?,?)", [cur_tvdb_id, cur_exception, curSeason]) name_cache.clearCache() f.close()
class QueueItemAdd(ShowQueueItem): def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, anime, scene): self.indexer = indexer self.indexer_id = indexer_id self.showDir = showDir self.default_status = default_status self.quality = quality self.flatten_folders = flatten_folders self.lang = lang self.subtitles = subtitles self.anime = anime self.scene = scene self.show = None # this will initialize self.show to None ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show) def _getName(self): """ Returns the show name if there is a show object created, if not returns the dir that the show is being added to. """ if self.show == None: return self.showDir return self.show.name show_name = property(_getName) def _isLoading(self): """ Returns True if we've gotten far enough to have a show object, or False if we still only know the folder name. """ if self.show == None: return True return False isLoading = property(_isLoading) def execute(self): ShowQueueItem.execute(self) logger.log(u"Starting to add show " + self.showDir) # make sure the Indexer IDs are valid try: lINDEXER_API_PARMS = sickbeard.indexerApi( self.indexer).api_params.copy() if self.lang: lINDEXER_API_PARMS['language'] = self.lang logger.log(u"" + str(sickbeard.indexerApi(self.indexer).name) + ": " + repr(lINDEXER_API_PARMS)) t = sickbeard.indexerApi( self.indexer).indexer(**lINDEXER_API_PARMS) s = t[self.indexer_id] # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show if getattr(s, 'seriesname', None) is None: logger.log( u"Show in " + self.showDir + " has no name on " + str(sickbeard.indexerApi(self.indexer).name) + ", probably the wrong language used to search with.", logger.ERROR) ui.notifications.error( "Unable to add show", "Show in " + self.showDir + " has no name on " + str(sickbeard.indexerApi(self.indexer).name) + ", probably the wrong language. Delete .nfo and add manually in the correct language." ) self._finishEarly() return # if the show has no episodes/seasons if not s: logger.log( u"Show " + str(s['seriesname']) + " is on " + str(sickbeard.indexerApi(self.indexer).name) + " but contains no season/episode data.", logger.ERROR) ui.notifications.error( "Unable to add show", "Show " + str(s['seriesname']) + " is on " + str(sickbeard.indexerApi(self.indexer).name) + " but contains no season/episode data.") self._finishEarly() return except Exception, e: logger.log( u"Unable to find show ID:" + str(self.indexer_id) + " on Indexer: " + str(sickbeard.indexerApi(self.indexer).name), logger.ERROR) ui.notifications.error( "Unable to add show", "Unable to look up the show in " + self.showDir + " on " + str(sickbeard.indexerApi(self.indexer).name) + " using ID " + str(self.indexer_id) + ", not using the NFO. Delete .nfo and try adding manually again." ) self._finishEarly() return try: # clear the name cache name_cache.clearCache() newShow = TVShow(self.indexer, self.indexer_id, self.lang) newShow.loadFromIndexer() self.show = newShow # set up initial values self.show.location = self.showDir self.show.subtitles = self.subtitles if self.subtitles != None else sickbeard.SUBTITLES_DEFAULT self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT self.show.anime = self.anime if self.anime != None else sickbeard.ANIME_DEFAULT self.show.scene = self.scene if self.scene != None else sickbeard.SCENE_DEFAULT self.show.paused = False # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1 if self.show.genre and "documentary" in self.show.genre.lower(): self.show.air_by_date = 0 if self.show.classification and "sports" in self.show.classification.lower( ): self.show.sports = 1 except sickbeard.indexer_exception, e: logger.log( u"Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), logger.ERROR) if self.show: ui.notifications.error( "Unable to add " + str(self.show.name) + " due to an error with " + sickbeard.indexerApi(self.indexer).name + "") else: ui.notifications.error( "Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + "") self._finishEarly() return
def retrieve_exceptions(): """ Looks up the exceptions on the show-api server, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ global _dyn_cache exception_dict = {} # Moved the exceptions onto our show-api server (to allow for future source merging) url = 'http://show-api.tvtumbler.com/api/exceptions' logger.log(u"Check scene exceptions update") url_data = helpers.getURL(url) if url_data is None: logger.log( u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) return else: exception_dict = json.loads(url_data) myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_tvdb_id in exception_dict: # update our cache _dyn_cache[str(cur_tvdb_id)] = exception_dict[cur_tvdb_id] # get a list of the existing exceptions for this ID existing_exceptions = [ x["show_name"] for x in myDB.select( "SELECT * FROM scene_exceptions WHERE tvdb_id = ?", [cur_tvdb_id]) ] for cur_exception in exception_dict[cur_tvdb_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: logger.log( u'Adding exception %s: %s' % (cur_tvdb_id, cur_exception), logger.DEBUG) myDB.action( "INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)", [cur_tvdb_id, cur_exception]) changed_exceptions = True # check for any exceptions which have been deleted for cur_exception in existing_exceptions: if cur_exception not in exception_dict[cur_tvdb_id]: logger.log( u'Removing exception %s: %s' % (cur_tvdb_id, cur_exception), logger.DEBUG) myDB.action( "DELETE FROM scene_exceptions WHERE tvdb_id = ? AND show_name = ?", [cur_tvdb_id, cur_exception]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: logger.log(u"Updated scene exceptions") name_cache.clearCache() else: logger.log(u"No scene exceptions update needed")
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ global exceptionCache, exceptionSeasonCache exception_dict = {} exceptionCache = {} exceptionSeasonCache = {} # exceptions are stored on github pages for indexer in sickbeard.indexerApi().indexers: logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "") url = sickbeard.indexerApi(indexer).config['scene_url'] url_data = helpers.getURL(url) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) continue else: # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode('utf-8') indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable if not aliases: continue indexer_id = int(indexer_id) # regex out the list of shows, taking \' into account # alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[indexer_id] = alias_list logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name) xem_exceptions = _xem_excpetions_fetcher(indexer) for xem_ex in xem_exceptions: # anidb xml anime exceptions if xem_ex in exception_dict: exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex] else: exception_dict[xem_ex] = xem_exceptions[xem_ex] logger.log(u"Checking for scene exception updates for AniDB") local_exceptions = _retrieve_anidb_mainnames() for local_ex in local_exceptions: # anidb xml anime exceptions if local_ex in exception_dict: exception_dict[local_ex] = exception_dict[local_ex] + local_exceptions[local_ex] else: exception_dict[local_ex] = local_exceptions[local_ex] myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_indexer_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])] for cur_exception_dict in exception_dict[cur_indexer_id]: cur_exception, curSeason = cur_exception_dict.items()[0] # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)", [cur_indexer_id, cur_exception, curSeason]) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: logger.log(u"Updated scene exceptions") name_cache.clearCache() else: logger.log(u"No scene exceptions update needed") # build indexer scene name cache buildIndexerCache()
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ exception_dict = {} # exceptions are stored on github pages for indexer in sickbeard.indexerApi().indexers: logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "") url = sickbeard.indexerApi(indexer).config["scene_url"] url_data = helpers.getURL(url) if url_data is None: # When urlData is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) continue else: # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode("utf-8") indexer_id, sep, aliases = cur_line.partition(":") # @UnusedVariable if not aliases: continue indexer_id = int(indexer_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r"\\(.)", r"\1", x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] exception_dict[indexer_id] = alias_list myDB = db.DBConnection("cache.db") changed_exceptions = False # write all the exceptions we got off the net into the database for cur_indexer_id in exception_dict: # get a list of the existing exceptions for this ID existing_exceptions = [ x["show_name"] for x in myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id]) ] for cur_exception in exception_dict[cur_indexer_id]: # if this exception isn't already in the DB then add it if cur_exception not in existing_exceptions: myDB.action( "INSERT INTO scene_exceptions (indexer_id, show_name) VALUES (?,?)", [cur_indexer_id, cur_exception] ) changed_exceptions = True # since this could invalidate the results of the cache we clear it out after updating if changed_exceptions: logger.log(u"Updated scene exceptions") name_cache.clearCache() else: logger.log(u"No scene exceptions update needed")
def retrieve_exceptions(): """ Looks up the exceptions on github, parses them into a dict, and inserts them into the scene_exceptions table in cache.db. Also clears the scene name cache. """ provider = 'sb_tvdb_scene_exceptions' remote_exception_dict = {} local_exception_dict = {} query_list = [] # remote exceptions are stored on github pages url = 'https://github.com/riksmith/sb_tvdb_scene_exceptions/raw/gh-pages/exceptions.txt' logger.log(u"Check scene exceptions update") # get remote exceptions url_data = helpers.getURL(url) if not url_data: # when url_data is None, trouble connecting to github logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR) return False else: # each exception is on one line with the format tvdb_id: 'show name 1', 'show name 2', etc for cur_line in url_data.splitlines(): cur_line = cur_line.decode('utf-8') tvdb_id, sep, aliases = cur_line.partition(':') # @UnusedVariable if not aliases: continue cur_tvdb_id = int(tvdb_id) # regex out the list of shows, taking \' into account alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)] remote_exception_dict[cur_tvdb_id] = alias_list # get local exceptions myDB = db.DBConnection("cache.db", row_type="dict") sql_result = myDB.select("SELECT tvdb_id, show_name FROM scene_exceptions WHERE provider=?;", [provider]) for cur_result in sql_result: cur_tvdb_id = cur_result["tvdb_id"] if cur_tvdb_id not in local_exception_dict: local_exception_dict[cur_tvdb_id] = [] local_exception_dict[cur_tvdb_id].append(cur_result["show_name"]) # check remote against local for added exceptions for cur_tvdb_id in remote_exception_dict: if cur_tvdb_id not in local_exception_dict: local_exception_dict[cur_tvdb_id] = [] for cur_exception_name in remote_exception_dict[cur_tvdb_id]: if cur_exception_name not in local_exception_dict[cur_tvdb_id]: query_list.append(["INSERT INTO scene_exceptions (tvdb_id,show_name,provider) VALUES (?,?,?);", [cur_tvdb_id, cur_exception_name, provider]]) # check local against remote for removed exceptions for cur_tvdb_id in local_exception_dict: if cur_tvdb_id not in remote_exception_dict: query_list.append(["DELETE FROM scene_exceptions WHERE tvdb_id=? AND provider=?;", [cur_tvdb_id, provider]]) else: for cur_exception_name in local_exception_dict[cur_tvdb_id]: if cur_exception_name not in remote_exception_dict[cur_tvdb_id]: query_list.append(["DELETE FROM scene_exceptions WHERE tvdb_id= ? AND show_name=? AND provider=?;", [cur_tvdb_id, cur_exception_name, provider]]) if query_list: logger.log(u"Updating scene exceptions") myDB.mass_action(query_list, logTransaction=True) logger.log(u"Clear name cache") name_cache.clearCache() logger.log(u"Performing a vacuum on database: " + myDB.filename) myDB.action("VACUUM") else: logger.log(u"No scene exceptions update needed") return True