def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # if there's no info after the season info then assume it's fine if not parse_result.extra_info: return True # if any of the bad strings are in the name then say no for x in resultFilters: if re.search('(^|[\W_])' + x + '($|[\W_])', parse_result.extra_info, re.I): logger.log( u"Invalid scene release: " + name + " contains " + x + ", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS: resultFilters.extend(sickbeard.IGNORE_WORDS.split(',')) filters = [ re.compile('(^|[\W_])%s($|[\W_])' % filter.strip(), re.I) for filter in resultFilters ] for regfilter in filters: if regfilter.search(name): logger.log( u"Invalid scene release: " + name + " contains pattern: " + regfilter.pattern + ", ignoring it", logger.DEBUG) return False return True
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj=None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj, convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = episodes = None if parse_result.is_air_by_date or parse_result.is_sports: airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal() myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [parse_result.show.indexerid, parse_result.show.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality if not isinstance(name, unicode): name = unicode(name, 'utf-8', 'replace') # get release group release_group = parse_result.release_group logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group) VALUES (?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group]]
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False # if there's no info after the season info then assume it's fine if not parse_result.extra_info: return True # if any of the bad strings are in the name then say no for x in resultFilters: if re.search('(^|[\W_])'+x+'($|[\W_])', parse_result.extra_info, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name): try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # if there's no info after the season info then assume it's fine if not parse_result.extra_info: return True # if any of the bad strings are in the name then say no for x in resultFilters: if re.search('(^|[\W_])' + x + '($|[\W_])', parse_result.extra_info, re.I): logger.log( u"Invalid scene release: " + name + " contains " + x + ", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name, lang="en"): try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False # exclude language exceptions from filters lResultFilters = [f for f in resultFilters] if lang in filterExceptions and filterExceptions[lang]: for exception in filterExceptions[lang]: if exception in lResultFilters: lResultFilters.remove(exception) # if there's no info after the season info then assume it's fine, unless there is a whitelist for this language if not parse_result.extra_info and not lang in requiredFilters: return True # if any of the bad strings are in the name then say no for x in lResultFilters: if re.search('(^|[\W_])'+x+'($|[\W_])', parse_result.extra_info, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False # if whitelist exists for language, allow only releases containing a whitelisted word if lang in requiredFilters and requiredFilters[lang]: for x in requiredFilters[lang]: if re.search('(^|[\W_])'+x+'($|[\W_])', parse_result.extra_info, re.I): return True logger.log(u"Invalid scene release: "+name+" doesn't contain any of the words "+str(requiredFilters[lang])+", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS: resultFilters.extend(sickbeard.IGNORE_WORDS.split(',')) filters = [re.compile('(^|[\W_])%s($|[\W_])' % filter.strip(), re.I) for filter in resultFilters] for regfilter in filters: if regfilter.search(name): logger.log(u"Invalid scene release: " + name + " contains pattern: " + regfilter.pattern + ", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name, show): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False #if language not english, search for mandatory, else add other languages to ignore list if show.lang != "en": mandatory = [(langCodes[show.lang])] if langCodes[show.lang] in resultFilters: resultFilters.remove(langCodes[show.lang]) logger.log(u"Language for \""+show.name+"\" is "+show.lang+" so im looking for \""+langCodes[show.lang]+"\" in release names", logger.DEBUG) elif show.lang == "en": for key in langCodes: if not langCodes[key] in resultFilters: resultFilters.append(langCodes[key]) mandatory = [] logger.log(u"Language for \""+show.name+"\" is "+show.lang, logger.DEBUG) # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: return True # if any of the bad strings are in the name then say no for x in resultFilters + sickbeard.IGNORE_WORDS.split(','): if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False # if every of the mandatory words are in there, say yes if mandatory: for x in mandatory: if not re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Mandatory string not found: "+name+" doesnt contains "+x+", ignoring it", logger.DEBUG) return False return True
def _addCacheEntry(self, name, url, quality=None): cacheDB = self._getDB() season = None episodes = None # if we don't have complete info then parse the filename to get it try: myParser = NameParser(0) parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None if not parse_result.show: logger.log(u"Couldn't find a show in our databases matching " + name + ", unable to cache it", logger.DEBUG) return None try: myDB = db.DBConnection() if parse_result.show.air_by_date: airdate = parse_result.sports_event_date.toordinal() if parse_result.show.sports else parse_result.air_date.toordinal() sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [parse_result.show.indexerid, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]) except: return
def _addCacheEntry(self, name, url, quality=None): cacheDB = self._getDB() season = None episodes = None # if we don't have complete info then parse the filename to get it try: myParser = NameParser() parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None if not parse_result.show: logger.log(u"Couldn't find a show in our databases matching " + name + ", unable to cache it", logger.DEBUG) return None try: myDB = db.DBConnection() if parse_result.show.air_by_date: airdate = parse_result.sports_event_date.toordinal() if parse_result.show.sports else parse_result.air_date.toordinal() sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [parse_result.show.indexerid, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]) except: return
def filterBadReleases(name, show): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False #if language not english, search for mandatory, else add german to ignore list if show.lang != "en": mandatory = [(langCodes[show.lang])] if langCodes[show.lang] in resultFilters: resultFilters.remove(langCodes[show.lang]) logger.log(u"Language for \""+show.name+"\" is "+show.lang+" so im looking for \""+langCodes[show.lang]+"\" in release names", logger.DEBUG) elif show.lang == "en": if not "german" in resultFilters: resultFilters.append("german") mandatory = [] logger.log(u"Language for \""+show.name+"\" is "+show.lang, logger.DEBUG) # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: return True # if any of the bad strings are in the name then say no for x in resultFilters + sickbeard.IGNORE_WORDS.split(','): if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False # if every of the mandatory words are in there, say yes if mandatory: for x in mandatory: if not re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Mandatory string not found: "+name+" doesnt contains "+x+", ignoring it", logger.DEBUG) return False return True
def _addCacheEntry(self, name, url, quality=None): try: myParser = NameParser(convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None season = episodes = None if parse_result.air_by_date or parse_result.sports: airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_event_date.toordinal() myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [parse_result.show.indexerid, parse_result.show.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name, parse_result.is_anime) if not isinstance(name, unicode): name = unicode(name, 'utf-8') # get release group release_group = parse_result.release_group logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group) VALUES (?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group]]
def filterBadReleases(name, showLang=u"en"): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ additionalFilters = [] if showLang == u"en": additionalFilters.append("dub(bed)?") try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: check_string = name # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS == "": ignore_words = "ztreyfgut" else: ignore_words = sickbeard.IGNORE_WORDS for x in resultFilters + ignore_words.split(',') + additionalFilters: if x == showLanguages.get(showLang): continue if re.search('(^|[\W_])' + x + '($|[\W_])', check_string, re.I): logger.log( u"Invalid scene release: " + name + " contains " + x + ", ignoring it", logger.DEBUG) return False return True
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj=None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj, convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u'Unable to parse the filename ' + name + ' into a valid episode', logger.DEBUG) return None except InvalidShowException: logger.log(u'Unable to parse the filename ' + name + ' into a valid show', logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = '|' + '|'.join(map(str, episodes)) + '|' # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality if not isinstance(name, unicode): name = unicode(name, 'utf-8', 'replace') # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log(u'Added RSS item: [' + name + '] to cache: [' + self.providerID + ']', logger.DEBUG) return [ 'INSERT OR IGNORE INTO provider_cache (provider, name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?,?)', [self.providerID, name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj=None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj, convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality if not isinstance(name, unicode): name = unicode(name, 'utf-8', 'replace') # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def _addCacheEntry(self, name, url, quality=None): try: myParser = NameParser(convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None if not parse_result.show: logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG) return None season = episodes = None if parse_result.air_by_date or parse_result.sports: myDB = db.DBConnection() airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [parse_result.show.indexerid, parse_result.show.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name, parse_result.is_anime) if not isinstance(name, unicode): name = unicode(name, 'utf-8') logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]]
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality name = ss(name) # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def filterBadReleases(name,showLang=u"en"): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ additionalFilters = [] if showLang == u"en": additionalFilters.append("dub(bed)?") try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: check_string = name # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS == "": ignore_words="ztreyfgut" else: ignore_words=sickbeard.IGNORE_WORDS for x in resultFilters + ignore_words.split(',') + additionalFilters: if x == showLanguages.get(showLang): continue if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False return True
def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality, extra_no_name, version, is_anime=False): level = 0 is_internal = False codec = '' rel_name = None if old_status not in SNATCHED_ANY: level = Quality.get_proper_level(extra_no_name, version, is_anime) elif show_obj: my_db = db.DBConnection() np = NameParser(False, showObj=show_obj) for episode in episodes: result = my_db.select( 'SELECT resource FROM history' ' WHERE showid = ?' ' AND season = ? AND episode = ? AND ' '(%s) ORDER BY date DESC LIMIT 1' % (' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY)), [indexerid, season, episode]) if not result or not isinstance( result[0]['resource'], basestring) or not result[0]['resource']: continue nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime) if nq != new_quality: continue try: p = np.parse(result[0]['resource']) except (StandardError, Exception): continue level = Quality.get_proper_level(p.extra_info_no_name(), p.version, show_obj.is_anime) extra_no_name = p.extra_info_no_name() rel_name = result[0]['resource'] is_internal = p.extra_info_no_name() and re.search( r'\binternal\b', p.extra_info_no_name(), flags=re.I) codec = _get_codec(p.extra_info_no_name()) break return level, is_internal, codec, extra_no_name, rel_name
def filterBadReleases(name, parse=True): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: if parse: NameParser().parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return False except InvalidShowException: logger.log( u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return False resultFilters = [ 'sub(bed|ed|pack|s)', '(dk|fin|heb|kor|nor|nordic|pl|swe)sub(bed|ed|s)?', '(dir|sample|sub|nfo)fix', 'sample', '(dvd)?extras', 'dub(bed)?' ] # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS: resultFilters.extend(sickbeard.IGNORE_WORDS.split(',')) filters = [ re.compile('(^|[\W_])%s($|[\W_])' % re.escape(filter.strip()), re.I) for filter in resultFilters ] for regfilter in filters: if regfilter.search(name): logger.log( u"Invalid scene release: " + name + " contained: " + regfilter.pattern + ", ignoring it", logger.DEBUG) return False # if any of the good strings aren't in the name then say no if sickbeard.REQUIRE_WORDS: require_words = sickbeard.REQUIRE_WORDS.split(',') filters = [ re.compile('(^|[\W_])%s($|[\W_])' % re.escape(filter.strip()), re.I) for filter in require_words ] for regfilter in filters: if not regfilter.search(name): logger.log( u"Invalid scene release: " + name + " didn't contain: " + regfilter.pattern + ", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: return True # if any of the bad strings are in the name then say no for ignore_word in resultFilters + sickbeard.IGNORE_WORDS.split(','): ignore_word = ignore_word.strip() if ignore_word: if re.search('(^|[\W_])' + ignore_word + '($|[\W_])', check_string, re.I): logger.log( u"Invalid scene release: " + name + " contains " + ignore_word + ", ignoring it", logger.DEBUG) return False return True
def filterByRequiredWordsReleases(name, requiredWords): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: return True # if any of the bad strings are in the name then say no logger.log(u"filtered requiredWords", logger.DEBUG) if ( requiredWords != ""): for x in requiredWords.split(','): if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"This scene release: "+name+" contains "+x+", add it", logger.DEBUG) return True logger.log(u"Invalid scene release: "+name+" not contains, ignore it", logger.DEBUG) return False return True
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: return True # if any of the bad strings are in the name then say no for x in resultFilters + sickbeard.IGNORE_WORDS.split(','): if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False # if every of the mandatory words are in there, say yes for x in mandatory: if not re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): logger.log(u"Mandatory string not found: "+name+" doesnt contains "+x+", ignoring it", logger.DEBUG) return False return True
def isGoodResult(name, show, log=True): """ Use an automatically-created regex to make sure the result actually is the show it claims to be """ all_show_names = allPossibleShowNames(show, season="all") showNames = map(sanitizeSceneName, all_show_names) + all_show_names if show.anime: np = NameParser(show) parse_result = np.parse(name) bwl = BlackAndWhiteList(show.tvdbid) if bwl.whiteList and parse_result.release_group not in bwl.whiteList or bwl.blackList and parse_result.release_group in bwl.blackList: return False for curName in set(showNames): if show.anime: escaped_name = re.sub('\\\\[\\s.-]', '\W+', re.escape(curName)) if show.startyear: escaped_name += "(?:\W+" + str(show.startyear) + ")?" curRegex = '^\[.+\]\W+' + escaped_name + '?\W+\d+(\W+\[.+\])?' else: escaped_name = re.sub('\\\\[\\s.-]', '\W+', re.escape(curName)) if show.startyear: escaped_name += "(?:\W+" + str(show.startyear) + ")?" curRegex = '^' + escaped_name + '\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(\d|[ivx]))|Season\W+\d+\W+|E\d+\W+)' if log: logger.log(u"Checking if show " + name + " matches " + curRegex, logger.DEBUG) match = re.search(curRegex, name, re.I) if match: logger.log(u"Matched " + curRegex + " to " + name, logger.DEBUG) return True if log: logger.log(u"Provider gave result " + name + " but that doesn't seem like a valid result for " + show.name + " so I'm ignoring it") return False
def filterBadReleases(name): try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING) return False # if there's no info after the season info then assume it's fine if not parse_result.extra_info: return True # if any of the bad strings are in the name then say no for x in resultFilters: if re.search('(^|[\W_])'+x+'($|[\W_])', parse_result.extra_info, re.I): logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) return False return True
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # # use the extra info and the scene group to filter against # check_string = '' # if parse_result.extra_info: # check_string = parse_result.extra_info # if parse_result.release_group: # if check_string: # check_string = check_string + '-' + parse_result.release_group # else: # check_string = parse_result.release_group # # # if there's no info after the season info then assume it's fine # if not check_string: # return True # if any of the bad strings are in the name then say no for ignore_word in resultFilters + sickbeard.IGNORE_WORDS.split(','): ignore_word = ignore_word.strip() if ignore_word: if re.search('(^|[\W_])' + ignore_word + '($|[\W_])', name, re.I): logger.log(u"Invalid scene release: " + name + " contains " + ignore_word + ", ignoring it", logger.DEBUG) return False return True
def pass_wordlist_checks(name, parse=True, indexer_lookup=True): """ Filters out non-english and just all-around stupid releases by comparing the word list contents at boundaries or the end of name. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ if parse: err_msg = u'Unable to parse the filename %s into a valid ' % name try: NameParser(indexer_lookup=indexer_lookup).parse(name) except InvalidNameException: logger.log(err_msg + 'episode', logger.DEBUG) return False except InvalidShowException: logger.log(err_msg + 'show', logger.DEBUG) return False word_list = [ 'sub(bed|ed|pack|s)', '(dk|fin|heb|kor|nor|nordic|pl|swe)sub(bed|ed|s)?', '(dir|sample|sub|nfo)fix', 'sample', '(dvd)?extras', 'dub(bed)?' ] # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS: word_list = ','.join([sickbeard.IGNORE_WORDS] + word_list) result = contains_any(name, word_list) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % name, logger.DEBUG) return False # if any of the good strings aren't in the name then say no result = not_contains_any(name, sickbeard.REQUIRE_WORDS) if None is not result and result: logger.log( u'Ignored: %s for not containing required word match' % name, logger.DEBUG) return False return True
def filterBadReleases(name, parse=True): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: if parse: NameParser().parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return False except InvalidShowException: pass # logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) # return False # if any of the bad strings are in the name then say no ignore_words = list(resultFilters) if sickbeard.IGNORE_WORDS: ignore_words.extend(sickbeard.IGNORE_WORDS.split(',')) word = containsAtLeastOneWord(name, ignore_words) if word: logger.log( u"Invalid scene release: " + name + " contains " + word + ", ignoring it", logger.DEBUG) return False # if any of the good strings aren't in the name then say no if sickbeard.REQUIRE_WORDS: require_words = sickbeard.REQUIRE_WORDS if not containsAtLeastOneWord(name, require_words): logger.log( u"Invalid scene release: " + name + " doesn't contain any of " + sickbeard.REQUIRE_WORDS + ", ignoring it", logger.DEBUG) return False return True
stripNS(curChild, ns) return element def splitResult(result): try: urlData = helpers.getURL(result.url) except urllib2.URLError, e: logger.log(u"Unable to load url "+result.url+", can't download season NZB", logger.ERROR) return False # parse the season ep name try: np = NameParser(False) parse_result = np.parse(result.name) except InvalidNameException: logger.log(u"Unable to parse the filename "+result.name+" into a valid episode", logger.WARNING) return False # bust it up season = parse_result.season_number if parse_result.season_number != None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: logger.log(u"Split out "+newNZB+" from "+result.name, logger.DEBUG)
def _getProperList(): propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() ] for curProvider in providers: threading.currentThread( ).name = origThreadName + ' :: [' + curProvider.name + ']' logger.log(u'Searching for any new PROPER releases from ' + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log( u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = origThreadName # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = _genericName(x.name) if not name in propers: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log( u'Unable to parse the filename ' + curProper.name + ' into a valid episode', logger.DEBUG) continue except InvalidShowException: logger.log( u'Unable to parse the filename ' + curProper.name + ' into a valid show', logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u'Ignoring ' + curProper.name + ' because it\'s for a full season rather than specific episode', logger.DEBUG) continue logger.log( u'Successful match! Result ' + parse_result.original_name + ' matched to show ' + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not curProper.release_group and curProper.version == -1: logger.log( u'Proper ' + curProper.name + ' doesn\'t have a release group and version, ignoring it', logger.DEBUG) continue if not show_name_helpers.filterBadReleases(curProper.name, parse=False): logger.log( u'Proper ' + curProper.name + ' isn\'t a valid scene release that we want, ignoring it', logger.DEBUG) continue if parse_result.show.rls_ignore_words and search.filter_release_name( curProper.name, parse_result.show.rls_ignore_words): logger.log( u'Ignoring ' + curProper.name + ' based on ignored words filter: ' + parse_result.show.rls_ignore_words, logger.MESSAGE) continue if parse_result.show.rls_require_words and not search.filter_release_name( curProper.name, parse_result.show.rls_require_words): logger.log( u'Ignoring ' + curProper.name + ' based on required words filter: ' + parse_result.show.rls_require_words, logger.MESSAGE) continue # check if we actually want this proper (if it's the right quality) myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus( int(sqlResults[0]['status'])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [curProper.indexerid, curProper.season, curProper.episode]) oldVersion = int(sqlResults[0]['version']) oldRelease_group = (sqlResults[0]['release_group']) if oldVersion > -1 and oldVersion < curProper.version: logger.log('Found new anime v' + str(curProper.version) + ' to replace existing v' + str(oldVersion)) else: continue if oldRelease_group != curProper.release_group: logger.log('Skipping proper from release group: ' + curProper.release_group + ', does not match existing release group: ' + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map( operator.attrgetter( 'indexerid', 'season', 'episode'), finalPropers): logger.log(u'Found a proper that we need: ' + str(curProper.name)) finalPropers.append(curProper) return finalPropers
class ProperFinder(): def __init__(self): self.amActive = False self.updateInterval = datetime.timedelta(hours=1) check_propers_interval = {'15m': 15, '45m': 45, '90m': 90, '4h': 4*60, 'daily': 24*60} for curInterval in ('15m', '45m', '90m', '4h', 'daily'): if sickbeard.CHECK_PROPERS_INTERVAL == curInterval: self.updateInterval = datetime.timedelta(minutes = check_propers_interval[curInterval]) def run(self, force=False): if not sickbeard.DOWNLOAD_PROPERS: return # look for propers every night at 1 AM updateTime = datetime.time(hour=1) logger.log(u"Checking proper time", logger.DEBUG) hourDiff = datetime.datetime.today().time().hour - updateTime.hour dayDiff = (datetime.date.today() - self._get_lastProperSearch()).days if sickbeard.CHECK_PROPERS_INTERVAL == "daily" and not force: # if it's less than an interval after the update time then do an update if not (hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >= 1): return logger.log(u"Beginning the search for new propers") self.amActive = True propers = self._getProperList() if propers: self._downloadPropers(propers) self._set_lastProperSearch(datetime.datetime.today().toordinal()) msg = u"Completed the search for new propers, next check " if sickbeard.CHECK_PROPERS_INTERVAL == "daily": logger.log(u"%sat 1am tomorrow" % msg) else: logger.log(u"%sin ~%s" % (msg, sickbeard.CHECK_PROPERS_INTERVAL)) self.amActive = False def _getProperList(self): propers = {} # for each provider get a list of the origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] for curProvider in providers: threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" search_date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.findPropers(search_date) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # reset thread name back to original threading.currentThread().name = origThreadName # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: in_cache = False try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue if not parse_result.series_name: continue cacheResult = sickbeard.name_cache.retrieveNameFromCache(parse_result.series_name) if cacheResult: in_cache = True curProper.indexerid = int(cacheResult) elif cacheResult == 0: return None if not curProper.indexerid: showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: curProper.indexerid = int(showResult[0]) if not curProper.indexerid: for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(curProper.name, curShow, False): curProper.indexerid = curShow.indexerid break showObj = None if curProper.indexerid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if not showObj: sickbeard.name_cache.addNameToCache(parse_result.series_name, 0) continue if not in_cache: sickbeard.name_cache.addNameToCache(parse_result.series_name, curProper.indexerid) # scene numbering -> indexer numbering parse_result = parse_result.convert(showObj) if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date or parse_result.sports: curProper.season = -1 curProper.episode = parse_result.air_date or parse_result.sports_event_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set(show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log( u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = curShow.indexerid # set the indexer in the db to the show's indexer curProper.indexer = curShow.indexer # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.indexerid != -1: break if not show_name_helpers.filterBadReleases(curProper.name): logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG) continue showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if not showObj: logger.log(u"Unable to find the show with indexerID " + str(curProper.indexerid), logger.ERROR) continue if showObj.rls_ignore_words and search.filter_release_name(curProper.name, showObj.rls_ignore_words): logger.log(u"Ignoring " + curProper.name + " based on ignored words filter: " + showObj.rls_ignore_words, logger.MESSAGE) continue if showObj.rls_require_words and not search.filter_release_name(curProper.name, showObj.rls_require_words): logger.log(u"Ignoring " + curProper.name + " based on required words filter: " + showObj.rls_require_words, logger.MESSAGE) continue # if we have an air-by-date show then get the real season/episode numbers if (parse_result.air_by_date or parse_result.sports_event_date) and curProper.indexerid: logger.log( u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode", logger.DEBUG) airdate = curProper.episode.toordinal() myDB = db.DBConnection() sql_result = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", [curProper.indexerid, curProper.indexer, airdate]) if sql_result: curProper.season = int(sql_result[0][0]) curProper.episodes = [int(sql_result[0][1])] else: logger.log(u"Unable to find episode with date " + str( curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log( u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return False if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return False # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang else: logger.log( u"We were given a TVDB id " + str(tvdb_id) + " but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow( sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid tvdb_lang = showObj.lang else: logger.log( u"We were given a TVRage id " + str(tvrage_id) + " but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( parse_result.series_name + " was found to be show " + showResult[1] + " (" + str(showResult[0]) + ") in our DB.", logger.DEBUG) tvdb_id = showResult[0] else: logger.log( u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if sceneHelpers.isGoodResult(name, curShow, False): logger.log( u"Successfully matched " + name + " to " + curShow.name + " with regex", logger.DEBUG) tvdb_id = curShow.tvdbid tvdb_lang = curShow.lang break if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if not showObj: logger.log( u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvrage_id = showObj.tvrid tvdb_lang = showObj.lang if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not (tvdb_lang == "" or tvdb_lang == "en" or tvdb_lang == None): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound, e: logger.log( u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return False
def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log( u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return False if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return False tvdb_lang = None # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang else: logger.log( u"We were given a TVDB id " + str(tvdb_id) + " but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow( sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid tvdb_lang = showObj.lang else: logger.log( u"We were given a TVRage id " + str(tvrage_id) + " but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: # check the name cache and see if we already know what show this is logger.log( u"Checking the cache to see if we already know the tvdb id of " + parse_result.series_name, logger.DEBUG) tvdb_id = name_cache.retrieveNameFromCache( parse_result.series_name) # remember if the cache lookup worked or not so we know whether we should bother updating it later if tvdb_id == None: logger.log( u"No cache results returned, continuing on with the search", logger.DEBUG) from_cache = False else: logger.log( u"Cache lookup found " + repr(tvdb_id) + ", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if tvdb_id == None: logger.log( u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow( parse_result.series_name) if showResult: logger.log( parse_result.series_name + " was found to be show " + showResult[1] + " (" + str(showResult[0]) + ") in our DB.", logger.DEBUG) tvdb_id = showResult[0] # if the DB lookup fails then do a comprehensive regex search if tvdb_id == None: logger.log( u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult( name, curShow, False): logger.log( u"Successfully matched " + name + " to " + curShow.name + " with regex", logger.DEBUG) tvdb_id = curShow.tvdbid tvdb_lang = curShow.lang break # if tvdb_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, tvdb_id) # if we came out with tvdb_id = None it means we couldn't figure it out at all, just use 0 for that if tvdb_id == None: tvdb_id = 0 # if we found the show then retrieve the show object if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not (tvdb_lang == "" or tvdb_lang == "en" or tvdb_lang == None): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log( u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return False except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: " + ex(e), logger.WARNING) return False
def _addCacheEntry(self, name, url, season=None, episodes=None, indexer_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName, True) except InvalidNameException: logger.log( u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None indexer_lang = None if indexer_id: # if we have only the indexer_id, use the database showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) if showObj: self.indexer = int(showObj.indexer) indexer_lang = showObj.lang else: logger.log( u"We were given a Indexer ID " + str(indexer_id) + " but it doesn't match a show we have in our list, so leaving indexer_id empty", logger.DEBUG) indexer_id = 0 # if no indexerID then fill out as much info as possible by searching the show name if not indexer_id: # check the name cache and see if we already know what show this is logger.log( u"Checking the cache to see if we already know the Indexer ID of " + parse_result.series_name, logger.DEBUG) indexer_id = name_cache.retrieveNameFromCache( parse_result.series_name) # remember if the cache lookup worked or not so we know whether we should bother updating it later if indexer_id == None: logger.log( u"No cache results returned, continuing on with the search", logger.DEBUG) from_cache = False else: logger.log( u"Cache lookup found " + repr(indexer_id) + ", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if indexer_id == None: logger.log(u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"" + parse_result.series_name + " was found to be show " + showResult[2] + " (" + str(showResult[1]) + ") in our DB.", logger.DEBUG) indexer_id = showResult[1] # if the DB lookup fails then do a comprehensive regex search if indexer_id == None: logger.log( u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): logger.log( u"Successfully matched " + name + " to " + curShow.name + " with regex", logger.DEBUG) indexer_id = curShow.indexerid indexer_lang = curShow.lang break # if indexer_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, indexer_id) # if we came out with indexer_id = None it means we couldn't figure it out at all, just use 0 for that if indexer_id == None: indexer_id = 0 # if we found the show then retrieve the show object if indexer_id: try: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except (MultipleShowObjectsException): showObj = None if showObj: self.indexer = int(showObj.indexer) indexer_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if (parse_result.air_by_date or parse_result.sports) and indexer_id: try: lINDEXER_API_PARMS = sickbeard.indexerApi( self.indexer).api_params.copy() if not (indexer_lang == "" or indexer_lang == "en" or indexer_lang == None): lINDEXER_API_PARMS['language'] = indexer_lang t = sickbeard.indexerApi( self.indexer).indexer(**lINDEXER_API_PARMS) epObj = None if parse_result.air_by_date: epObj = t[indexer_id].airedOn(parse_result.air_date)[0] elif parse_result.sports: epObj = t[indexer_id].airedOn(parse_result.sports_date)[0] if epObj is None: return None season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except sickbeard.indexer_episodenotfound: logger.log( u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return None except sickbeard.indexer_error, e: logger.log( u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), logger.WARNING) return None
name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False, showObj=curProper.show) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid show", logger.DEBUG) continue if not parse_result.series_name: continue
def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log(u"Unable to parse the filename "+curName+" into a valid episode", logger.DEBUG) continue if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: "+name, logger.DEBUG) return False if not parse_result.series_name: logger.log(u"No series name retrieved from "+name+", unable to cache it", logger.DEBUG) return False # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid else: logger.log(u"We were given a TVDB id "+str(tvdb_id)+" but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow(sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid else: logger.log(u"We were given a TVRage id "+str(tvrage_id)+" but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log(parse_result.series_name+" was found to be show "+showResult[1]+" ("+str(showResult[0])+") in our DB.", logger.DEBUG) tvdb_id = showResult[0] else: logger.log(u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if sceneHelpers.isGoodResult(name, curShow, False): logger.log(u"Successfully matched "+name+" to "+curShow.name+" with regex", logger.DEBUG) tvdb_id = curShow.tvdbid break if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if not showObj: logger.log(u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvrage_id = showObj.tvrid if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: t = tvdb_api.Tvdb(**sickbeard.TVDB_API_PARMS) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound, e: logger.log(u"Unable to find episode with date "+str(parse_result.air_date)+" for show "+parse_result.series_name+", skipping", logger.WARNING) return False
def _addCacheEntry(self, name, url): cacheDB = self._getDB() parse_result = None from_cache = False indexer_id = None # if we don't have complete info then parse the filename to get it while(True): try: myParser = NameParser() parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None logger.log( u"Checking the cahe for show:" + str(parse_result.series_name), logger.DEBUG) # remember if the cache lookup worked or not so we know whether we should bother updating it later cache_id = name_cache.retrieveNameFromCache(parse_result.series_name) if cache_id: logger.log(u"Cache lookup found Indexer ID:" + repr(indexer_id) + ", using that for " + parse_result.series_name, logger.DEBUG) from_cache = True indexer_id = cache_id break # if the cache failed, try looking up the show name in the database logger.log( u"Checking the database for show:" + str(parse_result.series_name), logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"Database lookup found Indexer ID:" + str(showResult[1]) + ", using that for " + parse_result.series_name, logger.DEBUG) indexer_id = showResult[1] break # if we didn't find a Indexer ID return None if not indexer_id: return None # if the show isn't in out database then return None try:showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except:return None if not showObj: return None # if we weren't provided with season/episode information then get it from the name that we parsed season = None episodes = None myDB = db.DBConnection() if parse_result.air_by_date: sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.air_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] elif parse_result.sports: sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.sports_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if not (season and episodes): return None # convert scene numbered releases before storing to cache convertedEps = {} for curEp in episodes: epObj = showObj.getEpisode(season, curEp, sceneConvert=True) if not epObj: return None if not epObj.season in convertedEps: convertedEps[epObj.season] = [] convertedEps[epObj.season].append(epObj.episode) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') for season, episodes in convertedEps.items(): episodeText = "|" + "|".join(map(str, episodes)) + "|" cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, indexer_id, url, curTimestamp, quality])
def splitResult(result): """ Split result into seperate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ urlData = helpers.getURL(result.url, session=requests.Session(), needBytes=True) if urlData is None: logging.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: logging.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: logging.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number != None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: logging.debug("Split out " + newNZB + " from " + result.name) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: logging.debug("Unable to parse the filename " + newNZB + " into a valid episode") return False except InvalidShowException: logging.debug("Unable to parse the filename " + newNZB + " into a valid show") return False # make sure the result is sane if (parse_result.season_number != None and parse_result.season_number != season) or ( parse_result.season_number == None and season != 1): logging.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it") continue elif len(parse_result.episode_numbers) == 0: logging.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it") continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): logging.info("Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality]) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def _get_proper_list(aired_since_shows, recent_shows, recent_anime): propers = {} # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']' logger.log(u'Searching for new PROPER releases') try: found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 np = NameParser(False, try_scene_exceptions=True) for x in found_propers: name = _generic_name(x.name) if name not in propers: try: parse_result = np.parse(x.title) if parse_result.series_name and parse_result.episode_numbers and \ parse_result.show.indexerid in recent_shows + recent_anime: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.show = parse_result.show.indexerid x.provider = cur_provider propers[name] = x count += 1 except Exception: continue cur_provider.log_result('Propers', count, '%s' % cur_provider.name) # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) verified_propers = [] for cur_proper in sorted_propers: # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not cur_proper.release_group and -1 == cur_proper.version: logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False): logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name) continue result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras) if None is not result and not result: logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name) continue # check if we actually want this proper (if it's the right quality) my_db = db.DBConnection() sql_results = my_db.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status'])) if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: my_db = db.DBConnection() sql_results = my_db.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) old_version = int(sql_results[0]['version']) old_release_group = (sql_results[0]['release_group']) if -1 < old_version < cur_proper.version: logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version)) else: continue if cur_proper.release_group != old_release_group: logger.log(u'Skipping proper from release group: %s, does not match existing release group: %s' % (cur_proper.release_group, old_release_group)) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), verified_propers): logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.append(cur_proper) return verified_propers
def _getProperList(): propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] for curProvider in providers: threading.currentThread().name = origThreadName + ' :: [' + curProvider.name + ']' logger.log(u'Searching for any new PROPER releases from ' + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = origThreadName # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = _genericName(x.name) if not name in propers: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid episode', logger.DEBUG) continue except InvalidShowException: logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid show', logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u'Ignoring ' + curProper.name + ' because it\'s for a full season rather than specific episode', logger.DEBUG) continue logger.log( u'Successful match! Result ' + parse_result.original_name + ' matched to show ' + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not curProper.release_group and curProper.version == -1: logger.log(u'Proper ' + curProper.name + ' doesn\'t have a release group and version, ignoring it', logger.DEBUG) continue if not show_name_helpers.filterBadReleases(curProper.name, parse=False): logger.log(u'Proper ' + curProper.name + ' isn\'t a valid scene release that we want, ignoring it', logger.DEBUG) continue if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name, parse_result.show.rls_ignore_words): logger.log( u'Ignoring ' + curProper.name + ' based on ignored words filter: ' + parse_result.show.rls_ignore_words, logger.MESSAGE) continue if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name, parse_result.show.rls_require_words): logger.log( u'Ignoring ' + curProper.name + ' based on required words filter: ' + parse_result.show.rls_require_words, logger.MESSAGE) continue # check if we actually want this proper (if it's the right quality) myDB = db.DBConnection() sqlResults = myDB.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]['status'])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [curProper.indexerid, curProper.season, curProper.episode]) oldVersion = int(sqlResults[0]['version']) oldRelease_group = (sqlResults[0]['release_group']) if oldVersion > -1 and oldVersion < curProper.version: logger.log('Found new anime v' + str(curProper.version) + ' to replace existing v' + str(oldVersion)) else: continue if oldRelease_group != curProper.release_group: logger.log('Skipping proper from release group: ' + curProper.release_group + ', does not match existing release group: ' + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u'Found a proper that we need: ' + str(curProper.name)) finalPropers.append(curProper) return finalPropers
name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid show", logger.DEBUG) continue if not parse_result.series_name: continue
import tvdb_api import tmdb import re from name_parser.parser import NameParser, InvalidNameException filename = "" np = NameParser(True) tvdb = tvdb_api.Tvdb() parsed = np.parse(filename) series_name = string.capwords(parsed.series_name.lower()) show = tvdb[series_name] print show
class ProperFinder(): def __init__(self): self.updateInterval = datetime.timedelta(hours=1) def run(self): if not sickbeard.DOWNLOAD_PROPERS: return # look for propers every night at 1 AM updateTime = datetime.time(hour=1) logger.log(u"Checking proper time", logger.DEBUG) hourDiff = datetime.datetime.today().time().hour - updateTime.hour # if it's less than an interval after the update time then do an update if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600: logger.log(u"Beginning the search for new propers") else: return propers = self._getProperList() self._downloadPropers(propers) def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue search_date = datetime.datetime.today() - datetime.timedelta( days=2) logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.findPropers(search_date) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set( show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log( u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log( u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log( u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log( u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus( int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map( operator.attrgetter( 'tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None): propers = {} # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() ] for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue if None is not proper_list: found_propers = proper_list.get(cur_provider.get_id(), []) if not found_propers: continue else: threading.currentThread( ).name = orig_thread_name + ' :: [' + cur_provider.name + ']' logger.log(u'Searching for new PROPER releases') try: found_propers = cur_provider.find_propers( search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log( u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 for x in found_propers: name = _generic_name(x.name) if name not in propers: try: np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show, indexer_lookup=False) parse_result = np.parse(x.name) if parse_result.series_name and parse_result.episode_numbers and \ (parse_result.show.indexer, parse_result.show.indexerid) in recent_shows + recent_anime: cur_size = getattr(x, 'size', None) if failed_history.has_failed(x.name, cur_size, cur_provider.name): continue logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.show = parse_result.show.indexerid x.provider = cur_provider x.is_repack, x.properlevel = Quality.get_proper_level( parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True) x.is_internal = parse_result.extra_info_no_name() and \ re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I) x.codec = _get_codec(parse_result.extra_info_no_name()) propers[name] = x count += 1 except (InvalidNameException, InvalidShowException): continue except (StandardError, Exception): continue cur_provider.log_result('Propers', count, '%s' % cur_provider.name) # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('properlevel', 'date'), reverse=True) verified_propers = set() for cur_proper in sorted_propers: np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) try: parse_result = np.parse(cur_proper.name) except (StandardError, Exception): continue # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.extra_info = parse_result.extra_info cur_proper.extra_info_no_name = parse_result.extra_info_no_name cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.is_anime = parse_result.is_anime # only get anime proper if it has release group and version if parse_result.is_anime: if not cur_proper.release_group and -1 == cur_proper.version: logger.log( u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks( cur_proper.name, parse=False, indexer_lookup=False): logger.log( u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any( cur_proper.name, parse_result.show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name) continue result = show_name_helpers.contains_any( cur_proper.name, parse_result.show.rls_require_words, **re_extras) if None is not result and not result: logger.log( u'Ignored: %s for not containing any required word match' % cur_proper.name) continue # check if we actually want this proper (if it's the right quality) my_db = db.DBConnection() sql_results = my_db.select( 'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND indexer = ? ' + 'AND season = ? AND episode = ?', [ cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode ]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) # don't take proper of the same level we already downloaded old_status, old_quality = Quality.splitCompositeStatus( int(sql_results[0]['status'])) cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level( cur_proper.extra_info_no_name(), cur_proper.version, cur_proper.is_anime, check_is_repack=True) old_release_group = sql_results[0]['release_group'] # check if we want this release: same quality as current, current has correct status # restrict other release group releases to proper's if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \ or cur_proper.quality != old_quality \ or (cur_proper.is_repack and cur_proper.release_group != old_release_group): continue np = NameParser(False, try_scene_exceptions=True, showObj=parse_result.show, indexer_lookup=False) try: extra_info = np.parse( sql_results[0]['release_name']).extra_info_no_name() except (StandardError, Exception): extra_info = None old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \ get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season, parse_result.episode_numbers, old_status, cur_proper.quality, extra_info, cur_proper.version, cur_proper.is_anime) old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)] if cur_proper.proper_level < old_proper_level: continue elif cur_proper.proper_level == old_proper_level: if '264' == cur_proper.codec and 'xvid' == old_codec: pass elif old_is_internal and not cur_proper.is_internal: pass else: continue log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\ % (cur_proper.release_group, old_release_group, cur_proper.name) is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or (old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) if is_web: old_webdl_type = get_webdl_type(old_extra_no_name, old_name) new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(), cur_proper.name) if old_webdl_type != new_webdl_type: logger.log( 'Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]' % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) continue # for webldls, prevent propers from different groups if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group: logger.log(log_same_grp, logger.DEBUG) continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: old_version = int(sql_results[0]['version']) if -1 < old_version < cur_proper.version: logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version)) else: continue if cur_proper.release_group != old_release_group: logger.log(log_same_grp, logger.DEBUG) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if cur_proper.indexerid != -1: if (cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode) not in map( operator.attrgetter('indexerid', 'indexer', 'season', 'episode'), verified_propers): logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.add(cur_proper) else: rp = set() for vp in verified_propers: if vp.indexer == cur_proper.indexer and vp.indexerid == cur_proper.indexerid and \ vp.season == cur_proper.season and vp.episode == cur_proper.episode and \ vp.proper_level < cur_proper.proper_level: rp.add(vp) if rp: verified_propers = verified_propers - rp logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.add(cur_proper) return list(verified_propers)
def _addCacheEntry(self, name, url, season=None, episodes=None, indexer_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log(u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None indexer_lang = None if indexer_id: # if we have only the indexer_id, use the database showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) if showObj: self.indexer = int(showObj.indexer) indexer_lang = showObj.lang else: logger.log(u"We were given a Indexer ID " + str(indexer_id) + " but it doesn't match a show we have in our list, so leaving indexer_id empty",logger.DEBUG) indexer_id = 0 # if no indexerID then fill out as much info as possible by searching the show name if not indexer_id: from_cache = False # check the name cache and see if we already know what show this is logger.log( u"Checking the cache for Indexer ID of " + parse_result.series_name, logger.DEBUG) # remember if the cache lookup worked or not so we know whether we should bother updating it later indexer_id = name_cache.retrieveNameFromCache(parse_result.series_name) if indexer_id: logger.log(u"Cache lookup found " + repr(indexer_id) + ", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if not indexer_id: logger.log( u"Checking the database for Indexer ID of " + str(parse_result.series_name), logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"" + parse_result.series_name + " was found to be show " + showResult[2] + " (" + str( showResult[1]) + ") in our DB.", logger.DEBUG) indexer_id = showResult[1] # if the database failed, try looking up the show name from scene exceptions list if not indexer_id: logger.log( u"Checking the scene exceptions list for Indexer ID of " + parse_result.series_name, logger.DEBUG) sceneResult = sickbeard.scene_exceptions.get_scene_exception_by_name(parse_result.series_name) if sceneResult: logger.log( u"" + str(parse_result.series_name) + " was found in scene exceptions list with Indexer ID: " + str(sceneResult), logger.DEBUG) indexer_id = sceneResult # if the DB lookup fails then do a comprehensive regex search if not indexer_id: logger.log( u"Checking the shows list for Indexer ID of " + str(parse_result.series_name), logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): logger.log(u"Successfully matched " + name + " to " + curShow.name + " from shows list", logger.DEBUG) indexer_id = curShow.indexerid indexer_lang = curShow.lang break # if the database failed, try looking up the show name from scene exceptions list if not indexer_id: logger.log( u"Checking Indexers for Indexer ID of " + parse_result.series_name, logger.DEBUG) # check indexers try:indexerResult = helpers.searchIndexerForShowID(parse_result.series_name) except:indexerResult = None if indexerResult: logger.log( u"" + str(parse_result.series_name) + " was found on " + str(sickbeard.indexerApi(indexerResult[0]).name) + " with Indexer ID: " + str(indexerResult[1]), logger.DEBUG) indexer_id = indexerResult[1] # if indexer_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, indexer_id) # if we came out with indexer_id = None it means we couldn't figure it out at all, just use 0 for that if indexer_id == None: indexer_id = 0 # if we found the show then retrieve the show object if indexer_id: try: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except (MultipleShowObjectsException): showObj = None if showObj: self.indexer = int(showObj.indexer) indexer_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if (parse_result.air_by_date or parse_result.sports) and indexer_id: try: lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() if not (indexer_lang == "" or indexer_lang == "en" or indexer_lang == None): lINDEXER_API_PARMS['language'] = indexer_lang t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) epObj = None if parse_result.air_by_date: epObj = t[indexer_id].airedOn(parse_result.air_date)[0] elif parse_result.sports: epObj = t[indexer_id].airedOn(parse_result.sports_date)[0] if epObj is None: return None season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except sickbeard.indexer_episodenotfound: logger.log(u"Unable to find episode with date " + str( parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return None except sickbeard.indexer_error, e: logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), logger.WARNING) return None
def splitResult(result): urlData = helpers.getURL(result.url) if urlData is None: logger.log(u"Unable to load url " + result.url + ", can't download season NZB", logger.ERROR) return False # parse the season ep name try: np = NameParser(False) parse_result = np.parse(result.name).convert() except InvalidNameException: logger.log(u"Unable to parse the filename " + result.name + " into a valid episode", logger.WARNING) return False # bust it up season = parse_result.season_number if parse_result.season_number != None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: logger.log(u"Split out " + newNZB + " from " + result.name, logger.DEBUG) # parse the name try: np = NameParser(False) parse_result = np.parse(newNZB) except InvalidNameException: logger.log(u"Unable to parse the filename " + newNZB + " into a valid episode", logger.WARNING) return False # make sure the result is sane if (parse_result.season_number != None and parse_result.season_number != season) or ( parse_result.season_number == None and season != 1): logger.log( u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING) continue elif len(parse_result.episode_numbers) == 0: logger.log( u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING) continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality], logger.DEBUG) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def filterBadReleases(name, language): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ logger.log(u"-----------------------------------------") logger.log(u"filterBadReleases::Name :" + name) logger.log(u"filterBadReleases::Language :" + language) logger.log(u"-----------------------------------------") try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine logger.log(u"---> " + check_string) if not check_string: return True g_i_w = "" g_i_w = sickbeard.getLanguageSetting(sickbeard.CFG, "Languages", "general", "ignore_words", "") if (not g_i_w == None): logger.log(u"==> global ignore:" + g_i_w) for x in resultFilters + g_i_w.split(','): if (not x.strip() == ""): if (ignoreWordFilter(check_string, x) == False): return False l_i_w = "" l_i_w = sickbeard.getLanguageSetting(sickbeard.CFG, "Languages", language, "ignore_words", "") if (not l_i_w == None): logger.log(u"==> lagnuage ignore:" + l_i_w) for x in resultFilters + l_i_w.split(','): if (not x.strip() == ""): if (ignoreWordFilter(check_string, x) == False): return False m_w = "" m_w = sickbeard.getLanguageSetting(sickbeard.CFG, "Languages", language, "mandatory", "") if (not m_w == None): logger.log(u"==> language mandatory:" + m_w) for x in m_w.split(','): if (not x.strip() == ""): if (mandatoryFilter(check_string, x) == False): return False # if any of the bad strings are in the name then say no #for x in resultFilters + sickbeard.IGNORE_WORDS.split(','): # if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): # logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG) # return False # if every of the mandatory words are in there, say yes #for x in mandatory: # if not re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I): # logger.log(u"Mandatory string not found: "+name+" doesnt contains "+x+", ignoring it", logger.DEBUG) # return False return True
def _addCacheEntry(self, name, url, quality=None): indexerid = None in_cache = False # if we don't have complete info then parse the filename to get it try: myParser = NameParser() parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None cacheResult = sickbeard.name_cache.retrieveNameFromCache(parse_result.series_name) if cacheResult: in_cache = True indexerid = int(cacheResult) if not indexerid: showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: indexerid = int(showResult[0]) showObj = None if indexerid: showObj = helpers.findCertainShow(sickbeard.showList, indexerid) if not showObj: logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG) return None season = episodes = None if parse_result.air_by_date or parse_result.sports: myDB = db.DBConnection() airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [indexerid, showObj.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) if not in_cache: sickbeard.name_cache.addNameToCache(parse_result.series_name, indexerid) return [ "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, indexerid, url, curTimestamp, quality]]
def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from "+curProvider.name) curPropers = curProvider.findPropers(date) # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: "+x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u"Unable to parse the filename "+curProper.name+" into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log(u"Ignoring "+curProper.name+" because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season == -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set(sceneHelpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log(u"Successful match! Result "+parse_result.series_name+" matched to show "+curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not sceneHelpers.filterBadReleases(curProper.name): logger.log(u"Proper "+curProper.name+" isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log(u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound, e: logger.log(u"Unable to find episode with date "+str(curProper.episode)+" for show "+parse_result.series_name+", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map(operator.attrgetter('tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: "+str(curProper.name)) finalPropers.append(curProper)
def splitResult(result): urlData = helpers.getURL(result.url) if urlData is None: logger.log( u"Unable to load url " + result.url + ", can't download season NZB", logger.ERROR) return False # parse the season ep name try: np = NameParser(False) parse_result = np.parse(result.name, True) except InvalidNameException: logger.log( u"Unable to parse the filename " + result.name + " into a valid episode", logger.WARNING) return False # bust it up season = parse_result.season_number if parse_result.season_number != None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: logger.log(u"Split out " + newNZB + " from " + result.name, logger.DEBUG) # parse the name try: np = NameParser(False) parse_result = np.parse(newNZB, True) except InvalidNameException: logger.log( u"Unable to parse the filename " + newNZB + " into a valid episode", logger.WARNING) return False # make sure the result is sane if (parse_result.season_number != None and parse_result.season_number != season) or ( parse_result.season_number == None and season != 1): logger.log( u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING) continue elif len(parse_result.episode_numbers) == 0: logger.log( u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING) continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): logger.log( u"Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality], logger.DEBUG) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def findNeededEpisodes(self, episode = None, manualSearch=False): neededEps = {} if episode: neededEps[episode] = [] myDB = self._getDB() if not episode: sqlResults = myDB.select("SELECT * FROM "+self.providerID) else: sqlResults = myDB.select("SELECT * FROM "+self.providerID+" WHERE tvdbid = ? AND season = ? AND episodes LIKE ?", [episode.show.tvdbid, episode.scene_season, "%|"+str(episode.scene_episode)+"|%"]) # for each cache entry for curResult in sqlResults: # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["tvdbid"])) if not showObj: continue try: fp = NameParser() parse_result = fp.parse(curResult["name"]) except InvalidNameException: logger.log(u"Unable to parse the filename "+curResult["name"]+" into a valid episode", logger.WARNING) if not parse_result.audio_langs == showObj.audio_lang: continue # skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well) if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"],showObj.audio_lang): continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult["season"]) if curSeason == -1: continue curEp = curResult["episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult["quality"]) # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log(u"Skipping "+curResult["name"]+" because we don't want an episode that's "+Quality.qualityStrings[curQuality], logger.DEBUG) else: if episode: epObj = episode else: epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult["name"] url = curResult["url"] logger.log(u"Found result " + title + " at " + url) result = self.provider.getResult([epObj]) result.url = url result.name = title result.quality = curQuality result.audio_lang = str(showObj.audio_lang) # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) return neededEps
def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log(u"Unable to parse the filename "+curName+" into a valid episode", logger.DEBUG) continue if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: "+name, logger.DEBUG) return False if not parse_result.series_name: logger.log(u"No series name retrieved from "+name+", unable to cache it", logger.DEBUG) return False tvdb_lang = None # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang else: logger.log(u"We were given a TVDB id "+str(tvdb_id)+" but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow(sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid tvdb_lang = showObj.lang else: logger.log(u"We were given a TVRage id "+str(tvrage_id)+" but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: # check the name cache and see if we already know what show this is logger.log(u"Checking the cache to see if we already know the tvdb id of "+parse_result.series_name, logger.DEBUG) tvdb_id = name_cache.retrieveNameFromCache(parse_result.series_name) # remember if the cache lookup worked or not so we know whether we should bother updating it later if tvdb_id == None: logger.log(u"No cache results returned, continuing on with the search", logger.DEBUG) from_cache = False else: logger.log(u"Cache lookup found "+repr(tvdb_id)+", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if tvdb_id == None: logger.log(u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log(parse_result.series_name+" was found to be show "+showResult[1]+" ("+str(showResult[0])+") in our DB.", logger.DEBUG) tvdb_id = showResult[0] # if the DB lookup fails then do a comprehensive regex search if tvdb_id == None: logger.log(u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): logger.log(u"Successfully matched "+name+" to "+curShow.name+" with regex", logger.DEBUG) tvdb_id = curShow.tvdbid tvdb_lang = curShow.lang break # if tvdb_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, tvdb_id) # if we came out with tvdb_id = None it means we couldn't figure it out at all, just use 0 for that if tvdb_id == None: tvdb_id = 0 # if we found the show then retrieve the show object if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not (tvdb_lang == "" or tvdb_lang == "en" or tvdb_lang == None): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(parse_result.air_date)+" for show "+parse_result.series_name+", skipping", logger.WARNING) return False except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: "+ex(e), logger.WARNING) return False
def findNeededEpisodes(self, episode, manualSearch=False): neededEps = {} cl = [] myDB = self.get_db() if type(episode) != list: sqlResults = myDB.select( 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?', [self.providerID, episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%']) else: for epObj in episode: cl.append([ 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?' + ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')', [self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']]) sqlResults = myDB.mass_action(cl) if sqlResults: sqlResults = list(itertools.chain(*sqlResults)) if not sqlResults: self.setLastSearch() return neededEps # for each cache entry for curResult in sqlResults: # skip non-tv crap if not show_name_helpers.pass_wordlist_checks(curResult['name'], parse=False, indexer_lookup=False): continue # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: logger.log(u'' + str(showObj.name) + ' is not an anime, skipping', logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult['season']) if curSeason == -1: continue curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult['quality']) curReleaseGroup = curResult['release_group'] curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log(u'Skipping ' + curResult['name'] + ' because we don\'t want an episode that\'s ' + Quality.qualityStrings[curQuality], logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult['name'] url = curResult['url'] logger.log(u'Found result ' + title + ' at ' + url) result = self.provider.get_result([epObj], url) if None is result: continue result.show = showObj result.name = title result.quality = curQuality result.release_group = curReleaseGroup result.version = curVersion result.content = None np = NameParser(False, showObj=showObj) try: parsed_result = np.parse(title) extra_info_no_name = parsed_result.extra_info_no_name() version = parsed_result.version is_anime = parsed_result.is_anime except (StandardError, Exception): extra_info_no_name = None version = -1 is_anime = False result.is_repack, result.properlevel = Quality.get_proper_level(extra_info_no_name, version, is_anime, check_is_repack=True) # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps
# if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log(u"Unable to parse the filename " + curProper.name + " into a valid show", logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG)
def findNeededEpisodes(self, episode=None, manualSearch=False): neededEps = {} if episode: neededEps[episode] = [] myDB = self._getDB() if not episode: sqlResults = myDB.select("SELECT * FROM " + self.providerID) else: sqlResults = myDB.select( "SELECT * FROM " + self.providerID + " WHERE tvdbid = ? AND season = ? AND episodes LIKE ?", [ episode.show.tvdbid, episode.scene_season, "%|" + str(episode.scene_episode) + "|%" ]) # for each cache entry for curResult in sqlResults: # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["tvdbid"])) if not showObj: continue try: fp = NameParser() parse_result = fp.parse(curResult["name"]) except InvalidNameException: logger.log( u"Unable to parse the filename " + curResult["name"] + " into a valid episode", logger.WARNING) if not parse_result.audio_langs == showObj.audio_lang: continue # skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well) if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases( curResult["name"], showObj.audio_lang): continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult["season"]) if curSeason == -1: continue curEp = curResult["episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult["quality"]) # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log( u"Skipping " + curResult["name"] + " because we don't want an episode that's " + Quality.qualityStrings[curQuality], logger.DEBUG) else: if episode: epObj = episode else: epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult["name"] url = curResult["url"] logger.log(u"Found result " + title + " at " + url) result = self.provider.getResult([epObj]) result.url = url result.name = title result.quality = curQuality result.audio_lang = str(showObj.audio_lang) # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) return neededEps
def _get_proper_list(aired_since_shows, recent_shows, recent_anime): propers = {} # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() ] for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue threading.currentThread( ).name = orig_thread_name + ' :: [' + cur_provider.name + ']' logger.log(u'Searching for new PROPER releases') try: found_propers = cur_provider.find_propers( search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log( u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 np = NameParser(False, try_scene_exceptions=True) for x in found_propers: name = _generic_name(x.name) if name not in propers: try: parse_result = np.parse(x.title) if parse_result.series_name and parse_result.episode_numbers and \ parse_result.show.indexerid in recent_shows + recent_anime: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.show = parse_result.show.indexerid x.provider = cur_provider propers[name] = x count += 1 except Exception: continue cur_provider.log_result('Propers', count, '%s' % cur_provider.name) # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) verified_propers = [] for cur_proper in sorted_propers: # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not cur_proper.release_group and -1 == cur_proper.version: logger.log( u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False): logger.log( u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any( cur_proper.name, parse_result.show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name) continue result = show_name_helpers.contains_any( cur_proper.name, parse_result.show.rls_require_words, **re_extras) if None is not result and not result: logger.log( u'Ignored: %s for not containing any required word match' % cur_proper.name) continue # check if we actually want this proper (if it's the right quality) my_db = db.DBConnection() sql_results = my_db.select( 'SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) old_status, old_quality = Quality.splitCompositeStatus( int(sql_results[0]['status'])) if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: my_db = db.DBConnection() sql_results = my_db.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) old_version = int(sql_results[0]['version']) old_release_group = (sql_results[0]['release_group']) if -1 < old_version < cur_proper.version: logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version)) else: continue if cur_proper.release_group != old_release_group: logger.log( u'Skipping proper from release group: %s, does not match existing release group: %s' % (cur_proper.release_group, old_release_group)) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if cur_proper.indexerid != -1 and ( cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), verified_propers): logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.append(cur_proper) return verified_propers
def findNeededEpisodes(self, episode, manualSearch=False, downCurQuality=False): neededEps = {} cl = [] myDB = self._getDB() if type(episode) != list: sqlResults = myDB.select( "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"], ) else: for epObj in episode: cl.append( [ "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN (" + ",".join([str(x) for x in epObj.wantedQuality]) + ")", [epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"], ] ) sqlResults = myDB.mass_action(cl, fetchall=True) sqlResults = list(itertools.chain(*sqlResults)) # for each cache entry for curResult in sqlResults: # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult["season"]) if curSeason == -1: continue curEp = curResult["episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult["quality"]) curReleaseGroup = curResult["release_group"] curVersion = curResult["version"] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch, downCurQuality): logger.log( u"Skipping " + curResult["name"] + " because we don't want an episode that's " + Quality.qualityStrings[curQuality], logger.DEBUG, ) continue try: fp = NameParser() parse_result = fp.parse(curResult["name"]) except InvalidNameException: logger.log( u"Unable to parse the filename " + curResult["name"] + " into a valid episode", logger.WARNING ) if not parse_result.audio_langs == showObj.audio_lang: continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult["name"] url = curResult["url"] logger.log(u"Found result " + title + " at " + url) result = self.provider.getResult([epObj]) result.show = showObj result.url = url result.name = title result.quality = curQuality result.audio_lang = str(showObj.audio_lang) result.release_group = curReleaseGroup result.version = curVersion result.content = None # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps