def logSubtitle(showid, season, episode, status, subtitleResult): resource = subtitleResult.path provider = subtitleResult.service status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(SUBTITLED, quality) _logHistoryItem(action, showid, season, episode, quality, resource, provider)
def revert_episode(ep_obj): """Restore the episodes of a failed download to their original state""" sql_results = db_select( 'SELECT * FROM history t WHERE t.showid=? AND t.season=?', [ep_obj.show.indexerid, ep_obj.season]) history_eps = {r['episode']: r for r in sql_results} try: logger.log('Reverting episode %sx%s: [%s]' % (ep_obj.season, ep_obj.episode, ep_obj.name)) with ep_obj.lock: if ep_obj.episode in history_eps: status_revert = history_eps[ep_obj.episode]['old_status'] status, quality = Quality.splitCompositeStatus(status_revert) logger.log('Found in failed.db history with status: %s quality: %s' % ( statusStrings[status], Quality.qualityStrings[quality])) else: status_revert = WANTED logger.log('Episode not found in failed.db history. Setting it to WANTED', logger.WARNING) ep_obj.status = status_revert ep_obj.saveToDB() except EpisodeNotFoundException as e: logger.log('Unable to create episode, please set its status manually: %s' % ex(e), logger.WARNING)
def logFailed(epObj, release, provider=None): showid = int(epObj.show.indexerid) season = int(epObj.season) epNum = int(epObj.episode) status, quality = Quality.splitCompositeStatus(epObj.status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider)
def refine_video(video, episode): # try to enrich video object using information in original filename if episode.release_name: guess_ep = Episode.fromguess(None, guessit(episode.release_name)) for name in vars(guess_ep): if getattr(guess_ep, name) and not getattr(video, name): setattr(video, name, getattr(guess_ep, name)) # Use sickbeard metadata metadata_mapping = { 'episode': 'episode', 'release_group': 'release_group', 'season': 'season', 'series': 'show.name', 'series_imdb_id': 'show.imdbid', 'size': 'file_size', 'title': 'name', 'year': 'show.startyear' } def get_attr_value(obj, name): value = None for attr in name.split('.'): if not value: value = getattr(obj, attr, None) else: value = getattr(value, attr, None) return value for name in metadata_mapping: if not getattr(video, name) and get_attr_value(episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) elif episode.show.subtitles_sr_metadata and get_attr_value(episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) # Set quality from metadata _, quality = Quality.splitCompositeStatus(episode.status) if not video.format or episode.show.subtitles_sr_metadata: if quality & Quality.ANYHDTV: video.format = Quality.combinedQualityStrings.get(Quality.ANYHDTV) elif quality & Quality.ANYWEBDL: video.format = Quality.combinedQualityStrings.get(Quality.ANYWEBDL) elif quality & Quality.ANYBLURAY: video.format = Quality.combinedQualityStrings.get(Quality.ANYBLURAY) if not video.resolution or episode.show.subtitles_sr_metadata: if quality & (Quality.HDTV | Quality.HDWEBDL | Quality.HDBLURAY): video.resolution = '720p' elif quality & Quality.RAWHDTV: video.resolution = '1080i' elif quality & (Quality.FULLHDTV | Quality.FULLHDWEBDL | Quality.FULLHDBLURAY): video.resolution = '1080p' elif quality & (Quality.UHD_4K_TV | Quality.UHD_4K_WEBDL | Quality.UHD_4K_BLURAY): video.resolution = '4K' elif quality & (Quality.UHD_8K_TV | Quality.UHD_8K_WEBDL | Quality.UHD_8K_BLURAY): video.resolution = '8K'
def logFailed(tvdbid, season, episode, status, release, provider=None): showid = int(tvdbid) season = int(season) epNum = int(episode) status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider)
def refine_video(video, episode): # try to enrich video object using information in original filename if episode.release_name: guess_ep = Episode.fromguess(None, guessit(episode.release_name)) for name in vars(guess_ep): if getattr(guess_ep, name) and not getattr(video, name): setattr(video, name, getattr(guess_ep, name)) # Use sickbeard metadata metadata_mapping = { "episode": "episode", "release_group": "release_group", "season": "season", "series": "show.name", "series_imdb_id": "show.imdbid", "size": "file_size", "title": "name", "year": "show.startyear", } def get_attr_value(obj, name): value = None for attr in name.split("."): if not value: value = getattr(obj, attr, None) else: value = getattr(value, attr, None) return value for name in metadata_mapping: if not getattr(video, name) and get_attr_value(episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) elif episode.show.subtitles_sr_metadata and get_attr_value(episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) # Set quality form metadata _, quality = Quality.splitCompositeStatus(episode.status) if not video.format or episode.show.subtitles_sr_metadata: if quality & Quality.ANYHDTV: video.format = Quality.combinedQualityStrings.get(Quality.ANYHDTV) elif quality & Quality.ANYWEBDL: video.format = Quality.combinedQualityStrings.get(Quality.ANYWEBDL) elif quality & Quality.ANYBLURAY: video.format = Quality.combinedQualityStrings.get(Quality.ANYBLURAY) if not video.resolution or episode.show.subtitles_sr_metadata: if quality & (Quality.HDTV | Quality.HDWEBDL | Quality.HDBLURAY): video.resolution = "720p" elif quality & Quality.RAWHDTV: video.resolution = "1080i" elif quality & (Quality.FULLHDTV | Quality.FULLHDWEBDL | Quality.FULLHDBLURAY): video.resolution = "1080p" elif quality & (Quality.UHD_4K_TV | Quality.UHD_4K_WEBDL | Quality.UHD_4K_BLURAY): video.resolution = "4K" elif quality & (Quality.UHD_8K_TV | Quality.UHD_8K_WEBDL | Quality.UHD_8K_BLURAY): video.resolution = "8K"
def set_episode_failed(ep_obj): try: with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() except EpisodeNotFoundException as e: logger.log('Unable to get episode, please set its status manually: %s' % ex(e), logger.WARNING)
def markFailed(epObj): log_str = u"" try: with epObj.lock: quality = Quality.splitCompositeStatus(epObj.status)[1] epObj.status = Quality.compositeStatus(FAILED, quality) epObj.saveToDB() except EpisodeNotFoundException, e: logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
def getQualityClass(ep_obj): """ Find the quality class for the episode """ _, ep_quality = Quality.splitCompositeStatus(ep_obj.status) if ep_quality in Quality.cssClassStrings: quality_class = Quality.cssClassStrings[ep_quality] else: quality_class = Quality.cssClassStrings[Quality.UNKNOWN] return quality_class
def markFailed(show_obj, season, episode=None): log_str = u"" if episode: try: ep_obj = show_obj.getEpisode(season, episode) with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() except exceptions.EpisodeNotFoundException, e: log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING)
def logFailed(epObj, release, provider=None): """ Log a failed download :param epObj: Episode object :param release: Release group :param provider: Provider used for snatch """ showid = int(epObj.show.indexerid) season = int(epObj.season) epNum = int(epObj.episode) status, quality = Quality.splitCompositeStatus(epObj.status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider)
def logSubtitle(showid, season, episode, status, subtitleResult): """ Log download of subtitle :param showid: Showid of download :param season: Show season :param episode: Show episode :param status: Status of download :param subtitleResult: Result object """ resource = subtitleResult.language.opensubtitles provider = subtitleResult.provider_name status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(SUBTITLED, quality) _logHistoryItem(action, showid, season, episode, quality, resource, provider)
def markFailed(epObj): """ Mark an episode as failed :param epObj: Episode object to mark as failed :return: empty string """ log_str = u"" try: with epObj.lock: quality = Quality.splitCompositeStatus(epObj.status)[1] epObj.status = Quality.compositeStatus(FAILED, quality) epObj.saveToDB() except EpisodeNotFoundException, e: logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
def markFailed(epObj): """ Mark an episode as failed :param epObj: Episode object to mark as failed :return: empty string """ log_str = "" try: with epObj.lock: quality = Quality.splitCompositeStatus(epObj.status)[1] epObj.status = Quality.compositeStatus(FAILED, quality) epObj.saveToDB() except EpisodeNotFoundException as e: logging.warning("Unable to get episode, please set its status manually: {}".format(ex(e))) return log_str
class ProperFinder(): def __init__(self): self.updateInterval = datetime.timedelta(hours=1) def run(self): if not sickbeard.DOWNLOAD_PROPERS: return # look for propers every night at 1 AM updateTime = datetime.time(hour=1) logger.log(u"Checking proper time", logger.DEBUG) hourDiff = datetime.datetime.today().time().hour - updateTime.hour # if it's less than an interval after the update time then do an update if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600: logger.log(u"Beginning the search for new propers") else: return propers = self._getProperList() self._downloadPropers(propers) def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue search_date = datetime.datetime.today() - datetime.timedelta( days=2) logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.findPropers(search_date) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name, True) except InvalidNameException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set( show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log( u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log( u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log( u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log( u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus( int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map( operator.attrgetter( 'tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from "+curProvider.name) curPropers = curProvider.findPropers(date) # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: "+x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u"Unable to parse the filename "+curProper.name+" into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log(u"Ignoring "+curProper.name+" because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set(show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log(u"Successful match! Result "+parse_result.series_name+" matched to show "+curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log(u"Proper "+curProper.name+" isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log(u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(curProper.episode)+" for show "+parse_result.series_name+", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map(operator.attrgetter('tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: "+str(curProper.name)) finalPropers.append(curProper) return finalPropers
# only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: logger.log(u"Proper " + bestResult.name + " doesn't have a release group and version, ignoring it", logger.DEBUG) continue # check if we actually want this proper (if it's the right quality) myDB = db.DBConnection() sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: myDB = db.DBConnection() sqlResults = myDB.select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) oldVersion = int(sqlResults[0]["version"]) oldRelease_group = (sqlResults[0]["release_group"]) if oldVersion > -1 and oldVersion < bestResult.version: logger.log("Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion))
def _getProperList(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the original_thread_name = threading.currentThread().name providers = enabled_providers('backlog') for cur_provider in providers: threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name) logger.log(u"Searching for any new PROPER releases from {provider}".format (provider=cur_provider.name)) try: cur_propers = cur_provider.find_propers(search_date) except AuthException as e: logger.log(u"Authentication error: {error}".format (error=ex(e)), logger.DEBUG) continue except (SocketTimeout) as e: logger.log(u"Socket time out while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except (requests_exceptions.HTTPError, requests_exceptions.TooManyRedirects) as e: logger.log(u"HTTP error while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.ConnectionError as e: logger.log(u"Connection error while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.Timeout as e: logger.log(u"Connection timed out while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.ContentDecodingError as e: logger.log(u"Content-Encoding was gzip, but content was not compressed while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except Exception as e: if u'ECONNRESET' in e or (hasattr(e, 'errno') and e.errno == errno.ECONNRESET): logger.log(u"Connection reset by peer while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) else: logger.log(u"Unknown exception while searching for propers in {provider}, skipping: {error}".format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list for proper in cur_propers: guess = guessit(proper.name) if not guess.get('proper_count'): logger.log(u'Skipping non-proper: {name}'.format(name=proper.name)) continue name = self._genericName(proper.name, remove=False) if name not in propers: logger.log(u'Found new proper result: {name}'.format (name=proper.name), logger.DEBUG) proper.provider = cur_provider propers[name] = proper threading.currentThread().name = original_thread_name # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) final_propers = [] for cur_proper in sorted_propers: try: parse_result = NameParser(False).parse(cur_proper.name) except (InvalidNameException, InvalidShowException) as error: logger.log(u'{}'.format(error), logger.DEBUG) continue if not parse_result.series_name: logger.log(u"Ignoring invalid show: {name}".format (name=cur_proper.name), logger.DEBUG) continue if not parse_result.episode_numbers: logger.log(u"Ignoring full season instead of episode: {name}".format (name=cur_proper.name), logger.DEBUG) continue logger.log(u'Successful match! Matched {} to show {}'.format (parse_result.original_name, parse_result.show.name), logger.DEBUG) # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.show = parse_result.show cur_proper.season = parse_result.season_number if parse_result.season_number is not None else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.content = None # filter release best_result = pickBestResult(cur_proper, parse_result.show) if not best_result: logger.log(u'Rejected proper due to release filters: {name}'.format (name=cur_proper.name)) continue # only get anime proper if it has release group and version if best_result.show.is_anime: if not best_result.release_group and best_result.version == -1: logger.log(u"Ignoring proper without release group and version: {name}".format (name=best_result.name)) continue # check if we actually want this proper (if it's the right quality) main_db_con = db.DBConnection() sql_results = main_db_con.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [best_result.indexerid, best_result.season, best_result.episode]) if not sql_results: logger.log(u"Ignoring proper with incorrect quality: {name}".format (name=best_result.name)) continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status'])) if old_status not in (DOWNLOADED, SNATCHED) or old_quality != best_result.quality: logger.log(u"Ignoring proper because quality is different or episode is already archived: {name}".format (name=best_result.name)) continue # check if we actually want this proper (if it's the right release group and a higher version) if best_result.show.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [best_result.indexerid, best_result.season, best_result.episode]) old_version = int(sql_results[0]['version']) old_release_group = (sql_results[0]['release_group']) if -1 < old_version < best_result.version: logger.log(u'Found new anime version {new} to replace existing version {old}: {name}'.format (old=old_version, new=best_result.version, name=best_result.name)) else: logger.log(u'Ignoring proper with the same or lower version: {name}'.format (name=best_result.name)) continue if old_release_group != best_result.release_group: logger.log(u"Ignoring proper from release group {new} instead of current group {old}".format (new=best_result.release_group, old=old_release_group)) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if best_result.indexerid != -1 and (best_result.indexerid, best_result.season, best_result.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), final_propers): logger.log(u'Found a desired proper: {name}'.format(name=best_result.name)) final_propers.append(best_result) return final_propers
ep_obj = show_obj.getEpisode(season, episode) with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() except exceptions.EpisodeNotFoundException, e: log_str += _log_helper( u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING) else: # Whole season for ep_obj in show_obj.getAllEpisodes(season): with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() return log_str def logSnatch(searchResult): myDB = db.DBConnection("failed.db") logDate = datetime.datetime.today().strftime(dateFormat) release = prepareFailedName(searchResult.name) providerClass = searchResult.provider if providerClass is not None: provider = providerClass.name
def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """ Retrieve a list of recently aired episodes, and search for these episodes in the different providers. """ propers = {} # for each provider get a list of the original_thread_name = threading.currentThread().name providers = enabled_providers('backlog') # Get the recently aired (last 2 days) shows from db search_date = datetime.datetime.today() - datetime.timedelta(days=2) main_db_con = db.DBConnection() search_qualities = list(set(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST)) search_q_params = ','.join('?' for _ in search_qualities) recently_aired = main_db_con.select( b'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate' b' FROM tv_episodes AS e' b' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' b' WHERE e.airdate >= ?' b' AND e.status IN ({0})'.format(search_q_params), [search_date.toordinal()] + search_qualities ) if not recently_aired: logger.log('No recently aired new episodes, nothing to search for') return [] # Loop through the providers, and search for releases for cur_provider in providers: threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name) logger.log('Searching for any new PROPER releases from {provider}'.format (provider=cur_provider.name)) try: cur_propers = cur_provider.find_propers(recently_aired) except AuthException as e: logger.log('Authentication error: {error}'.format (error=ex(e)), logger.DEBUG) continue except (SocketTimeout) as e: logger.log('Socket time out while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except (requests_exceptions.HTTPError, requests_exceptions.TooManyRedirects) as e: logger.log('HTTP error while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.ConnectionError as e: logger.log('Connection error while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.Timeout as e: logger.log('Connection timed out while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except requests_exceptions.ContentDecodingError as e: logger.log('Content-Encoding was gzip, but content was not compressed while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) continue except Exception as e: if 'ECONNRESET' in e or (hasattr(e, 'errno') and e.errno == errno.ECONNRESET): logger.log('Connection reset by peer while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) else: logger.log('Unknown exception while searching for propers in {provider}, skipping: {error}'.format (provider=cur_provider.name, error=ex(e)), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list for proper in cur_propers: name = self._genericName(proper.name, remove=False) if name not in propers: logger.log('Found new possible proper result: {name}'.format (name=proper.name), logger.DEBUG) proper.provider = cur_provider propers[name] = proper threading.currentThread().name = original_thread_name # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) final_propers = [] # Keep only last 100 items of processed propers: self.processed_propers = self.processed_propers[-100:] for cur_proper in sorted_propers: if cur_proper.name in self.processed_propers: logger.log(u'Proper already processed. Skipping: {0}'.format(cur_proper.name), logger.DEBUG) continue try: parse_result = NameParser(False).parse(cur_proper.name) except (InvalidNameException, InvalidShowException) as error: logger.log('{0}'.format(error), logger.DEBUG) continue if not parse_result.proper_tags: logger.log('Skipping non-proper: {name}'.format(name=cur_proper.name)) continue logger.log('Proper tags for {proper}: {tags}'.format (proper=cur_proper.name, tags=parse_result.proper_tags), logger.DEBUG) if not parse_result.series_name: logger.log('Ignoring invalid show: {name}'.format (name=cur_proper.name), logger.DEBUG) self.processed_propers.append(cur_proper.name) continue if not parse_result.episode_numbers: logger.log('Ignoring full season instead of episode: {name}'.format (name=cur_proper.name), logger.DEBUG) self.processed_propers.append(cur_proper.name) continue logger.log('Successful match! Matched {original_name} to show {new_name}'.format (original_name=parse_result.original_name, new_name=parse_result.show.name), logger.DEBUG) # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.show = parse_result.show cur_proper.season = parse_result.season_number if parse_result.season_number is not None else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.content = None cur_proper.proper_tags = parse_result.proper_tags # filter release best_result = pickBestResult(cur_proper, parse_result.show) if not best_result: logger.log('Rejected proper due to release filters: {name}'.format (name=cur_proper.name)) self.processed_propers.append(cur_proper.name) continue # only get anime proper if it has release group and version if best_result.show.is_anime: if not best_result.release_group and best_result.version == -1: logger.log('Ignoring proper without release group and version: {name}'.format (name=best_result.name)) self.processed_propers.append(cur_proper.name) continue # check if we actually want this proper (if it's the right quality) main_db_con = db.DBConnection() sql_results = main_db_con.select(b'SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [best_result.indexerid, best_result.season, best_result.episode]) if not sql_results: logger.log('Ignoring proper with incorrect quality: {name}'.format (name=best_result.name)) self.processed_propers.append(cur_proper.name) continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0][b'status'])) if old_status not in (DOWNLOADED, SNATCHED) or old_quality != best_result.quality: logger.log('Ignoring proper because quality is different or episode is already archived: {name}'.format (name=best_result.name)) self.processed_propers.append(cur_proper.name) continue # check if we actually want this proper (if it's the right release group and a higher version) if best_result.show.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( b'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [best_result.indexerid, best_result.season, best_result.episode]) old_version = int(sql_results[0][b'version']) old_release_group = (sql_results[0][b'release_group']) if -1 < old_version < best_result.version: logger.log('Found new anime version {new} to replace existing version {old}: {name}'.format (old=old_version, new=best_result.version, name=best_result.name)) else: logger.log('Ignoring proper with the same or lower version: {name}'.format (name=best_result.name)) self.processed_propers.append(cur_proper.name) continue if old_release_group != best_result.release_group: logger.log('Ignoring proper from release group {new} instead of current group {old}'.format (new=best_result.release_group, old=old_release_group)) self.processed_propers.append(cur_proper.name) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if best_result.indexerid != -1 and (best_result.indexerid, best_result.season, best_result.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), final_propers): logger.log('Found a desired proper: {name}'.format(name=best_result.name)) final_propers.append(best_result) self.processed_propers.append(cur_proper.name) return final_propers
def makeSceneSeasonSearchString (show, segment, extraSearchType=None): myDB = db.DBConnection() if show.air_by_date: numseasons = 0 # the search string for air by date shows is just seasonStrings = [segment] elif show.absolute_numbering: numseasons = 0 episodeNumbersSQLResult = myDB.select("SELECT absolute_episode, status FROM tv_episodes WHERE showid = ? and season = ?", [show.tvdbid, segment]) # get show qualities bestQualities = Quality.splitQuality(show.quality) # compile a list of all the episode numbers we need in this 'season' seasonStrings = [] for episodeNumberResult in episodeNumbersSQLResult: # get quality of the episode curCompositeStatus = int(episodeNumberResult["status"]) curStatus, curQuality = Quality.splitCompositeStatus(curCompositeStatus) if bestQualities: highestBestQuality = max(bestQualities) else: highestBestQuality = 0 # if we need a better one then add it to the list of episodes to fetch if (curStatus in (DOWNLOADED, SNATCHED) and curQuality < highestBestQuality) or curStatus == WANTED: seasonStrings.append("%d" % episodeNumberResult["absolute_episode"]) else: numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [show.tvdbid]) numseasons = int(numseasonsSQlResult[0][0]) seasonStrings = ["S%02d" % segment] # since nzbmatrix allows more than one search per request we search SxEE results too if extraSearchType == "nzbmatrix": seasonStrings.append("%ix" % segment) showNames = set(makeSceneShowSearchStrings(show)) toReturn = [] term_list = [] # search each show name for curShow in showNames: # most providers all work the same way if not extraSearchType: # if there's only one season then we can just use the show name straight up if numseasons == 1: toReturn.append(curShow) # for providers that don't allow multiple searches in one request we only search for Sxx style stuff else: for cur_season in seasonStrings: toReturn.append(curShow + "." + cur_season) # nzbmatrix is special, we build a search string just for them elif extraSearchType == "nzbmatrix": if numseasons == 1: toReturn.append('"'+curShow+'"') elif numseasons == 0: if show.absolute_numbering: term_list = ['(+"'+curShow+'"+"'+x+'")' for x in seasonStrings] toReturn.append('.'.join(term_list)) else: toReturn.append('"'+curShow+' '+str(segment).replace('-',' ')+'"') else: term_list = [x+'*' for x in seasonStrings] if show.air_by_date: term_list = ['"'+x+'"' for x in term_list] toReturn.append('+"'+curShow+'" +('+','.join(term_list)+')') return toReturn
def _getProperList(): propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] for curProvider in providers: threading.currentThread().name = origThreadName + ' :: [' + curProvider.name + ']' logger.log(u'Searching for any new PROPER releases from ' + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = origThreadName # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = _genericName(x.name) if not name in propers: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid episode', logger.DEBUG) continue except InvalidShowException: logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid show', logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u'Ignoring ' + curProper.name + ' because it\'s for a full season rather than specific episode', logger.DEBUG) continue logger.log( u'Successful match! Result ' + parse_result.original_name + ' matched to show ' + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not curProper.release_group and curProper.version == -1: logger.log(u'Proper ' + curProper.name + ' doesn\'t have a release group and version, ignoring it', logger.DEBUG) continue if not show_name_helpers.filterBadReleases(curProper.name, parse=False): logger.log(u'Proper ' + curProper.name + ' isn\'t a valid scene release that we want, ignoring it', logger.DEBUG) continue if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name, parse_result.show.rls_ignore_words): logger.log( u'Ignoring ' + curProper.name + ' based on ignored words filter: ' + parse_result.show.rls_ignore_words, logger.MESSAGE) continue if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name, parse_result.show.rls_require_words): logger.log( u'Ignoring ' + curProper.name + ' based on required words filter: ' + parse_result.show.rls_require_words, logger.MESSAGE) continue # check if we actually want this proper (if it's the right quality) myDB = db.DBConnection() sqlResults = myDB.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]['status'])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [curProper.indexerid, curProper.season, curProper.episode]) oldVersion = int(sqlResults[0]['version']) oldRelease_group = (sqlResults[0]['release_group']) if oldVersion > -1 and oldVersion < curProper.version: logger.log('Found new anime v' + str(curProper.version) + ' to replace existing v' + str(oldVersion)) else: continue if oldRelease_group != curProper.release_group: logger.log('Skipping proper from release group: ' + curProper.release_group + ', does not match existing release group: ' + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u'Found a proper that we need: ' + str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _get_proper_list(aired_since_shows, recent_shows, recent_anime): propers = {} # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']' logger.log(u'Searching for new PROPER releases') try: found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 np = NameParser(False, try_scene_exceptions=True) for x in found_propers: name = _generic_name(x.name) if name not in propers: try: parse_result = np.parse(x.title) if parse_result.series_name and parse_result.episode_numbers and \ parse_result.show.indexerid in recent_shows + recent_anime: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.show = parse_result.show.indexerid x.provider = cur_provider propers[name] = x count += 1 except Exception: continue cur_provider.log_result('Propers', count, '%s' % cur_provider.name) # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) verified_propers = [] for cur_proper in sorted_propers: # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not cur_proper.release_group and -1 == cur_proper.version: logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False): logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name) continue result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras) if None is not result and not result: logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name) continue # check if we actually want this proper (if it's the right quality) my_db = db.DBConnection() sql_results = my_db.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status'])) if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: my_db = db.DBConnection() sql_results = my_db.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) old_version = int(sql_results[0]['version']) old_release_group = (sql_results[0]['release_group']) if -1 < old_version < cur_proper.version: logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version)) else: continue if cur_proper.release_group != old_release_group: logger.log(u'Skipping proper from release group: %s, does not match existing release group: %s' % (cur_proper.release_group, old_release_group)) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), verified_propers): logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.append(cur_proper) return verified_propers
def _getProperList(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active()] for curProvider in providers: threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except AuthException as e: logger.log(u"Authentication error: " + ex(e), logger.DEBUG) continue except Exception as e: logger.log(u"Exception while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'\b(proper|repack|real)\b', x.name, re.I): logger.log(u'find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG) continue name = self._genericName(x.name) if name not in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x threading.currentThread().name = origThreadName # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: parse_result = NameParser(False).parse(curProper.name) except (InvalidNameException, InvalidShowException) as error: logger.log(u"{0}".format(error), logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue logger.log( u"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: logger.log(u"Proper " + curProper.name + " were rejected by our release filters.", logger.DEBUG) continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: logger.log(u"Proper " + bestResult.name + " doesn't have a release group and version, ignoring it", logger.DEBUG) continue # check if we actually want this proper (if it's the right quality) main_db_con = db.DBConnection() sql_results = main_db_con.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sql_results[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) oldVersion = int(sql_results[0]["version"]) oldRelease_group = (sql_results[0]["release_group"]) if -1 < oldVersion < bestResult.version: logger.log(u"Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: logger.log(u"Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in {(p.indexerid, p.season, p.episode) for p in finalPropers}: logger.log(u"Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def refine_video(video, episode): # try to enrich video object using information in original filename if episode.release_name: guess_ep = Episode.fromguess(None, guessit(episode.release_name)) for name in vars(guess_ep): if getattr(guess_ep, name) and not getattr(video, name): setattr(video, name, getattr(guess_ep, name)) # Use sickbeard metadata metadata_mapping = { 'episode': 'episode', 'release_group': 'release_group', 'season': 'season', 'series': 'show.name', 'series_imdb_id': 'show.imdbid', 'size': 'file_size', 'title': 'name', 'year': 'show.startyear' } def get_attr_value(obj, name): value = None for attr in name.split('.'): if not value: value = getattr(obj, attr, None) else: value = getattr(value, attr, None) return value for name in metadata_mapping: if not getattr(video, name) and get_attr_value(episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) elif episode.show.subtitles_sr_metadata and get_attr_value( episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) # Set quality from metadata _, quality = Quality.splitCompositeStatus(episode.status) if not video.format or episode.show.subtitles_sr_metadata: if quality & Quality.ANYHDTV: video.format = Quality.combinedQualityStrings.get(Quality.ANYHDTV) elif quality & Quality.ANYWEBDL: video.format = Quality.combinedQualityStrings.get(Quality.ANYWEBDL) elif quality & Quality.ANYBLURAY: video.format = Quality.combinedQualityStrings.get( Quality.ANYBLURAY) if not video.resolution or episode.show.subtitles_sr_metadata: if quality & (Quality.HDTV | Quality.HDWEBDL | Quality.HDBLURAY): video.resolution = '720p' elif quality & Quality.RAWHDTV: video.resolution = '1080i' elif quality & (Quality.FULLHDTV | Quality.FULLHDWEBDL | Quality.FULLHDBLURAY): video.resolution = '1080p' elif quality & (Quality.UHD_4K_TV | Quality.UHD_4K_WEBDL | Quality.UHD_4K_BLURAY): video.resolution = '4K' elif quality & (Quality.UHD_8K_TV | Quality.UHD_8K_WEBDL | Quality.UHD_8K_BLURAY): video.resolution = '8K'
if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name, parse_result.show.rls_require_words): logger.log( u"Ignoring " + curProper.name + " based on required words filter: " + parse_result.show.rls_require_words, logger.INFO) continue # check if we actually want this proper (if it's the right quality) myDB = db.DBConnection() sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: myDB = db.DBConnection() sqlResults = myDB.select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode]) oldVersion = int(sqlResults[0]["version"]) oldRelease_group = (sqlResults[0]["release_group"]) if oldVersion > -1 and oldVersion < curProper.version: logger.log("Found new anime v" + str(curProper.version) + " to replace existing v" + str(oldVersion))
if episode: try: ep_obj = show_obj.getEpisode(season, episode) with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() except exceptions.EpisodeNotFoundException, e: log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING) else: # Whole season for ep_obj in show_obj.getAllEpisodes(season): with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() return log_str def logSnatch(searchResult): myDB = db.DBConnection("failed.db") logDate = datetime.datetime.today().strftime(dateFormat) release = prepareFailedName(searchResult.name) providerClass = searchResult.provider if providerClass is not None: provider = providerClass.name else:
def choose_search_mode(self, episodes, ep_obj, hits_per_page=100): searches = [e for e in episodes if (not ep_obj.show.is_scene and e.season == ep_obj.season) or (ep_obj.show.is_scene and e.scene_season == ep_obj.scene_season)] needed = neededQualities() needed.check_needed_types(ep_obj.show) for s in searches: if needed.all_qualities_needed: break if not s.show.is_anime and not s.show.is_sports: if not getattr(s, 'wantedQuality', None): # this should not happen, the creation is missing for the search in this case logger.log('wantedQuality property was missing for search, creating it', logger.WARNING) ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) s.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(s.wantedQuality) if not hasattr(ep_obj, 'eps_aired_in_season'): # this should not happen, the creation is missing for the search in this case logger.log('eps_aired_in_season property was missing for search, creating it', logger.WARNING) ep_count, ep_count_scene = get_aired_in_season(ep_obj.show) ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0) ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show.is_scene else \ ep_obj.eps_aired_in_season per_ep, limit_per_ep = 0, 0 if needed.need_sd and not needed.need_hd: per_ep, limit_per_ep = 10, 25 if needed.need_hd: if not needed.need_sd: per_ep, limit_per_ep = 30, 90 else: per_ep, limit_per_ep = 40, 120 if needed.need_uhd or (needed.need_hd and not self.cats.get(NewznabConstants.CAT_UHD)): per_ep += 4 limit_per_ep += 10 if ep_obj.show.is_anime or ep_obj.show.is_sports or ep_obj.show.air_by_date: rel_per_ep, limit_per_ep = 5, 10 else: rel_per_ep = per_ep rel = max(1, int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else ep_obj.eps_aired_in_season * rel_per_ep) / hits_per_page))) rel_limit = max(1, int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page))) season_search = rel < (len(searches) * 100 // hits_per_page) if not season_search: needed = neededQualities() needed.check_needed_types(ep_obj.show) if not ep_obj.show.is_anime and not ep_obj.show.is_sports: if not getattr(ep_obj, 'wantedQuality', None): ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) ep_obj.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(ep_obj.wantedQuality) else: if not ep_obj.show.is_anime and not ep_obj.show.is_sports: for ep in episodes: if not getattr(ep, 'wantedQuality', None): ep_status, ep_quality = Quality.splitCompositeStatus(ep.status) ep.wantedQuality = get_wanted_qualities(ep, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(ep.wantedQuality) return (season_search, needed, (hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
def _getProperList(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList( sickbeard.RANDOMIZE_PROVIDERS) if x.is_active() ] for curProvider in providers: threading.currentThread( ).name = origThreadName + " :: [" + curProvider.name + "]" logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except AuthException as e: logger.log(u"Authentication error: " + ex(e), logger.DEBUG) continue except (SocketTimeout, TypeError) as e: logger.log( u"Connection timed out (sockets) while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except (requests_exceptions.HTTPError, requests_exceptions.TooManyRedirects) as e: logger.log( u"HTTP error while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except requests_exceptions.ConnectionError as e: logger.log( u"Connection error while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except requests_exceptions.Timeout as e: logger.log( u"Connection timed out while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except requests_exceptions.ContentDecodingError: logger.log( u"Content-Encoding was gzip, but content was not compressed while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except Exception as e: if hasattr(e, 'errno') and e.errno == errno.ECONNRESET: logger.log( u"Connection reseted by peer accessing {0}".format( curProvider.name), logger.DEBUG) else: logger.log( u"Unknown exception while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): logger.log( u'find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG) continue name = self._genericName(x.name) if name not in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x threading.currentThread().name = origThreadName # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: parse_result = NameParser(False).parse(curProper.name) except (InvalidNameException, InvalidShowException) as error: logger.log(u"{0}".format(error), logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue logger.log( u"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: logger.log( u"Proper " + curProper.name + " were rejected by our release filters.", logger.DEBUG) continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: logger.log( u"Proper " + bestResult.name + " doesn't have a release group and version, ignoring it", logger.DEBUG) continue # check if we actually want this proper (if it's the right quality) main_db_con = db.DBConnection() sql_results = main_db_con.select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus( int(sql_results[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [ bestResult.indexerid, bestResult.season, bestResult.episode ]) oldVersion = int(sql_results[0]["version"]) oldRelease_group = (sql_results[0]["release_group"]) if -1 < oldVersion < bestResult.version: logger.log(u"Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: logger.log(u"Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and ( bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers