def _generate_sample_ep(multi=None, abd=False, anime=False): # make a fake episode object ep = TVEpisode(2, 3, "Ep Name", 99, anime) ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' elif anime: ep._release_name = '[RLSGROUP]Show.Name.100.[HDTV]' else: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' if multi != None: ep._name = "Ep Name (1)" if anime: ep._release_name = '[RLSGROUP]Show.Name.100-101-102.[HDTV]' else: ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' secondEp = TVEpisode(2, 4, "Ep Name (2)", 100, anime) secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name thirdEp = TVEpisode(2, 5, "Ep Name (3)", 101, anime) thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) thirdEp._release_name = ep._release_name ep.relatedEps.append(secondEp) ep.relatedEps.append(thirdEp) return ep
def _generate_sample_ep(multi=None, abd=False, sports=False): # make a fake episode object ep = TVEpisode(2, 3, "Ep Name") ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.air_by_date = 1 elif sports: ep._release_name = 'Show.Name.100.Fighter.vs.Fighter.HDTV.XviD-RLSGROUP' ep.show.sports = 1 else: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' if multi != None: ep._name = "Ep Name (1)" ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' secondEp = TVEpisode(2, 4, "Ep Name (2)") secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name thirdEp = TVEpisode(2, 5, "Ep Name (3)") thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) thirdEp._release_name = ep._release_name ep.relatedEps.append(secondEp) ep.relatedEps.append(thirdEp) return ep
def _generate_sample_ep(multi=None, abd=False, sports=False): # make a fake episode object ep = TVEpisode(2, 3, "Ep Name") ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.air_by_date = 1 elif sports: ep._release_name = 'Show.Name.2011.Mar.09.HDTV.XviD-RLSGROUP' ep.show.sports = 1 else: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' if multi != None: ep._name = "Ep Name (1)" ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' secondEp = TVEpisode(2, 4, "Ep Name (2)") secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name thirdEp = TVEpisode(2, 5, "Ep Name (3)") thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) thirdEp._release_name = ep._release_name ep.relatedEps.append(secondEp) ep.relatedEps.append(thirdEp) return ep
def get_qualities(layer): qualities = [] with open( os.path.join('data', FOLDER_NAME, 'quality', FILENAME.format(layer))) as f: psnrs = [] ssims = [] next(f) next(f) reader = csv.reader(f, delimiter='\t') for row in reader: if row[0] == "Avg:": if not math.isclose(float(row[PSNR]), avg(psnrs), abs_tol=0.01): # abs_tol=1e-05): # print(float(row[PSNR]), avg(psnrs)) raise Exception( "Wrong PSNR average for layer {}".format(layer)) if not math.isclose( float(row[SSIM]), avg(ssims), abs_tol=1e-05): raise Exception( "Wrong SSIM average for layer {}".format(layer)) elif row[0] not in IGNORE_LINE_NAMES: psnrs.append(float(row[PSNR])) ssims.append(float(row[SSIM])) for i in range(0, len(psnrs), FRAMES_IN_SEG): psnr_subset = psnrs[i:i + FRAMES_IN_SEG] ssim_subset = ssims[i:i + FRAMES_IN_SEG] qualities.append(Quality(psnr_subset, ssim_subset)) return qualities
def isFinalResult(result): """ Checks if the given result is good enough quality that we can stop searching for other ones. If the result is the highest quality in both the any/best quality lists then this function returns True, if not then it's False """ logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG) show_obj = result.episodes[0].show any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) # if there is a redownload that's higher than this then we definitely need to keep looking if best_qualities and result.quality < max(best_qualities): return False # if it does not match the shows black and white list its no good elif show_obj.is_anime and show_obj.release_groups.is_valid(result): return False # if there's no redownload that's higher (above) and this is the highest initial download then we're good elif any_qualities and result.quality in any_qualities: return True elif best_qualities and result.quality == max(best_qualities): return True # if we got here than it's either not on the lists, they're empty, or it's lower than the highest required else: return False
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult is False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() return True
def __init__(self, season, episode, name): self.relatedEps = [] self._name = name self._season = season self._episode = episode self._airdate = datetime.date(2010, 3, 9) self.show = TVShow() self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if hasattr(result, "resultType"): if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType in ("torrent", "torrentdata"): # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() if hasattr(result, "extraInfo") and result.resultType == "torrentdata": result.content = result.extraInfo[0] dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.audio_langs = result.audio_lang curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True else: return False
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if hasattr(result,'resultType'): if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType in ("torrent", "torrentdata"): # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() if hasattr(result,'extraInfo') and result.resultType=="torrentdata": result.content=result.extraInfo[0] dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.audio_langs = result.audio_lang curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True else: return False
def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_type=None): # make a fake episode object ep = TVEpisode(2, 3, 3, 'Ep Name') ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.air_by_date = 1 elif sports: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.sports = 1 else: if not anime or 3 == anime_type: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' else: ep._release_name = 'Show.Name.003.HDTV.XviD-RLSGROUP' ep.show.anime = 1 if None is not multi: ep._name = 'Ep Name (1)' second_ep = TVEpisode(2, 4, 4, 'Ep Name (2)') second_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) normal_naming = not anime or 3 == anime_type release_name = ep._release_name = second_ep._release_name = \ ('Show.Name.003-004.HDTV.XviD-RLSGROUP', 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP')[normal_naming] ep.relatedEps.append(second_ep) if normal_naming: third_ep = TVEpisode(2, 5, 5, 'Ep Name (3)') third_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) third_ep._release_name = release_name ep.relatedEps.append(third_ep) else: ep.show.anime = 1 return ep
def is_first_best_match(ep_status, result): """ Checks if the given result is a best quality match and if we want to archive the episode on first match. """ logger.log(u'Checking if the first best quality match should be archived for episode %s' % result.name, logger.DEBUG) show_obj = result.episodes[0].show cur_status, cur_quality = Quality.splitCompositeStatus(ep_status) any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) # if there is a redownload that's a match to one of our best qualities and # we want to archive the episode then we are done if best_qualities and show_obj.upgrade_once and \ (result.quality in best_qualities and (cur_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED) or result.quality not in any_qualities)): return True return False
def generate_sample_ep(multi=None, abd=False, sports=False, anime=False, anime_type=None): # make a fake episode object ep = TVEpisode(2, 3, 3, 'Ep Name') ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.air_by_date = 1 elif sports: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.sports = 1 else: if not anime or 3 == anime_type: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' else: ep._release_name = 'Show.Name.003.HDTV.XviD-RLSGROUP' ep.show.anime = 1 if None is not multi: ep._name = 'Ep Name (1)' second_ep = TVEpisode(2, 4, 4, 'Ep Name (2)') second_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) second_ep._release_name = ep._release_name ep.relatedEps.append(second_ep) if not anime or 3 == anime_type: ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' third_ep = TVEpisode(2, 5, 5, 'Ep Name (3)') third_ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) third_ep._release_name = ep._release_name ep.relatedEps.append(third_ep) else: ep._release_name = 'Show.Name.003-004.HDTV.XviD-RLSGROUP' ep.show.anime = 1 return ep
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) logger.log(u"nzb parameters: {0}".format(result.__dict__), logger.ERROR) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # torrents are always saved to disk elif result.resultType == "torrent": dlResult = _downloadResult(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True
def _generate_sample_ep(multi=None, abd=False, ae=False, sn=False): # make a fake episode object ep = TVEpisode(2,3,"Ep Name",73) ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' elif ae: ep._release_name = 'Show.Name.073.HDTV.XviD-RLSGROUP' ep.show.anime=1 elif sn: ep._release_name = 'Show.Name.Season.Name.03.HDTV.XviD-RLSGROUP' else: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' if multi != None: ep._name = "Ep Name (1)" if ae: ep._release_name = 'Show.Name.073-074-075.HDTV.XviD-RLSGROUP' elif sn: ep._release_name = 'Show.Name.Season.Name.E03E04E05.HDTV.XviD-RLSGROUP' else: ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' secondEp = TVEpisode(2,4,"Ep Name (2)",74) secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name thirdEp = TVEpisode(2,5,"Ep Name (3)",75) thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) thirdEp._release_name = ep._release_name ep.relatedEps.append(secondEp) ep.relatedEps.append(thirdEp) return ep
def __init__(self, season, episode, absolute_number, name): self.relatedEps = [] self._name = name self._season = season self._episode = episode self._absolute_number = absolute_number self.scene_season = season self.scene_episode = episode self.scene_absolute_number = absolute_number self._airdate = datetime.date(2010, 3, 9) self.show = TVShow() self._status = Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' self._is_proper = True self._version = 2
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # torrents are always saved to disk elif result.resultType == "torrent": dlResult = _downloadResult(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) failed_history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True
def isFirstBestMatch(result): """ Checks if the given result is a best quality match and if we want to archive the episode on first match. """ logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name, logger.DEBUG) show_obj = result.episodes[0].show any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) # if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities: return True return False
def pickBestResult(results, show): """ Find the best result out of a list of search results for a show :param results: list of result objects :param show: Shows we check for :return: best result object """ results = results if isinstance(results, list) else [results] logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) bestResult = None # find the best result for the current episode for cur_result in results: if show and cur_result.show is not show: continue # build the black And white list if show.is_anime: if not show.release_groups.is_valid(cur_result): continue logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) anyQualities, bestQualities = Quality.splitQuality(show.quality) if cur_result.quality not in anyQualities + bestQualities: logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue if show.rls_ignore_words and show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_ignore_words): logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words, logger.INFO) continue if show.rls_require_words and not show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_require_words): logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words, logger.INFO) continue if not show_name_helpers.filterBadReleases(cur_result.name, parse=False): logger.log(u"Ignoring " + cur_result.name + " because its not a valid scene release that we want, ignoring it", logger.INFO) continue if hasattr(cur_result, 'size'): if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size, cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue if not bestResult: bestResult = cur_result elif cur_result.quality in bestQualities and (bestResult.quality < cur_result.quality or bestResult.quality not in bestQualities): bestResult = cur_result elif cur_result.quality in anyQualities and bestResult.quality not in bestQualities and bestResult.quality < cur_result.quality: bestResult = cur_result elif bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower(): bestResult = cur_result elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower(): bestResult = cur_result elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)") bestResult = cur_result if bestResult: logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG) else: logger.log(u"No result picked.", logger.DEBUG) return bestResult
def snatch_episode(result, end_status=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if None is result: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for cur_ep in result.episodes: if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7) or \ datetime.date.fromordinal(1) >= cur_ep.airdate: result.priority = 1 if 0 < result.properlevel: end_status = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ('nzb', 'nzbdata'): if 'blackhole' == sickbeard.NZB_METHOD: dl_result = _download_result(result) elif 'sabnzbd' == sickbeard.NZB_METHOD: dl_result = sab.send_nzb(result) elif 'nzbget' == sickbeard.NZB_METHOD: dl_result = nzbget.send_nzb(result) else: logger.log( u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR) dl_result = False # TORRENTs can be sent to clients or saved to disk elif 'torrent' == result.resultType: if not result.url.startswith( 'magnet') and None is not result.get_data_func: result.url = result.get_data_func(result.url) result.get_data_func = None # consume only once if not result.url: return False if not result.content and result.url.startswith('magnet-'): if sickbeard.TORRENT_DIR: filepath = ek.ek(os.path.join, sickbeard.TORRENT_DIR, 'files.txt') try: with open(filepath, 'a') as fh: result.url = result.url[7:] fh.write('"%s"\t"%s"\n' % (result.url, sickbeard.TV_DOWNLOAD_DIR)) dl_result = True except IOError: logger.log(u'Failed to write to %s' % filepath, logger.ERROR) return False else: logger.log(u'Need to set a torrent blackhole folder', logger.ERROR) return False # torrents are saved to disk when blackhole mode elif 'blackhole' == sickbeard.TORRENT_METHOD: dl_result = _download_result(result) else: # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) if result.provider.should_skip() or not result.content: logger.log( u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client dl_result = clients.get_client_instance( sickbeard.TORRENT_METHOD)().send_torrent(result) if getattr(result, 'cache_file', None): helpers.remove_file_failed(result.cache_file) else: logger.log(u'Unknown result type, unable to download it', logger.ERROR) dl_result = False if not dl_result: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.add_snatched(result) ui.notifications.message(u'Episode snatched', result.name) history.log_snatch(result) # don't notify when we re-download an episode sql_l = [] update_imdb_data = True for cur_ep_obj in result.episodes: with cur_ep_obj.lock: if is_first_best_match(cur_ep_obj.status, result): cur_ep_obj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: cur_ep_obj.status = Quality.compositeStatus( end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: sql_l.append(item) if cur_ep_obj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info( ) if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) return True
except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) continue except Exception, e: logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue didSearch = True if not didSearch: logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR) finalResults = [] anyQualities, bestQualities = Quality.splitQuality(show.quality) # pick the best season NZB bestSeasonNZB = None if SEASON_RESULT in foundResults: bestSeasonNZB = pickBestResult(foundResults[SEASON_RESULT], anyQualities + bestQualities) highest_quality_overall = 0 for cur_season in foundResults: for cur_result in foundResults[cur_season]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if bestSeasonNZB:
def isFinalResult(result): """ Checks if the given result is good enough quality that we can stop searching for other ones. If the result is the highest quality in both the any/best quality lists then this function returns True, if not then it's False """ logger.log( u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG) links = [] lists = [] myDB = db.DBConnection() show_obj = result.episodes[0].show epidr = myDB.select("SELECT episode_id from tv_episodes where showid=?", [show_obj.tvdbid]) for eplist in epidr: lists.append(eplist[0]) for i in lists: listlink = myDB.select( "SELECT link from episode_links where episode_id =?", [i]) for dlink in listlink: links.append(dlink[0]) any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) if hasattr(result, 'item'): if hasattr(result.item, 'nzburl'): eplink = result.item.nzburl elif hasattr(result.item, 'url'): eplink = result.item.url elif hasattr(result, 'nzburl'): eplink = result.nzburl elif hasattr(result, 'url'): eplink = result.url else: eplink = "" else: if hasattr(result, 'nzburl'): eplink = result.nzburl elif hasattr(result, 'url'): eplink = result.url else: eplink = "" # if episode link seems to have been already downloaded continue searching: if eplink in links: logger.log( eplink + " was already downloaded so let's continue searching assuming the download failed", logger.DEBUG) return False # if there is a redownload that's higher than this then we definitely need to keep looking if best_qualities and result.quality < max(best_qualities): return False # if there's no redownload that's higher (above) and this is the highest initial download then we're good elif any_qualities and result.quality == max(any_qualities): return True elif best_qualities and result.quality == max(best_qualities): # if this is the best redownload but we have a higher initial download then keep looking if any_qualities and result.quality < max(any_qualities): return False # if this is the best redownload and we don't have a higher initial download then we're done else: return True # if we got here than it's either not on the lists, they're empty, or it's lower than the highest required else: return False
def pickBestResult(results, show): results = results if isinstance(results, list) else [results] logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) bestResult = None # find the best result for the current episode for cur_result in results: if show and cur_result.show is not show: continue # build the black And white list if show.is_anime: if not show.release_groups.is_valid(cur_result): continue logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) anyQualities, bestQualities = Quality.splitQuality(show.quality) if cur_result.quality not in anyQualities + bestQualities: logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue if show.rls_ignore_words and show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_ignore_words): logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words, logger.INFO) continue if show.rls_require_words and not show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_require_words): logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words, logger.INFO) continue if not show_name_helpers.filterBadReleases(cur_result.name, parse=False): logger.log(u"Ignoring " + cur_result.name + " because its not a valid scene release that we want, ignoring it", logger.INFO) continue if hasattr(cur_result, 'size'): if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size, cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue # Download the torrent file contents only if it has passed all other checks! # Must be done before setting bestResult if cur_result.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole": if len(cur_result.url) and not cur_result.url.startswith('magnet'): cur_result.content = cur_result.provider.getURL(cur_result.url) if not cur_result.content: continue if cur_result.quality in bestQualities and (not bestResult or bestResult.quality < cur_result.quality or bestResult not in bestQualities): bestResult = cur_result elif cur_result.quality in anyQualities and (not bestResult or bestResult not in bestQualities) and (not bestResult or bestResult.quality < cur_result.quality): bestResult = cur_result elif bestResult and bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower(): bestResult = cur_result elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower(): bestResult = cur_result elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)") bestResult = cur_result if bestResult: logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG) else: logger.log(u"No result picked.", logger.DEBUG) return bestResult
break elif not curProvider.search_fallback or searchCount == 2: break if search_mode == 'sponly': logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...") search_mode = 'eponly' else: logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...") search_mode = 'sponly' # skip to next provider if we have no results to process if not len(foundResults[curProvider.name]): continue anyQualities, bestQualities = Quality.splitQuality(show.quality) # pick the best season NZB bestSeasonResult = None if SEASON_RESULT in foundResults[curProvider.name]: bestSeasonResult = pickBestResult( foundResults[curProvider.name][SEASON_RESULT], show, anyQualities + bestQualities) highest_quality_overall = 0 for cur_episode in foundResults[curProvider.name]: for cur_result in foundResults[curProvider.name][cur_episode]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality logger.log( u"The highest quality of any match is " +
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER if result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: # Sets per provider seed ratio result.ratio = result.provider.seedRatio() result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) if sql_l: myDB = db.DBConnection() myDB.mass_action(sql_l) return True
from common import Segment from common import Quality from roger_allocator import get_best_received_segment from roger_allocator import average_quals from roger_allocator import Allocator import math received_times = [[10, 100], [11, 110], [12, 120], [13, 130]] segments = [ [ Segment(0, 0, 30, Quality([.1], [.01])), Segment(1, 0, 130, Quality([.11], [.011])) ], [ Segment(0, 1, 40, Quality([.15], [.015])), Segment(1, 1, 140, Quality([.151], [.0151])) ], [ Segment(0, 2, 50, Quality([.2], [.02])), Segment(1, 2, 150, Quality([.21], [.021])) ], [ Segment(0, 3, 60, Quality([.35], [.035])), Segment(1, 3, 160, Quality([.351], [.0351])) ] ] def test_get_best_received_segment(): assert (get_best_received_segment(
def searchProviders(show, episodes, manual_search=False): foundResults = {} finalResults = [] didSearch = False origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog] for providerNum, curProvider in enumerate(providers): if curProvider.anime_only and not show.is_anime: logger.log(u"" + str(show.name) + " is not an anime, skipping", logger.DEBUG) continue threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" foundResults[curProvider.name] = {} searchCount = 0 search_mode = curProvider.search_mode while(True): searchCount += 1 if search_mode == 'eponly': logger.log(u"Performing episode search for " + show.name) else: logger.log(u"Performing season pack search for " + show.name) try: curProvider.cache._clearCache() searchResults = curProvider.find_search_results(show, episodes, search_mode, manual_search) except exceptions.AuthException as e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) break except Exception as e: logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) break finally: threading.currentThread().name = origThreadName didSearch = True if len(searchResults): # make a list of all the results for this provider for curEp in searchResults: # skip non-tv crap searchResults[curEp] = filter( lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp]) if curEp in foundResults: foundResults[curProvider.name][curEp] += searchResults[curEp] else: foundResults[curProvider.name][curEp] = searchResults[curEp] break elif not curProvider.search_fallback or searchCount == 2: break if search_mode == 'sponly': logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...") search_mode = 'eponly' else: logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...") search_mode = 'sponly' # skip to next provider if we have no results to process if not len(foundResults[curProvider.name]): continue anyQualities, bestQualities = Quality.splitQuality(show.quality) # pick the best season NZB bestSeasonResult = None if SEASON_RESULT in foundResults[curProvider.name]: bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show, anyQualities + bestQualities) highest_quality_overall = 0 for cur_episode in foundResults[curProvider.name]: for cur_result in foundResults[curProvider.name][cur_episode]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if bestSeasonResult: searchedSeasons = [] searchedSeasons = [str(x.season) for x in episodes] # get the quality of the season nzb seasonQual = bestSeasonResult.quality logger.log( u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[ seasonQual], logger.DEBUG) myDB = db.DBConnection() allEps = [int(x["episode"]) for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( " + ','.join(searchedSeasons) + " ) )", [show.indexerid])] logger.log(u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = %s AND season in %s]" % (show.indexerid, ','.join(searchedSeasons))) logger.log(u"Episode list: " + str(allEps), logger.DEBUG) allWanted = True anyWanted = False for curEpNum in allEps: for season in set([x.season for x in episodes]): if not show.wantEpisode(season, curEpNum, seasonQual): allWanted = False else: anyWanted = True # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred) if allWanted and bestSeasonResult.quality == highest_quality_overall: logger.log( u"Every episode in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name) epObjs = [] for curEpNum in allEps: epObjs.append(show.getEpisode(season, curEpNum)) bestSeasonResult.episodes = epObjs return [bestSeasonResult] elif not anyWanted: logger.log( u"No episodes from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name, logger.DEBUG) else: if bestSeasonResult.provider.providerType == GenericProvider.NZB: logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG) # if not, break it apart and add them as the lowest priority results individualResults = nzbSplitter.splitResult(bestSeasonResult) individualResults = filter( lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults) for curResult in individualResults: if len(curResult.episodes) == 1: epNum = curResult.episodes[0].episode elif len(curResult.episodes) > 1: epNum = MULTI_EP_RESULT if epNum in foundResults[curProvider.name]: foundResults[curProvider.name][epNum].append(curResult) else: foundResults[curProvider.name][epNum] = [curResult] # If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it. logger.log( u"Adding multi episode result for full season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!") epObjs = [] for curEpNum in allEps: epObjs.append(show.getEpisode(season, curEpNum)) bestSeasonResult.episodes = epObjs epNum = MULTI_EP_RESULT if epNum in foundResults[curProvider.name]: foundResults[curProvider.name][epNum].append(bestSeasonResult) else: foundResults[curProvider.name][epNum] = [bestSeasonResult] # go through multi-ep results and see if we really want them or not, get rid of the rest multiResults = {} if MULTI_EP_RESULT in foundResults[curProvider.name]: for multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]: logger.log(u"Seeing if we want to bother with multi episode result " + multiResult.name, logger.DEBUG) if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size, multiResult.provider.name): logger.log(multiResult.name + u" has previously failed, rejecting this multi episode result") continue # see how many of the eps that this result covers aren't covered by single results neededEps = [] notNeededEps = [] for epObj in multiResult.episodes: epNum = epObj.episode # if we have results for the episode if epNum in foundResults[curProvider.name] and len(foundResults[curProvider.name][epNum]) > 0: neededEps.append(epNum) else: notNeededEps.append(epNum) logger.log( u"Single episode check result is needed episodes: " + str(neededEps) + ", not needed episodes: " + str(notNeededEps), logger.DEBUG) if not notNeededEps: logger.log(u"All of these episodes were covered by single episode results, ignoring this multi episode result", logger.DEBUG) continue # check if these eps are already covered by another multi-result multiNeededEps = [] multiNotNeededEps = [] for epObj in multiResult.episodes: epNum = epObj.episode if epNum in multiResults: multiNotNeededEps.append(epNum) else: multiNeededEps.append(epNum) logger.log( u"Multi episode check result is multi needed episodes: " + str(multiNeededEps) + ", multi not needed episodes: " + str( multiNotNeededEps), logger.DEBUG) if not multiNeededEps: logger.log( u"All of these episodes were covered by another multi episode nzb, ignoring this multi episode result", logger.DEBUG) continue # if we're keeping this multi-result then remember it for epObj in multiResult.episodes: multiResults[epObj.episode] = multiResult # don't bother with the single result if we're going to get it with a multi result for epObj in multiResult.episodes: epNum = epObj.episode if epNum in foundResults[curProvider.name]: logger.log( u"A needed multi episode result overlaps with a single episode result for episode #" + str( epNum) + ", removing the single episode results from the list", logger.DEBUG) del foundResults[curProvider.name][epNum] # of all the single ep results narrow it down to the best one for each episode finalResults += set(multiResults.values()) for curEp in foundResults[curProvider.name]: if curEp in (MULTI_EP_RESULT, SEASON_RESULT): continue if len(foundResults[curProvider.name][curEp]) == 0: continue bestResult = pickBestResult(foundResults[curProvider.name][curEp], show) # if all results were rejected move on to the next episode if not bestResult: continue # filter out possible bad torrents from providers if bestResult.resultType == "torrent" and sickbeard.TORRENT_METHOD != "blackhole": bestResult.content = None if not bestResult.url.startswith('magnet'): bestResult.content = bestResult.provider.get_url(bestResult.url) if not bestResult.content: continue # add result if its not a duplicate and found = False for i, result in enumerate(finalResults): for bestResultEp in bestResult.episodes: if bestResultEp in result.episodes: if result.quality < bestResult.quality: finalResults.pop(i) else: found = True if not found: finalResults += [bestResult] # check that we got all the episodes we wanted first before doing a match and snatch wantedEpCount = 0 for wantedEp in episodes: for result in finalResults: if wantedEp in result.episodes and isFinalResult(result): wantedEpCount += 1 # make sure we search every provider for results unless we found everything we wanted if wantedEpCount == len(episodes): break if not didSearch: logger.log(u"No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.", logger.ERROR) return finalResults
def isFinalResult(result): """ Checks if the given result is good enough quality that we can stop searching for other ones. If the result is the highest quality in both the any/best quality lists then this function returns True, if not then it's False """ logger.log(u"Checking if we should keep searching after we've found "+result.name, logger.DEBUG) links=[] lists=[] myDB = db.DBConnection() show_obj = result.episodes[0].show epidr=myDB.select("SELECT episode_id from tv_episodes where showid=?",[show_obj.tvdbid]) for eplist in epidr: lists.append(eplist[0]) for i in lists: listlink=myDB.select("SELECT link from episode_links where episode_id =?",[i]) for dlink in listlink: links.append(dlink[0]) any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) if hasattr(result,'item'): if hasattr(result.item,'nzburl'): eplink=result.item.nzburl elif hasattr(result.item,'url'): eplink=result.item.url elif hasattr(result,'nzburl'): eplink=result.nzburl elif hasattr(result,'url'): eplink=result.url else: eplink="" else: if hasattr(result,'nzburl'): eplink=result.nzburl elif hasattr(result,'url'): eplink=result.url else: eplink="" # if episode link seems to have been already downloaded continue searching: if eplink in links: logger.log(eplink +" was already downloaded so let's continue searching assuming the download failed", logger.DEBUG) return False # if there is a redownload that's higher than this then we definitely need to keep looking if best_qualities and result.quality < max(best_qualities): return False # if there's no redownload that's higher (above) and this is the highest initial download then we're good elif any_qualities and result.quality == max(any_qualities): return True elif best_qualities and result.quality == max(best_qualities): # if this is the best redownload but we have a higher initial download then keep looking if any_qualities and result.quality < max(any_qualities): return False # if this is the best redownload and we don't have a higher initial download then we're done else: return True # if we got here than it's either not on the lists, they're empty, or it's lower than the highest required else: return False
def pickBestResult(results, show): results = results if isinstance(results, list) else [results] logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) bestResult = None # find the best result for the current episode for cur_result in results: if show and cur_result.show is not show: continue # build the black And white list if show.is_anime: if not show.release_groups.is_valid(cur_result): continue logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) anyQualities, bestQualities = Quality.splitQuality(show.quality) if cur_result.quality not in anyQualities + bestQualities: logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue if show.rls_ignore_words and show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_ignore_words): logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words, logger.INFO) continue if show.rls_require_words and not show_name_helpers.containsAtLeastOneWord(cur_result.name, cur_result.show.rls_require_words): logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words, logger.INFO) continue if not show_name_helpers.filterBadReleases(cur_result.name, parse=False): logger.log(u"Ignoring " + cur_result.name + " because its not a valid scene release that we want, ignoring it", logger.INFO) continue if hasattr(cur_result, 'size'): if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size, cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue # Only request HEAD instead of downloading content here, and only after all other checks but before bestresult! # Otherwise we are spamming providers even when searching with cache only. We can validate now, and download later if len(cur_result.url) and cur_result.provider: cur_result.url = cur_result.provider.headURL(cur_result) if not len(cur_result.url): logger.log('Skipping %s, URL check failed. Bad result from provider.' % cur_result.name,logger.INFO) continue if cur_result.quality in bestQualities and (not bestResult or bestResult.quality < cur_result.quality or bestResult not in bestQualities): bestResult = cur_result elif cur_result.quality in anyQualities and (not bestResult or bestResult not in bestQualities) and (not bestResult or bestResult.quality < cur_result.quality): bestResult = cur_result elif bestResult and bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower(): bestResult = cur_result elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower(): bestResult = cur_result elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)") bestResult = cur_result if bestResult: logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG) else: logger.log(u"No result picked.", logger.DEBUG) return bestResult
break elif not curProvider.search_fallback or searchCount == 2: break if search_mode == 'sponly': logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...") search_mode = 'eponly' else: logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...") search_mode = 'sponly' # skip to next provider if we have no results to process if not len(foundResults[curProvider.name]): continue anyQualities, bestQualities = Quality.splitQuality(show.quality) # pick the best season NZB bestSeasonResult = None if SEASON_RESULT in foundResults[curProvider.name]: bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show, anyQualities + bestQualities) highest_quality_overall = 0 for cur_episode in foundResults[curProvider.name]: for cur_result in foundResults[curProvider.name][cur_episode]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
def pickBestResult(results, show): results = results if isinstance(results, list) else [results] logger.log( u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) bestResult = None # find the best result for the current episode for cur_result in results: if show and cur_result.show is not show: continue # build the black And white list if show.is_anime: if not show.release_groups.is_valid(cur_result): continue logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) anyQualities, bestQualities = Quality.splitQuality(show.quality) if cur_result.quality not in anyQualities + bestQualities: logger.log( cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue if show.rls_ignore_words and show_name_helpers.containsAtLeastOneWord( cur_result.name, cur_result.show.rls_ignore_words): logger.log( u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words, logger.INFO) continue if show.rls_require_words and not show_name_helpers.containsAtLeastOneWord( cur_result.name, cur_result.show.rls_require_words): logger.log( u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words, logger.INFO) continue if not show_name_helpers.filterBadReleases(cur_result.name, parse=False): logger.log( u"Ignoring " + cur_result.name + " because its not a valid scene release that we want, ignoring it", logger.INFO) continue if hasattr(cur_result, 'size'): if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed( cur_result.name, cur_result.size, cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue # Only request HEAD instead of downloading content here, and only after all other checks but before bestresult! # Otherwise we are spamming providers even when searching with cache only. We can validate now, and download later if len(cur_result.url) and cur_result.provider: cur_result.url = cur_result.provider.headURL(cur_result) if not len(cur_result.url): continue if cur_result.quality in bestQualities and ( not bestResult or bestResult.quality < cur_result.quality or bestResult not in bestQualities): bestResult = cur_result elif cur_result.quality in anyQualities and ( not bestResult or bestResult not in bestQualities) and ( not bestResult or bestResult.quality < cur_result.quality): bestResult = cur_result elif bestResult and bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower( ) or "repack" in cur_result.name.lower(): bestResult = cur_result elif "internal" in bestResult.name.lower( ) and "internal" not in cur_result.name.lower(): bestResult = cur_result elif "xvid" in bestResult.name.lower( ) and "x264" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)") bestResult = cur_result if bestResult: logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG) else: logger.log(u"No result picked.", logger.DEBUG) return bestResult
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to downloader or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False elif result.resultType == "torrent": #this is required for providers that use torrent cache (more than one possibility) #like Torrentz. Maybe convert result.url to an array in the future. if result.url.count(";") > 0: allUrls = result.url.split(";", 3) for url in allUrls: try: urllib2.urlopen(url) result.url = url break except Exception: continue # torrents are always saved to disk if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) # torrents are sending to torrent client elif sickbeard.TORRENT_METHOD == "utorrent": dlResult = utorrent.sendTORRENT(result) elif sickbeard.TORRENT_METHOD == "transmission": dlResult = transmission.sendTORRENT(result) elif sickbeard.TORRENT_METHOD == "downloadstation": dlResult = downloadstation.sendDownload(result) elif sickbeard.TORRENT_METHOD == "deluge": dlResult = deluge.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if not curEpObj.show.skip_notices and curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True
def snatch_episode(result, end_status=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if None is result: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for cur_ep in result.episodes: if datetime.date.today() - cur_ep.airdate <= datetime.timedelta( days=7): result.priority = 1 if None is not re.search('(^|[. _-])(proper|repack)([. _-]|$)', result.name, re.I): end_status = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ('nzb', 'nzbdata'): if 'blackhole' == sickbeard.NZB_METHOD: dl_result = _download_result(result) elif 'sabnzbd' == sickbeard.NZB_METHOD: dl_result = sab.send_nzb(result) elif 'nzbget' == sickbeard.NZB_METHOD: is_proper = True if SNATCHED_PROPER == end_status else False dl_result = nzbget.send_nzb(result, is_proper) else: logger.log( u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR) dl_result = False # TORRENTs can be sent to clients or saved to disk elif 'torrent' == result.resultType: # torrents are saved to disk when blackhole mode if 'blackhole' == sickbeard.TORRENT_METHOD: dl_result = _download_result(result) else: # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) if not result.content: logger.log( u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client client = clients.get_client_instance(sickbeard.TORRENT_METHOD)() dl_result = client.send_torrent(result) else: logger.log(u'Unknown result type, unable to download it', logger.ERROR) dl_result = False if not dl_result: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message(u'Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] update_imdb_data = True for cur_ep_obj in result.episodes: with cur_ep_obj.lock: if is_first_best_match(result): cur_ep_obj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: cur_ep_obj.status = Quality.compositeStatus( end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: sql_l.append(item) if cur_ep_obj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info( ) if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) return True
logger.log( u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue didSearch = True if not didSearch: logger.log( u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", logger.ERROR) finalResults = [] anyQualities, bestQualities = Quality.splitQuality(show.quality) # pick the best season NZB bestSeasonNZB = None if SEASON_RESULT in foundResults: bestSeasonNZB = pickBestResult(foundResults[SEASON_RESULT], anyQualities + bestQualities, show=show) highest_quality_overall = 0 for cur_season in foundResults: for cur_result in foundResults[cur_season]: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality logger.log( u"The highest quality of any match is " +
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False): found_results = {} final_results = [] search_done = False orig_thread_name = threading.currentThread().name use_quality_list = None if any([episodes]): old_status = old_status or failed_history.find_old_status(episodes[0]) or episodes[0].status if old_status: status, quality = Quality.splitCompositeStatus(old_status) use_quality_list = (status not in ( common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and (not torrent_only or x.providerType == GenericProvider.TORRENT) and (not scheduled or x.enable_scheduled_backlog)] for cur_provider in provider_list: if cur_provider.anime_only and not show.is_anime: logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG) continue threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) provider_id = cur_provider.get_id() found_results[provider_id] = {} search_count = 0 search_mode = getattr(cur_provider, 'search_mode', 'eponly') while True: search_count += 1 if 'eponly' == search_mode: logger.log(u'Performing episode search for %s' % show.name) else: logger.log(u'Performing season pack search for %s' % show.name) try: cur_provider.cache._clearCache() search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search, try_other_searches=try_other_searches) if any(search_results): logger.log(', '.join(['%s %s candidate%s' % ( len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode], helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems()])) except exceptions.AuthException as e: logger.log(u'Authentication error: %s' % ex(e), logger.ERROR) break except Exception as e: logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) break finally: threading.currentThread().name = orig_thread_name search_done = True if len(search_results): # make a list of all the results for this provider for cur_ep in search_results: # skip non-tv crap search_results[cur_ep] = filter( lambda ep_item: show_name_helpers.pass_wordlist_checks( ep_item.name, parse=False, indexer_lookup=False) and ep_item.show == show, search_results[cur_ep]) if cur_ep in found_results: found_results[provider_id][cur_ep] += search_results[cur_ep] else: found_results[provider_id][cur_ep] = search_results[cur_ep] break elif not getattr(cur_provider, 'search_fallback', False) or 2 == search_count: break search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode] logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode]) # skip to next provider if we have no results to process if not len(found_results[provider_id]): continue any_qualities, best_qualities = Quality.splitQuality(show.quality) # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[provider_id]: best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show, any_qualities + best_qualities) highest_quality_overall = 0 for cur_episode in found_results[provider_id]: for cur_result in found_results[provider_id][cur_episode]: if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality: highest_quality_overall = cur_result.quality logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if best_season_result: # get the quality of the season nzb season_qual = best_season_result.quality logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual], best_season_result.provider.providerType), logger.DEBUG) my_db = db.DBConnection() sql = 'SELECT season, episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\ (show.indexerid, ','.join([str(x.season) for x in episodes])) ep_nums = [(int(x['season']), int(x['episode'])) for x in my_db.select(sql)] logger.log(u'Executed query: [%s]' % sql) logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG) all_wanted = True any_wanted = False for ep_num in ep_nums: if not show.wantEpisode(ep_num[0], ep_num[1], season_qual): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then just download this and # be done with it (unless single episodes are preferred) if all_wanted and highest_quality_overall == best_season_result.quality: logger.log(u'Every episode in this season is needed, downloading the whole %s %s' % (best_season_result.provider.providerType, best_season_result.name)) ep_objs = [] for ep_num in ep_nums: ep_objs.append(show.getEpisode(ep_num[0], ep_num[1])) best_season_result.episodes = ep_objs return [best_season_result] elif not any_wanted: logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' + best_season_result.name, logger.DEBUG) else: if GenericProvider.NZB == best_season_result.provider.providerType: logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG) # if not, break it apart and add them as the lowest priority results individual_results = nzbSplitter.splitResult(best_season_result) individual_results = filter( lambda r: show_name_helpers.pass_wordlist_checks( r.name, parse=False, indexer_lookup=False) and r.show == show, individual_results) for cur_result in individual_results: if 1 == len(cur_result.episodes): ep_num = cur_result.episodes[0].episode elif 1 < len(cur_result.episodes): ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append(cur_result) else: found_results[provider_id][ep_num] = [cur_result] # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' + u'the episodes that you do not want to "don\'t download"') ep_objs = [] for ep_num in ep_nums: ep_objs.append(show.getEpisode(ep_num[0], ep_num[1])) best_season_result.episodes = ep_objs ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append(best_season_result) else: found_results[provider_id][ep_num] = [best_season_result] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG) if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size, multi_result.provider.name): logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name) continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode # if we have results for the episode if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]): needed_eps.append(ep_num) else: not_needed_eps.append(ep_num) logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' % (needed_eps, not_needed_eps), logger.DEBUG) if not not_needed_eps: logger.log(u'All of these episodes were covered by single episode results, ' + 'ignoring this multi episode result', logger.DEBUG) continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in multi_results: multi_not_needed_eps.append(ep_num) else: multi_needed_eps.append(ep_num) logger.log(u'Multi episode check result is... multi needed episodes: ' + '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG) if not multi_needed_eps: logger.log(u'All of these episodes were covered by another multi episode nzb, ' + 'ignoring this multi episode result', logger.DEBUG) continue # if we're keeping this multi-result then remember it for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in found_results[provider_id]: logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' + '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG) del found_results[provider_id][ep_num] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None for cur_ep in found_results[provider_id]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if 0 == len(found_results[provider_id][cur_ep]): continue best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list, filter_rls=orig_thread_name) # if all results were rejected move on to the next episode if not best_result: continue # filter out possible bad torrents from providers if 'torrent' == best_result.resultType: if not best_result.url.startswith('magnet') and None is not best_result.get_data_func: best_result.url = best_result.get_data_func(best_result.url) best_result.get_data_func = None # consume only once if not best_result.url: continue if best_result.url.startswith('magnet'): if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = None else: cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(), '%s.torrent' % (helpers.sanitizeFileName(best_result.name))) if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session): continue try: with open(cache_file, 'rb') as fh: td = fh.read() setattr(best_result, 'cache_file', cache_file) except (StandardError, Exception): continue if getattr(best_result.provider, 'chk_td', None): name = None try: hdr = re.findall('(\w+(\d+):)', td[0:6])[0] x, v = len(hdr[0]), int(hdr[1]) while x < len(td): y = x + v name = 'name' == td[x: y] w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y: y + 32])[0] x, v = y + len(w[0]), int(w[1]) if name: name = td[x: x + v] break except (StandardError, Exception): continue if name: if not pass_show_wordlist_checks(name, show): continue if not show_name_helpers.pass_wordlist_checks(name, indexer_lookup=False): logger.log('Ignored: %s (debug log has detail)' % name) continue best_result.name = name if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = td # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_result_ep in best_result.episodes: if best_result_ep in result.episodes: if best_result.quality > result.quality: final_results.pop(i) else: found = True if not found: final_results += [best_result] # check that we got all the episodes we wanted first before doing a match and snatch wanted_ep_count = 0 for wanted_ep in episodes: for result in final_results: if wanted_ep in result.episodes and is_final_result(result): wanted_ep_count += 1 # make sure we search every provider for results unless we found everything we wanted if len(episodes) == wanted_ep_count: break if not len(provider_list): logger.log('No NZB/Torrent providers in Media Providers/Options are allowed for active searching', logger.WARNING) elif not search_done: logger.log('Failed active search of %s enabled provider%s. More info in debug log.' % ( len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR) elif not any(final_results): logger.log('No suitable candidates') return final_results
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) else: ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False elif result.resultType == "torrent": #this is required for providers that use torrent cache (more than one possibility) #like Torrentz. Maybe convert result.url to an array in the future. if result.url.count(";") > 0: allUrls = result.url.split(";", 3) for url in allUrls: try: urllib2.urlopen(url) result.url = url break except Exception: continue # torrents are always saved to disk if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) # torrents are sending to torrent client elif sickbeard.TORRENT_METHOD == "utorrent": dlResult = utorrent.sendTORRENT(result) elif sickbeard.TORRENT_METHOD == "transmission": dlResult = transmission.sendTORRENT(result) elif sickbeard.TORRENT_METHOD == "downloadstation": dlResult = downloadstation.sendDownload(result) elif sickbeard.TORRENT_METHOD == "deluge": dlResult = deluge.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta( days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: # make sure we have the torrent file content if not result.content: if not result.url.startswith('magnet'): result.content = result.provider.getURL(result.url) if not result.content: logger.log( u"Torrent content failed to download from " + result.url, logger.ERROR) # Snatches torrent with client client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus( endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta( days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Torrent file content is empty", logger.ERROR) dlResult = False else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] trakt_data = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus( endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN') + " from " + result.provider.name) trakt_data.append((curEpObj.season, curEpObj.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log( u"Add episodes, showid: indexerid " + str(result.show.indexerid) + ", Title " + str(result.show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data, update="add") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) if sickbeard.UPDATE_SHOWS_ON_SNATCH and not sickbeard.showQueueScheduler.action.isBeingUpdated( result.show) and result.show.status == "Continuing": try: sickbeard.showQueueScheduler.action.updateShow(result.show, True) except exceptions.CantUpdateException as e: logger.log("Unable to update show: {0}".format(str(e)), logger.DEBUG) return True
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False): found_results = {} final_results = [] search_done = False orig_thread_name = threading.currentThread().name provider_list = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and ( not torrent_only or x.providerType == GenericProvider.TORRENT) ] for cur_provider in provider_list: if cur_provider.anime_only and not show.is_anime: logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG) continue threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) provider_id = cur_provider.get_id() found_results[provider_id] = {} search_count = 0 search_mode = cur_provider.search_mode while True: search_count += 1 if 'eponly' == search_mode: logger.log(u'Performing episode search for %s' % show.name) else: logger.log(u'Performing season pack search for %s' % show.name) try: cur_provider.cache._clearCache() search_results = cur_provider.find_search_results( show, episodes, search_mode, manual_search, try_other_searches=try_other_searches) if any(search_results): logger.log(', '.join([ '%s %s candidate%s' % (len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode], helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems() ])) except exceptions.AuthException as e: logger.log(u'Authentication error: %s' % ex(e), logger.ERROR) break except Exception as e: logger.log( u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) break finally: threading.currentThread().name = orig_thread_name search_done = True if len(search_results): # make a list of all the results for this provider for cur_ep in search_results: # skip non-tv crap search_results[cur_ep] = filter( lambda ep_item: show_name_helpers.pass_wordlist_checks( ep_item.name, parse=False) and ep_item.show == show, search_results[cur_ep]) if cur_ep in found_results: found_results[provider_id][cur_ep] += search_results[ cur_ep] else: found_results[provider_id][cur_ep] = search_results[ cur_ep] break elif not cur_provider.search_fallback or search_count == 2: break search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode] logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode]) # skip to next provider if we have no results to process if not len(found_results[provider_id]): continue any_qualities, best_qualities = Quality.splitQuality(show.quality) # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[provider_id]: best_season_result = pick_best_result( found_results[provider_id][SEASON_RESULT], show, any_qualities + best_qualities) highest_quality_overall = 0 for cur_episode in found_results[provider_id]: for cur_result in found_results[provider_id][cur_episode]: if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality: highest_quality_overall = cur_result.quality logger.log( u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if best_season_result: # get the quality of the season nzb season_qual = best_season_result.quality logger.log( u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual], best_season_result.provider.providerType), logger.DEBUG) my_db = db.DBConnection() sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\ (show.indexerid, ','.join([str(x.season) for x in episodes])) ep_nums = [int(x['episode']) for x in my_db.select(sql)] logger.log(u'Executed query: [%s]' % sql) logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG) all_wanted = True any_wanted = False for ep_num in ep_nums: for season in set([x.season for x in episodes]): if not show.wantEpisode(season, ep_num, season_qual): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then just download this and # be done with it (unless single episodes are preferred) if all_wanted and highest_quality_overall == best_season_result.quality: logger.log( u'Every episode in this season is needed, downloading the whole %s %s' % (best_season_result.provider.providerType, best_season_result.name)) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs return [best_season_result] elif not any_wanted: logger.log( u'No episodes from this season are wanted at this quality, ignoring the result of ' + best_season_result.name, logger.DEBUG) else: if GenericProvider.NZB == best_season_result.provider.providerType: logger.log( u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG) # if not, break it apart and add them as the lowest priority results individual_results = nzbSplitter.splitResult( best_season_result) individual_results = filter( lambda r: show_name_helpers.pass_wordlist_checks( r.name, parse=False) and r.show == show, individual_results) for cur_result in individual_results: if 1 == len(cur_result.episodes): ep_num = cur_result.episodes[0].episode elif 1 < len(cur_result.episodes): ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append( cur_result) else: found_results[provider_id][ep_num] = [cur_result] # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it logger.log( u'Adding multi episode result for full season torrent. In your torrent client, set ' + u'the episodes that you do not want to "don\'t download"' ) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append( best_season_result) else: found_results[provider_id][ep_num] = [ best_season_result ] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: logger.log( u'Checking usefulness of multi episode result %s' % multi_result.name, logger.DEBUG) if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed( multi_result.name, multi_result.size, multi_result.provider.name): logger.log( u'%s has previously failed, rejecting this multi episode result' % multi_result.name) continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode # if we have results for the episode if ep_num in found_results[provider_id] and 0 < len( found_results[provider_id][ep_num]): needed_eps.append(ep_num) else: not_needed_eps.append(ep_num) logger.log( u'Single episode check result is... needed episodes: %s, not needed episodes: %s' % (needed_eps, not_needed_eps), logger.DEBUG) if not not_needed_eps: logger.log( u'All of these episodes were covered by single episode results, ' + 'ignoring this multi episode result', logger.DEBUG) continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in multi_results: multi_not_needed_eps.append(ep_num) else: multi_needed_eps.append(ep_num) logger.log( u'Multi episode check result is... multi needed episodes: ' + '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG) if not multi_needed_eps: logger.log( u'All of these episodes were covered by another multi episode nzb, ' + 'ignoring this multi episode result', logger.DEBUG) continue # if we're keeping this multi-result then remember it for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in found_results[provider_id]: logger.log( u'A needed multi episode result overlaps with a single episode result for episode ' + '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG) del found_results[provider_id][ep_num] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) for cur_ep in found_results[provider_id]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if 0 == len(found_results[provider_id][cur_ep]): continue best_result = pick_best_result(found_results[provider_id][cur_ep], show) # if all results were rejected move on to the next episode if not best_result: continue # filter out possible bad torrents from providers if 'torrent' == best_result.resultType: if best_result.url.startswith('magnet'): if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = None else: td = best_result.provider.get_url(best_result.url) if not td: continue if getattr(best_result.provider, 'chk_td', None): name = None try: hdr = re.findall('(\w+(\d+):)', td[0:6])[0] x, v = len(hdr[0]), int(hdr[1]) for item in range(0, 12): y = x + v name = 'name' == td[x:y] w = re.findall('((?:i\d+e|d|l)?(\d+):)', td[y:y + 32])[0] x, v = y + len(w[0]), int(w[1]) if name: name = td[x:x + v] break except: continue if name: if not pass_show_wordlist_checks(name, show): continue if not show_name_helpers.pass_wordlist_checks( name): logger.log( u'Ignored: %s (debug log has detail)' % name) continue best_result.name = name if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = td # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_result_ep in best_result.episodes: if best_result_ep in result.episodes: if best_result.quality > result.quality: final_results.pop(i) else: found = True if not found: final_results += [best_result] # check that we got all the episodes we wanted first before doing a match and snatch wanted_ep_count = 0 for wanted_ep in episodes: for result in final_results: if wanted_ep in result.episodes and is_final_result(result): wanted_ep_count += 1 # make sure we search every provider for results unless we found everything we wanted if len(episodes) == wanted_ep_count: break if not len(provider_list): logger.log( 'No NZB/Torrent sources enabled in Search Provider options to do backlog searches', logger.WARNING) elif not search_done: logger.log( 'Failed backlog search of %s enabled provider%s. More info in debug log.' % (len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR) return final_results
dlResult = downloadstation.sendDownload(result) elif sickbeard.TORRENT_METHOD == "deluge": dlResult = deluge.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True def searchForNeededEpisodes(): logger.log(u"Searching all providers for any needed episodes") foundResults = {} didSearch = False
def snatch_episode(result, end_status=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if None is result: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for cur_ep in result.episodes: if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7): result.priority = 1 if None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I): end_status = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ('nzb', 'nzbdata'): if 'blackhole' == sickbeard.NZB_METHOD: dl_result = _download_result(result) elif 'sabnzbd' == sickbeard.NZB_METHOD: dl_result = sab.send_nzb(result) elif 'nzbget' == sickbeard.NZB_METHOD: is_proper = True if SNATCHED_PROPER == end_status else False dl_result = nzbget.sendNZB(result, is_proper) else: logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR) dl_result = False # TORRENTs can be sent to clients or saved to disk elif 'torrent' == result.resultType: # torrents are saved to disk when blackhole mode if 'blackhole' == sickbeard.TORRENT_METHOD: dl_result = _download_result(result) else: # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) if not result.content: logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dl_result = client.sendTORRENT(result) else: logger.log(u'Unknown result type, unable to download it', logger.ERROR) dl_result = False if not dl_result: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message(u'Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] update_imdb_data = True for cur_ep_obj in result.episodes: with cur_ep_obj.lock: if is_first_best_match(result): cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: sql_l.append(item) if cur_ep_obj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info() if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) return True
def snatch_episode(result, end_status=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if None is result: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for cur_ep in result.episodes: if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7): result.priority = 1 if 0 < result.properlevel: end_status = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ('nzb', 'nzbdata'): if 'blackhole' == sickbeard.NZB_METHOD: dl_result = _download_result(result) elif 'sabnzbd' == sickbeard.NZB_METHOD: dl_result = sab.send_nzb(result) elif 'nzbget' == sickbeard.NZB_METHOD: dl_result = nzbget.send_nzb(result) else: logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR) dl_result = False # TORRENTs can be sent to clients or saved to disk elif 'torrent' == result.resultType: if not result.url.startswith('magnet') and None is not result.get_data_func: result.url = result.get_data_func(result.url) result.get_data_func = None # consume only once if not result.url: return False if not result.content and result.url.startswith('magnet-'): if sickbeard.TORRENT_DIR: filepath = ek.ek(os.path.join, sickbeard.TORRENT_DIR, 'files.txt') try: with open(filepath, 'a') as fh: result.url = result.url[7:] fh.write('"%s"\t"%s"\n' % (result.url, sickbeard.TV_DOWNLOAD_DIR)) dl_result = True except IOError: logger.log(u'Failed to write to %s' % filepath, logger.ERROR) return False else: logger.log(u'Need to set a torrent blackhole folder', logger.ERROR) return False # torrents are saved to disk when blackhole mode elif 'blackhole' == sickbeard.TORRENT_METHOD: dl_result = _download_result(result) else: # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) if result.provider.should_skip() or not result.content: logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client client = clients.get_client_instance(sickbeard.TORRENT_METHOD)() dl_result = client.send_torrent(result) if getattr(result, 'cache_file', None): helpers.remove_file_failed(result.cache_file) else: logger.log(u'Unknown result type, unable to download it', logger.ERROR) dl_result = False if not dl_result: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.add_snatched(result) ui.notifications.message(u'Episode snatched', result.name) history.log_snatch(result) # don't notify when we re-download an episode sql_l = [] update_imdb_data = True for cur_ep_obj in result.episodes: with cur_ep_obj.lock: if is_first_best_match(cur_ep_obj.status, result): cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: sql_l.append(item) if cur_ep_obj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info() if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Torrent file content is empty", logger.ERROR) dlResult = False else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] trakt_data = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.audio_langs = result.audio_lang sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN') + " from " + result.provider.name) trakt_data.append((curEpObj.season, curEpObj.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log(u"Add episodes, showid: indexerid " + str(result.show.indexerid) + ", Title " + str(result.show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data, update="add") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) if sickbeard.UPDATE_SHOWS_ON_SNATCH and not sickbeard.showQueueScheduler.action.isBeingUpdated(result.show) and result.show.status == "Continuing": try: sickbeard.showQueueScheduler.action.updateShow(result.show, True) except exceptions.CantUpdateException as e: logger.log("Unable to update show: {0}".format(str(e)),logger.DEBUG) return True
def search_providers(show, episodes, manual_search=False): found_results = {} final_results = [] search_done = False orig_thread_name = threading.currentThread().name provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog] for cur_provider in provider_list: if cur_provider.anime_only and not show.is_anime: logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG) continue threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) provider_id = cur_provider.get_id() found_results[provider_id] = {} search_count = 0 search_mode = cur_provider.search_mode while True: search_count += 1 if 'eponly' == search_mode: logger.log(u'Performing episode search for %s' % show.name) else: logger.log(u'Performing season pack search for %s' % show.name) try: cur_provider.cache._clearCache() search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search) except exceptions.AuthException as e: logger.log(u'Authentication error: %s' % ex(e), logger.ERROR) break except Exception as e: logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) break finally: threading.currentThread().name = orig_thread_name search_done = True if len(search_results): # make a list of all the results for this provider for cur_ep in search_results: # skip non-tv crap search_results[cur_ep] = filter( lambda item: show_name_helpers.pass_wordlist_checks(item.name, parse=False) and item.show == show, search_results[cur_ep]) if cur_ep in found_results: found_results[provider_id][cur_ep] += search_results[cur_ep] else: found_results[provider_id][cur_ep] = search_results[cur_ep] break elif not cur_provider.search_fallback or search_count == 2: break search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode] logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode]) # skip to next provider if we have no results to process if not len(found_results[provider_id]): continue any_qualities, best_qualities = Quality.splitQuality(show.quality) # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[provider_id]: best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show, any_qualities + best_qualities) highest_quality_overall = 0 for cur_episode in found_results[provider_id]: for cur_result in found_results[provider_id][cur_episode]: if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality: highest_quality_overall = cur_result.quality logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if best_season_result: # get the quality of the season nzb season_qual = best_season_result.quality logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual], best_season_result.provider.providerType), logger.DEBUG) my_db = db.DBConnection() sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\ (show.indexerid, ','.join([str(x.season) for x in episodes])) ep_nums = [int(x['episode']) for x in my_db.select(sql)] logger.log(u'Executed query: [%s]' % sql) logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG) all_wanted = True any_wanted = False for ep_num in ep_nums: for season in set([x.season for x in episodes]): if not show.wantEpisode(season, ep_num, season_qual): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred) if all_wanted and highest_quality_overall == best_season_result.quality: logger.log(u'Every episode in this season is needed, downloading the whole %s %s' % (best_season_result.provider.providerType, best_season_result.name)) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs return [best_season_result] elif not any_wanted: logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' + best_season_result.name, logger.DEBUG) else: if GenericProvider.NZB == best_season_result.provider.providerType: logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG) # if not, break it apart and add them as the lowest priority results individual_results = nzbSplitter.splitResult(best_season_result) individual_results = filter( lambda r: show_name_helpers.pass_wordlist_checks(r.name, parse=False) and r.show == show, individual_results) for cur_result in individual_results: if 1 == len(cur_result.episodes): ep_num = cur_result.episodes[0].episode elif 1 < len(cur_result.episodes): ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append(cur_result) else: found_results[provider_id][ep_num] = [cur_result] # If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it. logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' + u'the episodes that you do not want to "don\'t download"') ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append(best_season_result) else: found_results[provider_id][ep_num] = [best_season_result] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: logger.log(u'Checking usefulness of multi episode result %s' % multi_result.name, logger.DEBUG) if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multi_result.name, multi_result.size, multi_result.provider.name): logger.log(u'%s has previously failed, rejecting this multi episode result' % multi_result.name) continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode # if we have results for the episode if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]): needed_eps.append(ep_num) else: not_needed_eps.append(ep_num) logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' % (needed_eps, not_needed_eps), logger.DEBUG) if not not_needed_eps: logger.log(u'All of these episodes were covered by single episode results, ignoring this multi episode result', logger.DEBUG) continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in multi_results: multi_not_needed_eps.append(ep_num) else: multi_needed_eps.append(ep_num) logger.log(u'Multi episode check result is... multi needed episodes: %s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG) if not multi_needed_eps: logger.log(u'All of these episodes were covered by another multi episode nzb, ignoring this multi episode result', logger.DEBUG) continue # if we're keeping this multi-result then remember it for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in found_results[provider_id]: logger.log(u'A needed multi episode result overlaps with a single episode result for episode #%s, removing the single episode results from the list' % ep_num, logger.DEBUG) del found_results[provider_id][ep_num] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) for cur_ep in found_results[provider_id]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if 0 == len(found_results[provider_id][cur_ep]): continue best_result = pick_best_result(found_results[provider_id][cur_ep], show) # if all results were rejected move on to the next episode if not best_result: continue # filter out possible bad torrents from providers if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = None if not best_result.url.startswith('magnet'): best_result.content = best_result.provider.get_url(best_result.url) if not best_result.content: continue # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_result_ep in best_result.episodes: if best_result_ep in result.episodes: if best_result.quality > result.quality: final_results.pop(i) else: found = True if not found: final_results += [best_result] # check that we got all the episodes we wanted first before doing a match and snatch wanted_ep_count = 0 for wanted_ep in episodes: for result in final_results: if wanted_ep in result.episodes and is_final_result(result): wanted_ep_count += 1 # make sure we search every provider for results unless we found everything we wanted if len(episodes) == wanted_ep_count: break if not search_done: logger.log(u'No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.', logger.ERROR) return final_results