def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult is False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if hasattr(result, "resultType"): if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType in ("torrent", "torrentdata"): # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() if hasattr(result, "extraInfo") and result.resultType == "torrentdata": result.content = result.extraInfo[0] dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.audio_langs = result.audio_lang curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True else: return False
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ # NZBs can be sent straight to SAB or saved to disk if hasattr(result,'resultType'): if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": dlResult = nzbget.sendNZB(result) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType in ("torrent", "torrentdata"): # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() if hasattr(result,'extraInfo') and result.resultType=="torrentdata": result.content=result.extraInfo[0] dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if dlResult == False: return False history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.audio_langs = result.audio_lang curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj.prettyName()) return True else: return False
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta( days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: # make sure we have the torrent file content if not result.content: if not result.url.startswith('magnet'): result.content = result.provider.getURL(result.url) if not result.content: logger.log( u"Torrent content failed to download from " + result.url, logger.ERROR) # Snatches torrent with client client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus( endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) return True
def snatchEpisode(result, endStatus=SNATCHED): # pylint: disable=too-many-branches, too-many-statements """ Contains the internal logic necessary to actually "snatch" a result that has been found. :param result: SearchResult instance to be snatched. :param endStatus: the episode status that should be used for the episode object once it's snatched. :return: boolean, True on success """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) is not None: endStatus = SNATCHED_PROPER if result.url.startswith('magnet') or result.url.endswith('torrent'): result.resultType = 'torrent' # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # Torrents can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: if not result.content and not result.url.startswith('magnet'): if result.provider.login(): result.content = result.provider.get_url(result.url, returns='content') if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Torrent file content is empty", logger.WARNING) dlResult = False else: logger.log(u"Unknown result type, unable to download it ({0!r})".format(result.resultType), logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] trakt_data = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: try: notifiers.notify_snatch("{0} from {1}".format(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'), result.provider.name)) # pylint: disable=protected-access except Exception: # Without this, when notification fail, it crashes the snatch thread and SR will # keep snatching until notification is sent logger.log(u"Failed to send snatch notification", logger.DEBUG) trakt_data.append((curEpObj.season, curEpObj.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log(u"Add episodes, showid: indexerid " + str(result.show.indexerid) + ", Title " + str(result.show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data, update="add") if sql_l: main_db_con = db.DBConnection() main_db_con.mass_action(sql_l) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) else: ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.saveToDB() if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) return True
def findNeededEpisodes(self, episodes, manualSearch=False): neededEps = {} for epObj in episodes: myDB = self._getDB() sqlResults = myDB.select( "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", [ epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%" ]) # for each cache entry for curResult in sqlResults: # skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well) if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases( curResult["name"]): continue # get the show object, or if it's not one of our shows then ignore it try: showObj = helpers.findCertainShow( sickbeard.showList, int(curResult["indexerid"])) except MultipleShowObjectsException: showObj = None if not showObj: continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult["season"]) if curSeason == -1: continue curEp = curResult["episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult["quality"]) curReleaseGroup = curResult["release_group"] curVersion = curResult["version"] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log( u"Skipping " + curResult["name"] + " because we don't want an episode that's " + Quality.qualityStrings[curQuality], logger.DEBUG) continue # build a result object title = curResult["name"] url = curResult["url"] logger.log(u"Found result " + title + " at " + url) result = self.provider.getResult([epObj]) result.show = showObj result.url = url result.name = title result.quality = curQuality result.release_group = curReleaseGroup result.version = curVersion result.content = None # validate torrent file if not magnet link to avoid invalid torrent links if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT: if sickbeard.TORRENT_METHOD != "blackhole": client = clients.getClientIstance( sickbeard.TORRENT_METHOD)() result = client._get_torrent_hash(result) if not result.hash: logger.log( u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG) continue # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER if result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: # Sets per provider seed ratio result.ratio = result.provider.seedRatio() result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) if sql_l: myDB = db.DBConnection() myDB.mass_action(sql_l) return True
def findNeededEpisodes(self, episodes, manualSearch=False): neededEps = {} for epObj in episodes: myDB = self._getDB() sqlResults = myDB.select( "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", [epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"]) # for each cache entry for curResult in sqlResults: # skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well) if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]): continue # get the show object, or if it's not one of our shows then ignore it try: showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"])) except MultipleShowObjectsException: showObj = None if not showObj: continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult["season"]) if curSeason == -1: continue curEp = curResult["episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult["quality"]) curReleaseGroup = curResult["release_group"] curVersion = curResult["version"] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " + Quality.qualityStrings[curQuality], logger.DEBUG) continue # build a result object title = curResult["name"] url = curResult["url"] logger.log(u"Found result " + title + " at " + url) result = self.provider.getResult([epObj]) result.show = showObj result.url = url result.name = title result.quality = curQuality result.release_group = curReleaseGroup result.version = curVersion result.content = None # validate torrent file if not magnet link to avoid invalid torrent links if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT: if sickbeard.TORRENT_METHOD != "blackhole": client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() result = client._get_torrent_hash(result) if not result.hash: logger.log(u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG) continue # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps
def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): self._checkAuth() self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # check cache for results cacheResult = self.cache.searchCache([epObj], manualSearch) if len(cacheResult): results.update({epObj.episode: cacheResult[epObj]}) continue # skip if season already searched if len(episodes) > 1 and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season if len(episodes) > 1: # get season search results for curString in self._get_season_search_strings(epObj): itemList += self._doSearch(curString, search_mode, len(episodes)) else: # get single episode search results for curString in self._get_episode_search_strings(epObj): itemList += self._doSearch(curString, 'eponly', len(episodes)) # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)])) itemList += itemsUnknown if itemsUnknown else [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False, convert=True) parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG) continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly' and len(parse_result.episode_numbers): logger.log( u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", logger.DEBUG) addCacheEntry = True else: if not len(parse_result.episode_numbers) and ( parse_result.season_number and parse_result.season_number != season) or ( not parse_result.season_number and season != 1): logger.log( u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True elif len(parse_result.episode_numbers) and ( parse_result.season_number != season or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): logger.log( u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = season actual_episodes = parse_result.episode_numbers else: if not (parse_result.is_air_by_date or parse_result.is_sports): logger.log( u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) addCacheEntry = True else: airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal() myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate]) if len(sql_results) != 1: logger.log( u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING) addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # add parsed result to cache for usage later on if addCacheEntry: logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch): wantEp = False break if not wantEp: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.content = None result.version = version if len(epObj) == 1: epNum = epObj[0].episode logger.log(u"Single episode result.", logger.DEBUG) elif len(epObj) > 1: epNum = MULTI_EP_RESULT logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str( parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT logger.log(u"Separating full season result to check for later", logger.DEBUG) # validate torrent file if not magnet link to avoid invalid torrent links if self.providerType == self.TORRENT: if sickbeard.TORRENT_METHOD != "blackhole": client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() result = client._get_torrent_hash(result) if not result.hash: logger.log(u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG) continue if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: myDB = self.cache._getDB() myDB.mass_action(cl) return results
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # reset the anidb episode object self.anidbEpisode = None # try to find the file info (show, season, episodes, quality, version) = self._find_info() if not show: self._log( u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.WARNING) raise exceptions.PostProcessingFailed() elif season == None or not episodes: self._log( u"Not enough information to determine what episode this is", logger.DEBUG) self._log(u"Quitting post-processing", logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(show, season, episodes) # get the quality of the episode we're processing if quality: self._log( u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG) new_ep_quality = quality else: new_ep_quality = self._get_quality(ep_obj) logger.log( u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, PROPER, or BEST) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # get the version of the episode we're processing if version: self._log( u"Snatch history had a version in it, using that: v" + str(version), logger.DEBUG) new_ep_version = version else: new_ep_version = -1 # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status == PostProcessor.EXISTS_LARGER: if self.is_proper: self._log( u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace", logger.DEBUG) return True else: self._log( u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SAME: self._log( u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders( ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to delete the existing files") # set the status of the episodes # for curEp in [ep_obj] + ep_obj.relatedEps: # curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # if the show directory doesn't exist then make it if allowed if not ek.ek( os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: if self.release_name: self._log("Found release name " + self.release_name, logger.DEBUG) cur_ep.release_name = self.release_name else: cur_ep.release_name = "" if cur_ep.torrent_hash != '': client = clients.getClientIstance( sickbeard.TORRENT_METHOD)() torrent_removed = client.remove_torrent_downloaded( cur_ep.torrent_hash) if torrent_removed: logger.log("Torrent removed correctly", logger.DEBUG) else: self._log( u"Error removing torrent from client, ids: " + cur_ep.torrent_hash, logger.ERROR) cur_ep.torrent_hash = '' if ep_obj.status in common.Quality.SNATCHED_BEST: cur_ep.status = common.Quality.compositeStatus( common.ARCHIVED, new_ep_quality) else: cur_ep.status = common.Quality.compositeStatus( common.DOWNLOADED, new_ep_quality) cur_ep.subtitles = [] cur_ep.subtitles_searchcount = 0 cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00' cur_ep.is_proper = self.is_proper cur_ep.version = new_ep_version if self.release_group: cur_ep.release_group = self.release_group else: cur_ep.release_group = "" sql_l.append(cur_ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # Just want to keep this consistent for failed handling right now releaseName = show_name_helpers.determineReleaseName( self.folder_path, self.nzb_name) if releaseName is not None: failed_history.logSuccess(releaseName) else: self._log(u"Couldn't find release in snatch history", logger.WARNING) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting" ) self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need if not helpers.make_dirs(dest_path): raise exceptions.PostProcessingFailed( u"Unable to create destination folder: " + dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name # add to anidb if ep_obj.show.is_anime and sickbeard.ANIDB_USE_MYLIST: self._add_to_anidb_mylist(self.file_path) try: # move the episode and associated files to the show dir if self.process_method == "copy": self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "move": self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "hardlink": self._hardlink( self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "symlink": self._moveAndSymlink( self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) else: logger.log( u"Unknown process method: " + str(self.process_method), logger.ERROR) raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.downloadSubtitles(force=True) # put the new location in the database sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) sql_l.append(cur_ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # set file modify stamp to show airdate if sickbeard.AIRDATE_EPISODES: for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.airdateModifyStamp() # generate nfo/tbn ep_obj.createMetaFiles() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version) # send notifications notifiers.notify_download( ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): self._checkAuth() self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # check cache for results cacheResult = self.cache.searchCache([epObj], manualSearch) if len(cacheResult): results.update({epObj.episode: cacheResult[epObj]}) continue # skip if season already searched if len(episodes ) > 1 and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season if len(episodes) > 1: # get season search results for curString in self._get_season_search_strings(epObj): itemList += self._doSearch(curString, search_mode, len(episodes)) else: # get single episode search results for curString in self._get_episode_search_strings(epObj): itemList += self._doSearch(curString, 'eponly', len(episodes)) # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list( itertools.chain( *[v for (k, v) in sorted(items.items(), reverse=True)])) itemList += itemsUnknown if itemsUnknown else [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False, convert=True) parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log( u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG) continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly' and len( parse_result.episode_numbers): logger.log( u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", logger.DEBUG) addCacheEntry = True else: if not len(parse_result.episode_numbers) and ( parse_result.season_number and parse_result.season_number != season) or ( not parse_result.season_number and season != 1): logger.log( u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True elif len(parse_result.episode_numbers) and ( parse_result.season_number != season or not [ ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers ]): logger.log( u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = season actual_episodes = parse_result.episode_numbers else: if not (parse_result.is_air_by_date or parse_result.is_sports): logger.log( u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) addCacheEntry = True else: airdate = parse_result.air_date.toordinal( ) if parse_result.air_date else parse_result.sports_air_date.toordinal( ) myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate]) if len(sql_results) != 1: logger.log( u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING) addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # add parsed result to cache for usage later on if addCacheEntry: logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch): wantEp = False break if not wantEp: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.content = None result.version = version if len(epObj) == 1: epNum = epObj[0].episode logger.log(u"Single episode result.", logger.DEBUG) elif len(epObj) > 1: epNum = MULTI_EP_RESULT logger.log( u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT logger.log(u"Separating full season result to check for later", logger.DEBUG) # validate torrent file if not magnet link to avoid invalid torrent links if self.providerType == self.TORRENT: if sickbeard.TORRENT_METHOD != "blackhole": client = clients.getClientIstance( sickbeard.TORRENT_METHOD)() result = client._get_torrent_hash(result) if not result.hash: logger.log( u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG) continue if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: myDB = self.cache._getDB() myDB.mass_action(cl) return results
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. :param result: SearchResult instance to be snatched. :param endStatus: the episode status that should be used for the episode object once it's snatched. :return: boolean, True on success """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta( days=7): result.priority = 1 if re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) is not None: endStatus = SNATCHED_PROPER if result.url.startswith('magnet') or result.url.endswith('torrent'): result.resultType = 'torrent' # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # Torrents can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url, need_bytes=True) if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Torrent file content is empty", logger.WARNING) dlResult = False else: logger.log( u"Unknown result type, unable to download it (%r)" % result.resultType, logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] trakt_data = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus( endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: try: notifiers.notify_snatch( curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN') + " from " + result.provider.name) except: # Without this, when notification fail, it crashes the snatch thread and SR will # keep snatching until notification is sent logger.log(u"Failed to send snatch notification", logger.DEBUG) trakt_data.append((curEpObj.season, curEpObj.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log( u"Add episodes, showid: indexerid " + str(result.show.indexerid) + ", Title " + str(result.show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data, update="add") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) return True
def snatch_episode(result, end_status=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if None is result: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for cur_ep in result.episodes: if datetime.date.today() - cur_ep.airdate <= datetime.timedelta( days=7): result.priority = 1 if None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I): end_status = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ('nzb', 'nzbdata'): if 'blackhole' == sickbeard.NZB_METHOD: dl_result = _download_result(result) elif 'sabnzbd' == sickbeard.NZB_METHOD: dl_result = sab.send_nzb(result) elif 'nzbget' == sickbeard.NZB_METHOD: is_proper = True if SNATCHED_PROPER == end_status else False dl_result = nzbget.sendNZB(result, is_proper) else: logger.log( u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR) dl_result = False # TORRENTs can be sent to clients or saved to disk elif 'torrent' == result.resultType: # torrents are saved to disk when blackhole mode if 'blackhole' == sickbeard.TORRENT_METHOD: dl_result = _download_result(result) else: # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) if not result.content: logger.log( u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dl_result = client.sendTORRENT(result) else: logger.log(u'Unknown result type, unable to download it', logger.ERROR) dl_result = False if not dl_result: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message(u'Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] update_imdb_data = True for cur_ep_obj in result.episodes: with cur_ep_obj.lock: if is_first_best_match(result): cur_ep_obj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: cur_ep_obj.status = Quality.compositeStatus( end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: sql_l.append(item) if cur_ep_obj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info( ) if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) return True
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Torrent file content is empty", logger.ERROR) dlResult = False else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] trakt_data = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus(endStatus, result.quality) curEpObj.audio_langs = result.audio_lang sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN') + " from " + result.provider.name) trakt_data.append((curEpObj.season, curEpObj.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log(u"Add episodes, showid: indexerid " + str(result.show.indexerid) + ", Title " + str(result.show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data, update="add") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) if sickbeard.UPDATE_SHOWS_ON_SNATCH and not sickbeard.showQueueScheduler.action.isBeingUpdated(result.show) and result.show.status == "Continuing": try: sickbeard.showQueueScheduler.action.updateShow(result.show, True) except exceptions.CantUpdateException as e: logger.log("Unable to update show: {0}".format(str(e)),logger.DEBUG) return True
def snatch_episode(result, end_status=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if None is result: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for cur_ep in result.episodes: if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7): result.priority = 1 if None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I): end_status = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ('nzb', 'nzbdata'): if 'blackhole' == sickbeard.NZB_METHOD: dl_result = _download_result(result) elif 'sabnzbd' == sickbeard.NZB_METHOD: dl_result = sab.send_nzb(result) elif 'nzbget' == sickbeard.NZB_METHOD: is_proper = True if SNATCHED_PROPER == end_status else False dl_result = nzbget.sendNZB(result, is_proper) else: logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR) dl_result = False # TORRENTs can be sent to clients or saved to disk elif 'torrent' == result.resultType: # torrents are saved to disk when blackhole mode if 'blackhole' == sickbeard.TORRENT_METHOD: dl_result = _download_result(result) else: # make sure we have the torrent file content if not result.content and not result.url.startswith('magnet'): result.content = result.provider.get_url(result.url) if not result.content: logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR) return False # Snatches torrent with client client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dl_result = client.sendTORRENT(result) else: logger.log(u'Unknown result type, unable to download it', logger.ERROR) dl_result = False if not dl_result: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message(u'Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] update_imdb_data = True for cur_ep_obj in result.episodes: with cur_ep_obj.lock: if is_first_best_match(result): cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality) else: cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality) item = cur_ep_obj.get_sql() if None is not item: sql_l.append(item) if cur_ep_obj.status not in Quality.DOWNLOADED: notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info() if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) return True