def execute(self): generic_queue.QueueItem.execute(self) episodes = [] for i, epObj in enumerate(episodes): (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, epObj.season, epObj.episode) failed_history.logFailed(release) history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider) failed_history.revertEpisode(self.show, epObj.season, epObj.episode) episodes.append(epObj) providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] try: logger.log( "Beginning failed download search for episodes from Season [" + str(self.episodes[0].season) + "]") searchResult = search.searchProviders(self.show, self.episodes[0].season, self.episodes, False, True) if searchResult: self.success = SearchQueue().snatch_item(searchResult) except Exception, e: logger.log(traceback.format_exc(), logger.DEBUG)
def execute(self): generic_queue.QueueItem.execute(self) for season, episode in self.segment.iteritems(): epObj = self.show.getEpisode(season, episode) (release, provider) = failed_history.findRelease(self.show, season, episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, season, episode) failed_history.logFailed(release) history.logFailed(self.show.indexerid, season, episode, epObj.status, release, provider) failed_history.revertEpisode(self.show, season, episode) for season, episode in self.segment.iteritems(): epObj = self.show.getEpisode(season, episode) if self.show.air_by_date: results = search.findSeason(self.show, str(epObj.airdate)[:7]) else: results = search.findSeason(self.show, season) # download whatever we find for curResult in results: self.success = search.snatchEpisode(curResult) time.sleep(5) self.finish()
def execute(self): generic_queue.QueueItem.execute(self) results = [] for season, episode in self.segment.iteritems(): (release, provider) = failed_history.findRelease(self.show, season, episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, season, episode) failed_history.logFailed(release) failed_history.revertEpisode(self.show, season, episode) epObj = self.show.getEpisode(season, episode) result = search.findEpisode(epObj) if result: results.append(result) # download whatever we find for curResult in results: self.success = search.snatchEpisode(curResult) time.sleep(5) self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u"Marking episode as bad: [" + self.segment.prettyName() + "]") failed_history.markFailed(self.segment) (release, provider) = failed_history.findRelease(self.segment) if release: failed_history.logFailed(release) history.logFailed(self.segment, release, provider) failed_history.revertEpisode(self.segment) logger.log("Beginning failed download search for [" + self.segment.prettyName() + "]") searchResult = search.searchProviders(self.show, [self.segment], True) if searchResult: for result in searchResult: # just use the first result for now logger.log(u"Downloading " + result.name + " from " + result.provider.name) search.snatchEpisode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: logger.log(u"No valid episode found to retry for [" + self.segment.prettyName() + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) if self.success is None: self.success = False self.finish()
def execute(self): generic_queue.QueueItem.execute(self) for season, episodes in self.segment.items(): for epObj in episodes: logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]") failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log("Beginning failed download search for [" + epObj.prettyName() + "]") try: searchResult = search.searchProviders(self.show, season, [epObj], True) # reset thread back to original name threading.currentThread().name = self.thread_name if searchResult: for result in searchResult: # just use the first result for now logger.log(u"Downloading " + result.name + " from " + result.provider.name) search.snatchEpisode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: logger.log(u"No valid episode found to retry for [" + epObj.prettyName() + "]") except Exception, e: logger.log(traceback.format_exc(), logger.DEBUG)
def execute(self): generic_queue.QueueItem.execute(self) failed_episodes = [] for season in self.segment: epObj = self.segment[season] (release, provider) = failed_history.findRelease(epObj) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(epObj) failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) failed_episodes.append(epObj) logger.log( "Beginning failed download search for [" + epObj.prettyName() + "]") if len(failed_episodes): try: searchResult = search.searchProviders(self, self.show, failed_episodes[0].season, failed_episodes, False, True) if searchResult: SearchQueue().snatch_item(searchResult) else: logger.log(u"No episodes found to retry for failed downloads return from providers!") except Exception, e: logger.log(traceback.format_exc(), logger.DEBUG)
def execute(self): generic_queue.QueueItem.execute(self) episodes = [] for i, epObj in enumerate(episodes): # convert indexer numbering to scene numbering for searches (episodes[i].scene_season, self.episodes[i].scene_episode) = sickbeard.scene_numbering.get_scene_numbering( self.show.indexerid, self.show.indexer, epObj.season, epObj.episode) logger.log( "Beginning failed download search for " + epObj.prettyName() + ' as ' + epObj.prettySceneName()) (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, epObj.season, epObj.episode) failed_history.logFailed(release) history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider) failed_history.revertEpisode(self.show, epObj.season, epObj.episode) episodes.append(epObj) # get search results results = search.searchProviders(self.show, episodes[0].season, episodes) # download whatever we find for curResult in results: self.success = search.snatchEpisode(curResult) time.sleep(5) self.finish()
def execute(self): generic_queue.QueueItem.execute(self) episodes = [] for epObj in episodes: (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, epObj.season, epObj.episode) failed_history.logFailed(release) history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider) failed_history.revertEpisode(self.show, epObj.season, epObj.episode) episodes.append(epObj) # get search results results = search.searchProviders(self.show, episodes[0].season, episodes) # download whatever we find for curResult in results: self.success = search.snatchEpisode(curResult) time.sleep(5) self.finish()
def execute(self): generic_queue.QueueItem.execute(self) if self.ep_obj: failed_history.revertEpisodes(self.show, self.ep_obj.season, [self.ep_obj.episode]) failed_history.logFailed(self.ep_obj.release_name) foundEpisode = search.findEpisode(self.ep_obj, manualSearch=True) result = False if not foundEpisode: ui.notifications.message('No downloads were found', "Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName()) logger.log(u"Unable to find a download for " + self.ep_obj.prettyName()) else: # just use the first result for now logger.log(u"Downloading episode from " + foundEpisode.url) result = search.snatchEpisode(foundEpisode) providerModule = foundEpisode.provider if not result: ui.notifications.error('Error while attempting to snatch ' + foundEpisode.name+', check your logs') elif providerModule == None: ui.notifications.error('Provider is configured incorrectly, unable to download') self.success = result else: results = [] myDB = db.DBConnection() if not self.show.air_by_date: sqlResults = myDB.select("SELECT episode, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND status IN (" + ",".join([str(x) for x in common.Quality.FAILED]) + ")", [self.show.tvdbid, self.segment]) else: segment_year, segment_month = map(int, self.segment.split('-')) min_date = datetime.date(segment_year, segment_month, 1) # it's easier to just hard code this than to worry about rolling the year over or making a month length map if segment_month == 12: max_date = datetime.date(segment_year, 12, 31) else: max_date = datetime.date(segment_year, segment_month + 1, 1) - datetime.timedelta(days=1) sqlResults = myDB.select("SELECT episode, release_name FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ? AND status IN (" + ",".join([str(x) for x in common.Quality.FAILED]) + ")", [self.show.tvdbid, min_date.toordinal(), max_date.toordinal()]) for result in sqlResults: failed_history.revertEpisodes(self.show, self.segment, [result["episode"]]) failed_history.logFailed(result["release_name"]) results = search.findSeason(self.show, self.segment) # download whatever we find for curResult in results: search.snatchEpisode(curResult) time.sleep(5) self.finish()
def run(self): """ Run failed thread """ generic_queue.QueueItem.run(self) self.started = True try: for epObj in self.segment: logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]") failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log(u"Beginning failed download search for: [" + epObj.prettyName() + "]") # If it is wanted, self.downCurQuality doesnt matter # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! search_result = search.searchProviders(self.show, self.segment, True, False, False) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): logger.log(u"Downloading {0} with {1} seeders and {2} leechers from {3}".format(result.name, result.seeders, result.leechers, result.provider.name)) else: logger.log(u"Downloading {0} from {1}".format(result.name, result.provider.name)) self.success = search.snatchEpisode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: logger.log(u"No needed episodes found during failed search for: [" + self.show.name + "]") except Exception: self.success = False logger.log(traceback.format_exc(), logger.DEBUG) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): super(FailedQueueItem, self).run() self.started = True try: for epObj in self.segment: logger.log("Marking episode as bad: [" + epObj.pretty_name() + "]") failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log("Beginning failed download search for: [" + epObj.pretty_name() + "]") # If it is wanted, self.downCurQuality doesnt matter # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! searchResult = search.searchProviders(self.show, self.segment, True, False) if searchResult: for result in searchResult: # just use the first result for now logger.log("Downloading " + result.name + " from " + result.provider.name) search.snatchEpisode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: pass # logger.log(u"No valid episode found to retry for: [" + self.segment.pretty_name() + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) # ## Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False super(FailedQueueItem, self).finish() self.finish()
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + self.dir_name + ")") releaseName = self._get_release_name() if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() parser = NameParser(False) fixed_name = show_name_helpers.trimRelease(releaseName) try: parsed = parser.parse(fixed_name) except InvalidNameException: self._log(u"Error: release name is invalid: " + fixed_name, logger.WARNING) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) show_id = self._get_show_id(parsed.series_name) if show_id is None: self._log(u"Warning: couldn't find show ID", logger.WARNING) raise exceptions.FailedProcessingFailed() self._log(u"Found show_id: " + str(show_id), logger.DEBUG) self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id) if self._show_obj is None: self._log(u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) raise exceptions.FailedProcessingFailed() self._log(u"Marking release as bad: " + releaseName) failed_history.logFailed(releaseName) self._revert_episode_statuses(parsed.season_number, parsed.episode_numbers) cur_backlog_queue_item = search_queue.BacklogQueueItem(self._show_obj, parsed.season_number) sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) return True
def run(self): generic_queue.QueueItem.run(self) self.started = True try: for epObj in self.segment: logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]") failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log(u"Beginning failed download search for: [" + epObj.prettyName() + "]") # If it is wanted, self.downCurQuality doesnt matter # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! searchResult = search.searchProviders(self.show, self.segment, True, False) if searchResult: for result in searchResult: # just use the first result for now logger.log(u"Downloading " + result.name + " from " + result.provider.name) search.snatchEpisode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: pass # logger.log(u"No valid episode found to retry for: [" + self.segment.prettyName() + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) # ## Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) self.started = True try: for epObj in self.segment: logger.log(u'Marking episode as bad: [' + epObj.prettyName() + ']') failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log('Beginning failed download search for: [' + epObj.prettyName() + ']') searchResult = search.searchProviders(self.show, self.segment, True) if searchResult: for result in searchResult: # just use the first result for now logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name) search.snatchEpisode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: pass #logger.log(u"No valid episode found to retry for: [" + self.segment.prettyName() + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) finally: ### Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName( self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() parser = NameParser(False) try: parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) show_id = self._get_show_id(parsed.series_name) if show_id is None: self._log(u"Warning: couldn't find show ID", logger.WARNING) raise exceptions.FailedProcessingFailed() self._log(u"Found show_id: " + str(show_id), logger.DEBUG) self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id) if self._show_obj is None: self._log( u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) raise exceptions.FailedProcessingFailed() # Revert before fail, as fail alters the history self._log(u"Reverting episodes...") self.log += failed_history.revertEpisodes(self._show_obj, parsed.season_number, parsed.episode_numbers) self._log(u"Marking release as bad: " + releaseName) self.log += failed_history.logFailed(releaseName) cur_backlog_queue_item = search_queue.BacklogQueueItem( self._show_obj, parsed.season_number) sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) return True
def run(self): generic_queue.QueueItem.run(self) try: for season, episodes in self.segment.items(): for epObj in episodes: logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]") failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log("Beginning failed download search for [" + epObj.prettyName() + "]") searchResult = search.searchProviders( self.show, season, [epObj], True) if searchResult: for result in searchResult: # just use the first result for now logger.log(u"Downloading " + result.name + " from " + result.provider.name) search.snatchEpisode(result) # give the CPU a break time.sleep( common.cpu_presets[sickbeard.CPU_PRESET]) else: logger.log(u"No valid episode found to retry for [" + epObj.prettyName() + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) self.started = True try: for epObj in self.segment: logger.log(u'Marking episode as bad: [%s]' % epObj.prettyName()) failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log(u'Beginning failed download search for: [%s]' % epObj.prettyName()) search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) search.snatch_episode(result) helpers.cpu_sleep() else: pass # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName()) except Exception: logger.log(traceback.format_exc(), logger.DEBUG) finally: # Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def process(self, curProvider): episodes = [] for i, epObj in enumerate(episodes): time.sleep(0.01) if epObj.show.air_by_date: logger.log("Beginning manual search for " + epObj.prettyABDName()) else: logger.log( "Beginning failed download search for " + epObj.prettyName()) (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, epObj.season, epObj.episode) failed_history.logFailed(release) history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider) failed_history.revertEpisode(self.show, epObj.season, epObj.episode) episodes.append(epObj) return search.searchProviders(self.show, self.episodes[0].season, self.episodes, curProvider, False, False)
def execute(self): generic_queue.QueueItem.execute(self) results = False didSearch = False episodes = [] for i, epObj in enumerate(episodes): time.sleep(1) logger.log( "Beginning failed download search for " + epObj.prettyName()) (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) if release: logger.log(u"Marking release as bad: " + release) failed_history.markFailed(self.show, epObj.season, epObj.episode) failed_history.logFailed(release) history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider) failed_history.revertEpisode(self.show, epObj.season, epObj.episode) episodes.append(epObj) providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] try: with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor: for provider in providers: didSearch = True executor.submit( search.searchProviders, self, self.show, self.episodes[0].season, self.episodes, provider, False, True).add_done_callback(snatch_results) executor.shutdown(wait=True) except Exception, e: logger.log(traceback.format_exc(), logger.DEBUG)
def execute(self): generic_queue.QueueItem.execute(self) if self.ep_obj: try: ep_release_name = failed_history.findRelease( self.show.tvdbid, self.ep_obj.season, self.ep_obj.episode) failed_history.revertEpisodes(self.show, self.ep_obj.season, [self.ep_obj.episode]) failed_history.logFailed(ep_release_name) except: pass foundEpisode = search.findEpisode(self.ep_obj, manualSearch=True) result = False if not foundEpisode: ui.notifications.message( 'No downloads were found', "Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName()) logger.log(u"Unable to find a download for " + self.ep_obj.prettyName()) else: # just use the first result for now logger.log(u"Downloading episode from " + foundEpisode.url) result = search.snatchEpisode(foundEpisode) providerModule = foundEpisode.provider if not result: ui.notifications.error( 'Error while attempting to snatch ' + foundEpisode.name + ', check your logs') elif providerModule == None: ui.notifications.error( 'Provider is configured incorrectly, unable to download' ) self.success = result else: results = [] myDB = db.DBConnection() if not self.show.air_by_date: sqlResults = myDB.select( "SELECT episode FROM tv_episodes WHERE showid = ? AND season = ? AND status IN (" + ",".join([str(x) for x in common.Quality.FAILED]) + ")", [self.show.tvdbid, self.segment]) else: segment_year, segment_month = map(int, self.segment.split('-')) min_date = datetime.date(segment_year, segment_month, 1) # it's easier to just hard code this than to worry about rolling the year over or making a month length map if segment_month == 12: max_date = datetime.date(segment_year, 12, 31) else: max_date = datetime.date(segment_year, segment_month + 1, 1) - datetime.timedelta(days=1) sqlResults = myDB.select( "SELECT episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ? AND status IN (" + ",".join([str(x) for x in common.Quality.FAILED]) + ")", [ self.show.tvdbid, min_date.toordinal(), max_date.toordinal() ]) for result in sqlResults: failed_history.revertEpisodes(self.show, self.segment, [result["episode"]]) failed_history.logFailed(ep_release_name) results = search.findSeason(self.show, self.segment) # download whatever we find for curResult in results: search.snatchEpisode(curResult) time.sleep(5) self.finish()