def run(self): generic_queue.QueueItem.run(self) is_error = False try: if not self.standard_backlog: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: set_wanted_aired(ep_obj, True, ep_count, ep_count_scene) logger.log(u'Beginning backlog search for: [%s]' % self.show.name) search_result = search.search_providers( self.show, self.segment, False, try_other_searches=(not self.standard_backlog or not self.limited_backlog), scheduled=self.standard_backlog) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show.name) except (StandardError, Exception): is_error = True logger.log(traceback.format_exc(), logger.ERROR) finally: logger.log('Completed backlog search %sfor: [%s]' % (('', 'with a debug error ')[is_error], self.show.name)) self.finish()
def run(self): generic_queue.QueueItem.run(self) self.started = True try: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: logger.log(u'Marking episode as bad: [%s]' % ep_obj.prettyName()) failed_history.set_episode_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) failed_history.revert_episode(ep_obj) if release: failed_history.add_failed(release) history.log_failed(ep_obj, release, provider) logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName()) set_wanted_aired(ep_obj, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add( (ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: pass # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName()) self.started = True ep_count, ep_count_scene = get_aired_in_season(self.show) set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True) if search_result: # just use the first result for now logger.log( u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name)) self.success = search.snatch_episode(search_result[0]) for ep in search_result[0].episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: ui.notifications.message( 'No downloads found', u'Could not find a download for <i>%s</i>' % self.segment.prettyName()) logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) self.started = True try: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: logger.log(u'Marking episode as bad: [%s]' % ep_obj.prettyName()) failed_history.set_episode_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) failed_history.revert_episode(ep_obj) if release: failed_history.add_failed(release) history.log_failed(ep_obj, release, provider) logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName()) set_wanted_aired(ep_obj, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: pass # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) is_error = False try: if not self.standard_backlog: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: set_wanted_aired(ep_obj, True, ep_count, ep_count_scene) logger.log(u'Beginning backlog search for: [%s]' % self.show.name) search_result = search.search_providers( self.show, self.segment, False, try_other_searches=(not self.standard_backlog or not self.limited_backlog), scheduled=self.standard_backlog) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add( (ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: logger.log( u'No needed episodes found during backlog search for: [%s]' % self.show.name) except (StandardError, Exception): is_error = True logger.log(traceback.format_exc(), logger.ERROR) finally: logger.log('Completed backlog search %sfor: [%s]' % (('', 'with a debug error ')[is_error], self.show.name)) self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName()) self.started = True ep_count, ep_count_scene = get_aired_in_season(self.show) set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True) if search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name)) self.success = search.snatch_episode(search_result[0]) for ep in search_result[0].episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: ui.notifications.message('No downloads found', u'Could not find a download for <i>%s</i>' % self.segment.prettyName()) logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def choose_search_mode(self, episodes, ep_obj, hits_per_page=100): searches = [ e for e in episodes if (not ep_obj.show.is_scene and e.season == ep_obj.season) or ( ep_obj.show.is_scene and e.scene_season == ep_obj.scene_season) ] needed = neededQualities() needed.check_needed_types(ep_obj.show) for s in searches: if needed.all_qualities_needed: break if not s.show.is_anime and not s.show.is_sports: if not getattr(s, 'wantedQuality', None): # this should not happen, the creation is missing for the search in this case logger.log( 'wantedQuality property was missing for search, creating it', logger.WARNING) ep_status, ep_quality = Quality.splitCompositeStatus( ep_obj.status) s.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(s.wantedQuality) if not hasattr(ep_obj, 'eps_aired_in_season'): # this should not happen, the creation is missing for the search in this case logger.log( 'eps_aired_in_season property was missing for search, creating it', logger.WARNING) ep_count, ep_count_scene = get_aired_in_season(ep_obj.show) ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0) ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show.is_scene else \ ep_obj.eps_aired_in_season per_ep, limit_per_ep = 0, 0 if needed.need_sd and not needed.need_hd: per_ep, limit_per_ep = 10, 25 if needed.need_hd: if not needed.need_sd: per_ep, limit_per_ep = 30, 90 else: per_ep, limit_per_ep = 40, 120 if needed.need_uhd or (needed.need_hd and not self.cats.get(NewznabConstants.CAT_UHD)): per_ep += 4 limit_per_ep += 10 if ep_obj.show.is_anime or ep_obj.show.is_sports or ep_obj.show.air_by_date: rel_per_ep, limit_per_ep = 5, 10 else: rel_per_ep = per_ep rel = max( 1, int( ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else ep_obj.eps_aired_in_season * rel_per_ep) / hits_per_page))) rel_limit = max( 1, int( ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page))) season_search = rel < (len(searches) * 100 // hits_per_page) if not season_search: needed = neededQualities() needed.check_needed_types(ep_obj.show) if not ep_obj.show.is_anime and not ep_obj.show.is_sports: if not getattr(ep_obj, 'wantedQuality', None): ep_status, ep_quality = Quality.splitCompositeStatus( ep_obj.status) ep_obj.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(ep_obj.wantedQuality) else: if not ep_obj.show.is_anime and not ep_obj.show.is_sports: for ep in episodes: if not getattr(ep, 'wantedQuality', None): ep_status, ep_quality = Quality.splitCompositeStatus( ep.status) ep.wantedQuality = get_wanted_qualities(ep, ep_status, ep_quality, unaired=True) needed.check_needed_qualities(ep.wantedQuality) return (season_search, needed, (hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])