def run(self): generic_queue.QueueItem.run(self) is_error = False try: logger.log(u'Beginning backlog search for: [%s]' % self.show.name) search_result = search.search_providers( self.show, self.segment, False, try_other_searches=(not self.standard_backlog or not self.limited_backlog)) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) search.snatch_episode(result) helpers.cpu_sleep() else: logger.log( u'No needed episodes found during backlog search for: [%s]' % self.show.name) except (StandardError, Exception): is_error = True logger.log(traceback.format_exc(), logger.DEBUG) finally: logger.log('Completed backlog search %sfor: [%s]' % (('', 'with a debug error ')[is_error], self.show.name)) self.finish()
def _download_propers(proper_list): for cur_proper in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta(days=30) # make sure the episode has been downloaded before my_db = db.DBConnection() history_results = my_db.select( 'SELECT resource FROM history ' + 'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' + 'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')', [cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality, history_limit.strftime(history.dateFormat)]) # if we didn't download this episode in the first place we don't know what quality to use for the proper = skip if 0 == len(history_results): logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name) continue else: # get the show object show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid) if None is show_obj: logger.log(u'Unable to find the show with indexerid ' + str( cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR) continue # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime)) is_same = False for result in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name(helpers.remove_non_release_groups(result['resource'])): is_same = True break if is_same: logger.log(u'This proper is already in history, skipping it', logger.DEBUG) continue ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode) # make the result object result = cur_proper.provider.get_result([ep_obj], cur_proper.url) if None is result: continue result.name = cur_proper.name result.quality = cur_proper.quality result.version = cur_proper.version # snatch it search.snatch_episode(result, SNATCHED_PROPER)
def _download_propers(proper_list): for cur_proper in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta(days=30) # make sure the episode has been downloaded before my_db = db.DBConnection() history_results = my_db.select( 'SELECT resource FROM history ' + 'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' + 'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')', [cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality, history_limit.strftime(history.dateFormat)]) # if we didn't download this episode in the first place we don't know what quality to use for the proper = skip if 0 == len(history_results): logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name) continue else: # get the show object show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid) if None is show_obj: logger.log(u'Unable to find the show with indexerid ' + str( cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR) continue # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime())) is_same = False for result in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name(helpers.remove_non_release_groups(result['resource'])): is_same = True break if is_same: logger.log(u'This proper is already in history, skipping it', logger.DEBUG) continue ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode) # make the result object result = cur_proper.provider.get_result([ep_obj], cur_proper.url) if None is result: continue result.name = cur_proper.name result.quality = cur_proper.quality result.version = cur_proper.version # snatch it search.snatch_episode(result, SNATCHED_PROPER)
def run(self): generic_queue.QueueItem.run(self) self.started = True try: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: logger.log(u'Marking episode as bad: [%s]' % ep_obj.prettyName()) failed_history.set_episode_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) failed_history.revert_episode(ep_obj) if release: failed_history.add_failed(release) history.log_failed(ep_obj, release, provider) logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName()) set_wanted_aired(ep_obj, True, ep_count, ep_count_scene) search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) search.snatch_episode(result) helpers.cpu_sleep() else: pass # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName()) self.started = True search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True) if search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name)) self.success = search.snatch_episode(search_result[0]) helpers.cpu_sleep() else: ui.notifications.message('No downloads found', u'Could not find a download for <i>%s</i>' % self.segment.prettyName()) logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName()) except Exception: logger.log(traceback.format_exc(), logger.DEBUG) finally: # Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) is_error = False try: if not self.standard_backlog: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: set_wanted_aired(ep_obj, True, ep_count, ep_count_scene) logger.log(u'Beginning backlog search for: [%s]' % self.show.name) search_result = search.search_providers( self.show, self.segment, False, try_other_searches=(not self.standard_backlog or not self.limited_backlog), scheduled=self.standard_backlog) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show.name) except (StandardError, Exception): is_error = True logger.log(traceback.format_exc(), logger.ERROR) finally: logger.log('Completed backlog search %sfor: [%s]' % (('', 'with a debug error ')[is_error], self.show.name)) self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName()) self.started = True search_result = search.search_providers(self.show, [self.segment], True) if search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name)) self.success = search.snatch_episode(search_result[0]) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: ui.notifications.message('No downloads found', u'Could not find a download for <i>%s</i>' % self.segment.prettyName()) logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName()) except Exception: logger.log(traceback.format_exc(), logger.DEBUG) finally: # Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) self.started = True try: for epObj in self.segment: logger.log(u'Marking episode as bad: [%s]' % epObj.prettyName()) failed_history.markFailed(epObj) (release, provider) = failed_history.findRelease(epObj) if release: failed_history.logFailed(release) history.logFailed(epObj, release, provider) failed_history.revertEpisode(epObj) logger.log(u'Beginning failed download search for: []' % epObj.prettyName()) search_result = search.search_providers(self.show, self.segment, True) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) search.snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: pass # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName()) except Exception: logger.log(traceback.format_exc(), logger.DEBUG) finally: # Keep a list with the 100 last executed searches fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning backlog search for: [%s]' % self.show.name) search_result = search.search_providers(self.show, self.segment, False) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) search.snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) else: logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show.name) except Exception: logger.log(traceback.format_exc(), logger.DEBUG) finally: self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName()) self.started = True ep_count, ep_count_scene = get_aired_in_season(self.show) set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True) if search_result: # just use the first result for now logger.log( u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name)) self.success = search.snatch_episode(search_result[0]) for ep in search_result[0].episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: ui.notifications.message( 'No downloads found', u'Could not find a download for <i>%s</i>' % self.segment.prettyName()) logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) try: self._change_missing_episodes() self.update_providers() show_list = sickbeard.showList from_date = datetime.date.fromordinal(1) for curShow in show_list: if curShow.paused: continue self.episodes.extend(wanted_episodes(curShow, from_date)) if not self.episodes: logger.log(u'No search of cache for episodes required') self.success = True else: num_shows = len(set([ep.show.name for ep in self.episodes])) logger.log(u'Found %d needed episode%s spanning %d show%s' % (len(self.episodes), helpers.maybe_plural(len(self.episodes)), num_shows, helpers.maybe_plural(num_shows))) try: logger.log(u'Beginning recent search for episodes') found_results = search.search_for_needed_episodes(self.episodes) if not len(found_results): logger.log(u'No needed episodes found') else: for result in found_results: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) self.success = search.snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) except Exception: logger.log(traceback.format_exc(), logger.DEBUG) if None is self.success: self.success = False finally: self.finish()
def run(self): generic_queue.QueueItem.run(self) self.started = True try: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: logger.log(u'Marking episode as bad: [%s]' % ep_obj.prettyName()) failed_history.set_episode_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) failed_history.revert_episode(ep_obj) if release: failed_history.add_failed(release) history.log_failed(ep_obj, release, provider) logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName()) set_wanted_aired(ep_obj, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: pass # logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) is_error = False try: if not self.standard_backlog: ep_count, ep_count_scene = get_aired_in_season(self.show) for ep_obj in self.segment: set_wanted_aired(ep_obj, True, ep_count, ep_count_scene) logger.log(u'Beginning backlog search for: [%s]' % self.show.name) search_result = search.search_providers( self.show, self.segment, False, try_other_searches=(not self.standard_backlog or not self.limited_backlog), scheduled=self.standard_backlog) if search_result: for result in search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) if search.snatch_episode(result): for ep in result.episodes: self.snatched_eps.add( (ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: logger.log( u'No needed episodes found during backlog search for: [%s]' % self.show.name) except (StandardError, Exception): is_error = True logger.log(traceback.format_exc(), logger.ERROR) finally: logger.log('Completed backlog search %sfor: [%s]' % (('', 'with a debug error ')[is_error], self.show.name)) self.finish()
def run(self): generic_queue.QueueItem.run(self) try: logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName()) self.started = True ep_count, ep_count_scene = get_aired_in_season(self.show) set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True) search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True) if search_result: # just use the first result for now logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name)) self.success = search.snatch_episode(search_result[0]) for ep in search_result[0].episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() else: ui.notifications.message('No downloads found', u'Could not find a download for <i>%s</i>' % self.segment.prettyName()) logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName()) except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) finally: # Keep a list with the last executed searches fifo(MANUAL_SEARCH_HISTORY, self.base_info()) if self.success is None: self.success = False self.finish()
def run(self): generic_queue.QueueItem.run(self) try: self._change_missing_episodes() show_list = sickbeard.showList from_date = datetime.date.fromordinal(1) needed = common.neededQualities() for curShow in show_list: if curShow.paused: continue wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED) if wanted_eps: if not needed.all_needed: if not needed.all_types_needed: needed.check_needed_types(curShow) if not needed.all_qualities_needed: for w in wanted_eps: if needed.all_qualities_needed: break if not w.show.is_anime and not w.show.is_sports: needed.check_needed_qualities( w.wantedQuality) self.episodes.extend(wanted_eps) if sickbeard.DOWNLOAD_PROPERS: properFinder.get_needed_qualites(needed) self.update_providers(needed=needed) self._check_for_propers(needed) if not self.episodes: logger.log(u'No search of cache for episodes required') self.success = True else: num_shows = len(set([ep.show.name for ep in self.episodes])) logger.log(u'Found %d needed episode%s spanning %d show%s' % (len(self.episodes), helpers.maybe_plural(len(self.episodes)), num_shows, helpers.maybe_plural(num_shows))) try: logger.log(u'Beginning recent search for episodes') found_results = search.search_for_needed_episodes( self.episodes) if not len(found_results): logger.log(u'No needed episodes found') else: for result in found_results: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) self.success = search.snatch_episode(result) helpers.cpu_sleep() except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) if None is self.success: self.success = False finally: self.finish()
def run(self): generic_queue.QueueItem.run(self) try: self._change_missing_episodes() show_list = sickbeard.showList from_date = datetime.date.fromordinal(1) need_anime = need_sports = need_sd = need_hd = need_uhd = False max_sd = Quality.SDDVD hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY] max_hd = Quality.FULLHDBLURAY for curShow in show_list: if curShow.paused: continue wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED) if wanted_eps: if not need_anime and curShow.is_anime: need_anime = True if not need_sports and curShow.is_sports: need_sports = True if not need_sd or not need_hd or not need_uhd: for w in wanted_eps: if need_sd and need_hd and need_uhd: break if not w.show.is_anime and not w.show.is_sports: if Quality.UNKNOWN in w.wantedQuality: need_sd = need_hd = need_uhd = True else: if not need_sd and max_sd >= min(w.wantedQuality): need_sd = True if not need_hd and any(i in hd_qualities for i in w.wantedQuality): need_hd = True if not need_uhd and max_hd < max(w.wantedQuality): need_uhd = True self.episodes.extend(wanted_eps) self.update_providers(need_anime=need_anime, need_sports=need_sports, need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd) if not self.episodes: logger.log(u'No search of cache for episodes required') self.success = True else: num_shows = len(set([ep.show.name for ep in self.episodes])) logger.log(u'Found %d needed episode%s spanning %d show%s' % (len(self.episodes), helpers.maybe_plural(len(self.episodes)), num_shows, helpers.maybe_plural(num_shows))) try: logger.log(u'Beginning recent search for episodes') found_results = search.search_for_needed_episodes(self.episodes) if not len(found_results): logger.log(u'No needed episodes found') else: for result in found_results: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) self.success = search.snatch_episode(result) helpers.cpu_sleep() except Exception: logger.log(traceback.format_exc(), logger.DEBUG) if None is self.success: self.success = False finally: self.finish()
def run(self): generic_queue.QueueItem.run(self) try: self._change_missing_episodes() show_list = sickbeard.showList from_date = datetime.date.fromordinal(1) needed = common.neededQualities() for curShow in show_list: if curShow.paused: continue wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED) if wanted_eps: if not needed.all_needed: if not needed.all_types_needed: needed.check_needed_types(curShow) if not needed.all_qualities_needed: for w in wanted_eps: if needed.all_qualities_needed: break if not w.show.is_anime and not w.show.is_sports: needed.check_needed_qualities(w.wantedQuality) self.episodes.extend(wanted_eps) if sickbeard.DOWNLOAD_PROPERS: properFinder.get_needed_qualites(needed) self.update_providers(needed=needed) self._check_for_propers(needed) if not self.episodes: logger.log(u'No search of cache for episodes required') self.success = True else: num_shows = len(set([ep.show.name for ep in self.episodes])) logger.log(u'Found %d needed episode%s spanning %d show%s' % (len(self.episodes), helpers.maybe_plural(len(self.episodes)), num_shows, helpers.maybe_plural(num_shows))) try: logger.log(u'Beginning recent search for episodes') found_results = search.search_for_needed_episodes(self.episodes) if not len(found_results): logger.log(u'No needed episodes found') else: for result in found_results: # just use the first result for now logger.log(u'Downloading %s from %s' % (result.name, result.provider.name)) self.success = search.snatch_episode(result) if self.success: for ep in result.episodes: self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)) helpers.cpu_sleep() except (StandardError, Exception): logger.log(traceback.format_exc(), logger.ERROR) if None is self.success: self.success = False finally: self.finish()
def _download_propers(proper_list): verified_propers = True consumed_proper = [] downloaded_epid = set() _epid = operator.attrgetter('indexerid', 'indexer', 'season', 'episode') while verified_propers: verified_propers = set() # get verified list; sort the list of unique Propers for highest proper_level, newest first for cur_proper in sorted( filter( lambda p: p not in consumed_proper, # allows Proper to fail or be rejected and another to be tried (with a different name) filter(lambda p: _epid(p) not in downloaded_epid, proper_list)), key=operator.attrgetter('properlevel', 'date'), reverse=True): epid = _epid(cur_proper) # if the show is in our list and there hasn't been a Proper already added for that particular episode # then add it to our list of Propers if epid not in map(_epid, verified_propers): logger.log('Proper may be useful [%s]' % cur_proper.name) verified_propers.add(cur_proper) else: # use Proper with the highest level remove_propers = set() map( lambda vp: remove_propers.add(vp), filter( lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), verified_propers)) if remove_propers: verified_propers -= remove_propers logger.log('A more useful Proper [%s]' % cur_proper.name) verified_propers.add(cur_proper) for cur_proper in list(verified_propers): consumed_proper += [cur_proper] # scene release checking scene_only = getattr(cur_proper.provider, 'scene_only', False) scene_rej_nuked = getattr(cur_proper.provider, 'scene_rej_nuked', False) if any([scene_only, scene_rej_nuked ]) and not cur_proper.parsed_show.is_anime: scene_or_contain = getattr(cur_proper.provider, 'scene_or_contain', '') scene_contains = False if scene_only and scene_or_contain: re_extras = dict(re_prefix='.*', re_suffix='.*') r = show_name_helpers.contains_any(cur_proper.name, scene_or_contain, **re_extras) if None is not r and r: scene_contains = True if scene_contains and not scene_rej_nuked: reject = False else: reject, url = search.can_reject(cur_proper.name) if reject: if isinstance(reject, basestring): if scene_rej_nuked: logger.log( 'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) else: logger.log( 'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) reject = False elif scene_contains: reject = False else: logger.log( 'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) if reject: continue # make the result object ep_obj = cur_proper.parsed_show.getEpisode(cur_proper.season, cur_proper.episode) result = cur_proper.provider.get_result([ep_obj], cur_proper.url) if None is result: continue result.name = cur_proper.name result.quality = cur_proper.quality result.version = cur_proper.version result.properlevel = cur_proper.proper_level result.is_repack = cur_proper.is_repack result.puid = cur_proper.puid # snatch it if search.snatch_episode(result, SNATCHED_PROPER): downloaded_epid.add(_epid(cur_proper))