def _recent_history(aired_since_shows, aired_since_anime): recent_shows, recent_anime = [], [] my_db = db.DBConnection() sql_results = my_db.select( 'SELECT DISTINCT s.indexer, s.indexer_id FROM history as h' + ' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + ' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) + ' AND (%s)' % ' OR '.join([ 'h.action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, FAILED] ])) for sqlshow in sql_results: try: show = helpers.find_show_by_id( sickbeard.showList, {int(sqlshow['indexer']): int(sqlshow['indexer_id'])}) except MultipleShowObjectsException: continue if show: if not show.is_anime: (sqlshow['indexer'], sqlshow['indexer_id']) not in recent_shows and \ recent_shows.append((sqlshow['indexer'], sqlshow['indexer_id'])) else: (sqlshow['indexer'], sqlshow['indexer_id']) not in recent_anime and show.is_anime and \ recent_anime.append((sqlshow['indexer'], sqlshow['indexer_id'])) return recent_shows, recent_anime
def _check_for_propers(needed): if not sickbeard.DOWNLOAD_PROPERS: return propers = {} my_db = db.DBConnection('cache.db') sql_results = my_db.select('SELECT * FROM provider_cache') re_p = r'\brepack|proper|real%s\b' % ('', '|v[2-9]')[needed.need_anime] proper_regex = re.compile(re_p, flags=re.I) for s in sql_results: if proper_regex.search(s['name']): try: show = helpers.find_show_by_id( sickbeard.showList, {INDEXER_TVDB: int(s['indexerid'])}) except (StandardError, Exception): continue if show: propers.setdefault(s['provider'], []).append( Proper(s['name'], s['url'], datetime.datetime.fromtimestamp(s['time']), show, parsed_show=show)) if propers: logger.log( 'Found Proper/Repack/Real in recent search, sending data to properfinder' ) propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem( propers=propers) sickbeard.searchQueueScheduler.action.add_item( propersearch_queue_item)
def _check_for_propers(needed): if not sickbeard.DOWNLOAD_PROPERS: return propers = {} my_db = db.DBConnection('cache.db') sql_results = my_db.select('SELECT * FROM provider_cache') re_p = r'\brepack|proper|real%s\b' % ('', '|v[2-9]')[needed.need_anime] proper_regex = re.compile(re_p, flags=re.I) for s in sql_results: if proper_regex.search(s['name']): try: show = helpers.find_show_by_id(sickbeard.showList, {INDEXER_TVDB: int(s['indexerid'])}) except (StandardError, Exception): continue if show: propers.setdefault(s['provider'], []).append( Proper(s['name'], s['url'], datetime.datetime.fromtimestamp(s['time']), show, parsed_show=show)) if propers: logger.log('Found Proper/Repack/Real in recent search, sending data to properfinder') propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem(propers=propers) sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
def test_find_show_by_id(self): result = None # type: Optional[TVShow] for show_test in find_tests: success = True try: result = find_show_by_id(**show_test['para']) except MultipleShowObjectsException: success = False if isinstance(show_test['result'], dict) and None is not show_test['result'].get( 'success', None): self.assertEqual( success, show_test['result'].get('success', None), msg='error finding show (%s) with para: %s' % (show_test.get('description'), show_test['para'])) else: self.assertEqual( result and { 'tvid': result.tvid, 'prodid': result.prodid }, show_test['result'], msg='error finding show (%s) with para: %s' % (show_test.get('description'), show_test['para']))
def get_show(self, item, **kwargs): show_obj = None if 'name_space' in kwargs and 'newznab' in kwargs['name_space']: ids = self.cache.parse_ids(item, kwargs['name_space']) if ids: try: show_obj = helpers.find_show_by_id(sickbeard.showList, id_dict=ids, no_mapped_ids=False) except MultipleShowObjectsException: return None return show_obj
def get_needed_qualites(needed=None): if not isinstance(needed, neededQualities): needed = neededQualities() if not sickbeard.DOWNLOAD_PROPERS or needed.all_needed: return needed age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14 aired_since_shows = datetime.datetime.today() - datetime.timedelta( days=age_shows) aired_since_anime = datetime.datetime.today() - datetime.timedelta( days=age_anime) my_db = db.DBConnection() sql_results = my_db.select( 'SELECT DISTINCT s.indexer, s.indexer_id, e.season, e.episode FROM history as h' + ' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + ' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) + ' AND (%s)' % ' OR '.join([ 'h.action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, FAILED] ])) for sql_episode in sql_results: if needed.all_needed: break try: show = helpers.find_show_by_id( sickbeard.showList, {int(sql_episode['indexer']): int(sql_episode['indexer_id'])}) except MultipleShowObjectsException: continue if show: needed.check_needed_types(show) if needed.all_show_qualities_needed( show) or needed.all_qualities_needed: continue ep_obj = show.getEpisode(season=sql_episode['season'], episode=sql_episode['episode']) if ep_obj: ep_status, ep_quality = Quality.splitCompositeStatus( ep_obj.status) if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]: needed.check_needed_qualities([ep_quality]) return needed
def add_cache_entry(self, name, url, parse_result=None, indexer_id=0, id_dict=None): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available show_obj = None if indexer_id: try: show_obj = helpers.findCertainShow(sickbeard.showList, indexer_id) except MultipleShowObjectsException: return if id_dict: try: show_obj = helpers.find_show_by_id(sickbeard.showList, id_dict=id_dict, no_mapped_ids=False) except MultipleShowObjectsException: return try: np = NameParser(showObj=show_obj, convert=True, indexer_lookup=False) parse_result = np.parse(name) except InvalidNameException: logger.log('Unable to parse the filename %s into a valid episode' % name, logger.DEBUG) return except InvalidShowException: return if not parse_result or not parse_result.series_name: return # if we made it this far then lets add the parsed result to cache for usage later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a separated string episode_text = '|%s|' % '|'.join(map(str, episodes)) # get the current timestamp cur_timestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality if not isinstance(name, unicode): name = unicode(name, 'utf-8', 'replace') # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log('Add to cache: [%s]' % name, logger.DEBUG) return [ 'INSERT OR IGNORE INTO provider_cache' ' (provider, name, season, episodes, indexerid, url, time, quality, release_group, version)' ' VALUES (?,?,?,?,?,?,?,?,?,?)', [self.providerID, name, season, episode_text, parse_result.show.indexerid, url, cur_timestamp, quality, release_group, version]]
def add_cache_entry(self, name, url, parse_result=None, indexer_id=0, id_dict=None): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available show_obj = None if indexer_id: try: show_obj = helpers.findCertainShow(sickbeard.showList, indexer_id) except MultipleShowObjectsException: return if id_dict: try: show_obj = helpers.find_show_by_id(sickbeard.showList, id_dict=id_dict, no_mapped_ids=False) except MultipleShowObjectsException: return try: np = NameParser(showObj=show_obj, convert=True, indexer_lookup=False) parse_result = np.parse(name) except InvalidNameException: logger.log( 'Unable to parse the filename %s into a valid episode' % name, logger.DEBUG) return except InvalidShowException: return if not parse_result or not parse_result.series_name: return # if we made it this far then lets add the parsed result to cache for usage later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a separated string episode_text = '|%s|' % '|'.join(map(str, episodes)) # get the current timestamp cur_timestamp = int( time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality if not isinstance(name, unicode): name = unicode(name, 'utf-8', 'replace') # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log('Add to cache: [%s]' % name, logger.DEBUG) return [ 'INSERT OR IGNORE INTO provider_cache' ' (provider, name, season, episodes, indexerid, url, time, quality, release_group, version)' ' VALUES (?,?,?,?,?,?,?,?,?,?)', [ self.providerID, name, season, episode_text, parse_result.show.indexerid, url, cur_timestamp, quality, release_group, version ] ]
def search_backlog(self, which_shows=None, force_type=NORMAL_BACKLOG, force=False): if self.amActive and not which_shows: logger.log(u'Backlog is still running, not starting it again', logger.DEBUG) return if which_shows: show_list = which_shows standard_backlog = False else: show_list = sickbeard.showList standard_backlog = True now = datetime.datetime.now() any_torrent_enabled = continued_backlog = False if not force and standard_backlog and ( datetime.datetime.now() - datetime.datetime.fromtimestamp( self._get_last_runtime())) < datetime.timedelta(hours=23): any_torrent_enabled = any([ x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and x.providerType == GenericProvider.TORRENT ]) if not any_torrent_enabled: logger.log( 'Last scheduled Backlog run was within the last day, skipping this run.', logger.DEBUG) return self._get_last_backlog() self.amActive = True self.amPaused = False cur_date = datetime.date.today().toordinal() from_date = datetime.date.fromordinal(1) limited_from_date = datetime.date.today() - datetime.timedelta( days=sickbeard.BACKLOG_DAYS) limited_backlog = False if standard_backlog and (any_torrent_enabled or sickbeard.BACKLOG_NOFULL): logger.log( u'Running limited backlog for episodes missed during the last %s day(s)' % str(sickbeard.BACKLOG_DAYS)) from_date = limited_from_date limited_backlog = True runparts = [] if standard_backlog and not any_torrent_enabled and sickbeard.BACKLOG_NOFULL: logger.log( u'Skipping automated full backlog search because it is disabled in search settings' ) my_db = db.DBConnection('cache.db') if standard_backlog and not any_torrent_enabled and not sickbeard.BACKLOG_NOFULL: sql_result = my_db.select( 'SELECT * FROM backlogparts WHERE part in (SELECT MIN(part) FROM backlogparts)' ) if sql_result: sl = [] part_nr = int(sql_result[0]['part']) for s in sql_result: show_obj = find_show_by_id( sickbeard.showList, {int(s['indexer']): int(s['indexerid'])}) if show_obj: sl.append(show_obj) runparts.append( [int(s['indexerid']), int(s['indexer'])]) show_list = sl continued_backlog = True my_db.action('DELETE FROM backlogparts WHERE part = ?', [part_nr]) forced = standard_backlog and force_type != NORMAL_BACKLOG wanted_list = [] for curShow in show_list: if not curShow.paused: w = wanted_episodes( curShow, from_date, make_dict=True, unaired=(sickbeard.SEARCH_UNAIRED and not sickbeard.UNAIRED_RECENT_SEARCH_ONLY)) if w: wanted_list.append(w) parts = [] if standard_backlog and not any_torrent_enabled and not continued_backlog and not sickbeard.BACKLOG_NOFULL: fullbacklogparts = sum([len(w) for w in wanted_list if w ]) // sickbeard.BACKLOG_FREQUENCY h_part = [] counter = 0 for w in wanted_list: f = False for season, segment in w.iteritems(): counter += 1 if not f: h_part.append([ segment[0].show.indexerid, segment[0].show.indexer ]) f = True if counter > fullbacklogparts: counter = 0 parts.append(h_part) h_part = [] if h_part: parts.append(h_part) def in_showlist(show, showlist): return 0 < len([ item for item in showlist if item[1] == show.indexer and item[0] == show.indexerid ]) if not runparts and parts: runparts = parts[0] wanted_list = [ w for w in wanted_list if w and in_showlist(w.itervalues().next()[0].show, runparts) ] limited_wanted_list = [] if standard_backlog and not any_torrent_enabled and runparts: for curShow in sickbeard.showList: if not curShow.paused and not in_showlist(curShow, runparts): w = wanted_episodes( curShow, limited_from_date, make_dict=True, unaired=(sickbeard.SEARCH_UNAIRED and not sickbeard.UNAIRED_RECENT_SEARCH_ONLY)) if w: limited_wanted_list.append(w) self.add_backlog_item(wanted_list, standard_backlog, limited_backlog, forced, any_torrent_enabled) if standard_backlog and not any_torrent_enabled and limited_wanted_list: self.add_backlog_item(limited_wanted_list, standard_backlog, True, forced, any_torrent_enabled) if standard_backlog and not sickbeard.BACKLOG_NOFULL and not any_torrent_enabled and not continued_backlog: cl = ([], [['DELETE FROM backlogparts']])[len(parts) > 1] for i, l in enumerate(parts): if 0 == i: continue for m in l: cl.append([ 'INSERT INTO backlogparts (part, indexerid, indexer) VALUES (?,?,?)', [i + 1, m[0], m[1]] ]) if 0 < len(cl): my_db.mass_action(cl) # don't consider this an actual backlog search if we only did recent eps # or if we only did certain shows if from_date == datetime.date.fromordinal(1) and standard_backlog: self._set_last_backlog(cur_date) self._get_last_backlog() if standard_backlog and not any_torrent_enabled: self._set_last_runtime(now) self.amActive = False self._reset_progress_indicator()