def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality, extra_no_name, version, is_anime=False): level = 0 is_internal = False codec = '' rel_name = None if old_status not in SNATCHED_ANY: level = Quality.get_proper_level(extra_no_name, version, is_anime) elif show_obj: my_db = db.DBConnection() np = NameParser(False, showObj=show_obj) for episode in episodes: result = my_db.select( 'SELECT resource FROM history' ' WHERE showid = ?' ' AND season = ? AND episode = ? AND ' '(%s) ORDER BY date DESC LIMIT 1' % (' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY)), [indexerid, season, episode]) if not result or not isinstance( result[0]['resource'], basestring) or not result[0]['resource']: continue nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime) if nq != new_quality: continue try: p = np.parse(result[0]['resource']) except (StandardError, Exception): continue level = Quality.get_proper_level(p.extra_info_no_name(), p.version, show_obj.is_anime) extra_no_name = p.extra_info_no_name() rel_name = result[0]['resource'] is_internal = p.extra_info_no_name() and re.search( r'\binternal\b', p.extra_info_no_name(), flags=re.I) codec = _get_codec(p.extra_info_no_name()) break return level, is_internal, codec, extra_no_name, rel_name
def history_snatched_proper_fix(): my_db = db.DBConnection() if not my_db.has_flag('history_snatch_proper'): logger.log('Updating history items with status Snatched Proper in a background process...') sql_result = my_db.select('SELECT rowid, resource, quality, showid' ' FROM history' ' WHERE action LIKE "%%%02d"' % SNATCHED + ' AND (UPPER(resource) LIKE "%PROPER%"' ' OR UPPER(resource) LIKE "%REPACK%"' ' OR UPPER(resource) LIKE "%REAL%")') if sql_result: cl = [] for r in sql_result: show_obj = None try: show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid'])) except (StandardError, Exception): pass np = NameParser(False, showObj=show_obj, testing=True) try: pr = np.parse(r['resource']) except (StandardError, Exception): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append(['UPDATE history SET action = ? WHERE rowid = ?', [Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), r['rowid']]]) if cl: my_db.mass_action(cl) logger.log('Completed the history table update with status Snatched Proper.') my_db.add_flag('history_snatch_proper')
def history_snatched_proper_fix(): my_db = db.DBConnection() if not my_db.has_flag('history_snatch_proper'): logger.log( 'Updating history items with status Snatched Proper in a background process...' ) sql_result = my_db.select('SELECT rowid, resource, quality, showid' ' FROM history' ' WHERE action LIKE "%%%02d"' % SNATCHED + ' AND (UPPER(resource) LIKE "%PROPER%"' ' OR UPPER(resource) LIKE "%REPACK%"' ' OR UPPER(resource) LIKE "%REAL%")') if sql_result: cl = [] for r in sql_result: show_obj = None try: show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid'])) except (StandardError, Exception): pass np = NameParser(False, showObj=show_obj, testing=True) try: pr = np.parse(r['resource']) except (StandardError, Exception): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append([ 'UPDATE history SET action = ? WHERE rowid = ?', [ Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), r['rowid'] ] ]) if cl: my_db.mass_action(cl) logger.log( 'Completed the history table update with status Snatched Proper.' ) my_db.add_flag('history_snatch_proper')
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None): propers = {} # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() ] for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue if None is not proper_list: found_propers = proper_list.get(cur_provider.get_id(), []) if not found_propers: continue else: threading.currentThread( ).name = orig_thread_name + ' :: [' + cur_provider.name + ']' logger.log(u'Searching for new PROPER releases') try: found_propers = cur_provider.find_propers( search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log( u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 for x in found_propers: name = _generic_name(x.name) if name not in propers: try: np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show, indexer_lookup=False) parse_result = np.parse(x.name) if parse_result.series_name and parse_result.episode_numbers and \ (parse_result.show.indexer, parse_result.show.indexerid) in recent_shows + recent_anime: cur_size = getattr(x, 'size', None) if failed_history.has_failed(x.name, cur_size, cur_provider.name): continue logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.show = parse_result.show.indexerid x.provider = cur_provider x.is_repack, x.properlevel = Quality.get_proper_level( parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True) x.is_internal = parse_result.extra_info_no_name() and \ re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I) x.codec = _get_codec(parse_result.extra_info_no_name()) propers[name] = x count += 1 except (InvalidNameException, InvalidShowException): continue except (StandardError, Exception): continue cur_provider.log_result('Propers', count, '%s' % cur_provider.name) # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('properlevel', 'date'), reverse=True) verified_propers = set() for cur_proper in sorted_propers: np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) try: parse_result = np.parse(cur_proper.name) except (StandardError, Exception): continue # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.extra_info = parse_result.extra_info cur_proper.extra_info_no_name = parse_result.extra_info_no_name cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.is_anime = parse_result.is_anime # only get anime proper if it has release group and version if parse_result.is_anime: if not cur_proper.release_group and -1 == cur_proper.version: logger.log( u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks( cur_proper.name, parse=False, indexer_lookup=False): logger.log( u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any( cur_proper.name, parse_result.show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name) continue result = show_name_helpers.contains_any( cur_proper.name, parse_result.show.rls_require_words, **re_extras) if None is not result and not result: logger.log( u'Ignored: %s for not containing any required word match' % cur_proper.name) continue # check if we actually want this proper (if it's the right quality) my_db = db.DBConnection() sql_results = my_db.select( 'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND indexer = ? ' + 'AND season = ? AND episode = ?', [ cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode ]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) # don't take proper of the same level we already downloaded old_status, old_quality = Quality.splitCompositeStatus( int(sql_results[0]['status'])) cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level( cur_proper.extra_info_no_name(), cur_proper.version, cur_proper.is_anime, check_is_repack=True) old_release_group = sql_results[0]['release_group'] # check if we want this release: same quality as current, current has correct status # restrict other release group releases to proper's if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \ or cur_proper.quality != old_quality \ or (cur_proper.is_repack and cur_proper.release_group != old_release_group): continue np = NameParser(False, try_scene_exceptions=True, showObj=parse_result.show, indexer_lookup=False) try: extra_info = np.parse( sql_results[0]['release_name']).extra_info_no_name() except (StandardError, Exception): extra_info = None old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \ get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season, parse_result.episode_numbers, old_status, cur_proper.quality, extra_info, cur_proper.version, cur_proper.is_anime) old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)] if cur_proper.proper_level < old_proper_level: continue elif cur_proper.proper_level == old_proper_level: if '264' == cur_proper.codec and 'xvid' == old_codec: pass elif old_is_internal and not cur_proper.is_internal: pass else: continue log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\ % (cur_proper.release_group, old_release_group, cur_proper.name) is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or (old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) if is_web: old_webdl_type = get_webdl_type(old_extra_no_name, old_name) new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(), cur_proper.name) if old_webdl_type != new_webdl_type: logger.log( 'Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]' % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) continue # for webldls, prevent propers from different groups if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group: logger.log(log_same_grp, logger.DEBUG) continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: old_version = int(sql_results[0]['version']) if -1 < old_version < cur_proper.version: logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version)) else: continue if cur_proper.release_group != old_release_group: logger.log(log_same_grp, logger.DEBUG) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if cur_proper.indexerid != -1: if (cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode) not in map( operator.attrgetter('indexerid', 'indexer', 'season', 'episode'), verified_propers): logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.add(cur_proper) else: rp = set() for vp in verified_propers: if vp.indexer == cur_proper.indexer and vp.indexerid == cur_proper.indexerid and \ vp.season == cur_proper.season and vp.episode == cur_proper.episode and \ vp.proper_level < cur_proper.proper_level: rp.add(vp) if rp: verified_propers = verified_propers - rp logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.add(cur_proper) return list(verified_propers)
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None): propers = {} my_db = db.DBConnection() # for each provider get a list of arbitrary Propers orig_thread_name = threading.currentThread().name providers = filter(lambda p: p.is_active(), sickbeard.providers.sortedProviderList()) for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue if None is not proper_list: found_propers = proper_list.get(cur_provider.get_id(), []) if not found_propers: continue else: threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) logger.log('Searching for new PROPER releases') try: found_propers = cur_provider.find_propers( search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log('Authentication error: %s' % ex(e), logger.ERROR) continue except Exception as e: logger.log( 'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the Proper to the list for cur_proper in found_propers: name = _generic_name(cur_proper.name) if name in propers: continue try: np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) parse_result = np.parse(cur_proper.name) except (InvalidNameException, InvalidShowException, Exception): continue # get the show object cur_proper.parsed_show = (cur_proper.parsed_show or helpers.findCertainShow( sickbeard.showList, parse_result.show.indexerid)) if None is cur_proper.parsed_show: logger.log( 'Skip download; cannot find show with indexerid [%s]' % cur_proper.indexerid, logger.ERROR) continue cur_proper.indexer = cur_proper.parsed_show.indexer cur_proper.indexerid = cur_proper.parsed_show.indexerid if not (-1 != cur_proper.indexerid and parse_result.series_name and parse_result.episode_numbers and (cur_proper.indexer, cur_proper.indexerid) in recent_shows + recent_anime): continue # only get anime Proper if it has release group and version if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version: logger.log( 'Ignored Proper with no release group and version in name [%s]' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks( cur_proper.name, parse=False, indexer_lookup=False): logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG) continue re_x = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any( cur_proper.name, cur_proper.parsed_show.rls_ignore_words, **re_x) if None is not result and result: logger.log( 'Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG) continue result = show_name_helpers.contains_any( cur_proper.name, cur_proper.parsed_show.rls_require_words, **re_x) if None is not result and not result: logger.log( 'Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG) continue cur_size = getattr(cur_proper, 'size', None) if failed_history.has_failed(cur_proper.name, cur_size, cur_provider.name): continue cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] # check if we actually want this Proper (if it's the right quality) sql_results = my_db.select( 'SELECT release_group, status, version, release_name' ' FROM tv_episodes' ' WHERE showid = ? AND indexer = ? AND season = ? AND episode = ?' ' LIMIT 1', [ cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode ]) if not sql_results: continue # only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones) # check if we want this release: same quality as current, current has correct status # restrict other release group releases to Proper's old_status, old_quality = Quality.splitCompositeStatus( int(sql_results[0]['status'])) cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level( parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True) cur_proper.proper_level = cur_proper.properlevel # local non global value old_release_group = sql_results[0]['release_group'] try: same_release_group = parse_result.release_group.lower( ) == old_release_group.lower() except (StandardError, Exception): same_release_group = parse_result.release_group == old_release_group if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \ or cur_proper.quality != old_quality \ or (cur_proper.is_repack and not same_release_group): continue np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) try: extra_info = np.parse( sql_results[0]['release_name']).extra_info_no_name() except (StandardError, Exception): extra_info = None # don't take Proper of the same level we already downloaded old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \ get_old_proper_level(cur_proper.parsed_show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season, parse_result.episode_numbers, old_status, cur_proper.quality, extra_info, parse_result.version, parse_result.is_anime) cur_proper.codec = _get_codec(parse_result.extra_info_no_name()) if cur_proper.proper_level < old_proper_level: continue cur_proper.is_internal = (parse_result.extra_info_no_name() and re.search( r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)) if cur_proper.proper_level == old_proper_level: if (('264' == cur_proper.codec and 'xvid' == old_codec) or (old_is_internal and not cur_proper.is_internal)): pass continue is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or (old_quality == Quality.SDTV and re.search( r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) if is_web: old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)] old_webdl_type = get_webdl_type(old_extra_no_name, old_name) new_webdl_type = get_webdl_type( parse_result.extra_info_no_name(), cur_proper.name) if old_webdl_type != new_webdl_type: logger.log( 'Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]' % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) continue # for webdls, prevent Propers from different groups log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \ % (parse_result.release_group, old_release_group, cur_proper.name) if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group: logger.log(log_same_grp, logger.DEBUG) continue # check if we actually want this Proper (if it's the right release group and a higher version) if parse_result.is_anime: old_version = int(sql_results[0]['version']) if not (-1 < old_version < parse_result.version): continue if not same_release_group: logger.log(log_same_grp, logger.DEBUG) continue found_msg = 'Found anime Proper v%s to replace v%s' % ( parse_result.version, old_version) else: found_msg = 'Found Proper [%s]' % cur_proper.name # make sure the episode has been downloaded before history_limit = datetime.datetime.today() - datetime.timedelta( days=30) history_results = my_db.select( 'SELECT resource FROM history' ' WHERE showid = ?' ' AND season = ? AND episode = ? AND quality = ? AND date >= ?' ' AND (%s)' % ' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]), [ cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality, history_limit.strftime(history.dateFormat) ]) # skip if the episode has never downloaded, because a previous quality is required to match the Proper if not len(history_results): logger.log( 'Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG) continue # make sure that none of the existing history downloads are the same Proper as the download candidate clean_proper_name = _generic_name( helpers.remove_non_release_groups( cur_proper.name, cur_proper.parsed_show.is_anime)) is_same = False for hitem in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name( helpers.remove_non_release_groups( ek.ek(os.path.basename, hitem['resource']))): is_same = True break if is_same: logger.log('Ignored Proper already in history [%s]' % cur_proper.name) continue logger.log(found_msg, logger.DEBUG) # finish populating the Proper instance # cur_proper.show = cur_proper.parsed_show.indexerid cur_proper.provider = cur_provider cur_proper.extra_info = parse_result.extra_info cur_proper.extra_info_no_name = parse_result.extra_info_no_name cur_proper.release_group = parse_result.release_group cur_proper.is_anime = parse_result.is_anime cur_proper.version = parse_result.version propers[name] = cur_proper cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name) return propers.values()
def findNeededEpisodes(self, episode, manualSearch=False): neededEps = {} cl = [] myDB = self.get_db() if type(episode) != list: sqlResults = myDB.select( 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?', [self.providerID, episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%']) else: for epObj in episode: cl.append([ 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?' + ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')', [self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']]) sqlResults = myDB.mass_action(cl) if sqlResults: sqlResults = list(itertools.chain(*sqlResults)) if not sqlResults: self.setLastSearch() return neededEps # for each cache entry for curResult in sqlResults: # skip non-tv crap if not show_name_helpers.pass_wordlist_checks(curResult['name'], parse=False, indexer_lookup=False): continue # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: logger.log(u'' + str(showObj.name) + ' is not an anime, skipping', logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult['season']) if curSeason == -1: continue curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult['quality']) curReleaseGroup = curResult['release_group'] curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log(u'Skipping ' + curResult['name'] + ' because we don\'t want an episode that\'s ' + Quality.qualityStrings[curQuality], logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult['name'] url = curResult['url'] logger.log(u'Found result ' + title + ' at ' + url) result = self.provider.get_result([epObj], url) if None is result: continue result.show = showObj result.name = title result.quality = curQuality result.release_group = curReleaseGroup result.version = curVersion result.content = None np = NameParser(False, showObj=showObj) try: parsed_result = np.parse(title) extra_info_no_name = parsed_result.extra_info_no_name() version = parsed_result.version is_anime = parsed_result.is_anime except (StandardError, Exception): extra_info_no_name = None version = -1 is_anime = False result.is_repack, result.properlevel = Quality.get_proper_level(extra_info_no_name, version, is_anime, check_is_repack=True) # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps
def findNeededEpisodes(self, episode, manualSearch=False): neededEps = {} cl = [] myDB = self.get_db() if type(episode) != list: sqlResults = myDB.select( 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?', [ self.providerID, episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%' ]) else: for epObj in episode: cl.append([ 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?' + ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')', [ self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%' ] ]) sqlResults = myDB.mass_action(cl) if sqlResults: sqlResults = list(itertools.chain(*sqlResults)) if not sqlResults: self.setLastSearch() return neededEps # for each cache entry for curResult in sqlResults: # skip non-tv crap if not show_name_helpers.pass_wordlist_checks( curResult['name'], parse=False, indexer_lookup=False): continue # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: logger.log( u'' + str(showObj.name) + ' is not an anime, skipping', logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult['season']) if curSeason == -1: continue curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult['quality']) curReleaseGroup = curResult['release_group'] curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log( u'Skipping ' + curResult['name'] + ' because we don\'t want an episode that\'s ' + Quality.qualityStrings[curQuality], logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult['name'] url = curResult['url'] logger.log(u'Found result ' + title + ' at ' + url) result = self.provider.get_result([epObj], url) if None is result: continue result.show = showObj result.name = title result.quality = curQuality result.release_group = curReleaseGroup result.version = curVersion result.content = None np = NameParser(False, showObj=showObj) try: parsed_result = np.parse(title) extra_info_no_name = parsed_result.extra_info_no_name() version = parsed_result.version is_anime = parsed_result.is_anime except (StandardError, Exception): extra_info_no_name = None version = -1 is_anime = False result.is_repack, result.properlevel = Quality.get_proper_level( extra_info_no_name, version, is_anime, check_is_repack=True) # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps