def pass_show_wordlist_checks(name, show): re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any(name, show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % name) return False result = show_name_helpers.contains_any(name, show.rls_require_words, **re_extras) if None is not result and not result: logger.log(u'Ignored: %s for not containing any required word match' % name) return False return True
def pick_best_result(results, show, quality_list=None): logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG) # find the best result for the current episode best_result = None for cur_result in results: logger.log(u'Quality is %s for %s' % (Quality.qualityStrings[cur_result.quality], cur_result.name)) if show.is_anime and not show.release_groups.is_valid(cur_result): continue if quality_list and cur_result.quality not in quality_list: logger.log(u'%s is an unwanted quality, rejecting it' % cur_result.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any(cur_result.name, show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_result.name) continue result = show_name_helpers.contains_any(cur_result.name, show.rls_require_words, **re_extras) if None is not result and not result: logger.log(u'Ignored: %s for not containing any required word match' % cur_result.name) continue cur_size = getattr(cur_result, 'size', None) if sickbeard.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.hasFailed( cur_result.name, cur_size, cur_result.provider.name): logger.log(u'%s has previously failed, rejecting it' % cur_result.name) continue if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN: best_result = cur_result elif best_result.quality == cur_result.quality: if re.search('(?i)(proper|repack)', cur_result.name) or \ show.is_anime and re.search('(?i)(v1|v2|v3|v4|v5)', cur_result.name): best_result = cur_result elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower(): best_result = cur_result elif 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower(): logger.log(u'Preferring %s (x264 over xvid)' % cur_result.name) best_result = cur_result if best_result: logger.log(u'Picked %s as the best' % best_result.name, logger.DEBUG) else: logger.log(u'No result picked.', logger.DEBUG) return best_result
def _get_proper_list(aired_since_shows, recent_shows, recent_anime): propers = {} # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']' logger.log(u'Searching for new PROPER releases') try: found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR) continue except Exception as e: logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 np = NameParser(False, try_scene_exceptions=True) for x in found_propers: name = _generic_name(x.name) if name not in propers: try: parse_result = np.parse(x.title) if parse_result.series_name and parse_result.episode_numbers and \ parse_result.show.indexerid in recent_shows + recent_anime: logger.log(u'Found new proper: ' + x.name, logger.DEBUG) x.show = parse_result.show.indexerid x.provider = cur_provider propers[name] = x count += 1 except Exception: continue cur_provider.log_result('Propers', count, '%s' % cur_provider.name) # take the list of unique propers and get it sorted by sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) verified_propers = [] for cur_proper in sorted_propers: # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer cur_proper.indexer = parse_result.show.indexer # populate our Proper instance cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] cur_proper.release_group = parse_result.release_group cur_proper.version = parse_result.version cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not cur_proper.release_group and -1 == cur_proper.version: logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False): logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name, logger.DEBUG) continue re_extras = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras) if None is not result and result: logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name) continue result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras) if None is not result and not result: logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name) continue # check if we actually want this proper (if it's the right quality) my_db = db.DBConnection() sql_results = my_db.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status'])) if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if parse_result.is_anime: my_db = db.DBConnection() sql_results = my_db.select( 'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [cur_proper.indexerid, cur_proper.season, cur_proper.episode]) old_version = int(sql_results[0]['version']) old_release_group = (sql_results[0]['release_group']) if -1 < old_version < cur_proper.version: logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version)) else: continue if cur_proper.release_group != old_release_group: logger.log(u'Skipping proper from release group: %s, does not match existing release group: %s' % (cur_proper.release_group, old_release_group)) continue # if the show is in our list and there hasn't been a proper already added for that particular episode # then add it to our list of propers if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), verified_propers): logger.log(u'Found a proper that may be useful: %s' % cur_proper.name) verified_propers.append(cur_proper) return verified_propers
def pick_best_result(results, show, quality_list=None, filter_rls=False): logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG) # find the best result for the current episode best_result = None best_fallback_result = None scene_only = scene_or_contain = scene_loose = scene_loose_active = scene_rej_nuked = scene_nuked_active = False if filter_rls: try: provider = getattr(results[0], 'provider', None) scene_only = getattr(provider, 'scene_only', False) scene_or_contain = getattr(provider, 'scene_or_contain', '') recent_task = 'RECENT' in filter_rls scene_loose = getattr(provider, 'scene_loose', False) and recent_task scene_loose_active = getattr(provider, 'scene_loose_active', False) and not recent_task scene_rej_nuked = getattr(provider, 'scene_rej_nuked', False) scene_nuked_active = getattr(provider, 'scene_nuked_active', False) and not recent_task except (StandardError, Exception): filter_rls = False addendum = '' for cur_result in results: if show.is_anime and not show.release_groups.is_valid(cur_result): continue if quality_list and cur_result.quality not in quality_list: logger.log(u'Rejecting unwanted quality %s for [%s]' % ( Quality.qualityStrings[cur_result.quality], cur_result.name), logger.DEBUG) continue if not pass_show_wordlist_checks(cur_result.name, show): continue cur_size = getattr(cur_result, 'size', None) if sickbeard.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.has_failed( cur_result.name, cur_size, cur_result.provider.name): logger.log(u'Rejecting previously failed [%s]' % cur_result.name) continue if filter_rls and any([scene_only, scene_loose, scene_loose_active, scene_rej_nuked, scene_nuked_active]): if show.is_anime: addendum = u'anime (skipping scene/nuke filter) ' else: scene_contains = False if scene_only and scene_or_contain: re_extras = dict(re_prefix='.*', re_suffix='.*') r = show_name_helpers.contains_any(cur_result.name, scene_or_contain, **re_extras) if None is not r and r: scene_contains = True if scene_contains and not scene_rej_nuked: logger.log(u'Considering title match to \'or contain\' [%s]' % cur_result.name, logger.DEBUG) reject = False else: reject, url = can_reject(cur_result.name) if reject: if isinstance(reject, basestring): if scene_rej_nuked and not scene_nuked_active: logger.log(u'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) elif scene_nuked_active: best_fallback_result = best_candidate(best_fallback_result, cur_result) else: logger.log(u'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) reject = False elif scene_contains or any([scene_loose, scene_loose_active]): best_fallback_result = best_candidate(best_fallback_result, cur_result) else: logger.log(u'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) if reject: continue best_result = best_candidate(best_result, cur_result) if best_result and scene_only and not show.is_anime: addendum = u'scene release filtered ' elif not best_result and best_fallback_result: addendum = u'non scene release filtered ' best_result = best_fallback_result if best_result: logger.log(u'Picked as the best %s[%s]' % (addendum, best_result.name), logger.DEBUG) else: logger.log(u'No result picked.', logger.DEBUG) return best_result
def _download_propers(proper_list): verified_propers = True consumed_proper = [] downloaded_epid = set() _epid = operator.attrgetter('indexerid', 'indexer', 'season', 'episode') while verified_propers: verified_propers = set() # get verified list; sort the list of unique Propers for highest proper_level, newest first for cur_proper in sorted( filter( lambda p: p not in consumed_proper, # allows Proper to fail or be rejected and another to be tried (with a different name) filter(lambda p: _epid(p) not in downloaded_epid, proper_list)), key=operator.attrgetter('properlevel', 'date'), reverse=True): epid = _epid(cur_proper) # if the show is in our list and there hasn't been a Proper already added for that particular episode # then add it to our list of Propers if epid not in map(_epid, verified_propers): logger.log('Proper may be useful [%s]' % cur_proper.name) verified_propers.add(cur_proper) else: # use Proper with the highest level remove_propers = set() map( lambda vp: remove_propers.add(vp), filter( lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), verified_propers)) if remove_propers: verified_propers -= remove_propers logger.log('A more useful Proper [%s]' % cur_proper.name) verified_propers.add(cur_proper) for cur_proper in list(verified_propers): consumed_proper += [cur_proper] # scene release checking scene_only = getattr(cur_proper.provider, 'scene_only', False) scene_rej_nuked = getattr(cur_proper.provider, 'scene_rej_nuked', False) if any([scene_only, scene_rej_nuked ]) and not cur_proper.parsed_show.is_anime: scene_or_contain = getattr(cur_proper.provider, 'scene_or_contain', '') scene_contains = False if scene_only and scene_or_contain: re_extras = dict(re_prefix='.*', re_suffix='.*') r = show_name_helpers.contains_any(cur_proper.name, scene_or_contain, **re_extras) if None is not r and r: scene_contains = True if scene_contains and not scene_rej_nuked: reject = False else: reject, url = search.can_reject(cur_proper.name) if reject: if isinstance(reject, basestring): if scene_rej_nuked: logger.log( 'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) else: logger.log( 'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) reject = False elif scene_contains: reject = False else: logger.log( 'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) if reject: continue # make the result object ep_obj = cur_proper.parsed_show.getEpisode(cur_proper.season, cur_proper.episode) result = cur_proper.provider.get_result([ep_obj], cur_proper.url) if None is result: continue result.name = cur_proper.name result.quality = cur_proper.quality result.version = cur_proper.version result.properlevel = cur_proper.proper_level result.is_repack = cur_proper.is_repack result.puid = cur_proper.puid # snatch it if search.snatch_episode(result, SNATCHED_PROPER): downloaded_epid.add(_epid(cur_proper))
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None): propers = {} my_db = db.DBConnection() # for each provider get a list of arbitrary Propers orig_thread_name = threading.currentThread().name providers = filter(lambda p: p.is_active(), sickbeard.providers.sortedProviderList()) for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue if None is not proper_list: found_propers = proper_list.get(cur_provider.get_id(), []) if not found_propers: continue else: threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) logger.log('Searching for new PROPER releases') try: found_propers = cur_provider.find_propers( search_date=aired_since_shows, shows=recent_shows, anime=recent_anime) except exceptions.AuthException as e: logger.log('Authentication error: %s' % ex(e), logger.ERROR) continue except Exception as e: logger.log( 'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) continue finally: threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the Proper to the list for cur_proper in found_propers: name = _generic_name(cur_proper.name) if name in propers: continue try: np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) parse_result = np.parse(cur_proper.name) except (InvalidNameException, InvalidShowException, Exception): continue # get the show object cur_proper.parsed_show = (cur_proper.parsed_show or helpers.findCertainShow( sickbeard.showList, parse_result.show.indexerid)) if None is cur_proper.parsed_show: logger.log( 'Skip download; cannot find show with indexerid [%s]' % cur_proper.indexerid, logger.ERROR) continue cur_proper.indexer = cur_proper.parsed_show.indexer cur_proper.indexerid = cur_proper.parsed_show.indexerid if not (-1 != cur_proper.indexerid and parse_result.series_name and parse_result.episode_numbers and (cur_proper.indexer, cur_proper.indexerid) in recent_shows + recent_anime): continue # only get anime Proper if it has release group and version if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version: logger.log( 'Ignored Proper with no release group and version in name [%s]' % cur_proper.name, logger.DEBUG) continue if not show_name_helpers.pass_wordlist_checks( cur_proper.name, parse=False, indexer_lookup=False): logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG) continue re_x = dict(re_prefix='.*', re_suffix='.*') result = show_name_helpers.contains_any( cur_proper.name, cur_proper.parsed_show.rls_ignore_words, **re_x) if None is not result and result: logger.log( 'Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG) continue result = show_name_helpers.contains_any( cur_proper.name, cur_proper.parsed_show.rls_require_words, **re_x) if None is not result and not result: logger.log( 'Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG) continue cur_size = getattr(cur_proper, 'size', None) if failed_history.has_failed(cur_proper.name, cur_size, cur_provider.name): continue cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1 cur_proper.episode = parse_result.episode_numbers[0] # check if we actually want this Proper (if it's the right quality) sql_results = my_db.select( 'SELECT release_group, status, version, release_name' ' FROM tv_episodes' ' WHERE showid = ? AND indexer = ? AND season = ? AND episode = ?' ' LIMIT 1', [ cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode ]) if not sql_results: continue # only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones) # check if we want this release: same quality as current, current has correct status # restrict other release group releases to Proper's old_status, old_quality = Quality.splitCompositeStatus( int(sql_results[0]['status'])) cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime) cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level( parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True) cur_proper.proper_level = cur_proper.properlevel # local non global value old_release_group = sql_results[0]['release_group'] try: same_release_group = parse_result.release_group.lower( ) == old_release_group.lower() except (StandardError, Exception): same_release_group = parse_result.release_group == old_release_group if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \ or cur_proper.quality != old_quality \ or (cur_proper.is_repack and not same_release_group): continue np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False) try: extra_info = np.parse( sql_results[0]['release_name']).extra_info_no_name() except (StandardError, Exception): extra_info = None # don't take Proper of the same level we already downloaded old_proper_level, old_extra_no_name, old_name = \ get_old_proper_level(cur_proper.parsed_show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season, parse_result.episode_numbers, old_status, cur_proper.quality, extra_info, parse_result.version, parse_result.is_anime) if cur_proper.proper_level <= old_proper_level: continue is_web = ( old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or (old_quality == Quality.SDTV and isinstance(sql_results[0]['release_name'], basestring) and re.search(r'\Wweb.?(dl|rip|.([hx]\W?26[45]|hevc))\W', sql_results[0]['release_name'], re.I))) if is_web: old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)] old_webdl_type = get_webdl_type(old_extra_no_name, old_name) new_webdl_type = get_webdl_type( parse_result.extra_info_no_name(), cur_proper.name) if old_webdl_type != new_webdl_type: logger.log( 'Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]' % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) continue # for webdls, prevent Propers from different groups log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \ % (parse_result.release_group, old_release_group, cur_proper.name) if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group: logger.log(log_same_grp, logger.DEBUG) continue # check if we actually want this Proper (if it's the right release group and a higher version) if parse_result.is_anime: old_version = int(sql_results[0]['version']) if not (-1 < old_version < parse_result.version): continue if not same_release_group: logger.log(log_same_grp, logger.DEBUG) continue found_msg = 'Found anime Proper v%s to replace v%s' % ( parse_result.version, old_version) else: found_msg = 'Found Proper [%s]' % cur_proper.name # make sure the episode has been downloaded before history_limit = datetime.datetime.today() - datetime.timedelta( days=30) history_results = my_db.select( 'SELECT resource FROM history' ' WHERE showid = ?' ' AND season = ? AND episode = ? AND quality = ? AND date >= ?' ' AND (%s)' % ' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]), [ cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality, history_limit.strftime(history.dateFormat) ]) # skip if the episode has never downloaded, because a previous quality is required to match the Proper if not len(history_results): logger.log( 'Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG) continue # make sure that none of the existing history downloads are the same Proper as the download candidate clean_proper_name = _generic_name( helpers.remove_non_release_groups( cur_proper.name, cur_proper.parsed_show.is_anime)) is_same = False for hitem in history_results: # if the result exists in history already we need to skip it if clean_proper_name == _generic_name( helpers.remove_non_release_groups( ek.ek(os.path.basename, hitem['resource']))): is_same = True break if is_same: logger.log('Ignored Proper already in history [%s]' % cur_proper.name) continue logger.log(found_msg, logger.DEBUG) # finish populating the Proper instance # cur_proper.show = cur_proper.parsed_show.indexerid cur_proper.provider = cur_provider cur_proper.extra_info = parse_result.extra_info cur_proper.extra_info_no_name = parse_result.extra_info_no_name cur_proper.release_group = parse_result.release_group cur_proper.is_anime = parse_result.is_anime cur_proper.version = parse_result.version propers[name] = cur_proper cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name) return propers.values()
def call_contains_any(name, csv_words): re_extras = dict(re_prefix='.*', re_suffix='.*') match = show_name_helpers.contains_any(name, csv_words, **re_extras) return None is not match and match
def pick_best_result(results, show, quality_list=None, filter_rls=False): logger.log( u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG) # find the best result for the current episode best_result = None best_fallback_result = None scene_only = scene_or_contain = scene_loose = scene_loose_active = scene_rej_nuked = scene_nuked_active = False if filter_rls: try: provider = getattr(results[0], 'provider', None) scene_only = getattr(provider, 'scene_only', False) scene_or_contain = getattr(provider, 'scene_or_contain', '') recent_task = 'RECENT' in filter_rls scene_loose = getattr(provider, 'scene_loose', False) and recent_task scene_loose_active = getattr(provider, 'scene_loose_active', False) and not recent_task scene_rej_nuked = getattr(provider, 'scene_rej_nuked', False) scene_nuked_active = getattr(provider, 'scene_nuked_active', False) and not recent_task except (StandardError, Exception): filter_rls = False addendum = '' for cur_result in results: if show.is_anime and not show.release_groups.is_valid(cur_result): continue if quality_list and cur_result.quality not in quality_list: logger.log( u'Rejecting unwanted quality %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name), logger.DEBUG) continue if not pass_show_wordlist_checks(cur_result.name, show): continue cur_size = getattr(cur_result, 'size', None) if sickbeard.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.has_failed( cur_result.name, cur_size, cur_result.provider.name): logger.log(u'Rejecting previously failed [%s]' % cur_result.name) continue if filter_rls and any([ scene_only, scene_loose, scene_loose_active, scene_rej_nuked, scene_nuked_active ]): if show.is_anime: addendum = u'anime (skipping scene/nuke filter) ' else: scene_contains = False if scene_only and scene_or_contain: re_extras = dict(re_prefix='.*', re_suffix='.*') r = show_name_helpers.contains_any(cur_result.name, scene_or_contain, **re_extras) if None is not r and r: scene_contains = True if scene_contains and not scene_rej_nuked: logger.log( u'Considering title match to \'or contain\' [%s]' % cur_result.name, logger.DEBUG) reject = False else: reject, url = can_reject(cur_result.name) if reject: if isinstance(reject, basestring): if scene_rej_nuked and not scene_nuked_active: logger.log( u'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) elif scene_nuked_active: best_fallback_result = best_candidate( best_fallback_result, cur_result) else: logger.log( u'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url), logger.DEBUG) reject = False elif scene_contains or any( [scene_loose, scene_loose_active]): best_fallback_result = best_candidate( best_fallback_result, cur_result) else: logger.log( u'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG) if reject: continue best_result = best_candidate(best_result, cur_result) if best_result and scene_only and not show.is_anime: addendum = u'scene release filtered ' elif not best_result and best_fallback_result: addendum = u'non scene release filtered ' best_result = best_fallback_result if best_result: logger.log(u'Picked as the best %s[%s]' % (addendum, best_result.name), logger.DEBUG) else: logger.log(u'No result picked.', logger.DEBUG) return best_result