def test_fov_repeat_file_names(self): np = NameParser() self._test_names(np, 'fov_repeat', lambda x: x + '.avi')
def test_standard_names(self): np = NameParser(True) self._test_names(np, 'standard')
def test_fov_repeat_names(self): np = NameParser(False) self._test_names(np, 'fov_repeat')
def test_no_season_multi_ep_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'no_season_multi_ep', lambda x: x + '.avi')
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = os.path.join(new_path, new_name) if not new_name: sickrage.app.log.debug("Unable to create a name out of " + pattern) return False sickrage.app.log.debug("Trying to parse " + new_name) parser = NameParser(True, show_id=ep.show.indexer_id, naming_pattern=True) try: result = parser.parse(new_name) except Exception: sickrage.app.log.debug("Unable to parse " + new_name + ", not valid") return False sickrage.app.log.debug("Parsed " + new_name + " into " + str(result)) if abd or sports: if result.air_date != ep.airdate: sickrage.app.log.debug( "Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [ x.absolute_number for x in [ep] + ep.related_episodes ]: sickrage.app.log.debug( "Absolute numbering incorrect in parsed episode, pattern isn't valid" ) return False else: if result.season_number != ep.season: sickrage.app.log.debug( "Season number incorrect in parsed episode, pattern isn't valid" ) return False if result.episode_numbers != [ x.episode for x in [ep] + ep.related_episodes ]: sickrage.app.log.debug( "Episode numbering incorrect in parsed episode, pattern isn't valid" ) return False return True
def test_no_season_multi_ep_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'no_season_multi_ep')
def test_fov_repeat_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'fov_repeat', lambda x: x + '.avi')
def split_nzb_result(result): """ Split result into separate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ try: url_data = WebSession().get(result.url, needBytes=True).text except Exception: sickrage.app.log.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: parse_result = NameParser(False, series_id=result.series_id, series_provider_id=result.series_provider_id).parse(result.name) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separate_nzbs, xmlns = getSeasonNZBs(result.name, url_data, season) result_list = [] for newNZB in separate_nzbs: sickrage.app.log.debug("Split out {} from {}".format(newNZB, result.name)) # parse the name try: parse_result = NameParser(False, series_id=result.series_id, series_provider_id=result.series_provider_id).parse(newNZB) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename {} into a valid episode".format(newNZB)) return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename {} into a valid show".format(newNZB)) return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or (parse_result.season_number is None and season != 1): sickrage.app.log.warning("Found {} inside {} but it doesn't seem to belong to the same season, ignoring it".format(newNZB, result.name)) continue elif len(parse_result.episode_numbers) == 0: sickrage.app.log.warning("Found {} inside {} but it doesn't seem to be a valid episode NZB, ignoring it".format(newNZB, result.name)) continue want_ep = True for epNo in parse_result.episode_numbers: show_object = find_show(parse_result.series_id, parse_result.series_provider_id) if not show_object.want_episode(parse_result.season_number, epNo, result.quality): sickrage.app.log.info("Ignoring result {} because we don't want an episode that is {}".format(newNZB, result.quality.display_name)) want_ep = False break if not want_ep: continue # make a result cur_result = NZBDataSearchProviderResult(season, parse_result.episode_numbers) cur_result.name = newNZB cur_result.provider = result.provider cur_result.quality = result.quality cur_result.extraInfo = [createNZBString(separate_nzbs[newNZB], xmlns)] result_list.append(cur_result) return result_list
def find_search_results(self, show_id, season, episode, search_mode, manualSearch=False, downCurQuality=False, cacheOnly=False, session=None): provider_results = {} item_list = [] if not self._check_auth: return provider_results show_object = find_show(show_id, session=session) episode_object = show_object.get_episode(season, episode) # search cache for episode result provider_results = self.cache.search_cache(show_id, season, episode, manualSearch, downCurQuality) # check if this is a cache only search if cacheOnly: return provider_results search_strings = [] if search_mode == 'sponly': # get season search results search_strings = self._get_season_search_strings(show_id, season, episode) elif search_mode == 'eponly': # get single episode search results search_strings = self._get_episode_search_strings(show_id, season, episode) for curString in search_strings: try: item_list += self.search(curString, show_id=show_id, season=season, episode=episode) except SAXParseException: continue # sort list by quality if item_list: # categorize the items into lists by quality items = defaultdict(list) for item in item_list: items[self.get_quality(item, anime=show_object.is_anime)].append(item) # temporarily remove the list of items with unknown quality unknown_items = items.pop(Quality.UNKNOWN, []) # make a generator to sort the remaining items by descending quality items_list = (items[quality] for quality in sorted(items, reverse=True)) # unpack all of the quality lists into a single sorted list items_list = list(itertools.chain(*items_list)) # extend the list with the unknown qualities, now sorted at the bottom of the list items_list.extend(unknown_items) # filter results for item in item_list: provider_result = self.getResult() provider_result.name, provider_result.url = self._get_title_and_url(item) # ignore invalid urls if not validate_url(provider_result.url) and not provider_result.url.startswith('magnet'): continue try: parse_result = NameParser(show_id=show_id).parse(provider_result.name) except (InvalidNameException, InvalidShowException) as e: sickrage.app.log.debug("{}".format(e)) continue provider_result.show_id = parse_result.indexer_id provider_result.quality = parse_result.quality provider_result.release_group = parse_result.release_group provider_result.version = parse_result.version provider_result.size = self._get_size(item) provider_result.seeders, provider_result.leechers = self._get_result_stats(item) sickrage.app.log.debug("Adding item from search to cache: {}".format(provider_result.name)) self.cache.add_cache_entry(provider_result.name, provider_result.url, provider_result.seeders, provider_result.leechers, provider_result.size) if not provider_result.show_id: continue provider_result_show_obj = find_show(provider_result.show_id, session=session) if not provider_result_show_obj: continue if not parse_result.is_air_by_date and (provider_result_show_obj.air_by_date or provider_result_show_obj.sports): sickrage.app.log.debug("This is supposed to be a date search but the result {} didn't parse as one, skipping it".format(provider_result.name)) continue if search_mode == 'sponly': if len(parse_result.episode_numbers): sickrage.app.log.debug("This is supposed to be a season pack search but the result {} is not " "a valid season pack, skipping it".format(provider_result.name)) continue elif parse_result.season_number != (episode_object.season, episode_object.scene_season)[show_object.is_scene]: sickrage.app.log.debug("This season result {} is for a season we are not searching for, skipping it".format(provider_result.name)) continue else: if not all([parse_result.season_number is not None, parse_result.episode_numbers, parse_result.season_number == (episode_object.season, episode_object.scene_season)[show_object.is_scene], (episode_object.episode, episode_object.scene_episode)[show_object.is_scene] in parse_result.episode_numbers]): sickrage.app.log.debug("The result {} doesn't seem to be a valid episode " "that we are trying to snatch, ignoring".format(provider_result.name)) continue provider_result.season = int(parse_result.season_number) provider_result.episodes = list(map(int, parse_result.episode_numbers)) # make sure we want the episode for episode_number in provider_result.episodes.copy(): if not provider_result_show_obj.want_episode(provider_result.season, episode_number, provider_result.quality, manualSearch, downCurQuality): sickrage.app.log.info("RESULT:[{}] QUALITY:[{}] IGNORED!".format(provider_result.name, Quality.qualityStrings[provider_result.quality])) if episode_number in provider_result.episodes: provider_result.episodes.remove(episode_number) # detects if season pack and if not checks if we wanted any of the episodes if len(provider_result.episodes) != len(parse_result.episode_numbers): continue sickrage.app.log.debug( "FOUND RESULT:[{}] QUALITY:[{}] URL:[{}]".format(provider_result.name, Quality.qualityStrings[provider_result.quality], provider_result.url) ) if len(provider_result.episodes) == 1: episode_number = provider_result.episodes[0] sickrage.app.log.debug("Single episode result.") elif len(provider_result.episodes) > 1: episode_number = MULTI_EP_RESULT sickrage.app.log.debug("Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers)) else: episode_number = SEASON_RESULT sickrage.app.log.debug("Separating full season result to check for later") if episode_number not in provider_results: provider_results[int(episode_number)] = [provider_result] else: provider_results[int(episode_number)] += [provider_result] return provider_results
def test_season_only_file_names(self): np = NameParser() self._test_names(np, 'season_only', lambda x: x + '.avi')
def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False): if not self._checkAuth: return self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # search cache for episode result cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality) if cacheResult: if epObj.episode not in results: results[epObj.episode] = cacheResult else: results[epObj.episode].extend(cacheResult) # found result, search next episode continue # skip if season already searched if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season search_strings = [] if len(episodes) > 1 and search_mode == 'sponly': # get season search results search_strings = self._get_season_search_strings(epObj) elif search_mode == 'eponly': # get single episode search results search_strings = self._get_episode_search_strings(epObj) first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0] if first: sickrage.srCore.srLogger.debug('First search_string has rid') for curString in search_strings: itemList += self.search(curString, search_mode, len(episodes), epObj=epObj) if first: first = False if itemList: sickrage.srCore.srLogger.debug( 'First search_string had rid, and returned results, skipping query by string') break else: sickrage.srCore.srLogger.debug( 'First search_string had rid, but returned no results, searching with string query') # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)])) itemList += itemsUnknown or [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(title) except InvalidNameException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid episode") continue except InvalidShowException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid show") continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly': if len(parse_result.episode_numbers): sickrage.srCore.srLogger.debug( "This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it") addCacheEntry = True if len(parse_result.episode_numbers) and ( parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): sickrage.srCore.srLogger.debug( "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring") addCacheEntry = True else: if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: sickrage.srCore.srLogger.debug( "The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring") addCacheEntry = True elif len(parse_result.episode_numbers) and not [ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: sickrage.srCore.srLogger.debug( "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring") addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: if not parse_result.is_air_by_date: sickrage.srCore.srLogger.debug( "This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it") addCacheEntry = True else: airdate = parse_result.air_date.toordinal() sql_results = main_db.MainDB().select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate]) if len(sql_results) != 1: sickrage.srCore.srLogger.warning( "Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it") addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # add parsed result to cache for usage later on if addCacheEntry: sickrage.srCore.srLogger.debug("Adding item from search to cache: " + title) ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality): wantEp = False break if not wantEp: sickrage.srCore.srLogger.info("RESULT:[{}] QUALITY:[{}] IGNORED!".format(title, Quality.qualityStrings[quality])) continue sickrage.srCore.srLogger.debug("FOUND RESULT:[{}] URL:[{}]".format(title, url)) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(epObj) == 1: epNum = epObj[0].episode sickrage.srCore.srLogger.debug("Single episode result.") elif len(epObj) > 1: epNum = MULTI_EP_RESULT sickrage.srCore.srLogger.debug( "Separating multi-episode result to check for later - result contains episodes: " + str( parse_result.episode_numbers)) elif len(epObj) == 0: epNum = SEASON_RESULT sickrage.srCore.srLogger.debug("Separating full season result to check for later") if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: self.cache._getDB().mass_action(cl) del cl # cleanup return results
def test_no_season_multi_ep_file_names(self): np = NameParser() self._test_names(np, 'no_season_multi_ep', lambda x: x + '.avi')
def test_no_season_general_file_names(self): np = NameParser() self._test_names(np, 'no_season_general', lambda x: x + '.avi')
def test_stupid_file_names(self): np = NameParser() self._test_names(np, 'stupid', lambda x: x + '.avi')
def test_stupid_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'stupid')
def addCacheEntry(self, name, url, seeders, leechers, size): # check for existing entry in cache if len([ x for x in sickrage.app.cache_db.get_many( 'providers', self.providerID) if x['url'] == url ]): return # ignore invalid and private IP address urls if not validate_url(url): if not url.startswith('magnet'): return elif is_ip_private(url.split(r'//')[-1].split(r'/')[0]): return try: # parse release name parse_result = NameParser(validate_show=sickrage.app.config. enable_rss_cache_valid_shows).parse(name) if parse_result.series_name and parse_result.quality != Quality.UNKNOWN: season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version dbData = { '_t': 'providers', 'provider': self.providerID, 'name': name, 'season': season, 'episodes': episodeText, 'indexerid': parse_result.indexerid, 'url': url, 'time': int(time.mktime( datetime.datetime.today().timetuple())), 'quality': quality, 'release_group': release_group, 'version': version, 'seeders': seeders, 'leechers': leechers, 'size': size } # add to internal database sickrage.app.cache_db.insert(dbData) # add to external provider cache database if sickrage.app.config.enable_api_providers_cache and not self.provider.private: try: ProviderCacheAPI().add(dbData) except Exception: pass sickrage.app.log.debug( "SEARCH RESULT:[%s] ADDED TO CACHE!", name) except (InvalidShowException, InvalidNameException): pass
def test_no_season_general_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'no_season_general')
def splitNZBResult(result): """ Split result into seperate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ urlData = sickrage.srCore.srWebSession.get(result.url, needBytes=True) if urlData is None: sickrage.srCore.srLogger.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: sickrage.srCore.srLogger.debug("Split out " + newNZB + " from " + result.name) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + newNZB + " into a valid episode") return False except InvalidShowException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + newNZB + " into a valid show") return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or ( parse_result.season_number is None and season != 1): sickrage.srCore.srLogger.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it") continue elif len(parse_result.episode_numbers) == 0: sickrage.srCore.srLogger.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it") continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): sickrage.srCore.srLogger.info( "Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality]) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def test_season_only_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'season_only')
def validateDir(path, dirName, nzbNameOriginal, failed, result): """ Check if directory is valid for processing :param path: Path to use :param dirName: Directory to check :param nzbNameOriginal: Original NZB name :param failed: Previously failed objects :param result: Previous results :return: True if dir is valid for processing, False if not """ IGNORED_FOLDERS = ['.AppleDouble', '.@__thumb', '@eaDir'] folder_name = os.path.basename(dirName) if folder_name in IGNORED_FOLDERS: return False result.output += logHelper("Processing folder " + dirName, sickrage.srCore.srLogger.DEBUG) if folder_name.startswith('_FAILED_'): result.output += logHelper( "The directory name indicates it failed to extract.", sickrage.srCore.srLogger.DEBUG) failed = True elif folder_name.startswith('_UNDERSIZED_'): result.output += logHelper( "The directory name indicates that it was previously rejected for being undersized.", sickrage.srCore.srLogger.DEBUG) failed = True elif folder_name.upper().startswith('_UNPACK'): result.output += logHelper( "The directory name indicates that this release is in the process of being unpacked.", sickrage.srCore.srLogger.DEBUG) result.missedfiles.append(dirName + " : Being unpacked") return False if failed: process_failed(os.path.join(path, dirName), nzbNameOriginal, result) result.missedfiles.append(dirName + " : Failed download") return False if is_hidden_folder(os.path.join(path, dirName)): result.output += logHelper("Ignoring hidden folder: " + dirName, sickrage.srCore.srLogger.DEBUG) result.missedfiles.append(dirName + " : Hidden folder") return False # make sure the dir isn't inside a show dir for dbData in [ x['doc'] for x in MainDB().db.all('tv_shows', with_doc=True) ]: if dirName.lower().startswith(os.path.realpath(dbData["location"]).lower() + os.sep) or \ dirName.lower() == os.path.realpath(dbData["location"]).lower(): result.output += logHelper( "Cannot process an episode that's already been moved to its show dir, skipping " + dirName, sickrage.srCore.srLogger.WARNING) return False # Get the videofile list for the next checks allFiles = [] allDirs = [] for _, processdir, fileList in os.walk(os.path.join(path, dirName), topdown=False): allDirs += processdir allFiles += fileList videoFiles = [x for x in allFiles if isMediaFile(x)] allDirs.append(dirName) # check if the dir have at least one tv video file for video in videoFiles: try: NameParser().parse(video, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass for proc_dir in allDirs: try: NameParser().parse(proc_dir, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass if sickrage.srCore.srConfig.UNPACK: # Search for packed release packedFiles = [x for x in allFiles if isRarFile(x)] for packed in packedFiles: try: NameParser().parse(packed, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass result.output += logHelper( dirName + " : No processable items found in folder", sickrage.srCore.srLogger.DEBUG) return False
def test_stupid_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'stupid', lambda x: x + '.avi')
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move if [ x for x in MainDB().db.all('tv_episodes', with_doc=True) if x['doc']['release_name'] == dirName ]: return True else: if [ x for x in MainDB().db.all('tv_episodes', with_doc=True) if x['doc']['release_name'] == [videofile.rpartition('.')[0]] ]: return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: parse_result = np.parse(dirName) except: parse_result = False for h in [ h['doc'] for h in MainDB().db.all('history', with_doc=True) if h['doc']['resource'].endswith(videofile) ]: for e in [ e['doc'] for e in MainDB().db.get_many( 'tv_episodes', h['showid'], with_doc=True) if h['season'] == e['doc']['season'] and h['episode'] == e['doc']['episode'] and e['doc']['status'] in Quality.DOWNLOADED ]: # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and (parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number): if e['showid'] == int( parse_result.show.indexerid ) and e['season'] == int( parse_result.season_number and e['episode']) == int( parse_result.episode_numbers[0]): return True else: return True return False
def test_season_only_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'season_only', lambda x: x + '.avi')
def test_anime_sxxexx_file_names(self): """ Test anime SxxExx file names """ np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'anime_SxxExx', lambda x: x + '.avi')
def _getProperList(self): """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) origThreadName = threading.currentThread().getName() # for each provider get a list of the for providerID, providerObj in sickrage.srCore.providersDict.sort( randomize=sickrage.srCore.srConfig.RANDOMIZE_PROVIDERS).items( ): # check provider type and provider is enabled if not sickrage.srCore.srConfig.USE_NZBS and providerObj.type in [ NZBProvider.type, NewznabProvider.type ]: continue elif not sickrage.srCore.srConfig.USE_TORRENTS and providerObj.type in [ TorrentProvider.type, TorrentRssProvider.type ]: continue elif not providerObj.isEnabled: continue threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]") sickrage.srCore.srLogger.info( "Searching for any new PROPER releases from " + providerObj.name) try: curPropers = providerObj.find_propers(search_date) except AuthException as e: sickrage.srCore.srLogger.debug( "Authentication error: {}".format(e.message)) continue except Exception as e: sickrage.srCore.srLogger.debug( "Error while searching " + providerObj.name + ", skipping: {}".format(e.message)) sickrage.srCore.srLogger.debug(traceback.format_exc()) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): sickrage.srCore.srLogger.debug( 'findPropers returned a non-proper, we have caught and skipped it.' ) continue name = self._genericName(x.name) if not name in propers: sickrage.srCore.srLogger.debug("Found new proper: " + x.name) x.provider = providerObj propers[name] = x threading.currentThread().setName(origThreadName) # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: sickrage.srCore.srLogger.debug( "Unable to parse the filename " + curProper.name + " into a valid episode") continue except InvalidShowException: sickrage.srCore.srLogger.debug( "Unable to parse the filename " + curProper.name + " into a valid show") continue if not parse_result.series_name: continue if not parse_result.episode_numbers: sickrage.srCore.srLogger.debug( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode" ) continue sickrage.srCore.srLogger.debug("Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: sickrage.srCore.srLogger.debug( "Proper " + curProper.name + " were rejected by our release filters.") continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: sickrage.srCore.srLogger.debug( "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it" ) continue # check if we actually want this proper (if it's the right quality) dbData = [ x['doc'] for x in sickrage.srCore.mainDB().db.get_many( 'tv_episodes', bestResult.indexerid, with_doc=True) if x['doc']['season'] == bestResult.season and x['doc']['episode'] == bestResult.episode ] if not dbData: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus( int(dbData[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: dbData = [ x['doc'] for x in sickrage.srCore.mainDB.db.get_many( 'tv_episodes', bestResult.indexerid, with_doc=True) if x['doc']['season'] == bestResult.season and x['doc']['episode'] == bestResult.episode ] oldVersion = int(dbData[0]["version"]) oldRelease_group = (dbData[0]["release_group"]) if -1 < oldVersion < bestResult.version: sickrage.srCore.srLogger.info("Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: sickrage.srCore.srLogger.info( "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and ( bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): sickrage.srCore.srLogger.info("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def test_standard_names(self): np = NameParser(True, showObj=self.show, validate_show=False) self._test_names(np, 'standard')
def test_standard_repeat_names(self): np = NameParser(False) self._test_names(np, 'standard_repeat')
def test_fov_repeat_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'fov_repeat')
def test_stupid_names(self): np = NameParser(False) self._test_names(np, 'stupid')
def test_season_only_names(self): np = NameParser(False) self._test_names(np, 'season_only')