def _test_unicode(self, name, result): np = NameParser(True, showObj=self.show) parse_result = np.parse(name) # this shouldn't raise an exception repr(str(parse_result)) self.assertEqual(parse_result.extra_info, result.extra_info)
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + name + " into a valid episode") return None except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + name + " into a valid show") return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.today().timetuple())) # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version sickrage.srLogger.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]") return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [ name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version ] ]
def _test_name(name): np = NameParser(True) try: parse_result = np.parse(name) except (InvalidNameException, InvalidShowException): return True if VERBOSE: print 'Actual: ', parse_result.which_regex, parse_result return False
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move if main_db.MainDB().select( "SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]): # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", LOGGER.DEBUG) return True else: if main_db.MainDB().select( "SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]]): # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG) return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: parse_result = np.parse(dirName) except: parse_result = False search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" # This part is always the same search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and (parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number): search_sql += " and tv_episodes.showid = '" + str( parse_result.show.indexerid ) + "' and tv_episodes.season = '" + str( parse_result.season_number ) + "' and tv_episodes.episode = '" + str( parse_result.episode_numbers[0]) + "'" search_sql += " and tv_episodes.status IN (" + ",".join( [str(x) for x in Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ?" if main_db.MainDB().select(search_sql, ['%' + videofile]): # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG) return True return False
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + ".ext" new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = os.path.join(new_path, new_name) if not new_name: sickrage.srLogger.debug("Unable to create a name out of " + pattern) return False sickrage.srLogger.debug("Trying to parse " + new_name) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: sickrage.srLogger.debug("Unable to parse " + new_name + ", not valid") return False sickrage.srLogger.debug("Parsed " + new_name + " into " + str(result)) if abd or sports: if result.air_date != ep.airdate: sickrage.srLogger.debug("Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [ x.absolute_number for x in [ep] + ep.relatedEps ]: sickrage.srLogger.debug("Absolute numbering incorrect in parsed episode, pattern isn't valid") return False else: if result.season_number != ep.season: sickrage.srLogger.debug("Season number incorrect in parsed episode, pattern isn't valid") return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: sickrage.srLogger.debug("Episode numbering incorrect in parsed episode, pattern isn't valid") return False return True
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move if main_db.MainDB().select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]): # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", LOGGER.DEBUG) return True else: if main_db.MainDB().select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition(".")[0]]): # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG) return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: parse_result = np.parse(dirName) except: parse_result = False search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" # This part is always the same search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and ( parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number ): search_sql += ( " and tv_episodes.showid = '" + str(parse_result.show.indexerid) + "' and tv_episodes.season = '" + str(parse_result.season_number) + "' and tv_episodes.episode = '" + str(parse_result.episode_numbers[0]) + "'" ) search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ?" if main_db.MainDB().select(search_sql, ["%" + videofile]): # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG) return True return False
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + name + " into a valid episode") return None except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + name + " into a valid show") return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.today().timetuple())) # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version sickrage.srLogger.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]") return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def _is_season_pack(name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename %s into a valid show" % name) return False sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?" episodes = main_db.MainDB().select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True
def process(self): """ Do the actual work :return: True """ self._log("Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_names.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log("Warning: unable to find a valid release name.", sickrage.srLogger.WARNING) raise FailedPostProcessingFailedException() try: parser = NameParser(False) parsed = parser.parse(releaseName) except InvalidNameException: self._log("Error: release name is invalid: " + releaseName, sickrage.srLogger.DEBUG) raise FailedPostProcessingFailedException() except InvalidShowException: self._log( "Error: unable to parse release name " + releaseName + " into a valid show", sickrage.srLogger.DEBUG) raise FailedPostProcessingFailedException() sickrage.srLogger.debug("name_parser info: ") sickrage.srLogger.debug(" - " + str(parsed.series_name)) sickrage.srLogger.debug(" - " + str(parsed.season_number)) sickrage.srLogger.debug(" - " + str(parsed.episode_numbers)) sickrage.srLogger.debug(" - " + str(parsed.extra_info)) sickrage.srLogger.debug(" - " + str(parsed.release_group)) sickrage.srLogger.debug(" - " + str(parsed.air_date)) for episode in parsed.episode_numbers: sickrage.srCore.SEARCHQUEUE.add_item( FailedQueueItem( parsed.show, [parsed.show.getEpisode(parsed.season_number, episode)])) return True
def _is_season_pack(name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: sickrage.srLogger.debug( "Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: sickrage.srLogger.debug( "Unable to parse the filename %s into a valid show" % name) return False sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?" episodes = main_db.MainDB().select( sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True
def _test_combo(self, name, result, which_regexes): if VERBOSE: print print 'Testing', name np = NameParser(True) try: test_result = np.parse(name) except InvalidShowException: return False if DEBUG: print test_result, test_result.which_regex print result, which_regexes self.assertEqual(test_result, result) for cur_regex in which_regexes: self.assertTrue(cur_regex in test_result.which_regex) self.assertEqual(len(which_regexes), len(test_result.which_regex))
def process(self): """ Do the actual work :return: True """ self._log("Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_names.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log("Warning: unable to find a valid release name.", sickrage.srLogger.WARNING) raise FailedPostProcessingFailedException() try: parser = NameParser(False) parsed = parser.parse(releaseName) except InvalidNameException: self._log("Error: release name is invalid: " + releaseName, sickrage.srLogger.DEBUG) raise FailedPostProcessingFailedException() except InvalidShowException: self._log("Error: unable to parse release name " + releaseName + " into a valid show", sickrage.srLogger.DEBUG) raise FailedPostProcessingFailedException() sickrage.srLogger.debug("name_parser info: ") sickrage.srLogger.debug(" - " + str(parsed.series_name)) sickrage.srLogger.debug(" - " + str(parsed.season_number)) sickrage.srLogger.debug(" - " + str(parsed.episode_numbers)) sickrage.srLogger.debug(" - " + str(parsed.extra_info)) sickrage.srLogger.debug(" - " + str(parsed.release_group)) sickrage.srLogger.debug(" - " + str(parsed.air_date)) for episode in parsed.episode_numbers: segment = parsed.show.getEpisode(parsed.season_number, episode) cur_failed_queue_item = FailedQueueItem(parsed.show, [segment]) sickrage.srCore.SEARCHQUEUE.add_item(cur_failed_queue_item) return True
def _getProperList(self): """ Walk providers for propers """ propers = {} search_date = datetime.today() - timedelta(days=2) origThreadName = threading.currentThread().getName() # for each provider get a list of the for providerID, providerObj in { k: v for k, v in sortedProviderDict( sickrage.srConfig.RANDOMIZE_PROVIDERS).items() if v.isActive }.items(): threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]") sickrage.srLogger.info( "Searching for any new PROPER releases from " + providerObj.name) try: curPropers = providerObj.findPropers(search_date) except AuthException as e: sickrage.srLogger.debug("Authentication error: {}".format( e.message)) continue except Exception as e: sickrage.srLogger.debug("Error while searching " + providerObj.name + ", skipping: {}".format(e.message)) sickrage.srLogger.debug(traceback.format_exc()) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): sickrage.srLogger.debug( 'findPropers returned a non-proper, we have caught and skipped it.' ) continue name = self._genericName(x.name) if not name in propers: sickrage.srLogger.debug("Found new proper: " + x.name) x.provider = providerObj propers[name] = x threading.currentThread().setName(origThreadName) # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + curProper.name + " into a valid episode") continue except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + curProper.name + " into a valid show") continue if not parse_result.series_name: continue if not parse_result.episode_numbers: sickrage.srLogger.debug( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode" ) continue sickrage.srLogger.debug("Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: sickrage.srLogger.debug( "Proper " + curProper.name + " were rejected by our release filters.") continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: sickrage.srLogger.debug( "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it" ) continue # check if we actually want this proper (if it's the right quality) sqlResults = main_db.MainDB().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus( int(sqlResults[0][b"status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: sqlResults = main_db.MainDB().select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [ bestResult.indexerid, bestResult.season, bestResult.episode ]) oldVersion = int(sqlResults[0][b"version"]) oldRelease_group = (sqlResults[0][b"release_group"]) if oldVersion > -1 and oldVersion < bestResult.version: sickrage.srLogger.info("Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: sickrage.srLogger.info( "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and ( bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): sickrage.srLogger.info("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False): if not self._checkAuth: return self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # search cache for episode result cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality) if cacheResult: if epObj.episode not in results: results[epObj.episode] = cacheResult else: results[epObj.episode].extend(cacheResult) # found result, search next episode continue # skip if season already searched if len(episodes) > 1 and search_mode == "sponly" and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season search_strings = [] if len(episodes) > 1 and search_mode == "sponly": # get season search results search_strings = self._get_season_search_strings(epObj) elif search_mode == "eponly": # get single episode search results search_strings = self._get_episode_search_strings(epObj) first = search_strings and isinstance(search_strings[0], dict) and "rid" in search_strings[0] if first: sickrage.srLogger.debug("First search_string has rid") for curString in search_strings: itemList += self._doSearch(curString, search_mode, len(episodes), epObj=epObj) if first: first = False if itemList: sickrage.srLogger.debug( "First search_string had rid, and returned results, skipping query by string" ) break else: sickrage.srLogger.debug( "First search_string had rid, but returned no results, searching with string query" ) # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list(itertools.chain(*[v for (k, v) in sorted(items.iteritems(), reverse=True)])) itemList += itemsUnknown if itemsUnknown else [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(title) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + title + " into a valid episode") continue except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + title + " into a valid show") continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == "sponly": if len(parse_result.episode_numbers): sickrage.srLogger.debug( "This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it" ) addCacheEntry = True if len(parse_result.episode_numbers) and ( parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers] ): sickrage.srLogger.debug( "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring" ) addCacheEntry = True else: if ( not len(parse_result.episode_numbers) and parse_result.season_number and not [ ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers ] ): sickrage.srLogger.debug( "The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring" ) addCacheEntry = True elif len(parse_result.episode_numbers) and not [ ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers ]: sickrage.srLogger.debug( "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring" ) addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: if not parse_result.is_air_by_date: sickrage.srLogger.debug( "This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it" ) addCacheEntry = True else: airdate = parse_result.air_date.toordinal() sql_results = main_db.MainDB().select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate], ) if len(sql_results) != 1: sickrage.srLogger.warning( "Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it" ) addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0][b"season"]) actual_episodes = [int(sql_results[0][b"episode"])] # add parsed result to cache for usage later on if addCacheEntry: sickrage.srLogger.debug("Adding item from search to cache: " + title) # pylint: disable=W0212 # Access to a protected member of a client class ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality): wantEp = False break if not wantEp: sickrage.srLogger.info( "Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality] ) continue sickrage.srLogger.debug("Found result " + title + " at " + url) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(epObj) == 1: epNum = epObj[0].episode sickrage.srLogger.debug("Single episode result.") elif len(epObj) > 1: epNum = MULTI_EP_RESULT sickrage.srLogger.debug( "Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers) ) elif len(epObj) == 0: epNum = SEASON_RESULT sickrage.srLogger.debug("Separating full season result to check for later") if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: self.cache._getDB().mass_action(cl) del cl # cleanup return results
def _getProperList(self): """ Walk providers for propers """ propers = {} search_date = datetime.today() - timedelta(days=2) origThreadName = threading.currentThread().getName() # for each provider get a list of the for providerID, providerObj in {k: v for k, v in sortedProviderDict( sickrage.srConfig.RANDOMIZE_PROVIDERS).items() if v.isActive}.items(): threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]") sickrage.srLogger.info("Searching for any new PROPER releases from " + providerObj.name) try: curPropers = providerObj.findPropers(search_date) except AuthException as e: sickrage.srLogger.debug("Authentication error: {}".format(e.message)) continue except Exception as e: sickrage.srLogger.debug("Error while searching " + providerObj.name + ", skipping: {}".format(e.message)) sickrage.srLogger.debug(traceback.format_exc()) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): sickrage.srLogger.debug('findPropers returned a non-proper, we have caught and skipped it.') continue name = self._genericName(x.name) if not name in propers: sickrage.srLogger.debug("Found new proper: " + x.name) x.provider = providerObj propers[name] = x threading.currentThread().setName(origThreadName) # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + curProper.name + " into a valid episode") continue except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + curProper.name + " into a valid show") continue if not parse_result.series_name: continue if not parse_result.episode_numbers: sickrage.srLogger.debug( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode") continue sickrage.srLogger.debug( "Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: sickrage.srLogger.debug("Proper " + curProper.name + " were rejected by our release filters.") continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: sickrage.srLogger.debug("Proper " + bestResult.name + " doesn't have a release group and version, ignoring it") continue # check if we actually want this proper (if it's the right quality) sqlResults = main_db.MainDB().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0][b"status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: sqlResults = main_db.MainDB().select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) oldVersion = int(sqlResults[0][b"version"]) oldRelease_group = (sqlResults[0][b"release_group"]) if oldVersion > -1 and oldVersion < bestResult.version: sickrage.srLogger.info( "Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: sickrage.srLogger.info( "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): sickrage.srLogger.info("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def splitNZBResult(result): """ Split result into seperate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ urlData = getURL(result.url, session=requests.Session(), needBytes=True) if urlData is None: sickrage.srLogger.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: sickrage.srLogger.debug("Split out " + newNZB + " from " + result.name) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + newNZB + " into a valid episode") return False except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + newNZB + " into a valid show") return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or ( parse_result.season_number is None and season != 1): sickrage.srLogger.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it" ) continue elif len(parse_result.episode_numbers) == 0: sickrage.srLogger.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it") continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): sickrage.srLogger.info( "Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality]) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def splitNZBResult(result): """ Split result into seperate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ urlData = getURL(result.url, session=requests.Session(), needBytes=True) if urlData is None: sickrage.srLogger.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: sickrage.srLogger.debug("Split out " + newNZB + " from " + result.name) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: sickrage.srLogger.debug("Unable to parse the filename " + newNZB + " into a valid episode") return False except InvalidShowException: sickrage.srLogger.debug("Unable to parse the filename " + newNZB + " into a valid show") return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or ( parse_result.season_number is None and season != 1): sickrage.srLogger.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it") continue elif len(parse_result.episode_numbers) == 0: sickrage.srLogger.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it") continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): sickrage.srLogger.info("Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality]) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = os.path.join(new_path, new_name) if not new_name: sickrage.srLogger.debug("Unable to create a name out of " + pattern) return False sickrage.srLogger.debug("Trying to parse " + new_name) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: sickrage.srLogger.debug("Unable to parse " + new_name + ", not valid") return False sickrage.srLogger.debug("Parsed " + new_name + " into " + str(result)) if abd or sports: if result.air_date != ep.airdate: sickrage.srLogger.debug( "Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [ x.absolute_number for x in [ep] + ep.relatedEps ]: sickrage.srLogger.debug( "Absolute numbering incorrect in parsed episode, pattern isn't valid" ) return False else: if result.season_number != ep.season: sickrage.srLogger.debug( "Season number incorrect in parsed episode, pattern isn't valid" ) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: sickrage.srLogger.debug( "Episode numbering incorrect in parsed episode, pattern isn't valid" ) return False return True