def _test_unicode(self, name, result): np = NameParser(True, showObj=self.show) parse_result = np.parse(name) # this shouldn't raise an exception repr(str(parse_result)) self.assertEqual(parse_result.extra_info, result.extra_info)
def addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) if not parse_result: return except (InvalidShowException, InvalidNameException): sickrage.srCore.srLogger.debug("RSS ITEM:[{}] IGNORED!".format(name)) return if not parse_result.series_name: return # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version if not len([x for x in sickrage.srCore.cacheDB.db.get_many('providers', self.providerID, with_doc=True) if x['doc']['url'] == url]): sickrage.srCore.cacheDB.db.insert({ '_t': 'providers', 'provider': self.providerID, 'name': name, 'season': season, 'episodes': episodeText, 'indexerid': parse_result.show.indexerid, 'url': url, 'time': curTimestamp, 'quality': quality, 'release_group': release_group, 'version': version }) sickrage.srCore.srLogger.debug("RSS ITEM:[%s] ADDED!", name)
def _test_name(name): np = NameParser(True) try: parse_result = np.parse(name) except (InvalidNameException, InvalidShowException): return True if VERBOSE: print 'Actual: ', parse_result.which_regex, parse_result return False
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = formatted_filename(ep.show, ep, pattern, multi, anime_type) + '.ext' new_path = formatted_dir(ep.show, ep, pattern, multi) if not file_only: new_name = os.path.join(new_path, new_name) if not new_name: sickrage.LOGGER.debug("Unable to create a name out of " + pattern) return False sickrage.LOGGER.debug("Trying to parse " + new_name) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: sickrage.LOGGER.debug("Unable to parse " + new_name + ", not valid") return False sickrage.LOGGER.debug("Parsed " + new_name + " into " + str(result)) if abd or sports: if result.air_date != ep.airdate: sickrage.LOGGER.debug("Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [x.absolute_number for x in [ep] + ep.relatedEps]: sickrage.LOGGER.debug("Absolute numbering incorrect in parsed episode, pattern isn't valid") return False else: if result.season_number != ep.season: sickrage.LOGGER.debug("Season number incorrect in parsed episode, pattern isn't valid") return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: sickrage.LOGGER.debug("Episode numbering incorrect in parsed episode, pattern isn't valid") return False return True
def _is_season_pack(name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: sickrage.srCore.srLogger.debug("Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: sickrage.srCore.srLogger.debug("Unable to parse the filename %s into a valid show" % name) return False if len([x for x in sickrage.srCore.mainDB.db.get_many('tv_episodes', parse_result.show.indexerid, with_doc=True) if x['doc']['season'] == parse_result.season_number]) == len(parse_result.episode_numbers): return True
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move if main_db.MainDB().select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]): # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", LOGGER.DEBUG) return True else: if main_db.MainDB().select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]]): # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG) return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: parse_result = np.parse(dirName) except: parse_result = False search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" # This part is always the same search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and ( parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number): search_sql += " and tv_episodes.showid = '" + str( parse_result.show.indexerid) + "' and tv_episodes.season = '" + str( parse_result.season_number) + "' and tv_episodes.episode = '" + str( parse_result.episode_numbers[0]) + "'" search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ?" if main_db.MainDB().select(search_sql, ['%' + videofile]): # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG) return True return False
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = findCertainShow(sickrage.showList, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: sickrage.LOGGER.debug("Unable to parse the filename " + name + " into a valid episode") return None except InvalidShowException: sickrage.LOGGER.debug("Unable to parse the filename " + name + " into a valid show") return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version sickrage.LOGGER.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]") return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move if [x for x in sickrage.app.main_db.all('tv_episodes') if x['release_name'] and (x['release_name'] in dirName or x['release_name'] in videofile)]: return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName) try: parse_result = np.parse(dirName) except: parse_result = False for h in (h for h in sickrage.app.main_db.all('history') if h['resource'].endswith(videofile)): for e in (e for e in sickrage.app.main_db.get_many('tv_episodes', h['showid']) if h['season'] == e['season'] and h['episode'] == e['episode'] and e['status'] in Quality.DOWNLOADED): # If we find a showid, a season number, and one or more episode numbers then we need to use those in the # query if parse_result and (parse_result.indexerid and parse_result.episode_numbers and parse_result.season_number): if e['showid'] == int(parse_result.indexerid) and \ e['season'] == int(parse_result.season_number and e['episode']) == int(parse_result.episode_numbers[0]): return True else: return True # Checks for processed file marker if os.path.isfile(os.path.join(dirName, videofile + '.sr_processed')): return True return False
def _test_combo(self, name, result, which_regexes): if VERBOSE: print() print('Testing', name) np = NameParser(True, validate_show=False) test_result = np.parse(name) if DEBUG: print(test_result, test_result.which_regex) print(result, which_regexes) self.assertEqual(test_result, result) for cur_regex in which_regexes: self.assertTrue(cur_regex in test_result.which_regex) self.assertEqual(len(which_regexes), len(test_result.which_regex))
def release_group(show, name): if name: name = remove_non_release_groups(remove_extension(name)) else: return "" try: np = NameParser(name, showObj=show, naming_pattern=True) parse_result = np.parse(name) except (InvalidNameException, InvalidShowException) as e: sickrage.LOGGER.debug("Unable to get parse release_group: {}".format(e)) return '' if not parse_result.release_group: return '' return parse_result.release_group
def _is_season_pack(name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: sickrage.LOGGER.debug("Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: sickrage.LOGGER.debug("Unable to parse the filename %s into a valid show" % name) return False sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?" episodes = main_db.MainDB().select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0][b"count"]) == len(parse_result.episode_numbers): return True
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move if [x for x in sickrage.srCore.mainDB.db.all('tv_episodes', with_doc=True) if x['doc']['release_name'] == dirName]: return True else: if [x for x in sickrage.srCore.mainDB.db.all('tv_episodes', with_doc=True) if x['doc']['release_name'] == [videofile.rpartition('.')[0]]]: return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: parse_result = np.parse(dirName) except: parse_result = False for h in [h['doc'] for h in sickrage.srCore.mainDB.db.all('history', with_doc=True) if h['doc']['resource'].endswith(videofile)]: for e in [e['doc'] for e in sickrage.srCore.mainDB.db.get_many('tv_episodes', h['showid'], with_doc=True) if h['season'] == e['doc']['season'] and h['episode'] == e['doc']['episode'] and e['doc']['status'] in Quality.DOWNLOADED]: # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and ( parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number): if e['showid'] == int(parse_result.show.indexerid) and e['season'] == int( parse_result.season_number and e['episode']) == int( parse_result.episode_numbers[0]): return True else: return True return False
def _test_combo(self, name, result, which_regexes): if VERBOSE: print print 'Testing', name np = NameParser(True) try: test_result = np.parse(name) except InvalidShowException: return False if DEBUG: print test_result, test_result.which_regex print result, which_regexes self.assertEqual(test_result, result) for cur_regex in which_regexes: self.assertTrue(cur_regex in test_result.which_regex) self.assertEqual(len(which_regexes), len(test_result.which_regex))
def process(self): """ Do the actual work :return: True """ self._log("Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_names.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log("Warning: unable to find a valid release name.", sickrage.LOGGER.WARNING) raise FailedPostProcessingFailedException() try: parser = NameParser(False) parsed = parser.parse(releaseName) except InvalidNameException: self._log("Error: release name is invalid: " + releaseName, sickrage.LOGGER.DEBUG) raise FailedPostProcessingFailedException() except InvalidShowException: self._log("Error: unable to parse release name " + releaseName + " into a valid show", sickrage.LOGGER.DEBUG) raise FailedPostProcessingFailedException() sickrage.LOGGER.debug("name_parser info: ") sickrage.LOGGER.debug(" - " + str(parsed.series_name)) sickrage.LOGGER.debug(" - " + str(parsed.season_number)) sickrage.LOGGER.debug(" - " + str(parsed.episode_numbers)) sickrage.LOGGER.debug(" - " + str(parsed.extra_info)) sickrage.LOGGER.debug(" - " + str(parsed.release_group)) sickrage.LOGGER.debug(" - " + str(parsed.air_date)) for episode in parsed.episode_numbers: segment = parsed.show.getEpisode(parsed.season_number, episode) cur_failed_queue_item = FailedQueueItem(parsed.show, [segment]) sickrage.SEARCHQUEUE.add_item(cur_failed_queue_item) return True
def load_episodes_from_dir(self): if not os.path.isdir(self.location): sickrage.app.log.debug( str(self.indexer_id) + ": Show dir doesn't exist, not loading episodes from disk") return sickrage.app.log.debug( str(self.indexer_id) + ": Loading all episodes from the show directory " + self.location) # get file list media_files = list_media_files(self.location) # create TVEpisodes from each media file (if possible) for mediaFile in media_files: curEpisode = None sickrage.app.log.debug( str(self.indexer_id) + ": Creating episode from " + mediaFile) try: curEpisode = self.make_ep_from_file( os.path.join(self.location, mediaFile)) except (ShowNotFoundException, EpisodeNotFoundException) as e: sickrage.app.log.warning( "Episode " + mediaFile + " returned an exception: {}".format(e)) except EpisodeDeletedException: sickrage.app.log.debug( "The episode deleted itself when I tried making an object for it" ) # skip to next episode? if not curEpisode: continue # see if we should save the release name in the db ep_file_name = os.path.basename(curEpisode.location) ep_file_name = os.path.splitext(ep_file_name)[0] try: parse_result = NameParser( False, show_id=self.indexer_id).parse(ep_file_name) except (InvalidNameException, InvalidShowException): parse_result = None if ' ' not in ep_file_name and parse_result and parse_result.release_group: sickrage.app.log.debug("Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid") curEpisode.release_name = ep_file_name object_session(self).commit() # store the reference in the show if self.subtitles and sickrage.app.config.use_subtitles: try: curEpisode.refresh_subtitles() except Exception: sickrage.app.log.error("%s: Could not refresh subtitles" % self.indexer_id) sickrage.app.log.debug(traceback.format_exc())
def test_season_only_file_names(self): np = NameParser() self._test_names(np, 'season_only', lambda x: x + '.avi')
def _getProperList(self): """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) origThreadName = threading.currentThread().getName() # for each provider get a list of the for providerID, providerObj in sickrage.srCore.providersDict.sort( randomize=sickrage.srCore.srConfig.RANDOMIZE_PROVIDERS).items( ): # check provider type and provider is enabled if not sickrage.srCore.srConfig.USE_NZBS and providerObj.type in [ NZBProvider.type, NewznabProvider.type ]: continue elif not sickrage.srCore.srConfig.USE_TORRENTS and providerObj.type in [ TorrentProvider.type, TorrentRssProvider.type ]: continue elif not providerObj.isEnabled: continue threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]") sickrage.srCore.srLogger.info( "Searching for any new PROPER releases from " + providerObj.name) try: curPropers = providerObj.find_propers(search_date) except AuthException as e: sickrage.srCore.srLogger.warning( "Authentication error: {}".format(e.message)) continue except Exception as e: sickrage.srCore.srLogger.debug( "Error while searching " + providerObj.name + ", skipping: {}".format(e.message)) sickrage.srCore.srLogger.debug(traceback.format_exc()) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): sickrage.srCore.srLogger.debug( 'findPropers returned a non-proper, we have caught and skipped it.' ) continue name = self._genericName(x.name) if not name in propers: sickrage.srCore.srLogger.debug("Found new proper: " + x.name) x.provider = providerObj propers[name] = x threading.currentThread().setName(origThreadName) # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: sickrage.srCore.srLogger.debug( "Unable to parse the filename " + curProper.name + " into a valid episode") continue except InvalidShowException: sickrage.srCore.srLogger.debug( "Unable to parse the filename " + curProper.name + " into a valid show") continue if not parse_result.series_name: continue if not parse_result.episode_numbers: sickrage.srCore.srLogger.debug( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode" ) continue sickrage.srCore.srLogger.debug("Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: sickrage.srCore.srLogger.debug( "Proper " + curProper.name + " were rejected by our release filters.") continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: sickrage.srCore.srLogger.debug( "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it" ) continue # check if we actually want this proper (if it's the right quality) dbData = [ x['doc'] for x in sickrage.srCore.mainDB().db.get_many( 'tv_episodes', bestResult.indexerid, with_doc=True) if x['doc']['season'] == bestResult.season and x['doc']['episode'] == bestResult.episode ] if not dbData: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus( int(dbData[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: dbData = [ x['doc'] for x in sickrage.srCore.mainDB.db.get_many( 'tv_episodes', bestResult.indexerid, with_doc=True) if x['doc']['season'] == bestResult.season and x['doc']['episode'] == bestResult.episode ] oldVersion = int(dbData[0]["version"]) oldRelease_group = (dbData[0]["release_group"]) if -1 < oldVersion < bestResult.version: sickrage.srCore.srLogger.info("Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: sickrage.srCore.srLogger.info( "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and ( bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): sickrage.srCore.srLogger.info("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def test_fov_repeat_file_names(self): np = NameParser() self._test_names(np, 'fov_repeat', lambda x: x + '.avi')
def test_no_season_general_file_names(self): np = NameParser() self._test_names(np, 'no_season_general', lambda x: x + '.avi')
def test_stupid_names(self): np = NameParser(False) self._test_names(np, 'stupid')
def test_no_season_multi_ep_names(self): np = NameParser(False) self._test_names(np, 'no_season_multi_ep')
def _getProperList(self): """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) origThreadName = threading.currentThread().getName() # for each provider get a list of the for providerID, providerObj in sickrage.srCore.providersDict.sort( randomize=sickrage.srCore.srConfig.RANDOMIZE_PROVIDERS).items(): # check provider type and provider is enabled if not sickrage.srCore.srConfig.USE_NZBS and providerObj.type in [NZBProvider.type, NewznabProvider.type]: continue elif not sickrage.srCore.srConfig.USE_TORRENTS and providerObj.type in [TorrentProvider.type, TorrentRssProvider.type]: continue elif not providerObj.isEnabled: continue threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]") sickrage.srCore.srLogger.info("Searching for any new PROPER releases from " + providerObj.name) try: curPropers = providerObj.findPropers(search_date) except AuthException as e: sickrage.srCore.srLogger.debug("Authentication error: {}".format(e.message)) continue except Exception as e: sickrage.srCore.srLogger.debug( "Error while searching " + providerObj.name + ", skipping: {}".format(e.message)) sickrage.srCore.srLogger.debug(traceback.format_exc()) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): sickrage.srCore.srLogger.debug('findPropers returned a non-proper, we have caught and skipped it.') continue name = self._genericName(x.name) if not name in propers: sickrage.srCore.srLogger.debug("Found new proper: " + x.name) x.provider = providerObj propers[name] = x threading.currentThread().setName(origThreadName) # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: sickrage.srCore.srLogger.debug( "Unable to parse the filename " + curProper.name + " into a valid episode") continue except InvalidShowException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + curProper.name + " into a valid show") continue if not parse_result.series_name: continue if not parse_result.episode_numbers: sickrage.srCore.srLogger.debug( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode") continue sickrage.srCore.srLogger.debug( "Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: sickrage.srCore.srLogger.debug("Proper " + curProper.name + " were rejected by our release filters.") continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: sickrage.srCore.srLogger.debug( "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it") continue # check if we actually want this proper (if it's the right quality) sqlResults = main_db.MainDB().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sqlResults: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: sqlResults = main_db.MainDB().select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) oldVersion = int(sqlResults[0]["version"]) oldRelease_group = (sqlResults[0]["release_group"]) if oldVersion > -1 and oldVersion < bestResult.version: sickrage.srCore.srLogger.info( "Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: sickrage.srCore.srLogger.info( "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): sickrage.srCore.srLogger.info("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check for existing entry in cache if len([ x for x in sickrage.srCore.cacheDB.db.get_many( 'providers', self.providerID, with_doc=True) if x['doc']['url'] == url ]): return # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id) try: parse_result = NameParser(showObj=showObj, tryIndexers=True, validate_show=False).parse(name) except (InvalidShowException, InvalidNameException): pass if parse_result and parse_result.series_name: season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version # add to DB sickrage.srCore.cacheDB.db.insert({ '_t': 'providers', 'provider': self.providerID, 'name': name, 'season': season, 'episodes': episodeText, 'indexerid': parse_result.show.indexerid if parse_result.show else 0, 'url': url, 'time': int(time.mktime(datetime.datetime.today().timetuple())), 'quality': quality, 'release_group': release_group, 'version': version }) sickrage.srCore.srLogger.debug("RSS ITEM:[%s] ADDED!", name)
def test_season_only_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'season_only', lambda x: x + '.avi')
def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False): if not self._checkAuth: return self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # search cache for episode result cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality) if cacheResult: if epObj.episode not in results: results[epObj.episode] = cacheResult else: results[epObj.episode].extend(cacheResult) # found result, search next episode continue # skip if season already searched if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season search_strings = [] if len(episodes) > 1 and search_mode == 'sponly': # get season search results search_strings = self._get_season_search_strings(epObj) elif search_mode == 'eponly': # get single episode search results search_strings = self._get_episode_search_strings(epObj) first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0] if first: sickrage.srCore.srLogger.debug('First search_string has rid') for curString in search_strings: itemList += self.search(curString, search_mode, len(episodes), epObj=epObj) if first: first = False if itemList: sickrage.srCore.srLogger.debug( 'First search_string had rid, and returned results, skipping query by string') break else: sickrage.srCore.srLogger.debug( 'First search_string had rid, but returned no results, searching with string query') # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)])) itemList += itemsUnknown or [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(title) except InvalidNameException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid episode") continue except InvalidShowException: sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid show") continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly': if len(parse_result.episode_numbers): sickrage.srCore.srLogger.debug( "This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it") addCacheEntry = True if len(parse_result.episode_numbers) and ( parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): sickrage.srCore.srLogger.debug( "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring") addCacheEntry = True else: if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: sickrage.srCore.srLogger.debug( "The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring") addCacheEntry = True elif len(parse_result.episode_numbers) and not [ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: sickrage.srCore.srLogger.debug( "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring") addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: if not parse_result.is_air_by_date: sickrage.srCore.srLogger.debug( "This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it") addCacheEntry = True else: airdate = parse_result.air_date.toordinal() sql_results = main_db.MainDB().select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate]) if len(sql_results) != 1: sickrage.srCore.srLogger.warning( "Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it") addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # add parsed result to cache for usage later on if addCacheEntry: sickrage.srCore.srLogger.debug("Adding item from search to cache: " + title) ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality): wantEp = False break if not wantEp: sickrage.srCore.srLogger.info("RESULT:[{}] QUALITY:[{}] IGNORED!".format(title, Quality.qualityStrings[quality])) continue sickrage.srCore.srLogger.debug("FOUND RESULT:[{}] URL:[{}]".format(title, url)) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(epObj) == 1: epNum = epObj[0].episode sickrage.srCore.srLogger.debug("Single episode result.") elif len(epObj) > 1: epNum = MULTI_EP_RESULT sickrage.srCore.srLogger.debug( "Separating multi-episode result to check for later - result contains episodes: " + str( parse_result.episode_numbers)) elif len(epObj) == 0: epNum = SEASON_RESULT sickrage.srCore.srLogger.debug("Separating full season result to check for later") if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: self.cache._getDB().mass_action(cl) del cl # cleanup return results
def test_no_season_multi_ep_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'no_season_multi_ep', lambda x: x + '.avi')
def test_stupid_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'stupid', lambda x: x + '.avi')
def test_fov_repeat_file_names(self): np = NameParser(showObj=self.show, validate_show=False) self._test_names(np, 'fov_repeat', lambda x: x + '.avi')
def splitNZBResult(result): """ Split result into seperate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ urlData = sickrage.app.wsession.get(result.url, needBytes=True) if urlData is None: sickrage.app.log.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: sickrage.app.log.debug("Split out " + newNZB + " from " + result.name) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + newNZB + " into a valid episode") return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + newNZB + " into a valid show") return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or ( parse_result.season_number is None and season != 1): sickrage.app.log.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it" ) continue elif len(parse_result.episode_numbers) == 0: sickrage.app.log.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it") continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): sickrage.app.log.info( "Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality]) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def split_nzb_result(result, session=None): """ Split result into separate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ url_data = WebSession().get(result.url, needBytes=True).text if url_data is None: sickrage.app.log.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: parse_result = NameParser(False, show_id=result.show_id).parse(result.name) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separate_nzbs, xmlns = getSeasonNZBs(result.name, url_data, season) result_list = [] for newNZB in separate_nzbs: sickrage.app.log.debug("Split out {} from {}".format( newNZB, result.name)) # parse the name try: parse_result = NameParser(False, show_id=result.show_id).parse(newNZB) except InvalidNameException: sickrage.app.log.debug( "Unable to parse the filename {} into a valid episode".format( newNZB)) return False except InvalidShowException: sickrage.app.log.debug( "Unable to parse the filename {} into a valid show".format( newNZB)) return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or ( parse_result.season_number is None and season != 1): sickrage.app.log.warning( "Found {} inside {} but it doesn't seem to belong to the same season, ignoring it" .format(newNZB, result.name)) continue elif len(parse_result.episode_numbers) == 0: sickrage.app.log.warning( "Found {} inside {} but it doesn't seem to be a valid episode NZB, ignoring it" .format(newNZB, result.name)) continue want_ep = True for epNo in parse_result.episode_numbers: show_object = find_show(parse_result.indexer_id, session=session) if not show_object.want_episode(parse_result.season_number, epNo, result.quality): sickrage.app.log.info( "Ignoring result {} because we don't want an episode that is {}" .format(newNZB, Quality.qualityStrings[result.quality])) want_ep = False break if not want_ep: continue # make a result cur_result = NZBDataSearchResult(season, parse_result.episode_numbers) cur_result.name = newNZB cur_result.provider = result.provider cur_result.quality = result.quality cur_result.extraInfo = [createNZBString(separate_nzbs[newNZB], xmlns)] result_list.append(cur_result) return result_list
def test_season_only_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'season_only')
def _getProperList(self): """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) origThreadName = threading.currentThread().getName() recently_aired = [] for show in sickrage.app.showlist: self._lastProperSearch = self._get_lastProperSearch(show.indexerid) for episode in sickrage.app.main_db.get_many('tv_episodes', show.indexerid): if episode['airdate'] >= str(search_date.toordinal()): if episode['status'] in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST: recently_aired += [episode] self._set_lastProperSearch(show.indexerid, datetime.datetime.today().toordinal()) if not recently_aired: sickrage.app.log.info('No recently aired episodes, nothing to search for') return [] # for each provider get a list of the for providerID, providerObj in sickrage.app.search_providers.sort( randomize=sickrage.app.config.randomize_providers).items(): # check provider type and provider is enabled if not sickrage.app.config.use_nzbs and providerObj.type in [NZBProvider.type, NewznabProvider.type]: continue elif not sickrage.app.config.use_torrents and providerObj.type in [TorrentProvider.type, TorrentRssProvider.type]: continue elif not providerObj.isEnabled: continue threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]") sickrage.app.log.info("Searching for any new PROPER releases from " + providerObj.name) try: curPropers = providerObj.find_propers(recently_aired) except AuthException as e: sickrage.app.log.warning("Authentication error: {}".format(e)) continue except Exception as e: sickrage.app.log.debug( "Error while searching " + providerObj.name + ", skipping: {}".format(e)) sickrage.app.log.debug(traceback.format_exc()) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[. _-])(proper|repack)([. _-]|$)', x.name, re.I): sickrage.app.log.debug('findPropers returned a non-proper, we have caught and skipped it.') continue name = self._genericName(x.name) if name not in propers: sickrage.app.log.debug("Found new proper: " + x.name) x.provider = providerObj propers[name] = x threading.currentThread().setName(origThreadName) # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: sickrage.app.log.debug( "Unable to parse the filename " + curProper.name + " into a valid episode") continue except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + curProper.name + " into a valid show") continue if not parse_result.series_name: continue if not parse_result.episode_numbers: sickrage.app.log.debug( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode") continue sickrage.app.log.debug( "Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: sickrage.app.log.debug("Proper " + curProper.name + " were rejected by our release filters.") continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: sickrage.app.log.debug( "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it") continue # check if we actually want this proper (if it's the right quality) dbData = [x for x in sickrage.app.main_db().get_many('tv_episodes', bestResult.indexerid) if x['season'] == bestResult.season and x['episode'] == bestResult.episode] if not dbData: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(dbData[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', bestResult.indexerid) if x['season'] == bestResult.season and x['episode'] == bestResult.episode] oldVersion = int(dbData[0]["version"]) oldRelease_group = (dbData[0]["release_group"]) if -1 < oldVersion < bestResult.version: sickrage.app.log.info( "Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: sickrage.app.log.info( "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): sickrage.app.log.info("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def test_no_season_general_names(self): np = NameParser(False) self._test_names(np, 'no_season_general')
def add_cache_entry(self, name, url, seeders, leechers, size): session = sickrage.app.cache_db.session() # check for existing entry in cache if session.query(CacheDB.Provider).filter_by(url=url).count(): return # ignore invalid and private IP address urls if not validate_url(url): if not url.startswith('magnet'): return elif is_ip_private(url.split(r'//')[-1].split(r'/')[0]): return try: # parse release name parse_result = NameParser(validate_show=True).parse(name) if parse_result.series_name and parse_result.quality != Quality.UNKNOWN: season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version dbData = { 'provider': self.providerID, 'name': name, 'season': season, 'episodes': episodeText, 'series_id': parse_result.indexer_id, 'url': url, 'time': int(time.mktime(datetime.datetime.today().timetuple())), 'quality': quality, 'release_group': release_group, 'version': version, 'seeders': try_int(seeders), 'leechers': try_int(leechers), 'size': try_int(size, -1) } # add to internal database try: session.add(CacheDB.Provider(**dbData)) session.commit() sickrage.app.log.debug("SEARCH RESULT:[{}] ADDED TO CACHE!".format(name)) except IntegrityError: pass # add to external provider cache database if sickrage.app.config.enable_api_providers_cache and not self.provider.private: try: sickrage.app.io_loop.run_in_executor(None, functools.partial(sickrage.app.api.provider_cache.add, data=dbData)) except Exception as e: pass except (InvalidShowException, InvalidNameException): pass
def test_season_only_names(self): np = NameParser(False) self._test_names(np, 'season_only')
def addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) if not parse_result: return except (InvalidShowException, InvalidNameException): sickrage.srCore.srLogger.debug( "RSS ITEM:[{}] IGNORED!".format(name)) return if not parse_result.series_name: return # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int( time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version if not len([ x for x in sickrage.srCore.cacheDB.db.get_many( 'providers', self.providerID, with_doc=True) if x['doc']['url'] == url ]): sickrage.srCore.cacheDB.db.insert({ '_t': 'providers', 'provider': self.providerID, 'name': name, 'season': season, 'episodes': episodeText, 'indexerid': parse_result.show.indexerid, 'url': url, 'time': curTimestamp, 'quality': quality, 'release_group': release_group, 'version': version }) sickrage.srCore.srLogger.debug("RSS ITEM:[%s] ADDED!", name)
def test_stupid_file_names(self): np = NameParser() self._test_names(np, 'stupid', lambda x: x + '.avi')
def addCacheEntry(self, name, url, seeders, leechers, size, files, parse_result=None, indexer_id=0): # check for existing entry in cache if len([ x for x in sickrage.app.cache_db.db.get_many( 'providers', self.providerID, with_doc=True) if x['doc']['url'] == url ]): return # check if we passed in a parsed result or should we try and create one if not parse_result: try: parse_result = NameParser( showObj=findCertainShow(sickrage.app.showlist, indexer_id), tryIndexers=not sickrage.app.config. enable_rss_cache_valid_shows, validate_show=sickrage.app.config. enable_rss_cache_valid_shows).parse(name) except (InvalidShowException, InvalidNameException): pass if parse_result and parse_result.series_name: season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get quality of release quality = parse_result.quality # get release group release_group = parse_result.release_group # get version version = parse_result.version dbData = { '_t': 'providers', 'provider': self.providerID, 'name': name, 'season': season, 'episodes': episodeText, 'indexerid': parse_result.indexerid, 'url': url, 'time': int(time.mktime(datetime.datetime.today().timetuple())), 'quality': quality, 'release_group': release_group, 'version': version, 'seeders': seeders, 'leechers': leechers, 'size': size, 'files': json.dumps(files) } # add to internal database sickrage.app.cache_db.db.insert(dbData) # add to external database if sickrage.app.config.enable_api_providers_cache and not self.provider.private: try: sickrage.app.api.add_cache_result(dbData) except Exception: pass sickrage.app.log.debug("SEARCH RESULT:[%s] ADDED TO CACHE!", name)
def test_no_season_multi_ep_file_names(self): np = NameParser() self._test_names(np, 'no_season_multi_ep', lambda x: x + '.avi')
def validateDir(path, dirName, nzbNameOriginal, failed, result): """ Check if directory is valid for processing :param path: Path to use :param dirName: Directory to check :param nzbNameOriginal: Original NZB name :param failed: Previously failed objects :param result: Previous results :return: True if dir is valid for processing, False if not """ IGNORED_FOLDERS = ['.AppleDouble', '.@__thumb', '@eaDir'] folder_name = os.path.basename(dirName) if folder_name in IGNORED_FOLDERS: return False result.output += logHelper("Processing folder " + dirName, sickrage.srCore.srLogger.DEBUG) if folder_name.startswith('_FAILED_'): result.output += logHelper("The directory name indicates it failed to extract.", sickrage.srCore.srLogger.DEBUG) failed = True elif folder_name.startswith('_UNDERSIZED_'): result.output += logHelper("The directory name indicates that it was previously rejected for being undersized.", sickrage.srCore.srLogger.DEBUG) failed = True elif folder_name.upper().startswith('_UNPACK'): result.output += logHelper( "The directory name indicates that this release is in the process of being unpacked.", sickrage.srCore.srLogger.DEBUG) result.missedfiles.append(dirName + " : Being unpacked") return False if failed: process_failed(os.path.join(path, dirName), nzbNameOriginal, result) result.missedfiles.append(dirName + " : Failed download") return False if is_hidden_folder(os.path.join(path, dirName)): result.output += logHelper("Ignoring hidden folder: " + dirName, sickrage.srCore.srLogger.DEBUG) result.missedfiles.append(dirName + " : Hidden folder") return False # make sure the dir isn't inside a show dir for dbData in [x['doc'] for x in sickrage.srCore.mainDB.db.all('tv_shows', with_doc=True)]: if dirName.lower().startswith(os.path.realpath(dbData["location"]).lower() + os.sep) or \ dirName.lower() == os.path.realpath(dbData["location"]).lower(): result.output += logHelper( "Cannot process an episode that's already been moved to its show dir, skipping " + dirName, sickrage.srCore.srLogger.WARNING) return False # Get the videofile list for the next checks allFiles = [] allDirs = [] for _, processdir, fileList in os.walk(os.path.join(path, dirName), topdown=False): allDirs += processdir allFiles += fileList videoFiles = [x for x in allFiles if isMediaFile(x)] allDirs.append(dirName) # check if the dir have at least one tv video file for video in videoFiles: try: NameParser().parse(video, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass for proc_dir in allDirs: try: NameParser().parse(proc_dir, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass if sickrage.srCore.srConfig.UNPACK: # Search for packed release packedFiles = [x for x in allFiles if isRarFile(x)] for packed in packedFiles: try: NameParser().parse(packed, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass result.output += logHelper(dirName + " : No processable items found in folder", sickrage.srCore.srLogger.DEBUG) return False
def test_standard_names(self): np = NameParser(True) self._test_names(np, 'standard')
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = os.path.join(new_path, new_name) if not new_name: sickrage.srCore.srLogger.debug("Unable to create a name out of " + pattern) return False sickrage.srCore.srLogger.debug("Trying to parse " + new_name) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: sickrage.srCore.srLogger.debug("Unable to parse " + new_name + ", not valid") return False sickrage.srCore.srLogger.debug("Parsed " + new_name + " into " + str(result)) if abd or sports: if result.air_date != ep.airdate: sickrage.srCore.srLogger.debug( "Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [ x.absolute_number for x in [ep] + ep.relatedEps ]: sickrage.srCore.srLogger.debug( "Absolute numbering incorrect in parsed episode, pattern isn't valid" ) return False else: if result.season_number != ep.season: sickrage.srCore.srLogger.debug( "Season number incorrect in parsed episode, pattern isn't valid" ) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: sickrage.srCore.srLogger.debug( "Episode numbering incorrect in parsed episode, pattern isn't valid" ) return False return True
def test_standard_repeat_names(self): np = NameParser(False) self._test_names(np, 'standard_repeat')
def make_ep_from_file(self, filename): if not os.path.isfile(filename): sickrage.app.log.info( str(self.indexer_id) + ": That isn't even a real file dude... " + filename) return None sickrage.app.log.debug( str(self.indexer_id) + ": Creating episode object from " + filename) try: parse_result = NameParser(validate_show=False).parse( filename, skip_scene_detection=True) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + filename + " into a valid episode") return None except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + filename + " into a valid show") return None if not len(parse_result.episode_numbers): sickrage.app.log.info("parse_result: " + str(parse_result)) sickrage.app.log.warning("No episode number found in " + filename + ", ignoring it") return None # for now lets assume that any episode in the show dir belongs to that show season = parse_result.season_number if parse_result.season_number is not None else 1 root_ep = None for curEpNum in parse_result.episode_numbers: episode = int(curEpNum) sickrage.app.log.debug("%s: %s parsed to %s S%02dE%02d" % (self.indexer_id, filename, self.name, season or 0, episode or 0)) check_quality_again = False try: episode_obj = self.get_episode(season, episode) except EpisodeNotFoundException: object_session(self).add( TVEpisode( **{ 'showid': self.indexer_id, 'indexer': self.indexer, 'season': season, 'episode': episode, 'location': filename })) object_session(self).commit() episode_obj = self.get_episode(season, episode) # if there is a new file associated with this ep then re-check the quality if episode_obj.location and os.path.normpath( episode_obj.location) != os.path.normpath(filename): sickrage.app.log.debug( "The old episode had a different file associated with it, I will re-check " "the quality based on the new filename " + filename) check_quality_again = True # if the sizes are the same then it's probably the same file old_size = episode_obj.file_size episode_obj.location = filename same_file = old_size and episode_obj.file_size == old_size episode_obj.checkForMetaFiles() if root_ep is None: root_ep = episode_obj else: if episode_obj not in root_ep.related_episodes: root_ep.related_episodes.append(episode_obj) # if it's a new file then if not same_file: episode_obj.release_name = '' # if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the # same file if check_quality_again and not same_file: new_quality = Quality.name_quality(filename, self.is_anime) sickrage.app.log.debug("Since this file has been renamed") episode_obj.status = Quality.composite_status( DOWNLOADED, new_quality) # check for status/quality changes as long as it's a new file elif not same_file and is_media_file( filename ) and episode_obj.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [ IGNORED ]: old_status, old_quality = Quality.split_composite_status( episode_obj.status) new_quality = Quality.name_quality(filename, self.is_anime) new_status = None # if it was snatched and now exists then set the status correctly if old_status == SNATCHED and old_quality <= new_quality: sickrage.app.log.debug( "STATUS: this ep used to be snatched with quality " + Quality.qualityStrings[old_quality] + " but a file exists with quality " + Quality.qualityStrings[new_quality] + " so I'm setting the status to DOWNLOADED") new_status = DOWNLOADED # if it was snatched proper and we found a higher quality one then allow the status change elif old_status == SNATCHED_PROPER and old_quality < new_quality: sickrage.app.log.debug( "STATUS: this ep used to be snatched proper with quality " + Quality.qualityStrings[old_quality] + " but a file exists with quality " + Quality.qualityStrings[new_quality] + " so I'm setting the status to DOWNLOADED") new_status = DOWNLOADED elif old_status not in (SNATCHED, SNATCHED_PROPER): new_status = DOWNLOADED if new_status is not None: sickrage.app.log.debug( "STATUS: we have an associated file, so setting the status from " + str(episode_obj.status) + " to DOWNLOADED/" + str( Quality.status_from_name(filename, anime=self.is_anime))) episode_obj.status = Quality.composite_status( new_status, new_quality) # creating metafiles on the root should be good enough if root_ep: root_ep.create_meta_files() object_session(self).commit() return root_ep
def test_fov_repeat_names(self): np = NameParser(False) self._test_names(np, 'fov_repeat')
def splitNZBResult(result): """ Split result into seperate episodes :param result: search result object :return: False upon failure, a list of episode objects otherwise """ urlData = WebSession().get(result.url, needBytes=True) if urlData is None: sickrage.app.log.error("Unable to load url " + result.url + ", can't download season NZB") return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid episode") return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid show") return False # bust it up season = parse_result.season_number if parse_result.season_number is not None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: sickrage.app.log.debug("Split out " + newNZB + " from " + result.name) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: sickrage.app.log.debug("Unable to parse the filename " + newNZB + " into a valid episode") return False except InvalidShowException: sickrage.app.log.debug("Unable to parse the filename " + newNZB + " into a valid show") return False # make sure the result is sane if (parse_result.season_number is not None and parse_result.season_number != season) or ( parse_result.season_number is None and season != 1): sickrage.app.log.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it") continue elif len(parse_result.episode_numbers) == 0: sickrage.app.log.warning( "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it") continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].want_episode(season, epNo, result.quality): sickrage.app.log.info( "Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality]) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].get_episode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
def test_no_season_multi_ep_names(self): np = NameParser(False, showObj=self.show, validate_show=False) self._test_names(np, 'no_season_multi_ep')