def _analyze_name(self, name, resource=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u'Analyzing name ' + repr(name)) to_return = (None, None, [], None) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(resource, try_scene_exceptions=True, convert=True) parse_result = np.parse(name) self._log(u'Parsed %s<br />.. from %s' % (str(parse_result).decode('utf-8', 'xmlcharrefreplace'), name), logger.DEBUG) if parse_result.is_air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers # show object show = parse_result.show to_return = (show, season, episodes, parse_result.quality) self._finalize(parse_result) return to_return
def history_snatched_proper_fix(): my_db = db.DBConnection() if not my_db.has_flag('history_snatch_proper'): logger.log('Updating history items with status Snatched Proper in a background process...') sql_result = my_db.select('SELECT rowid, resource, quality, showid' ' FROM history' ' WHERE action LIKE "%%%02d"' % SNATCHED + ' AND (UPPER(resource) LIKE "%PROPER%"' ' OR UPPER(resource) LIKE "%REPACK%"' ' OR UPPER(resource) LIKE "%REAL%")') if sql_result: cl = [] for r in sql_result: show_obj = None try: show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid'])) except (StandardError, Exception): pass np = NameParser(False, showObj=show_obj, testing=True) try: pr = np.parse(r['resource']) except (StandardError, Exception): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append(['UPDATE history SET action = ? WHERE rowid = ?', [Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), r['rowid']]]) if cl: my_db.mass_action(cl) logger.log('Completed the history table update with status Snatched Proper.') my_db.add_flag('history_snatch_proper')
def test_formating(self): release = "UFC.172.26th.April.2014.HDTV.x264.720p-Sir.Paul[rartv]" # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(release).convert() print(parse_result)
def _is_season_pack(self, name): try: myParser = NameParser(tryIndexers=True, trySceneExceptions=True, convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + str(name) + " into a valid episode", logger.DEBUG) return False except InvalidShowException: logger.log( u"Unable to parse the filename " + str(name) + " into a valid show", logger.DEBUG) return False myDB = db.DBConnection() sql_selection = "select count(*) as count from tv_episodes where showid = ? and season = ?" episodes = myDB.select( sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True
def _parseItem(self, item): ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() ltvdb_api_parms['search_all_languages'] = True (title, url) = self.provider._get_title_and_url(item) if not title or not url: logger.log(u"The XML returned from the Ethor RSS feed is incomplete, this result is unusable", logger.ERROR) return try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.DEBUG) return try: t = tvdb_api.Tvdb(**ltvdb_api_parms) showObj = t[parse_result.series_name] except tvdb_exceptions.tvdb_error: logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR) return logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) self._addCacheEntry(name=title, url=url, tvdb_id=showObj['id'])
def validate_name(pattern, multi=None, file_only=False, abd=False): ep = _generate_sample_ep(multi, abd) parser = NameParser(True) new_name = ep.formatted_filename(pattern, multi) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) logger.log(u"Trying to parse "+new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException: logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG) return False logger.log(new_name + " vs " + str(result), logger.DEBUG) if abd: if result.air_date != ep.airdate: return False else: if result.season_number != ep.season: return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: return False return True
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception, e: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False): ep = _generate_sample_ep(multi, abd, sports) parser = NameParser(True) new_name = ep.formatted_filename(pattern, multi) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException, e: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False
def test_parsing_scene_release(self): self.loadFromDB() # parse the file name scene_parsse_results1 = '' scene_parsse_results2 = '' scene_release = 'Pawn Stars S08E41 Field Trip HDTV x264-tNe' try: myParser = NameParser(False, 1) scene_parsse_results1 = myParser.parse(scene_release) scene_parsse_results2 = myParser.parse(scene_release).convert() except InvalidNameException: print(u"Unable to parse the filename " + scene_release + " into a valid episode") print scene_parsse_results1 print scene_parsse_results2
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() try: parser = NameParser(False, showObj=self.show, convert=True) parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.DEBUG) raise exceptions.FailedProcessingFailed() except InvalidShowException: self._log(u"Error: unable to parse release name " + releaseName + " into a valid show", logger.DEBUG) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) for episode in parsed.episode_numbers: segment = parsed.show.getEpisode(parsed.season_number, episode) cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, [segment]) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def findEpisode(self, episode, manualSearch=False): self._checkAuth() logger.log(u"Searching "+self.name+" for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: "+str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results itemList = [] for cur_search_string in self._get_episode_search_strings(episode): itemList += self._doSearch(cur_search_string) for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality result.provider = self result.content = None results.append(result) return results
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = self.get_url(torrent_link) if not data: return None try: with BS4Parser(data, features=['html5lib', 'permissive']) as soup: file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})] video_files = filter(lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files) # Filtering SingleEpisode/MultiSeason Torrent if len(video_files) < ep_number or len(video_files) > float(ep_number * 1.1): logger.log(u'Result %s lists %s episodes with %s episodes retrieved in torrent' % (title, ep_number, len(video_files)), logger.DEBUG) logger.log(u'Result %s seem to be a single episode or multi-season torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d %s' % (int(parse_result.season_number), self._reverse_quality(quality)) return title except Exception: logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move myDB = db.DBConnection() sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]) if sqlResult: # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", logging.DEBUG) return True else: sqlResult = myDB.select( "SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]]) if sqlResult: # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", logging.DEBUG) return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: # if it fails to find any info (because we're doing an unparsable folder (like the TV root dir) it will throw an exception, which we want to ignore parse_result = np.parse(dirName) except Exception: # ignore the exception, because we kind of expected it, but create parse_result anyway so we can perform a check on it. parse_result = False search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" # This part is always the same search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and (parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number): search_sql += " and tv_episodes.showid = '" + str( parse_result.show.indexerid ) + "' and tv_episodes.season = '" + str( parse_result.season_number ) + "' and tv_episodes.episode = '" + str( parse_result.episode_numbers[0]) + "'" search_sql += " and tv_episodes.status IN (" + ",".join( [str(x) for x in common.Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ?" sqlResult = myDB.select(search_sql, ['%' + videofile]) if sqlResult: # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", logging.DEBUG) return True return False
def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False): ep = _generate_sample_ep(multi, abd, sports) regexMode = 0 if sports: regexMode = 1 parser = NameParser(True, regexMode) new_name = ep.formatted_filename(pattern, multi) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG) if abd: if result.air_date != ep.airdate: logger.log( u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False elif sports: if result.sports_date != ep.airdate: logger.log( u"Sports air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False else: if result.season_number != ep.season: logger.log( u"Season incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logger.log( u"Episode incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False return True
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.url + 'ajax_details_filelist.php?id=' + str(torrent_id) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def test_parsing_scene_release(self): self.loadFromDB() # parse the file name scene_parsse_results1 = '' scene_parsse_results2 = '' scene_release = 'Pawn Stars S08E41 Field Trip HDTV x264-tNe' try: myParser = NameParser(False, 1) scene_parsse_results1 = myParser.parse(scene_release) scene_parsse_results2 = myParser.parse(scene_release).convert() except InvalidNameException: print(u"Unable to parse the filename " + scene_release + " into a valid episode") print scene_parsse_results1 print scene_parsse_results2
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName( self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() parser = NameParser(False) try: parsed = parser.parse(releaseName, True) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) show_id = self._get_show_id(parsed.series_name) if show_id is None: self._log(u"Warning: couldn't find show ID", logger.WARNING) raise exceptions.FailedProcessingFailed() self._log(u"Found show_id: " + str(show_id), logger.DEBUG) self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id) if self._show_obj is None: self._log( u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) raise exceptions.FailedProcessingFailed() # # Revert before fail, as fail alters the history # self._log(u"Reverting episodes...") # self.log += failed_history.revertEpisodes(self._show_obj, parsed.season_number, parsed.episode_numbers) # self._log(u"Marking release as bad: " + releaseName) # self.log += failed_history.logFailed(releaseName) self.log += failed_history.markFailed(self._show_obj, parsed.season_number, parsed.episode_numbers) # self._log(u"Marking release as Failed: " + releaseName) # self.log += failed_history.logFailed(releaseName) cur_failed_queue_item = search_queue.FailedQueueItem( self._show_obj, parsed.season_number) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move myDB = db.DBConnection() sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]) if sqlResult: # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", logger.DEBUG) return True else: sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition(".")[0]]) if sqlResult: # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG) return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: # if it fails to find any info (because we're doing an unparsable folder (like the TV root dir) it will throw an exception, which we want to ignore parse_result = np.parse(dirName) except Exception: # ignore the exception, because we kind of expected it, but create parse_result anyway so we can perform a check on it. parse_result = False search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" # This part is always the same search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and ( parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number ): search_sql += ( " and tv_episodes.showid = '" + str(parse_result.show.indexerid) + "' and tv_episodes.season = '" + str(parse_result.season_number) + "' and tv_episodes.episode = '" + str(parse_result.episode_numbers[0]) + "'" ) search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ?" sqlResult = myDB.select(search_sql, ["%" + videofile]) if sqlResult: # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG) return True return False
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception as e: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG) if abd or sports: if result.air_date != ep.airdate: logger.log( u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [ x.absolute_number for x in [ep] + ep.relatedEps ]: logger.log( u"Absolute numbering incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False else: if result.season_number != ep.season: logger.log( u"Season number incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logger.log( u"Episode numbering incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False return True
def _find_season_quality(self, title, torrent_id, show): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = [ 'avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip' ] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) for fileName in filter( lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): quality = Quality.nameQuality(os.path.basename(fileName), show.anime) if quality != Quality.UNKNOWN: break if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for " + title, logger.DEBUG) return None try: myParser = NameParser(show=show) parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log( u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def test_formating(self): self.loadFromDB() release = "d:\\Downloads\\newdownload\\2.Broke.Girls.S03E10.And.the.First.Day.of.School.720p.WEB-DL.DD5.1.H.264-BS.mkv" # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(release).convert() print(parse_result)
def _find_season_quality(self,title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs = {'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
def _find_season_quality(self,title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs = {'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + (" Exceptions: " + str(e) if e else ''), logger.ERROR)
def test_formating(self): self.loadFromDB() release = "d:\\Downloads\\newdownload\\2.Broke.Girls.S03E10.And.the.First.Day.of.School.720p.WEB-DL.DD5.1.H.264-BS.mkv" # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(release).convert() print(parse_result)
def findEpisode (self, episode, manualSearch=False): self._checkAuth() logger.log(u"Searching "+self.name+" for " + episode.prettyName(True)) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: "+str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results itemList = [] for cur_search_string in self._get_episode_search_strings(episode): itemList += self._doSearch(cur_search_string, show=episode.show) for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) return results
def _findTorrentHash(url): for i in range(0, 15): try: if url.startswith('magnet'): token_re = "&dn=([^<>]+)&tr=" match = re.search(token_re, url) name = match.group(1)[0:match.group(1).find('&tr=')].replace('_','.').replace('+','.') else: real_name = url[url.rfind('/') + 1:url.rfind('.torrent')] real_name = real_name[real_name.rfind('=') + 1:url.rfind('.torrent')] name = real_name.replace('_','.').replace('+','.') except: logger.log("Unable to retrieve episode name from " + url, logger.WARNING) return False try: myParser = NameParser() parse_result = myParser.parse(name) except: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False success, torrent_list = _action('&list=1', sickbeard.TORRENT_HOST, sickbeard.TORRENT_USERNAME, sickbeard.TORRENT_PASSWORD) if not success: continue #Don't fail when a torrent name can't be parsed to a name for torrent in torrent_list['torrents']: try: #Try to match URL first if len(torrent) >= 20 and url == torrent[19]: return torrent[0] if len(torrent) < 3: continue #If that fails try to parse the name of the torrent torrent_result = myParser.parse(torrent[2]) if torrent_result.series_name == parse_result.series_name and torrent_result.season_number == parse_result.season_number and torrent_result.episode_numbers == parse_result.episode_numbers: return torrent[0] except InvalidNameException: pass time.sleep(1) logger.log(u"I will not be able to set label or paused to this torrent: " + url) return False
def _find_season_quality(self, title, torrent_id): """ Rewrite the title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ["avi", "mkv", "wmv", "divx", "vob", "dvr-ms", "wtv", "ts" "ogv", "rar", "zip"] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + "ajax_details_filelist.php?id=" + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall("<td.+>(.*?)</td>", data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) # for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): # quality = Quality.nameQuality(os.path.basename(fileName)) # if quality != Quality.UNKNOWN: break for fileName in filesList: sepFile = fileName.rpartition(".") if fileName.rpartition(".")[2].lower() in mediaExtensions: quality = Quality.nameQuality(fileName) if quality != Quality.UNKNOWN: break if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = ( parse_result.series_name + " S%02d" % int(parse_result.season_number) + " " + self._reverseQuality(quality) ) return title
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ if not self.url: return False quality = Quality.UNKNOWN file_name = None data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) if self.should_skip() or not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter(lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float(ep_number * 1.1) < len(video_files): logger.log(u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log(u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show, indexer_lookup=False) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def _get_language(self, title=None, item=None): if not title: return 'en' else: try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) return 'en' return parse_result.audio_langs
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek(os.path.join, new_path, new_name) if not new_name: logging.debug("Unable to create a name out of " + pattern) return False logging.debug("Trying to parse " + new_name) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: logging.debug("Unable to parse " + new_name + ", not valid") return False logging.debug("The name " + new_name + " parsed into " + str(result)) if abd or sports: if result.air_date != ep.airdate: logging.debug("Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [x.absolute_number for x in [ep] + ep.relatedEps]: logging.debug("Absolute numbering incorrect in parsed episode, pattern isn't valid") return False else: if result.season_number != ep.season: logging.debug("Season number incorrect in parsed episode, pattern isn't valid") return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logging.debug("Episode numbering incorrect in parsed episode, pattern isn't valid") return False return True
def _get_language(self, title=None, item=None): if not title: return 'en' else: try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) return 'en' return parse_result.audio_langs
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: logging.debug("Unable to parse the filename " + name + " into a valid episode") return None except InvalidShowException: logging.debug("Unable to parse the filename " + name + " into a valid show") return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality name = ss(name) # get release group release_group = parse_result.release_group # get version version = parse_result.version logging.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]") return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: logging.debug("Unable to parse the filename " + name + " into a valid episode") return None except InvalidShowException: logging.debug("Unable to parse the filename " + name + " into a valid show") return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality name = ss(name) # get release group release_group = parse_result.release_group # get version version = parse_result.version logging.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]") return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def test_parsing_scene_release(self): self.loadFromDB() # parse the file name scene_parsse_results1 = '' scene_parsse_results2 = '' scene_release = 'Pawn Stars S08E41 Field Trip HDTV x264-tNe' try: myParser = NameParser(False, 1) scene_parsse_results1 = myParser.parse(scene_release) scene_parsse_results2 = myParser.parse(scene_release).convert() except InvalidNameException: print(u"Unable to parse the filename " + scene_release + " into a valid episode") print scene_parsse_results1 print scene_parsse_results2 sports_release = 'UFC.168.Weidman.vs.Silva.II.28th.Dec.2013.HDTV.x264-Sir.Paul' try: myParser = NameParser(False, 2) parse_result = myParser.parse(sports_release) test = sickbeard.show_name_helpers.allPossibleShowNames(parse_result.series_name) show = get_show_by_name(parse_result.series_name) if show: sql_results = test.db.DBConnection().select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.indexerid, parse_result.sports_event_date.toordinal()]) actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] print actual_season print actual_episodes except InvalidNameException: print(u"Unable to parse the filename " + scene_release + " into a valid episode") print scene_parsse_results1
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() parser = NameParser(False) try: parsed = parser.parse(releaseName, True) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) show_id = self._get_show_id(parsed.series_name) if show_id is None: self._log(u"Warning: couldn't find show ID", logger.WARNING) raise exceptions.FailedProcessingFailed() self._log(u"Found show_id: " + str(show_id), logger.DEBUG) self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id) if self._show_obj is None: self._log(u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) raise exceptions.FailedProcessingFailed() # # Revert before fail, as fail alters the history # self._log(u"Reverting episodes...") # self.log += failed_history.revertEpisodes(self._show_obj, parsed.season_number, parsed.episode_numbers) # self._log(u"Marking release as bad: " + releaseName) # self.log += failed_history.logFailed(releaseName) self.log += failed_history.markFailed(self._show_obj, parsed.season_number, parsed.episode_numbers) # self._log(u"Marking release as Failed: " + releaseName) # self.log += failed_history.logFailed(releaseName) cur_failed_queue_item = search_queue.FailedQueueItem(self._show_obj, parsed.season_number) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName( self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() parser = NameParser(False) try: parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG) self._show_obj = sickbeard.helpers.get_show_by_name(parsed.series_name) if self._show_obj is None: self._log( u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) raise exceptions.FailedProcessingFailed() # scene -> indexer numbering parsed = parsed.convert(self._show_obj) segment = {parsed.season_number: []} for episode in parsed.episode_numbers: epObj = self._show_obj.getEpisode(parsed.season_number, episode) segment[parsed.season_number].append(epObj) cur_failed_queue_item = search_queue.FailedQueueItem( self._show_obj, segment) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def parse_wrapper(self, show=None, toParse='', showList=[], tvdbActiveLookUp=False): """Returns a parse result or a InvalidNameException to get the tvdbid the tvdbapi might be used if tvdbActiveLookUp is True """ # TODO: refactor ABD into its own mode ... if done remove simple check in parse() if len(showList) == 0: showList = sickbeard.showList try: myParser = NameParser() parse_result = myParser.parse(toParse) except InvalidNameException: raise InvalidNameException(u"Unable to parse: " + toParse) else: show = self.get_show_by_name(parse_result.series_name, showList, toParse, tvdbActiveLookUp) return (parse_result, show)
def parse_wrapper(self, show=None, toParse='', showList=[], tvdbActiveLookUp=False): """Retruns a parse result or a InvalidNameException it will try to take the correct regex for the show if given if not given it will try Anime first then Normal if name is parsed as anime it will lookup the tvdbid and check if we have it as an anime only if both is true we will consider it an anime to get the tvdbid the tvdbapi might be used if tvdbActiveLookUp is True """ # TODO: refactor ABD into its own mode ... if done remove simple check in parse() if len(showList) == 0: showList = sickbeard.showList if show and show.is_anime: modeList = [NameParser.ANIME_REGEX, NameParser.NORMAL_REGEX] elif show and not show.is_anime: modeList = [NameParser.NORMAL_REGEX] else: # just try both ... time consuming modeList = [NameParser.ANIME_REGEX, NameParser.NORMAL_REGEX] for mode in modeList: try: myParser = NameParser(regexMode=mode) parse_result = myParser.parse(toParse) except InvalidNameException: self._log( u"Could not parse '" + toParse + "' in regex mode: " + str(mode), logger.DEBUG) else: found_show = self.get_show_by_name(parse_result.series_name, showList, tvdbActiveLookUp) if found_show and found_show.is_anime: if mode == NameParser.ANIME_REGEX or mode == NameParser.NORMAL_REGEX: break elif show and show.is_anime and hasattr(show, 'test_obj'): if mode == NameParser.ANIME_REGEX or mode == NameParser.NORMAL_REGEX: break else: if mode == NameParser.NORMAL_REGEX: break else: raise InvalidNameException(u"Unable to parse " + toParse) self._parse_mode = mode return (parse_result, found_show)
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) indexer_id = None indexer = None to_return = (indexer_id, indexer, None, [], None) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(name) self._log( u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] elif parse_result.sports: season = -1 episodes = [parse_result.sports_event_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers showObj = helpers.get_show_by_name(parse_result.series_name) if showObj: indexer_id = showObj.indexerid indexer = showObj.indexer to_return = (indexer_id, indexer, season, episodes, None) self._finalize(parse_result) return to_return
def _find_season_quality(self,title,torrent_id): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url+'ajax_details_filelist.php?id='+str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>',data) if not filesList: logger.log(u"Unable to get the torrent file list for "+title, logger.ERROR) for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): quality = Quality.nameQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for "+title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title
def process(self): """ Do the actual work :return: True """ self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName( self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise FailedPostProcessingFailedException() try: parser = NameParser(False) parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.DEBUG) raise FailedPostProcessingFailedException() except InvalidShowException: self._log( u"Error: unable to parse release name " + releaseName + " into a valid show", logger.DEBUG) raise FailedPostProcessingFailedException() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) for episode in parsed.episode_numbers: segment = parsed.show.getEpisode(parsed.season_number, episode) cur_failed_queue_item = search_queue.FailedQueueItem( parsed.show, [segment]) sickbeard.searchQueueScheduler.action.add_item( cur_failed_queue_item) return True
def _is_season_pack(self, name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: logging.debug("Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: logging.debug("Unable to parse the filename %s into a valid show" % name) return False myDB = db.DBConnection() sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?" episodes = myDB.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True
def _is_season_pack(name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename %s into a valid episode" % name, logger.DEBUG) return False except InvalidShowException: logger.log(u"Unable to parse the filename %s into a valid show" % name, logger.DEBUG) return False myDB = db.DBConnection() sql_selection = "select count(*) as count from tv_episodes where showid = ? and season = ?" episodes = myDB.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True
def test_formating(self): self.loadFromDB() show = sickbeard.helpers.findCertainShow(sickbeard.showList, 24749) ep = show.getEpisode(21, 17) ep.airdate = datetime.date.today() # parse the file name pattern = u'%SN - %A-D - %EN' title = 'Show.Name.9th.Mar.2010.HDTV.XviD-RLSGROUP' try: myParser = NameParser(False, 1) parse_result = myParser.parse(title, True) except InvalidNameException: print(u"Unable to parse the filename " + ep.name + " into a valid episode") print parse_result search_string = {'Episode': []} episode = ep.airdate str(episode).replace('-', '|') ep_string = sanitizeSceneName(show.name) + ' ' + \ str(episode).replace('-', '|') + '|' + \ episode.strftime('%b') search_string['Episode'].append(ep_string) scene_ep_string = sanitizeSceneName(show.name) + ' ' + \ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + ' %s category:tv' % '' scene_season_string = show.name + ' S%02d' % int( ep.scene_season) + ' -S%02d' % int( ep.scene_season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE print(u'Searching "%s" for "%s" as "%s"' % (show.name, ep.prettyName(), ep.scene_prettyName())) print('Scene episode search strings: %s' % (scene_ep_string)) print('Scene season search strings: %s' % (scene_season_string))
def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False): ep = _generate_sample_ep(multi, abd, sports) regexMode = 0 if sports: regexMode = 1 parser = NameParser(True, regexMode) new_name = ep.formatted_filename(pattern, multi) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG) if abd: if result.air_date != ep.airdate: logger.log(u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False elif sports: if result.sports_date != ep.airdate: logger.log(u"Sports air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False else: if result.season_number != ep.season: logger.log(u"Season incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logger.log(u"Episode incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False return True
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) indexer_id = None indexer = None to_return = (indexer_id, indexer, None, [], None) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(name) self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] elif parse_result.sports: season = -1 episodes = [parse_result.sports_event_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers showObj = helpers.get_show_by_name(parse_result.series_name) if showObj: indexer_id = showObj.indexerid indexer = showObj.indexer to_return = (indexer_id, indexer, season, episodes, None) self._finalize(parse_result) return to_return
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() try: parser = NameParser(False, convert=True) parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() except InvalidShowException: self._log(u"Error: unable to parse release name " + releaseName + " into a valid show", logger.WARNING) raise exceptions.FailedProcessingFailed logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG) if parsed.show is None: self._log( u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)", logger.WARNING, ) raise exceptions.FailedProcessingFailed() segment = {parsed.season_number: []} for episode in parsed.episode_numbers: epObj = parsed.show.getEpisode(parsed.season_number, episode) segment[parsed.season_number].append(epObj) cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def test_formating(self): self.loadFromDB() show = sickbeard.helpers.findCertainShow(sickbeard.showList, 111051) show.loadEpisodesFromDB() ep = show.getEpisode(8, 56, sceneConvert=True) ep.airdate = datetime.date.today() # parse the file name pattern = u'%SN - %A-D - %EN' title = 'UFC.166.Velasquez.v.Dos Santos.III.19th.Oct.2013.HDTV.x264-Sir.Paul' try: myParser = NameParser(False, 1) parse_result = myParser.parse(title) except InvalidNameException: print(u"Unable to parse the filename " + ep.name + " into a valid episode") print parse_result search_string = {'Episode':[]} episode = ep.airdate str(episode).replace('-', '|') ep_string = sanitizeSceneName(show.name) + ' ' + \ str(episode).replace('-', '|') + '|' + \ episode.strftime('%b') search_string['Episode'].append(ep_string) scene_ep_string = sanitizeSceneName(show.name) + ' ' + \ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + ' %s category:tv' % '' scene_season_string = show.name + ' S%02d' % int(ep.scene_season) + ' -S%02d' % int(ep.scene_season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE print( u'Searching "%s" for "%s" as "%s"' % (show.name, ep.prettyName(), ep.scene_prettyName())) print('Scene episode search strings: %s' % (scene_ep_string)) print('Scene season search strings: %s' % (scene_season_string))
def history_snatched_proper_fix(): my_db = db.DBConnection() if not my_db.has_flag('history_snatch_proper'): logger.log( 'Updating history items with status Snatched Proper in a background process...' ) sql_result = my_db.select('SELECT rowid, resource, quality, showid' ' FROM history' ' WHERE action LIKE "%%%02d"' % SNATCHED + ' AND (UPPER(resource) LIKE "%PROPER%"' ' OR UPPER(resource) LIKE "%REPACK%"' ' OR UPPER(resource) LIKE "%REAL%")') if sql_result: cl = [] for r in sql_result: show_obj = None try: show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid'])) except (StandardError, Exception): pass np = NameParser(False, showObj=show_obj, testing=True) try: pr = np.parse(r['resource']) except (StandardError, Exception): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append([ 'UPDATE history SET action = ? WHERE rowid = ?', [ Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), r['rowid'] ] ]) if cl: my_db.mass_action(cl) logger.log( 'Completed the history table update with status Snatched Proper.' ) my_db.add_flag('history_snatch_proper')
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, [], None) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(file, useIndexers=True, convert=True) parse_result = np.parse(name) self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) # couldn't find this in our show list if not parse_result.show: return to_return if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] elif parse_result.sports: season = -1 episodes = [parse_result.sports_event_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (parse_result.show, season, episodes, parse_result.quality) self._finalize(parse_result) return to_return
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, [], None, None) if not name: return to_return name = helpers.remove_non_release_groups( helpers.remove_extension(name)) # parse the name to break it into show name, season, and episode np = NameParser(file, tryIndexers=True, trySceneExceptions=True, convert=True) parse_result = np.parse(name) # show object show = parse_result.show if parse_result.is_air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (show, season, episodes, parse_result.quality, None) self._finalize(parse_result) return to_return
def _is_season_pack(self, name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: logging.debug( "Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: logging.debug("Unable to parse the filename %s into a valid show" % name) return False myDB = db.DBConnection() sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?" episodes = myDB.select( sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True
def execute(self): backupDatabase(10) if not self.hasColumn("tv_episodes", "file_size"): self.addColumn("tv_episodes", "file_size") if not self.hasColumn("tv_episodes", "release_name"): self.addColumn("tv_episodes", "release_name", "TEXT", "") ep_results = self.connection.select( "SELECT episode_id, location, file_size FROM tv_episodes") logger.log( u"Adding file size to all episodes in DB, please be patient") for cur_ep in ep_results: if not cur_ep["location"]: continue # if there is no size yet then populate it for us if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek( os.path.isfile, cur_ep["location"]): cur_size = ek.ek(os.path.getsize, cur_ep["location"]) self.connection.action( "UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", [cur_size, int(cur_ep["episode_id"])]) # check each snatch to see if we can use it to get a release name from history_results = self.connection.select( "SELECT * FROM history WHERE provider != -1 ORDER BY date ASC") logger.log(u"Adding release name to all episodes still in history") for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select( "SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?", [ cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"] ]) if not download_results: logger.log( u"Found a snatch in the history for " + cur_result["resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) continue nzb_name = cur_result["resource"] file_name = ek.ek(os.path.basename, download_results[0]["resource"]) # take the extension off the filename, it's not needed if '.' in file_name: file_name = file_name.rpartition('.')[0] # find the associated episode on disk ep_results = self.connection.select( "SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", [ cur_result["showid"], cur_result["season"], cur_result["episode"] ]) if not ep_results: logger.log( u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) continue # get the status/quality of the existing ep and make sure it's what we expect ep_status, ep_quality = common.Quality.splitCompositeStatus( int(ep_results[0]["status"])) if ep_status != common.DOWNLOADED: continue if ep_quality != int(cur_result["quality"]): continue # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): logger.log( u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) except InvalidNameException: continue if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database self.connection.action( "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]]) break # check each snatch to see if we can use it to get a release name from empty_results = self.connection.select( "SELECT episode_id, location FROM tv_episodes WHERE release_name = ''" ) logger.log( u"Adding release name to all episodes with obvious scene filenames" ) for cur_result in empty_results: ep_file_name = ek.ek(os.path.basename, cur_result["location"]) ep_file_name = os.path.splitext(ep_file_name)[0] # only want to find real scene names here so anything with a space in it is out if ' ' in ep_file_name: continue try: np = NameParser(False) parse_result = np.parse(ep_file_name) except InvalidNameException: continue if not parse_result.release_group: continue logger.log( u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG) self.connection.action( "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]]) self.incDBVersion()
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = [ 'avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4' ] quality = Quality.UNKNOWN fileName = None fileURL = self.url + 'torrent/' + str(torrent_id) data = self.getURL(fileURL) if not data: return None try: with BS4Parser(data, features=["html5lib", "permissive"]) as soup: files_tbody = soup.find('div', attrs={ 'class': 'description-files' }).find('tbody') if (not files_tbody): return None files = [] rows = files_tbody.find_all('tr') for row in rows: files.append(row.find_all('td')[1].text) videoFiles = filter( lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float( ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log( u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log( u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number ) + ' ' + self._reverseQuality(quality) return title except Exception, e: logger.log( u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)