def history_snatched_proper_fix(): my_db = db.DBConnection() if not my_db.has_flag('history_snatch_proper'): logger.log('Updating history items with status Snatched Proper in a background process...') sql_result = my_db.select('SELECT rowid, resource, quality, showid' ' FROM history' ' WHERE action LIKE "%%%02d"' % SNATCHED + ' AND (UPPER(resource) LIKE "%PROPER%"' ' OR UPPER(resource) LIKE "%REPACK%"' ' OR UPPER(resource) LIKE "%REAL%")') if sql_result: cl = [] for r in sql_result: show_obj = None try: show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid'])) except (StandardError, Exception): pass np = NameParser(False, showObj=show_obj, testing=True) try: pr = np.parse(r['resource']) except (StandardError, Exception): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append(['UPDATE history SET action = ? WHERE rowid = ?', [Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), r['rowid']]]) if cl: my_db.mass_action(cl) logger.log('Completed the history table update with status Snatched Proper.') my_db.add_flag('history_snatch_proper')
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception, e: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False
def test_formating(self): release = "UFC.172.26th.April.2014.HDTV.x264.720p-Sir.Paul[rartv]" # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(release).convert() print(parse_result)
def _analyze_name(self, name, resource=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u'Analyzing name ' + repr(name)) to_return = (None, None, [], None) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(resource, try_scene_exceptions=True, convert=True) parse_result = np.parse(name) self._log(u'Parsed %s<br />.. from %s' % (str(parse_result).decode('utf-8', 'xmlcharrefreplace'), name), logger.DEBUG) if parse_result.is_air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers # show object show = parse_result.show to_return = (show, season, episodes, parse_result.quality) self._finalize(parse_result) return to_return
def validate_name(pattern, multi=None, file_only=False, abd=False): ep = _generate_sample_ep(multi, abd) parser = NameParser(True) new_name = ep.formatted_filename(pattern, multi) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) logger.log(u"Trying to parse "+new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException: logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG) return False logger.log(new_name + " vs " + str(result), logger.DEBUG) if abd: if result.air_date != ep.airdate: return False else: if result.season_number != ep.season: return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: return False return True
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def _parseItem(self, item): ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() ltvdb_api_parms['search_all_languages'] = True (title, url) = self.provider._get_title_and_url(item) if not title or not url: logger.log(u"The XML returned from the Ethor RSS feed is incomplete, this result is unusable", logger.ERROR) return try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.DEBUG) return try: t = tvdb_api.Tvdb(**ltvdb_api_parms) showObj = t[parse_result.series_name] except tvdb_exceptions.tvdb_error: logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR) return logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) self._addCacheEntry(name=title, url=url, tvdb_id=showObj['id'])
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() try: parser = NameParser(False, showObj=self.show, convert=True) parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.DEBUG) raise exceptions.FailedProcessingFailed() except InvalidShowException: self._log(u"Error: unable to parse release name " + releaseName + " into a valid show", logger.DEBUG) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) for episode in parsed.episode_numbers: segment = parsed.show.getEpisode(parsed.season_number, episode) cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, [segment]) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = self.get_url(torrent_link) if not data: return None try: with BS4Parser(data, features=['html5lib', 'permissive']) as soup: file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})] video_files = filter(lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files) # Filtering SingleEpisode/MultiSeason Torrent if len(video_files) < ep_number or len(video_files) > float(ep_number * 1.1): logger.log(u'Result %s lists %s episodes with %s episodes retrieved in torrent' % (title, ep_number, len(video_files)), logger.DEBUG) logger.log(u'Result %s seem to be a single episode or multi-season torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d %s' % (int(parse_result.season_number), self._reverse_quality(quality)) return title except Exception: logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
def findEpisode(self, episode, manualSearch=False): self._checkAuth() logger.log(u"Searching "+self.name+" for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: "+str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results itemList = [] for cur_search_string in self._get_episode_search_strings(episode): itemList += self._doSearch(cur_search_string) for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality result.provider = self result.content = None results.append(result) return results
def already_postprocessed(dirName, videofile, force, result): """ Check if we already post processed a file :param dirName: Directory a file resides in :param videofile: File name :param force: Force checking when already checking (currently unused) :param result: True if file is already postprocessed, False if not :return: """ if force: return False # Avoid processing the same dir again if we use a process method <> move myDB = db.DBConnection() sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]) if sqlResult: # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", logger.DEBUG) return True else: sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition(".")[0]]) if sqlResult: # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG) return True # Needed if we have downloaded the same episode @ different quality # But we need to make sure we check the history of the episode we're going to PP, and not others np = NameParser(dirName, tryIndexers=True) try: # if it fails to find any info (because we're doing an unparsable folder (like the TV root dir) it will throw an exception, which we want to ignore parse_result = np.parse(dirName) except Exception: # ignore the exception, because we kind of expected it, but create parse_result anyway so we can perform a check on it. parse_result = False search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid" # This part is always the same search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode" # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query if parse_result and ( parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number ): search_sql += ( " and tv_episodes.showid = '" + str(parse_result.show.indexerid) + "' and tv_episodes.season = '" + str(parse_result.season_number) + "' and tv_episodes.episode = '" + str(parse_result.episode_numbers[0]) + "'" ) search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")" search_sql += " and history.resource LIKE ?" sqlResult = myDB.select(search_sql, ["%" + videofile]) if sqlResult: # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG) return True return False
def test_formating(self): self.loadFromDB() release = "d:\\Downloads\\newdownload\\2.Broke.Girls.S03E10.And.the.First.Day.of.School.720p.WEB-DL.DD5.1.H.264-BS.mkv" # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(release).convert() print(parse_result)
def _find_season_quality(self,title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs = {'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ if not self.url: return False quality = Quality.UNKNOWN file_name = None data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) if self.should_skip() or not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter(lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float(ep_number * 1.1) < len(video_files): logger.log(u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log(u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show, indexer_lookup=False) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def _find_season_quality(self, title, torrent_id): """ Rewrite the title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ["avi", "mkv", "wmv", "divx", "vob", "dvr-ms", "wtv", "ts" "ogv", "rar", "zip"] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + "ajax_details_filelist.php?id=" + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall("<td.+>(.*?)</td>", data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) # for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): # quality = Quality.nameQuality(os.path.basename(fileName)) # if quality != Quality.UNKNOWN: break for fileName in filesList: sepFile = fileName.rpartition(".") if fileName.rpartition(".")[2].lower() in mediaExtensions: quality = Quality.nameQuality(fileName) if quality != Quality.UNKNOWN: break if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = ( parse_result.series_name + " S%02d" % int(parse_result.season_number) + " " + self._reverseQuality(quality) ) return title
def _get_language(self, title=None, item=None): if not title: return 'en' else: try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) return 'en' return parse_result.audio_langs
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek(os.path.join, new_path, new_name) if not new_name: logging.debug("Unable to create a name out of " + pattern) return False logging.debug("Trying to parse " + new_name) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: logging.debug("Unable to parse " + new_name + ", not valid") return False logging.debug("The name " + new_name + " parsed into " + str(result)) if abd or sports: if result.air_date != ep.airdate: logging.debug("Air date incorrect in parsed episode, pattern isn't valid") return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [x.absolute_number for x in [ep] + ep.relatedEps]: logging.debug("Absolute numbering incorrect in parsed episode, pattern isn't valid") return False else: if result.season_number != ep.season: logging.debug("Season number incorrect in parsed episode, pattern isn't valid") return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logging.debug("Episode numbering incorrect in parsed episode, pattern isn't valid") return False return True
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: logging.debug("Unable to parse the filename " + name + " into a valid episode") return None except InvalidShowException: logging.debug("Unable to parse the filename " + name + " into a valid show") return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality name = ss(name) # get release group release_group = parse_result.release_group # get version version = parse_result.version logging.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]") return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() parser = NameParser(False) try: parsed = parser.parse(releaseName, True) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) show_id = self._get_show_id(parsed.series_name) if show_id is None: self._log(u"Warning: couldn't find show ID", logger.WARNING) raise exceptions.FailedProcessingFailed() self._log(u"Found show_id: " + str(show_id), logger.DEBUG) self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id) if self._show_obj is None: self._log(u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING) raise exceptions.FailedProcessingFailed() # # Revert before fail, as fail alters the history # self._log(u"Reverting episodes...") # self.log += failed_history.revertEpisodes(self._show_obj, parsed.season_number, parsed.episode_numbers) # self._log(u"Marking release as bad: " + releaseName) # self.log += failed_history.logFailed(releaseName) self.log += failed_history.markFailed(self._show_obj, parsed.season_number, parsed.episode_numbers) # self._log(u"Marking release as Failed: " + releaseName) # self.log += failed_history.logFailed(releaseName) cur_failed_queue_item = search_queue.FailedQueueItem(self._show_obj, parsed.season_number) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def parse_wrapper(self, show=None, toParse='', showList=[], tvdbActiveLookUp=False): """Returns a parse result or a InvalidNameException to get the tvdbid the tvdbapi might be used if tvdbActiveLookUp is True """ # TODO: refactor ABD into its own mode ... if done remove simple check in parse() if len(showList) == 0: showList = sickbeard.showList try: myParser = NameParser() parse_result = myParser.parse(toParse) except InvalidNameException: raise InvalidNameException(u"Unable to parse: " + toParse) else: show = self.get_show_by_name(parse_result.series_name, showList, toParse, tvdbActiveLookUp) return (parse_result, show)
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = Show.find(sickbeard.showList, indexer_id) try: parse_result = NameParser(showObj=showObj).parse(name) parse_result.qualitiy = provider.get_quality(item) except (InvalidNameException, InvalidShowException) as error: logger.log(u"{0}".format(error), logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join({str(episode) for episode in episodes if episode}) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality assert isinstance(name, unicode) # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def test_parsing_scene_release(self): self.loadFromDB() # parse the file name scene_parsse_results1 = '' scene_parsse_results2 = '' scene_release = 'Pawn Stars S08E41 Field Trip HDTV x264-tNe' try: myParser = NameParser(False, 1) scene_parsse_results1 = myParser.parse(scene_release) scene_parsse_results2 = myParser.parse(scene_release).convert() except InvalidNameException: print(u"Unable to parse the filename " + scene_release + " into a valid episode") print scene_parsse_results1 print scene_parsse_results2
def _is_season_pack(self, name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: logging.debug("Unable to parse the filename %s into a valid episode" % name) return False except InvalidShowException: logging.debug("Unable to parse the filename %s into a valid show" % name) return False myDB = db.DBConnection() sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?" episodes = myDB.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True
def _find_season_quality(self,title,torrent_id): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url+'ajax_details_filelist.php?id='+str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>',data) if not filesList: logger.log(u"Unable to get the torrent file list for "+title, logger.ERROR) for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): quality = Quality.nameQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for "+title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title
def _is_season_pack(name): try: myParser = NameParser(tryIndexers=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename %s into a valid episode" % name, logger.DEBUG) return False except InvalidShowException: logger.log(u"Unable to parse the filename %s into a valid show" % name, logger.DEBUG) return False myDB = db.DBConnection() sql_selection = "select count(*) as count from tv_episodes where showid = ? and season = ?" episodes = myDB.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True
def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False): ep = _generate_sample_ep(multi, abd, sports) regexMode = 0 if sports: regexMode = 1 parser = NameParser(True, regexMode) new_name = ep.formatted_filename(pattern, multi) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek.ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) try: result = parser.parse(new_name) except InvalidNameException: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG) if abd: if result.air_date != ep.airdate: logger.log(u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False elif sports: if result.sports_date != ep.airdate: logger.log(u"Sports air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False else: if result.season_number != ep.season: logger.log(u"Season incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logger.log(u"Episode incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False return True
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) indexer_id = None indexer = None to_return = (indexer_id, indexer, None, [], None) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(name) self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] elif parse_result.sports: season = -1 episodes = [parse_result.sports_event_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers showObj = helpers.get_show_by_name(parse_result.series_name) if showObj: indexer_id = showObj.indexerid indexer = showObj.indexer to_return = (indexer_id, indexer, season, episodes, None) self._finalize(parse_result) return to_return
def _findTorrentHash(url): for i in range(0, 15): try: if url.startswith('magnet'): token_re = "&dn=([^<>]+)&tr=" match = re.search(token_re, url) name = match.group(1)[0:match.group(1).find('&tr=')].replace('_','.').replace('+','.') else: real_name = url[url.rfind('/') + 1:url.rfind('.torrent')] real_name = real_name[real_name.rfind('=') + 1:url.rfind('.torrent')] name = real_name.replace('_','.').replace('+','.') except: logger.log("Unable to retrieve episode name from " + url, logger.WARNING) return False try: myParser = NameParser() parse_result = myParser.parse(name) except: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False success, torrent_list = _action('&list=1', sickbeard.TORRENT_HOST, sickbeard.TORRENT_USERNAME, sickbeard.TORRENT_PASSWORD) if not success: continue #Don't fail when a torrent name can't be parsed to a name for torrent in torrent_list['torrents']: try: #Try to match URL first if len(torrent) >= 20 and url == torrent[19]: return torrent[0] if len(torrent) < 3: continue #If that fails try to parse the name of the torrent torrent_result = myParser.parse(torrent[2]) if torrent_result.series_name == parse_result.series_name and torrent_result.season_number == parse_result.season_number and torrent_result.episode_numbers == parse_result.episode_numbers: return torrent[0] except InvalidNameException: pass time.sleep(1) logger.log(u"I will not be able to set label or paused to this torrent: " + url) return False
def process(self): self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")") releaseName = show_name_helpers.determineReleaseName(self.dir_name, self.nzb_name) if releaseName is None: self._log(u"Warning: unable to find a valid release name.", logger.WARNING) raise exceptions.FailedProcessingFailed() try: parser = NameParser(False, convert=True) parsed = parser.parse(releaseName) except InvalidNameException: self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) raise exceptions.FailedProcessingFailed() except InvalidShowException: self._log(u"Error: unable to parse release name " + releaseName + " into a valid show", logger.WARNING) raise exceptions.FailedProcessingFailed logger.log(u"name_parser info: ", logger.DEBUG) logger.log(u" - " + str(parsed.series_name), logger.DEBUG) logger.log(u" - " + str(parsed.season_number), logger.DEBUG) logger.log(u" - " + str(parsed.episode_numbers), logger.DEBUG) logger.log(u" - " + str(parsed.extra_info), logger.DEBUG) logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG) if parsed.show is None: self._log( u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)", logger.WARNING, ) raise exceptions.FailedProcessingFailed() segment = {parsed.season_number: []} for episode in parsed.episode_numbers: epObj = parsed.show.getEpisode(parsed.season_number, episode) segment[parsed.season_number].append(epObj) cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True
def test_formating(self): self.loadFromDB() show = sickbeard.helpers.findCertainShow(sickbeard.showList, 111051) show.loadEpisodesFromDB() ep = show.getEpisode(8, 56, sceneConvert=True) ep.airdate = datetime.date.today() # parse the file name pattern = u'%SN - %A-D - %EN' title = 'UFC.166.Velasquez.v.Dos Santos.III.19th.Oct.2013.HDTV.x264-Sir.Paul' try: myParser = NameParser(False, 1) parse_result = myParser.parse(title) except InvalidNameException: print(u"Unable to parse the filename " + ep.name + " into a valid episode") print parse_result search_string = {'Episode':[]} episode = ep.airdate str(episode).replace('-', '|') ep_string = sanitizeSceneName(show.name) + ' ' + \ str(episode).replace('-', '|') + '|' + \ episode.strftime('%b') search_string['Episode'].append(ep_string) scene_ep_string = sanitizeSceneName(show.name) + ' ' + \ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep.scene_season, 'episodenumber': ep.scene_episode} + ' %s category:tv' % '' scene_season_string = show.name + ' S%02d' % int(ep.scene_season) + ' -S%02d' % int(ep.scene_season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE print( u'Searching "%s" for "%s" as "%s"' % (show.name, ep.prettyName(), ep.scene_prettyName())) print('Scene episode search strings: %s' % (scene_ep_string)) print('Scene season search strings: %s' % (scene_season_string))
def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): self._checkAuth() self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # check cache for results cacheResult = self.cache.searchCache([epObj], manualSearch) if len(cacheResult): results.update({epObj.episode: cacheResult[epObj]}) continue # skip if season already searched if len(episodes ) > 1 and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season if len(episodes) > 1: # get season search results for curString in self._get_season_search_strings(epObj): itemList += self._doSearch(curString, search_mode, len(episodes)) else: # get single episode search results for curString in self._get_episode_search_strings(epObj): itemList += self._doSearch(curString, 'eponly', len(episodes)) # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list( itertools.chain( *[v for (k, v) in sorted(items.items(), reverse=True)])) itemList += itemsUnknown if itemsUnknown else [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False, convert=True) parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log( u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG) continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly' and len( parse_result.episode_numbers): logger.log( u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", logger.DEBUG) addCacheEntry = True else: if not len(parse_result.episode_numbers) and ( parse_result.season_number and parse_result.season_number != season) or ( not parse_result.season_number and season != 1): logger.log( u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True elif len(parse_result.episode_numbers) and ( parse_result.season_number != season or not [ ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers ]): logger.log( u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = season actual_episodes = parse_result.episode_numbers else: if not (parse_result.is_air_by_date or parse_result.is_sports): logger.log( u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) addCacheEntry = True else: airdate = parse_result.air_date.toordinal( ) if parse_result.air_date else parse_result.sports_air_date.toordinal( ) myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate]) if len(sql_results) != 1: logger.log( u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING) addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # add parsed result to cache for usage later on if addCacheEntry: logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch): wantEp = False break if not wantEp: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.content = None result.version = version if len(epObj) == 1: epNum = epObj[0].episode logger.log(u"Single episode result.", logger.DEBUG) elif len(epObj) > 1: epNum = MULTI_EP_RESULT logger.log( u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT logger.log(u"Separating full season result to check for later", logger.DEBUG) # validate torrent file if not magnet link to avoid invalid torrent links if self.providerType == self.TORRENT: if sickbeard.TORRENT_METHOD != "blackhole": client = clients.getClientIstance( sickbeard.TORRENT_METHOD)() result = client._get_torrent_hash(result) if not result.hash: logger.log( u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG) continue if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: myDB = self.cache._getDB() myDB.mass_action(cl) return results
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False): """ See if we understand a name :param pattern: Name to analyse :param multi: Is this a multi-episode name :param anime_type: Is this anime :param file_only: Is this just a file or a dir :param abd: Is air-by-date enabled :param sports: Is this sports :return: True if valid name, False if not """ ep = generate_sample_ep(multi, abd, sports, anime_type) new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext' new_path = ep.formatted_dir(pattern, multi) if not file_only: new_name = ek(os.path.join, new_path, new_name) if not new_name: logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG) return False logger.log(u"Trying to parse " + new_name, logger.DEBUG) parser = NameParser(True, showObj=ep.show, naming_pattern=True) try: result = parser.parse(new_name) except Exception: logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG) return False logger.log(u"The name " + new_name + " parsed into " + str(result), logger.DEBUG) if abd or sports: if result.air_date != ep.airdate: logger.log( u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False elif anime_type != 3: if len(result.ab_episode_numbers) and result.ab_episode_numbers != [ x.absolute_number for x in [ep] + ep.relatedEps ]: logger.log( u"Absolute numbering incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False else: if result.season_number != ep.season: logger.log( u"Season number incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]: logger.log( u"Episode numbering incorrect in parsed episode, pattern isn't valid", logger.DEBUG) return False return True
def filter_bad_releases(name, parse=True, show=None): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: if parse: NameParser().parse(name) except InvalidNameException as error: logger.log("{0}".format(error), logger.DEBUG) return False except InvalidShowException: pass # except InvalidShowException as error: # logger.log(u"{0}".format(error), logger.DEBUG) # return False # if any of the bad strings are in the name then say no ignore_words = list(resultFilters) if show and show.rls_ignore_words: ignore_words.extend(show.rls_ignore_words.split(',')) elif sickbeard.IGNORE_WORDS: ignore_words.extend(sickbeard.IGNORE_WORDS.split(',')) if show and show.rls_require_words: ignore_words = list( set(ignore_words).difference( x.strip() for x in show.rls_require_words.split(',') if x.strip())) elif sickbeard.REQUIRE_WORDS and not ( show and show.rls_ignore_words ): # Only remove global require words from the list if we arent using show ignore words ignore_words = list( set(ignore_words).difference( x.strip() for x in sickbeard.REQUIRE_WORDS.split(',') if x.strip())) word = containsAtLeastOneWord(name, ignore_words) if word: logger.log("Release: " + name + " contains " + word + ", ignoring it", logger.INFO) return False # if any of the good strings aren't in the name then say no require_words = [] if show and show.rls_require_words: require_words.extend(show.rls_require_words.split(',')) elif sickbeard.REQUIRE_WORDS: require_words.extend(sickbeard.REQUIRE_WORDS.split(',')) if show and show.rls_ignore_words: require_words = list( set(require_words).difference( x.strip() for x in show.rls_ignore_words.split(',') if x.strip())) elif sickbeard.IGNORE_WORDS and not ( show and show.rls_require_words ): # Only remove global ignore words from the list if we arent using show require words require_words = list( set(require_words).difference( x.strip() for x in sickbeard.IGNORE_WORDS.split(',') if x.strip())) if require_words and not containsAtLeastOneWord(name, require_words): logger.log( "Release: " + name + " doesn't contain any of " + ', '.join(set(require_words)) + ", ignoring it", logger.INFO) return False return True
def validateDir(path, dirName, nzbNameOriginal, failed, result): # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements """ Check if directory is valid for processing :param path: Path to use :param dirName: Directory to check :param nzbNameOriginal: Original NZB name :param failed: Previously failed objects :param result: Previous results :return: True if dir is valid for processing, False if not """ dirName = ss(dirName) IGNORED_FOLDERS = [u'.AppleDouble', u'.@__thumb', u'@eaDir'] folder_name = ek(os.path.basename, dirName) if folder_name in IGNORED_FOLDERS: return False result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG) if folder_name.upper().startswith(u'_FAILED_') or folder_name.upper().endswith(u'_FAILED_'): result.output += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG) failed = True elif folder_name.upper().startswith(u'_UNDERSIZED_') or folder_name.upper().endswith(u'_UNDERSIZED_'): result.output += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG) failed = True elif folder_name.upper().startswith(u'_UNPACK') or folder_name.upper().endswith(u'_UNPACK'): result.output += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG) result.missedfiles.append(u"{0} : Being unpacked".format(dirName)) return False if failed: process_failed(ek(os.path.join, path, dirName), nzbNameOriginal, result) result.missedfiles.append(u"{0} : Failed download".format(dirName)) return False if helpers.is_hidden_folder(ek(os.path.join, path, dirName)): result.output += logHelper(u"Ignoring hidden folder: {0}".format(dirName), logger.DEBUG) result.missedfiles.append(u"{0} : Hidden folder".format(dirName)) return False # make sure the dir isn't inside a show dir main_db_con = db.DBConnection() sql_results = main_db_con.select("SELECT location FROM tv_shows") for sqlShow in sql_results: if dirName.lower().startswith(ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or \ dirName.lower() == ek(os.path.realpath, sqlShow["location"]).lower(): result.output += logHelper( u"Cannot process an episode that's already been moved to its show dir, skipping " + dirName, logger.WARNING) return False # Get the videofile list for the next checks allFiles = [] allDirs = [] for root_, processdir, fileList in ek(os.walk, ek(os.path.join, path, dirName), topdown=False): allDirs += processdir allFiles += fileList videoFiles = [x for x in allFiles if helpers.isMediaFile(x)] allDirs.append(dirName) # check if the dir have at least one tv video file for video in videoFiles: try: NameParser().parse(video, cache_result=False) return True except (InvalidNameException, InvalidShowException) as error: result.output += logHelper(u"{0}".format(error), logger.DEBUG) for proc_dir in allDirs: try: NameParser().parse(proc_dir, cache_result=False) return True except (InvalidNameException, InvalidShowException) as error: result.output += logHelper(u"{0}".format(error), logger.DEBUG) if sickbeard.UNPACK: # Search for packed release packedFiles = [x for x in allFiles if helpers.isRarFile(x)] for packed in packedFiles: try: NameParser().parse(packed, cache_result=False) return True except (InvalidNameException, InvalidShowException) as error: result.output += logHelper(u"{0}".format(error), logger.DEBUG) result.output += logHelper(u"{0} : No processable items found in folder".format(dirName), logger.DEBUG) return False
def findFrench(self, episode=None, manualSearch=False): results = [] self._checkAuth() logger.log(u"Searching " + self.name + " for " + episode.prettyName()) itemList = [] for cur_search_string in self._get_episode_search_strings( episode, 'french'): itemList += self._doSearch(cur_search_string, show=episode.show, french='french') for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue language = self._get_language(title, item) if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( "Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( "Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue if not language == 'fr': logger.log(u"Ignoring result " + title + " because the language: " + showLanguages[language] + " does not match the desired language: French") continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.item = item if hasattr(item, 'getNZB'): result.extraInfo = [item.getNZB()] elif hasattr(item, 'extraInfo'): result.extraInfo = item.extraInfo result.url = url result.name = title result.quality = quality if hasattr(item, 'audio_langs'): result.audio_lang = ''.join(item.audio_langs) else: result.audio_lang = language results.append(result) return results
def find_search_results(self, show, episodes, search_mode, manual_search=False, download_current_quality=False): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements self._check_auth() self.show = show results = {} items_list = [] searched_scene_season = None for episode in episodes: cache_result = self.cache.searchCache( episode, manualSearch=manual_search, downCurQuality=download_current_quality) if cache_result: if episode.episode not in results: results[episode.episode] = cache_result else: results[episode.episode].extend(cache_result) continue if len( episodes ) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season: continue search_strings = [] searched_scene_season = episode.scene_season if len(episodes) > 1 and search_mode == 'sponly': search_strings = self._get_season_search_strings(episode) elif search_mode == 'eponly': search_strings = self._get_episode_search_strings(episode) first = search_strings and isinstance( search_strings[0], dict) and 'rid' in search_strings[0] if first: logger.log(u'First search_string has rid', logger.DEBUG) for search_string in search_strings: items_list += self.search(search_string, ep_obj=episode) if first: first = False if items_list: logger.log( u'First search_string had rid, and returned results, skipping query by string', logger.DEBUG) break logger.log( u'First search_string had rid, but returned no results, searching with string query', logger.DEBUG) if len(results) == len(episodes): return results if items_list: items = {} unknown_items = [] for item in items_list: quality = self.get_quality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: unknown_items.append(item) else: if quality not in items: items[quality] = [] items[quality].append(item) items_list = list( chain( *[v for (_, v) in sorted(items.iteritems(), reverse=True)])) items_list += unknown_items cl = [] for item in items_list: (title, url) = self._get_title_and_url(item) try: parser = NameParser(parse_method=('normal', 'anime')[show.is_anime]) parse_result = parser.parse(title) except InvalidNameException: logger.log( u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG) continue except InvalidShowException: logger.log( u'Unable to parse the filename %s into a valid show' % title, logger.DEBUG) continue show_object = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version add_cache_entry = False if not (show_object.air_by_date or show_object.sports): if search_mode == 'sponly': if len(parse_result.episode_numbers): logger.log( u'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title, logger.DEBUG) add_cache_entry = True if len(parse_result.episode_numbers) and ( parse_result.season_number not in set( [ep.season for ep in episodes]) or not [ ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers ]): logger.log( u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title, logger.DEBUG) add_cache_entry = True else: if not len( parse_result.episode_numbers ) and parse_result.season_number and not [ ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers ]: logger.log( u'The result %s doesn\'t seem to be a valid season that we are trying to snatch, ignoring' % title, logger.DEBUG) add_cache_entry = True elif len(parse_result.episode_numbers) and not [ ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers ]: logger.log( u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title, logger.DEBUG) add_cache_entry = True if not add_cache_entry: actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: same_day_special = False if not parse_result.is_air_by_date: logger.log( u'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title, logger.DEBUG) add_cache_entry = True else: air_date = parse_result.air_date.toordinal() db = DBConnection() sql_results = db.select( 'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', [show_object.indexerid, air_date]) if len(sql_results) == 2: if int(sql_results[0]['season']) == 0 and int( sql_results[1]['season']) != 0: actual_season = int(sql_results[1]['season']) actual_episodes = [int(sql_results[1]['episode'])] same_day_special = True elif int(sql_results[1]['season']) == 0 and int( sql_results[0]['season']) != 0: actual_season = int(sql_results[0]['season']) actual_episodes = [int(sql_results[0]['episode'])] same_day_special = True elif len(sql_results) != 1: logger.log( u'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title, logger.WARNING) add_cache_entry = True if not add_cache_entry and not same_day_special: actual_season = int(sql_results[0]['season']) actual_episodes = [int(sql_results[0]['episode'])] if add_cache_entry: logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue episode_wanted = True for episode_number in actual_episodes: if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search, download_current_quality): episode_wanted = False break if not episode_wanted: logger.log( u'Ignoring result %s because we don\'t want an episode that is %s' % (title, Quality.qualityStrings[quality]), logger.INFO) continue logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG) episode_object = [] for current_episode in actual_episodes: episode_object.append( show_object.getEpisode(actual_season, current_episode)) result = self.get_result(episode_object) result.show = show_object result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(episode_object) == 1: episode_number = episode_object[0].episode logger.log(u'Single episode result.', logger.DEBUG) elif len(episode_object) > 1: episode_number = MULTI_EP_RESULT logger.log( u'Separating multi-episode result to check for later - result contains episodes: %s' % str(parse_result.episode_numbers), logger.DEBUG) elif len(episode_object) == 0: episode_number = SEASON_RESULT logger.log(u'Separating full season result to check for later', logger.DEBUG) if episode_number not in results: results[episode_number] = [result] else: results[episode_number].append(result) if len(cl) > 0: # pylint: disable=protected-access # Access to a protected member of a client class db = self.cache._getDB() db.mass_action(cl) return results
def _getProperList(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active()] for curProvider in providers: threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except AuthException as e: logger.log(u"Authentication error: " + ex(e), logger.DEBUG) continue except (SocketTimeout, TypeError) as e: logger.log(u"Connection timed out (sockets) while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except (requests_exceptions.HTTPError, requests_exceptions.TooManyRedirects) as e: logger.log(u"HTTP error while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except requests_exceptions.ConnectionError as e: logger.log(u"Connection error while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except requests_exceptions.Timeout as e: logger.log(u"Connection timed out while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except requests_exceptions.ContentDecodingError: logger.log(u"Content-Encoding was gzip, but content was not compressed while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.DEBUG) continue except Exception as e: if hasattr(e, 'errno') and e.errno == errno.ECONNRESET: logger.log(u"Connection reseted by peer accessing {}".format(curProvider.name), logger.DEBUG) else: logger.log(u"Unknown exception while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I): logger.log(u'find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG) continue name = self._genericName(x.name) if name not in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x threading.currentThread().name = origThreadName # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log(u"Unable to parse the filename " + curProper.name + " into a valid show", logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue logger.log( u"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: logger.log(u"Proper " + curProper.name + " were rejected by our release filters.", logger.DEBUG) continue # only get anime proper if it has release group and version if bestResult.show.is_anime: if not bestResult.release_group and bestResult.version == -1: logger.log(u"Proper " + bestResult.name + " doesn't have a release group and version, ignoring it", logger.DEBUG) continue # check if we actually want this proper (if it's the right quality) main_db_con = db.DBConnection() sql_results = main_db_con.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus(int(sql_results[0]["status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) oldVersion = int(sql_results[0]["version"]) oldRelease_group = (sql_results[0]["release_group"]) if -1 < oldVersion < bestResult.version: logger.log(u"Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: logger.log(u"Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def _already_postprocessed(self, dir_name, videofile, force): if force and not self.any_vid_processed: return False # Needed for accessing DB with a unicode dir_name if not isinstance(dir_name, unicode): dir_name = unicode(dir_name, 'utf_8') parse_result = None try: parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(videofile, cache_result=False) except (InvalidNameException, InvalidShowException): # Does not parse, move on to directory check pass if None is parse_result: try: parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse( dir_name, cache_result=False) except (InvalidNameException, InvalidShowException): # If the filename doesn't parse, then return false as last # resort. We can assume that unparseable filenames are not # processed in the past return False showlink = ( ' for "<a href="/home/displayShow?show=%s" target="_blank">%s</a>"' % (parse_result.show.indexerid, parse_result.show.name), parse_result.show.name)[self.any_vid_processed] ep_detail_sql = '' if parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number: ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\ % (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0])) # Avoid processing the same directory again if we use a process method <> move my_db = db.DBConnection() sql_result = my_db.select( 'SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name]) if sql_result: self._log_helper( u'Found a release directory%s that has already been processed,<br />.. skipping: %s' % (showlink, dir_name)) reset_status(parse_result.show.indexerid, parse_result.season_number, parse_result.episode_numbers[0]) return True else: # This is needed for video whose name differ from dir_name if not isinstance(videofile, unicode): videofile = unicode(videofile, 'utf_8') sql_result = my_db.select( 'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]]) if sql_result: self._log_helper( u'Found a video, but that release%s was already processed,<br />.. skipping: %s' % (showlink, videofile)) reset_status(parse_result.show.indexerid, parse_result.season_number, parse_result.episode_numbers[0]) return True # Needed if we have downloaded the same episode @ different quality search_sql = 'SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history'\ + ' ON history.showid=tv_episodes.showid'\ + ' WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode'\ + ep_detail_sql\ + ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\ + ' and history.resource LIKE ?' sql_result = my_db.select(search_sql, [u'%' + videofile]) if sql_result: self._log_helper( u'Found a video, but the episode%s is already processed,<br />.. skipping: %s' % (showlink, videofile)) reset_status(parse_result.show.indexerid, parse_result.season_number, parse_result.episode_numbers[0]) return True return False
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = [ 'avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4' ] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [ x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'}) ] videoFiles = filter( lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float( ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log( u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality( quality) return title except Exception, e: logger.log( u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. name: A string which we want to analyze to determine show info from (unicode) Returns a (tvdb_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, []) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(name) self._log("Parsed " + name + " into " + str(parse_result).decode('utf-8'), logger.DEBUG) if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (None, season, episodes) # do a scene reverse-lookup to get a list of all possible names name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name) if not name_list: return (None, season, episodes) def _finalize(parse_result): self.release_group = parse_result.release_group # remember whether it's a proper if parse_result.extra_info: self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None # if the result is complete then remember that for later if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: test_name = os.path.basename(name) if test_name == self.nzb_name: self.good_results[self.NZB_NAME] = True elif test_name == self.folder_name: self.good_results[self.FOLDER_NAME] = True elif test_name == self.file_name: self.good_results[self.FILE_NAME] = True else: logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name)) else: logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name", logger.DEBUG) logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG) logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG) logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG) logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG) # for each possible interpretation of that scene name for cur_name in name_list: self._log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG) scene_id = scene_exceptions.get_scene_exception_by_name(cur_name) if scene_id: self._log(u"Scene exception lookup got tvdb id " + str(scene_id) + u", using that", logger.DEBUG) _finalize(parse_result) return (scene_id, season, episodes) # see if we can find the name directly in the DB, if so use it for cur_name in name_list: self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG) db_result = helpers.searchDBForShow(cur_name) if db_result: self._log(u"Lookup successful, using tvdb id " + str(db_result[0]), logger.DEBUG) _finalize(parse_result) return (int(db_result[0]), season, episodes) # see if we can find the name with a TVDB lookup for cur_name in name_list: try: t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **sickbeard.TVDB_API_PARMS) self._log(u"Looking up name " + cur_name + u" on TVDB", logger.DEBUG) showObj = t[cur_name] except (tvdb_exceptions.tvdb_exception): # if none found, search on all languages try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() ltvdb_api_parms['search_all_languages'] = True t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **ltvdb_api_parms) self._log(u"Looking up name " + cur_name + u" in all languages on TVDB", logger.DEBUG) showObj = t[cur_name] except (tvdb_exceptions.tvdb_exception, IOError): pass continue except (IOError): continue self._log(u"Lookup successful, using tvdb id " + str(showObj["id"]), logger.DEBUG) _finalize(parse_result) return (int(showObj["id"]), season, episodes) _finalize(parse_result) return to_return
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = None has_signature = False details_url = '/ajax_details_filelist.php?id=%s' % torrent_id for idx, url in enumerate(self.urls['config_provider_home_uri']): data = self.get_url(url + details_url) if data and re.search(r'<title>The\sPirate\sBay', data[33:200:]): has_signature = True break else: data = None if not has_signature: logger.log( u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR) if not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter( lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float( ep_number * 1.1) < len(video_files): logger.log( u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log( u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log( u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def getSearchResults(self, show, season, ep_objs, seasonSearch=False, manualSearch=False): itemList = [] results = {} self._checkAuth() self.show = show regexMode = 0 if show.sports: regexMode = 2 for ep_obj in ep_objs: logger.log( u'Searching "%s" for "%s" as "%s"' % (self.name, ep_obj.prettyName(), ep_obj.scene_prettyName())) if seasonSearch: for curString in self._get_season_search_strings(ep_obj): itemList += self._doSearch(curString) else: for curString in self._get_episode_search_strings(ep_obj): itemList += self._doSearch(curString) for item in itemList: (title, url) = self._get_title_and_url(item) quality = self.getQuality(item) # parse the file name try: myParser = NameParser(False, regexMode=regexMode) parse_result = myParser.parse(title).convert() except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if not show.air_by_date: # this check is meaningless for non-season searches if (parse_result.season_number is not None and parse_result.season_number != season ) or (parse_result.season_number is None and season != 1): logger.log( u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG) continue # we just use the existing info for normal searches actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: if show.air_by_date and not parse_result.air_by_date: logger.log( u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) continue myDB = db.DBConnection() if parse_result.air_by_date: sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.indexerid, parse_result.air_date.toordinal()]) if len(sql_results) != 1: logger.log( u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING) continue actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # make sure we want the episode wantEp = True for epNo in actual_episodes: epObj = show.getEpisode(actual_season, epNo) if not epObj or not show.wantEpisode( epObj.season, epObj.episode, quality, manualSearch=manualSearch): wantEp = False break if not epObj: logger.log(u"Ignoring result " + title + " because episode scene info is invalid.") continue if not wantEp: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObjs = [epObj] result = self.getResult(epObjs) result.url = url result.name = title result.quality = quality result.provider = self result.content = None if len(epObjs) == 1: epNum = epObjs[0].episode elif len(epObjs) > 1: epNum = MULTI_EP_RESULT logger.log( u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG) elif len(epObjs) == 0: epNum = SEASON_RESULT result.extraInfo = [show] logger.log(u"Separating full season result to check for later", logger.DEBUG) if epNum in results: results[epNum].append(result) else: results = {epNum: [result]} return results
def findSeasonResults(self, show, season): itemList = [] results = {} for curString in self._get_season_search_strings(show, season): itemList += self._doSearch(curString) for item in itemList: (title, url) = self._get_title_and_url(item) quality = self.getQuality(item) # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if not show.air_by_date: # this check is meaningless for non-season searches if (parse_result.season_number != None and parse_result.season_number != season ) or (parse_result.season_number == None and season != 1): logger.log( u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG) continue # we just use the existing info for normal searches actual_season = season actual_episodes = parse_result.episode_numbers else: if not parse_result.air_by_date: logger.log( u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) continue myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.tvdbid, parse_result.air_date.toordinal()]) if len(sql_results) != 1: logger.log( u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING) continue actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # make sure we want the episode wantEp = True for epNo in actual_episodes: if not show.wantEpisode(actual_season, epNo, quality): wantEp = False break if not wantEp: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(show.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.url = url result.name = title result.quality = quality result.provider = self result.content = self.getURL(result.url) \ if self.providerType == GenericProvider.TORRENT \ and not result.url.startswith('magnet') else None if len(epObj) == 1: epNum = epObj[0].episode elif len(epObj) > 1: epNum = MULTI_EP_RESULT logger.log( u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT result.extraInfo = [show] logger.log(u"Separating full season result to check for later", logger.DEBUG) if epNum in results: results[epNum].append(result) else: results[epNum] = [result] return results
def findEpisode(self, episode, manualSearch=False): self._checkAuth() # create a copy of the episode, using scene numbering episode_scene = copy.copy(episode) episode_scene.convertToSceneNumbering() logger.log( u'Searching "%s" for "%s" as "%s"' % (self.name, episode.prettyName(), episode_scene.prettyName())) self.cache.updateCache() results = self.cache.searchCache(episode_scene, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) logger.log(u"manualSearch: " + str(manualSearch), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results itemList = [] for cur_search_string in self._get_episode_search_strings( episode_scene): itemList += self._doSearch(cur_search_string, show=episode.show) for item in itemList: (title, url) = self._get_title_and_url(item) if self.urlIsBlacklisted(url): logger.log( u'Ignoring %s as the url %s is blacklisted' % (title, url), logger.DEBUG) continue # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title, fix_scene_numbering=True) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) return results
def findEpisode(self, episode, manualSearch=False): self._checkAuth() logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results # create a copy of the episode, using scene numbering episode_scene = copy.copy(episode) episode_scene.convertToSceneNumbering() simple_show_name = self._get_simple_name_for_show(episode.show) if not simple_show_name: logger.log( u"Show %s not known to dtvt, not running any further search." % (episode.show.name), logger.MESSAGE) return results query_params = {'show_name': simple_show_name} if episode.show.air_by_date: query_params['episode_num'] = str(episode.airdate) else: query_params['episode_num'] = 'S%02dE%02d' % (episode.season, episode.episode) api_result = self._api_call('1.0/torrent.getInfosAll', query_params) if api_result: for cur_result in api_result: #{ # "name": "Futurama.S06E23.720p.HDTV.x264-IMMERSE", # "quality": "720", # "age": 47406999, # "data_size": 369900878, # "seeds": 2, # "leechers": 0, # "link": "http:\/\/www.dailytvtorrents.org\/dl\/9pa\/Futurama.S06E23.720p.HDTV.x264-IMMERSE.DailyTvTorrents.torrent" #} title = cur_result['name'] url = cur_result['link'] try: myParser = NameParser() parse_result = myParser.parse(title, True) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( "Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( "Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue #quality = cur_result['quality'] - actually, we get a bit more info # from the torrent name, so let's use that instead. quality = Quality.nameQuality(title) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) else: logger.log(u"No result from api call 1.0/torrent.getInfosAll", logger.WARNING) return results
def validateDir(path, dirName, nzbNameOriginal, failed): global process_result, returnStr returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG) if ek.ek(os.path.basename, dirName).startswith('_FAILED_'): returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG) failed = True elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'): returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG) failed = True elif ek.ek(os.path.basename, dirName).upper().startswith('_UNPACK'): returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG) return False if failed: process_failed(os.path.join(path, dirName), nzbNameOriginal) return False if helpers.is_hidden_folder(dirName): returnStr += logHelper(u"Ignoring hidden folder: " + dirName, logger.DEBUG) return False # make sure the dir isn't inside a show dir myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows") for sqlShow in sqlResults: if dirName.lower().startswith( ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek( os.path.realpath, sqlShow["location"]).lower(): returnStr += logHelper( u"You're trying to post process an episode that's already been moved to its show dir, skipping", logger.ERROR) return False # Get the videofile list for the next checks allFiles = [] allDirs = [] for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dirName), topdown=False): allDirs += processDir allFiles += fileList videoFiles = filter(helpers.isMediaFile, allFiles) allDirs.append(dirName) #check if the dir have at least one tv video file for video in videoFiles: try: NameParser().parse(video, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass for dir in allDirs: try: NameParser().parse(dir, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass if sickbeard.UNPACK: #Search for packed release packedFiles = filter(helpers.isRarFile, allFiles) for packed in packedFiles: try: NameParser().parse(packed, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass return False
def searchProviders(self, show, season, episode=None, manualSearch=False): itemList = [] results = {} logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season)) # gather all episodes for season and then pick out the wanted episodes and compare to determin if we want whole season or just a few episodes if episode is None: seasonEps = show.getAllEpisodes(season) wantedEps = [ x for x in seasonEps if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL) ] else: wantedEps = [show.getEpisode(season, episode)] for ep_obj in wantedEps: season = ep_obj.scene_season episode = ep_obj.scene_episode self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results itemList += self.getURL(self.search_url, post_data=self._make_post_data_JSON( show=show, season=season, episode=episode), json=True) for parsedJSON in itemList: if not parsedJSON: logger.log(u"No data returned from " + self.search_url, logger.ERROR) return [] if self._checkAuthFromData(parsedJSON): results = [] if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log( u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR) items = [] for item in items: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date or episode.sports: if parse_result.air_date != episode.airdate: logger.log( u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) return results
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): # check if we passed in a parsed result or should we try and create one if not parse_result: # create showObj from indexer_id if available showObj = None if indexer_id: showObj = Show.find(sickbeard.showList, indexer_id) try: myParser = NameParser(showObj=showObj) parse_result = myParser.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log( u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None # if we made it this far then lets add the parsed result to cache for usager later on season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int( time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality name = ss(name) # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log( u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", [ name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version ] ]
def _validate_dir(self, path, dir_name, nzb_name_original, failed): self._log_helper(u'Processing dir: ' + dir_name) if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'): self._log_helper( u'The directory name indicates it failed to extract.') failed = True elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'): self._log_helper( u'The directory name indicates that it was previously rejected for being undersized.' ) failed = True elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'): self._log_helper( u'The directory name indicates that this release is in the process of being unpacked.' ) return False if failed: self._process_failed(os.path.join(path, dir_name), nzb_name_original) return False if helpers.is_hidden_folder(dir_name): self._log_helper(u'Ignoring hidden folder: ' + dir_name) return False # make sure the directory isn't inside a show directory my_db = db.DBConnection() sql_results = my_db.select('SELECT * FROM tv_shows') for sqlShow in sql_results: if dir_name.lower().startswith(ek.ek(os.path.realpath, sqlShow['location']).lower() + os.sep)\ or dir_name.lower() == ek.ek(os.path.realpath, sqlShow['location']).lower(): self._log_helper( u'Found an episode that has already been moved to its show dir, skipping', logger.ERROR) return False # Get the videofile list for the next checks all_files = [] all_dirs = [] for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek( os.path.join, path, dir_name), topdown=False): all_dirs += process_dir all_files += fileList video_files = filter(helpers.isMediaFile, all_files) all_dirs.append(dir_name) # check if the directory have at least one tv video file for video in video_files: try: NameParser().parse(video, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass for directory in all_dirs: try: NameParser().parse(directory, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass if sickbeard.UNPACK: # Search for packed release packed_files = filter(helpers.isRarFile, all_files) for packed in packed_files: try: NameParser().parse(packed, cache_result=False) return True except (InvalidNameException, InvalidShowException): pass return False
def findEpisode(self, episode, manualSearch=False): self._checkAuth() logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results itemList = [] for cur_search_string in self._get_episode_search_strings(episode): itemList += self._doSearch(cur_search_string, show=episode.show) for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( "Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( "Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality result.provider = self result.content = self.getURL(result.url) \ if self.providerType == GenericProvider.TORRENT \ and not result.url.startswith('magnet') else None results.append(result) return results
def validate_dir(process_path, release_name, failed, result): # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements """ Check if directory is valid for processing :param process_path: Directory to check :param release_name: Original NZB/Torrent name :param failed: Previously failed objects :param result: Previous results :return: True if dir is valid for processing, False if not """ result.output += log_helper("Processing folder " + process_path, logger.DEBUG) upper_name = ek(os.path.basename, process_path).upper() if upper_name.startswith('_FAILED_') or upper_name.endswith('_FAILED_'): result.output += log_helper( "The directory name indicates it failed to extract.", logger.DEBUG) failed = True elif upper_name.startswith('_UNDERSIZED_') or upper_name.endswith( '_UNDERSIZED_'): result.output += log_helper( "The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG) failed = True elif upper_name.startswith('_UNPACK') or upper_name.endswith('_UNPACK'): result.output += log_helper( "The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG) result.missed_files.append("{0} : Being unpacked".format(process_path)) return False if failed: process_failed(process_path, release_name, result) result.missed_files.append( "{0} : Failed download".format(process_path)) return False if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path( process_path) != helpers.real_path( sickbeard.TV_DOWNLOAD_DIR) and helpers.is_hidden_folder( process_path): result.output += log_helper( "Ignoring hidden folder: {0}".format(process_path), logger.DEBUG) result.missed_files.append("{0} : Hidden folder".format(process_path)) return False # make sure the dir isn't inside a show dir main_db_con = db.DBConnection() sql_results = main_db_con.select("SELECT location FROM tv_shows") for sqlShow in sql_results: if process_path.lower().startswith(ek(os.path.realpath, sqlShow[b"location"]).lower() + os.sep) or \ process_path.lower() == ek(os.path.realpath, sqlShow[b"location"]).lower(): result.output += log_helper( "Cannot process an episode that's already been moved to its show dir, skipping " + process_path, logger.WARNING) return False for current_directory, directory_names, file_names in ek( os.walk, process_path, topdown=False, followlinks=sickbeard.PROCESSOR_FOLLOW_SYMLINKS): sync_files = filter(is_sync_file, file_names) if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES: result.output += log_helper( "Found temporary sync files: {0} in path: {1}".format( sync_files, ek(os.path.join, process_path, sync_files[0]))) result.output += log_helper( "Skipping post processing for folder: {0}".format( process_path)) result.missed_files.append("{0} : Sync files found".format( ek(os.path.join, process_path, sync_files[0]))) continue found_files = filter(helpers.is_media_file, file_names) if sickbeard.UNPACK == sickbeard.UNPACK_PROCESS_CONTENTS: found_files += filter(helpers.is_rar_file, file_names) if current_directory != sickbeard.TV_DOWNLOAD_DIR and found_files: found_files.append(ek(os.path.basename, current_directory)) for found_file in found_files: try: NameParser().parse(found_file, cache_result=False) except (InvalidNameException, InvalidShowException) as e: pass else: return True result.output += log_helper( "{0} : No processable items found in folder".format(process_path), logger.DEBUG) return False
def split_result(obj): """ Split obj into separate episodes. :param obj: to search for results :return: a list of episode objects or an empty list """ url_data = helpers.getURL(obj.url, session=helpers.make_session(), returns='content') if url_data is None: logger.log( u"Unable to load url " + obj.url + ", can't download season NZB", logger.ERROR) return [] # parse the season ep name try: parsed_obj = NameParser(False, showObj=obj.show).parse(obj.name) except (InvalidNameException, InvalidShowException) as error: logger.log(u"{}".format(error), logger.DEBUG) return [] # bust it up season = 1 if parsed_obj.season_number is None else parsed_obj.season_number separate_nzbs, xmlns = get_season_nzbs(obj.name, url_data, season) result_list = [] # TODO: Re-evaluate this whole section # If we have valid results and hit an exception, we ignore the results found so far. # Maybe we should return the results found or possibly continue with the next iteration of the loop # Also maybe turn this into a function and generate the results_list with a list comprehension instead for new_nzb in separate_nzbs: logger.log(u"Split out " + new_nzb + " from " + obj.name, logger.DEBUG) # pylint: disable=no-member # parse the name try: parsed_obj = NameParser(False, showObj=obj.show).parse(new_nzb) except (InvalidNameException, InvalidShowException) as error: logger.log(u"{}".format(error), logger.DEBUG) return [] # make sure the result is sane if (parsed_obj.season_number != season) or (parsed_obj.season_number is None and season != 1): # pylint: disable=no-member logger.log( u"Found " + new_nzb + " inside " + obj.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING) continue elif len(parsed_obj.episode_numbers) == 0: # pylint: disable=no-member logger.log( u"Found " + new_nzb + " inside " + obj.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING) continue want_ep = True for ep_num in parsed_obj.episode_numbers: if not obj.extraInfo[0].wantEpisode(season, ep_num, obj.quality): logger.log(u"Ignoring result: " + new_nzb, logger.DEBUG) want_ep = False break if not want_ep: continue # get all the associated episode objects ep_obj_list = [ obj.extraInfo[0].getEpisode(season, ep) for ep in parsed_obj.episode_numbers ] # make a result cur_obj = classes.NZBDataSearchResult(ep_obj_list) cur_obj.name = new_nzb cur_obj.provider = obj.provider cur_obj.quality = obj.quality cur_obj.extraInfo = [create_nzb_string(separate_nzbs[new_nzb], xmlns)] result_list.append(cur_obj) return result_list
def _analyze_name(self, name, file=True): """ Takes a name and tries to figure out a show, season, and episode from it. Returns a (tvdb_id, season, [episodes]) tuple. The first two may be None and episodes may be [] if none were found. """ logger.log(u"Analyzing name " + repr(name)) to_return = (None, None, []) if not name: return to_return # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(name) self._log( "Parsed " + name + " into " + str(parse_result).decode('utf-8'), logger.DEBUG) if parse_result.air_by_date: season = -1 episodes = [parse_result.air_date] else: season = parse_result.season_number episodes = parse_result.episode_numbers to_return = (None, season, episodes) # do a scene reverse-lookup to get a list of all possible names name_list = show_name_helpers.sceneToNormalShowNames( parse_result.series_name) if not name_list: return (None, season, episodes) def _finalize(parse_result): self.release_group = parse_result.release_group if parse_result.extra_info: self.is_proper = re.search( '(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None # for each possible interpretation of that scene name for cur_name in name_list: self._log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG) scene_id = scene_exceptions.get_scene_exception_by_name(cur_name) if scene_id: self._log( u"Scene exception lookup got tvdb id " + str(scene_id) + u", using that", logger.DEBUG) _finalize(parse_result) return (scene_id, season, episodes) # see if we can find the name directly in the DB, if so use it for cur_name in name_list: self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG) db_result = helpers.searchDBForShow(cur_name) if db_result: self._log( u"Lookup successful, using tvdb id " + str(db_result[0]), logger.DEBUG) _finalize(parse_result) return (int(db_result[0]), season, episodes) # see if we can find the name with a TVDB lookup for cur_name in name_list: try: t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **sickbeard.TVDB_API_PARMS) self._log(u"Looking up name " + cur_name + u" on TVDB", logger.DEBUG) showObj = t[cur_name] except (tvdb_exceptions.tvdb_exception), e: # if none found, search on all languages try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() ltvdb_api_parms['search_all_languages'] = True t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **ltvdb_api_parms) self._log( u"Looking up name " + cur_name + u" in all languages on TVDB", logger.DEBUG) showObj = t[cur_name] except (tvdb_exceptions.tvdb_exception, IOError), e: pass continue
def find_search_results( self, show, episodes, search_mode, # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements manual_search=False, download_current_quality=False): self._check_auth() self.show = show results = {} items_list = [] searched_scene_season = None for episode in episodes: cache_result = self.cache.search_cache( episode, manual_search=manual_search, down_cur_quality=download_current_quality) if cache_result: if episode.episode not in results: results[episode.episode] = cache_result else: results[episode.episode].extend(cache_result) continue if len( episodes ) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season: continue search_strings = [] searched_scene_season = episode.scene_season if len(episodes) > 1 and search_mode == 'sponly': search_strings = self._get_season_search_strings(episode) elif search_mode == 'eponly': search_strings = self._get_episode_search_strings(episode) for search_string in search_strings: items_list += self.search(search_string, ep_obj=episode) if len(results) == len(episodes): return results if items_list: items = {} unknown_items = [] for item in items_list: quality = self.get_quality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: unknown_items.append(item) else: if quality not in items: items[quality] = [] items[quality].append(item) items_list = list( chain(*[ v for (k_, v) in sorted(six.iteritems(items), reverse=True) ])) items_list += unknown_items cl = [] for item in items_list: (title, url) = self._get_title_and_url(item) try: parse_result = NameParser( parse_method=('normal', 'anime')[show.is_anime]).parse(title) except (InvalidNameException, InvalidShowException) as error: logger.log("{0}".format(error), logger.DEBUG) continue show_object = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version add_cache_entry = False if not (show_object.air_by_date or show_object.sports): if search_mode == 'sponly': if parse_result.episode_numbers: logger.log( 'This is supposed to be a season pack search but the result {0} is not a valid season pack, skipping it' .format(title), logger.DEBUG) add_cache_entry = True elif not [ ep for ep in episodes if parse_result.season_number == (ep.season, ep.scene_season)[ep.show.is_scene] ]: logger.log( 'This season result {0} is for a season we are not searching for, skipping it' .format(title), logger.DEBUG) add_cache_entry = True else: if not all([ # pylint: disable=bad-continuation parse_result.season_number is not None, parse_result.episode_numbers, [ ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] == (parse_result.season_number, parse_result.scene_season)[ep.show.is_scene] and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers ] ]) and not all([ # fallback for anime on absolute numbering parse_result.is_anime, parse_result.ab_episode_numbers is not None, [ ep for ep in episodes if ep.show.is_anime and ep. absolute_number in parse_result.ab_episode_numbers ] ]): logger.log( 'The result {0} doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' .format(title), logger.DEBUG) add_cache_entry = True if not add_cache_entry: actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: same_day_special = False if not parse_result.is_air_by_date: logger.log( 'This is supposed to be a date search but the result {0} didn\'t parse as one, skipping it' .format(title), logger.DEBUG) add_cache_entry = True else: air_date = parse_result.air_date.toordinal() db = DBConnection() sql_results = db.select( 'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', [show_object.indexerid, air_date]) if len(sql_results) == 2: if int(sql_results[0][b'season']) == 0 and int( sql_results[1][b'season']) != 0: actual_season = int(sql_results[1][b'season']) actual_episodes = [int(sql_results[1][b'episode'])] same_day_special = True elif int(sql_results[1][b'season']) == 0 and int( sql_results[0][b'season']) != 0: actual_season = int(sql_results[0][b'season']) actual_episodes = [int(sql_results[0][b'episode'])] same_day_special = True elif len(sql_results) != 1: logger.log( 'Tried to look up the date for the episode {0} but the database didn\'t give proper results, skipping it' .format(title), logger.WARNING) add_cache_entry = True if not add_cache_entry and not same_day_special: actual_season = int(sql_results[0][b'season']) actual_episodes = [int(sql_results[0][b'episode'])] if add_cache_entry: logger.log( 'Adding item from search to cache: {0}'.format(title), logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class ci = self.cache._add_cache_entry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue episode_wanted = True for episode_number in actual_episodes: if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search, download_current_quality): episode_wanted = False break if not episode_wanted: logger.log('Ignoring result {0}.'.format(title), logger.DEBUG) continue logger.log('Found result {0} at {1}'.format(title, url), logger.DEBUG) episode_object = [] for current_episode in actual_episodes: episode_object.append( show_object.getEpisode(actual_season, current_episode)) result = self.get_result(episode_object) result.show = show_object result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(episode_object) == 1: episode_number = episode_object[0].episode logger.log('Single episode result.', logger.DEBUG) elif len(episode_object) > 1: episode_number = MULTI_EP_RESULT logger.log( 'Separating multi-episode result to check for later - result contains episodes: {0}' .format(parse_result.episode_numbers), logger.DEBUG) elif len(episode_object) == 0: episode_number = SEASON_RESULT logger.log('Separating full season result to check for later', logger.DEBUG) if episode_number not in results: results[episode_number] = [result] else: results[episode_number].append(result) if cl: # pylint: disable=protected-access # Access to a protected member of a client class cache_db = self.cache._get_db() cache_db.mass_action(cl) return results
def _add_torrent_uri(self, result): is_not_downloading = False transmission_error = False if not self._torrent_is_downloading(result): arguments = { 'filename': result.url, 'paused': 1 if sickbeard.TORRENT_PAUSED else 0, 'download-dir': sickbeard.TORRENT_PATH } post_data = json.dumps({ 'arguments': arguments, 'method': 'torrent-add', }) self._request(method='post', data=post_data) if not self.response.json()['result'] == "success": self.remove_torrent_downloaded(result.hash) transmission_error = True return False is_not_downloading = True file_list = self._get_file_list_in_torrent(result) if not file_list: self.remove_torrent_downloaded(result.hash) transmission_error = True return False wantedFile = [] unwantedFile = [] for epObj in result.episodes: index = 0 for name_file in file_list['arguments']['torrents'][0]['files']: try: if '/' in name_file["name"]: name_file["name"] = name_file["name"].split('/')[1] myParser = NameParser(showObj=result.show, convert=True) parse_result = myParser.parse(name_file["name"]) except InvalidNameException: logger.log( u"Unable to parse the filename " + str(name_file["name"]) + " into a valid episode", logger.DEBUG) self.remove_torrent_downloaded(result.hash) transmission_error = True return False except InvalidShowException: logger.log( u"Unable to parse the filename " + str(name_file["name"]) + " into a valid show", logger.DEBUG) self.remove_torrent_downloaded(result.hash) transmission_error = True return False if not parse_result or not parse_result.series_name: continue if epObj.episode in parse_result.episode_numbers and epObj.season == parse_result.season_number: wantedFile.append(index) else: unwantedFile.append(index) index += 1 if is_not_downloading: if unwantedFile: arguments = { 'ids': [result.hash], 'files-unwanted': unwantedFile } post_data = json.dumps({ 'arguments': arguments, 'method': 'torrent-set', }) self._request(method='post', data=post_data) if not self.response.json()['result'] == "success": self.remove_torrent_downloaded(result.hash) transmission_error = True return False else: if wantedFile: arguments = {'ids': [result.hash], 'files-wanted': wantedFile} post_data = json.dumps({ 'arguments': arguments, 'method': 'torrent-set', }) self._request(method='post', data=post_data) if not self.response.json()['result'] == "success": self.remove_torrent_downloaded(result.hash) transmission_error = True return False if transmission_error: self.remove_torrent_downloaded(result.hash) arguments = { 'filename': result.url, 'paused': 1 if sickbeard.TORRENT_PAUSED else 0, 'download-dir': sickbeard.TORRENT_PATH } post_data = json.dumps({ 'arguments': arguments, 'method': 'torrent-add', }) self._request(method='post', data=post_data) if not self.response.json()['result'] == "success": return False return True
def findEpisode(self, episode, manualSearch=False): logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results data = self.getURL(self.search_url, post_data=self._make_post_data_JSON( show=episode.show, episode=episode)) if not data: logger.log(u"No data returned from " + self.search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON): results = [] if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log( u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR) items = [] for item in items: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) return results
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = [ 'avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4' ] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) if self.proxy and self.proxy.isEnabled(): self.headers.update({'referer': self.proxy.getProxyURL()}) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter( lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float( ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log( u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def execute(self): backupDatabase(10) if not self.hasColumn("tv_episodes", "file_size"): self.addColumn("tv_episodes", "file_size") if not self.hasColumn("tv_episodes", "release_name"): self.addColumn("tv_episodes", "release_name", "TEXT", "") ep_results = self.connection.select( "SELECT episode_id, location, file_size FROM tv_episodes") logger.log( u"Adding file size to all episodes in DB, please be patient") for cur_ep in ep_results: if not cur_ep["location"]: continue # if there is no size yet then populate it for us if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek( os.path.isfile, cur_ep["location"]): cur_size = ek.ek(os.path.getsize, cur_ep["location"]) self.connection.action( "UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", [cur_size, int(cur_ep["episode_id"])]) # check each snatch to see if we can use it to get a release name from history_results = self.connection.select( "SELECT * FROM history WHERE provider != -1 ORDER BY date ASC") logger.log(u"Adding release name to all episodes still in history") for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select( "SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?", [ cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"] ]) if not download_results: logger.log( u"Found a snatch in the history for " + cur_result["resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) continue nzb_name = cur_result["resource"] file_name = ek.ek(os.path.basename, download_results[0]["resource"]) # take the extension off the filename, it's not needed if '.' in file_name: file_name = file_name.rpartition('.')[0] # find the associated episode on disk ep_results = self.connection.select( "SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", [ cur_result["showid"], cur_result["season"], cur_result["episode"] ]) if not ep_results: logger.log( u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) continue # get the status/quality of the existing ep and make sure it's what we expect ep_status, ep_quality = common.Quality.splitCompositeStatus( int(ep_results[0]["status"])) if ep_status != common.DOWNLOADED: continue if ep_quality != int(cur_result["quality"]): continue # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): logger.log( u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) except InvalidNameException: continue if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database self.connection.action( "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]]) break # check each snatch to see if we can use it to get a release name from empty_results = self.connection.select( "SELECT episode_id, location FROM tv_episodes WHERE release_name = ''" ) logger.log( u"Adding release name to all episodes with obvious scene filenames" ) for cur_result in empty_results: ep_file_name = ek.ek(os.path.basename, cur_result["location"]) ep_file_name = os.path.splitext(ep_file_name)[0] # only want to find real scene names here so anything with a space in it is out if ' ' in ep_file_name: continue try: np = NameParser(False) parse_result = np.parse(ep_file_name) except InvalidNameException: continue if not parse_result.release_group: continue logger.log( u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG) self.connection.action( "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]]) self.incDBVersion()
def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False): self._checkAuth() self.show = show results = {} itemList = [] searched_scene_season = None for epObj in episodes: # search cache for episode result cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality) if cacheResult: if epObj.episode not in results: results[epObj.episode] = cacheResult else: results[epObj.episode].extend(cacheResult) # found result, search next episode continue # skip if season already searched if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season: continue # mark season searched for season pack searches so we can skip later on searched_scene_season = epObj.scene_season search_strings = [] if len(episodes) > 1 and search_mode == 'sponly': # get season search results search_strings = self._get_season_search_strings(epObj) elif search_mode == 'eponly': # get single episode search results search_strings = self._get_episode_search_strings(epObj) first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0] if first: logger.log(u'First search_string has rid', logger.DEBUG) for curString in search_strings: itemList += self._doSearch(curString, search_mode, len(episodes), epObj=epObj) if first: first = False if itemList: logger.log(u'First search_string had rid, and returned results, skipping query by string', logger.DEBUG) break else: logger.log(u'First search_string had rid, but returned no results, searching with string query', logger.DEBUG) # if we found what we needed already from cache then return results and exit if len(results) == len(episodes): return results # sort list by quality if len(itemList): items = {} itemsUnknown = [] for item in itemList: quality = self.getQuality(item, anime=show.is_anime) if quality == Quality.UNKNOWN: itemsUnknown += [item] else: if quality not in items: items[quality] = [item] else: items[quality].append(item) itemList = list(itertools.chain(*[v for (k, v) in sorted(items.iteritems(), reverse=True)])) itemList += itemsUnknown if itemsUnknown else [] # filter results cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG) continue except InvalidShowException: logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG) continue showObj = parse_result.show quality = parse_result.quality release_group = parse_result.release_group version = parse_result.version addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly': if len(parse_result.episode_numbers): logger.log( u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", logger.DEBUG) addCacheEntry = True if len(parse_result.episode_numbers) and (parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]): logger.log( u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True else: if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: logger.log( u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True elif len(parse_result.episode_numbers) and not [ep for ep in episodes if ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: logger.log( u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True if not addCacheEntry: # we just use the existing info for normal searches actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: if not parse_result.is_air_by_date: logger.log( u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) addCacheEntry = True else: airdate = parse_result.air_date.toordinal() myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, airdate]) if len(sql_results) != 1: logger.log( u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING) addCacheEntry = True if not addCacheEntry: actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # add parsed result to cache for usage later on if addCacheEntry: logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) # pylint: disable=W0212 # Access to a protected member of a client class ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) if ci is not None: cl.append(ci) continue # make sure we want the episode wantEp = True for epNo in actual_episodes: if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality): wantEp = False break if not wantEp: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ quality], logger.INFO) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(showObj.getEpisode(actual_season, curEp)) result = self.getResult(epObj) result.show = showObj result.url = url result.name = title result.quality = quality result.release_group = release_group result.version = version result.content = None result.size = self._get_size(item) if len(epObj) == 1: epNum = epObj[0].episode logger.log(u"Single episode result.", logger.DEBUG) elif len(epObj) > 1: epNum = MULTI_EP_RESULT logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str( parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT logger.log(u"Separating full season result to check for later", logger.DEBUG) if epNum not in results: results[epNum] = [result] else: results[epNum].append(result) # check if we have items to add to cache if len(cl) > 0: # pylint: disable=W0212 # Access to a protected member of a client class myDB = self.cache._getDB() myDB.mass_action(cl) return results
def execute(self): db.backup_database('sickbeard.db', self.checkDBVersion()) if not self.hasColumn('tv_episodes', 'file_size'): self.addColumn('tv_episodes', 'file_size') if not self.hasColumn('tv_episodes', 'release_name'): self.addColumn('tv_episodes', 'release_name', 'TEXT', '') ep_results = self.connection.select( 'SELECT episode_id, location, file_size FROM tv_episodes') logger.log( u'Adding file size to all episodes in DB, please be patient') for cur_ep in ep_results: if not cur_ep['location']: continue # if there is no size yet then populate it for us if (not cur_ep['file_size'] or not int(cur_ep['file_size'])) and ek.ek( os.path.isfile, cur_ep['location']): cur_size = ek.ek(os.path.getsize, cur_ep['location']) self.connection.action( 'UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?', [cur_size, int(cur_ep['episode_id'])]) # check each snatch to see if we can use it to get a release name from history_results = self.connection.select( 'SELECT * FROM history WHERE provider != -1 ORDER BY date ASC') logger.log(u'Adding release name to all episodes still in history') for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select( 'SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?', [ cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date'] ]) if not download_results: logger.log( u'Found a snatch in the history for ' + cur_result['resource'] + ' but couldn\'t find the associated download, skipping it', logger.DEBUG) continue nzb_name = cur_result['resource'] file_name = ek.ek(os.path.basename, download_results[0]['resource']) # take the extension off the filename, it's not needed if '.' in file_name: file_name = file_name.rpartition('.')[0] # find the associated episode on disk ep_results = self.connection.select( 'SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ""', [ cur_result['showid'], cur_result['season'], cur_result['episode'] ]) if not ep_results: logger.log( u'The episode ' + nzb_name + ' was found in history but doesn\'t exist on disk anymore, skipping', logger.DEBUG) continue # get the status/quality of the existing ep and make sure it's what we expect ep_status, ep_quality = common.Quality.splitCompositeStatus( int(ep_results[0]['status'])) if ep_status != common.DOWNLOADED: continue if ep_quality != int(cur_result['quality']): continue # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): logger.log( u'Checking if ' + cur_name + ' is actually a good release name', logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) except (InvalidNameException, InvalidShowException): continue if parse_result.series_name and parse_result.season_number is not None\ and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database self.connection.action( 'UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?', [cur_name, ep_results[0]['episode_id']]) break # check each snatch to see if we can use it to get a release name from empty_results = self.connection.select( 'SELECT episode_id, location FROM tv_episodes WHERE release_name = ""' ) logger.log( u'Adding release name to all episodes with obvious scene filenames' ) for cur_result in empty_results: ep_file_name = ek.ek(os.path.basename, cur_result['location']) ep_file_name = os.path.splitext(ep_file_name)[0] # only want to find real scene names here so anything with a space in it is out if ' ' in ep_file_name: continue try: np = NameParser(False) parse_result = np.parse(ep_file_name) except (InvalidNameException, InvalidShowException): continue if not parse_result.release_group: continue logger.log( u'Name ' + ep_file_name + ' gave release group of ' + parse_result.release_group + ', seems valid', logger.DEBUG) self.connection.action( 'UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?', [ep_file_name, cur_result['episode_id']]) self.incDBVersion() return self.checkDBVersion()