def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = self.get_url(torrent_link) if not data: return None try: with BS4Parser(data, features=['html5lib', 'permissive']) as soup: file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})] video_files = filter(lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files) # Filtering SingleEpisode/MultiSeason Torrent if len(video_files) < ep_number or len(video_files) > float(ep_number * 1.1): logger.log(u'Result %s lists %s episodes with %s episodes retrieved in torrent' % (title, ep_number, len(video_files)), logger.DEBUG) logger.log(u'Result %s seem to be a single episode or multi-season torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d %s' % (int(parse_result.season_number), self._reverse_quality(quality)) return title except Exception: logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ if not self.url: return False quality = Quality.UNKNOWN file_name = None data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) if self.should_skip() or not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter(lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float(ep_number * 1.1) < len(video_files): logger.log(u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log(u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show, indexer_lookup=False) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def getQuality(self, item, anime=False): if 'quality' in item: if item.get('quality') == "480p": return Quality.SDTV elif item.get('quality') == "720p": return Quality.HDWEBDL elif item.get('quality') == "1080p": return Quality.FULLHDWEBDL else: return Quality.sceneQuality(item.get('title'), anime) else: return Quality.sceneQuality(item.get('title'), anime)
def getQuality(self, item, anime=False): try: quality = Quality.sceneQuality(item.filename, anime) except: quality = Quality.UNKNOWN return quality
def _addCacheEntry(self, name, url, quality=None): cacheDB = self._getDB() season = None episodes = None # if we don't have complete info then parse the filename to get it try: myParser = NameParser(0) parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None if not parse_result.show: logger.log(u"Couldn't find a show in our databases matching " + name + ", unable to cache it", logger.DEBUG) return None try: myDB = db.DBConnection() if parse_result.show.air_by_date: airdate = parse_result.sports_event_date.toordinal() if parse_result.show.sports else parse_result.air_date.toordinal() sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [parse_result.show.indexerid, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]) except: return
def _find_season_quality(self,title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs = {'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
def getQuality(self, item): torrent_node = item.getElementsByTagName('torrent')[0] filename_node = torrent_node.getElementsByTagName('fileName')[0] filename = get_xml_text(filename_node) quality = Quality.sceneQuality(filename) return quality
def get_quality(self, item, anime=False): if isinstance(item, tuple): name = item[0] elif isinstance(item, dict): name, url = self._get_title_and_url(item) else: name = item.title return Quality.sceneQuality(name, anime)
def _doSearch(self, search_params, show=None): results = [] items = {'Season': [], 'Episode': []} for mode in search_params.keys(): for search_string in search_params[mode]: searchURL = self.proxy._buildURL(self.searchurl %(urllib.quote(unidecode(search_string)))) logger.log(u"Search string: " + searchURL, logger.DEBUG) data = self.getURL(searchURL) if not data: continue re_title_url = self.proxy._buildRE(self.re_title_url) #Extracting torrent information from data returned by searchURL match = re.compile(re_title_url, re.DOTALL ).finditer(urllib.unquote(data)) for torrent in match: title = torrent.group('title').replace('_','.')#Do not know why but SickBeard skip release with '_' in name url = torrent.group('url') id = int(torrent.group('id')) seeders = int(torrent.group('seeders')) leechers = int(torrent.group('leechers')) #Filter unseeded torrent if seeders == 0: continue #Accept Torrent only from Good People for every Episode Search if sickbeard.THEPIRATEBAY_TRUSTED and re.search('(VIP|Trusted|Helper)',torrent.group(0))== None: logger.log(u"ThePirateBay Provider found result " + torrent.group('title') + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG) continue #Try to find the real Quality for full season torrent analyzing files in torrent if mode == 'Season' and Quality.sceneQuality(title) == Quality.UNKNOWN: ep_number = int(len(search_params['Episode']) / len(allPossibleShowNames(self.show))) title = self._find_season_quality(title,id, ep_number) if not title: continue item = title, url, id, seeders, leechers items[mode].append(item) #For each search mode sort all the items by seeders items[mode].sort(key=lambda tup: tup[3], reverse=True) results += items[mode] return results
def getQuality(self, item, anime=False): """ Figures out the quality of the given RSS item node item: An elementtree.ElementTree element representing the <item> tag of the RSS feed Returns a Quality value obtained from the node's data """ (title, url) = self._get_title_and_url(item) quality = Quality.sceneQuality(title, anime) return quality
def getQuality(self, item): """ Figures out the quality of the given RSS item node item: An xml.dom.minidom.Node representing the <item> tag of the RSS feed Returns a Quality value obtained from the node's data """ (title, url) = self._get_title_and_url(item) #@UnusedVariable quality = Quality.sceneQuality(title) return quality
def _addCacheEntry(self, name, url, quality=None): try: myParser = NameParser(convert=True) parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) return None if not parse_result or not parse_result.series_name: return None season = episodes = None if parse_result.air_by_date or parse_result.sports: airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_event_date.toordinal() myDB = db.DBConnection() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [parse_result.show.indexerid, parse_result.show.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name, parse_result.is_anime) if not isinstance(name, unicode): name = unicode(name, 'utf-8') # get release group release_group = parse_result.release_group logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) return [ "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group) VALUES (?,?,?,?,?,?,?,?)", [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group]]
def getQuality(self, item, anime=False): """ Figures out the quality of the given RSS item node item: An elementtree.ElementTree element representing the <item> tag of the RSS feed Returns a Quality value obtained from the node's data """ (title, url) = self._get_title_and_url(item) quality = Quality.sceneQuality(title, anime) height="" if quality == Quality.FULLHDTV or quality == Quality.FULLHDWEBDL or quality == Quality.FULLHDBLURAY: height="1080p" elif quality == Quality.HDTV or quality == Quality.HDWEBDL or quality == Quality.HDBLURAY: height="720p" if height != "": rlsSize = self._get_size(item) if rlsSize > -1: rlsCodec = Quality.sceneQualityFromName(title,quality) if rlsCodec == "": rlsCodec = "x264" if "265" in rlsCodec or rlsCodec== "hevc": if self.show.runtime > 30: if (height=="1080p" and rlsSize <= 400000000) or (height=="720p" and rlsSize <= 200000000): quality = Quality.UNKNOWN else: if (height=="1080p" and rlsSize <= 200000000) or (height=="720p" and rlsSize <= 100000000): quality = Quality.UNKNOWN elif "264" in rlsCodec or rlsCodec== "avc": if self.show.runtime > 30: if (height=="1080p" and rlsSize <= 1000000000) or (height=="720p" and rlsSize <= 800000000): quality = Quality.UNKNOWN else: if (height=="1080p" and rlsSize <= 700000000) or (height=="720p" and rlsSize <= 500000000): quality = Quality.UNKNOWN return quality
def getQuality(self, item): url = item.enclosures[0].href quality = Quality.sceneQuality(url) return quality
def getQuality(self, item, anime=False): return Quality.sceneQuality(item[0], anime)
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ if not self.url: return False quality = Quality.UNKNOWN file_name = None data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) if not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter( lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float( ep_number * 1.1) < len(video_files): logger.log( u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log( u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log( u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def getQuality(self, item): quality = Quality.sceneQuality(item[0]) return quality
def get_quality(self, item, anime=False): (title, _) = self._get_title_and_url(item) quality = Quality.sceneQuality(title, anime) return quality
def _addCacheEntry(self, name, url): cacheDB = self._getDB() parse_result = None from_cache = False indexer_id = None # if we don't have complete info then parse the filename to get it while (True): try: myParser = NameParser() parse_result = myParser.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None logger.log( u"Checking the cahe for show:" + str(parse_result.series_name), logger.DEBUG) # remember if the cache lookup worked or not so we know whether we should bother updating it later cache_id = name_cache.retrieveNameFromCache( parse_result.series_name) if cache_id: logger.log( u"Cache lookup found Indexer ID:" + repr(indexer_id) + ", using that for " + parse_result.series_name, logger.DEBUG) from_cache = True indexer_id = cache_id break # if the cache failed, try looking up the show name in the database logger.log( u"Checking the database for show:" + str(parse_result.series_name), logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"Database lookup found Indexer ID:" + str(showResult[1]) + ", using that for " + parse_result.series_name, logger.DEBUG) indexer_id = showResult[1] break # if we didn't find a Indexer ID return None if not indexer_id: return None # if the show isn't in out database then return None try: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except: return None if not showObj: return None # if we weren't provided with season/episode information then get it from the name that we parsed season = None episodes = None myDB = db.DBConnection() if parse_result.air_by_date: sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.air_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] elif parse_result.sports: sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.sports_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if not (season and episodes): return None # convert scene numbered releases before storing to cache convertedEps = {} for curEp in episodes: epObj = showObj.getEpisode(season, curEp, sceneConvert=True) if not epObj: return None if not epObj.season in convertedEps: convertedEps[epObj.season] = [] convertedEps[epObj.season].append(epObj.episode) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') for season, episodes in convertedEps.items(): episodeText = "|" + "|".join(map(str, episodes)) + "|" cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [ name, season, episodeText, indexer_id, url, curTimestamp, quality ])
def getQuality(self, item, anime=False): self.debug() title = helpers.get_xml_text( item.getElementsByTagName('title')[0]).replace("/", " ") quality = Quality.sceneQuality(title, anime) return quality
def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from "+curProvider.name) curPropers = curProvider.findPropers(date) # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: "+x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log(u"Unable to parse the filename "+curProper.name+" into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log(u"Ignoring "+curProper.name+" because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.sceneQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set(show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log(u"Successful match! Result "+parse_result.series_name+" matched to show "+curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log(u"Proper "+curProper.name+" isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log(u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(curProper.episode)+" for show "+parse_result.series_name+", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map(operator.attrgetter('tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: "+str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = None has_signature = False details_url = '/ajax_details_filelist.php?id=%s' % torrent_id for idx, url in enumerate(self.urls['config_provider_home_uri']): data = self.get_url(url + details_url) if data and re.search(r'<title>The\sPirate\sBay', data[33:200:]): has_signature = True break else: data = None if not has_signature: logger.log( u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR) if not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter( lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float( ep_number * 1.1) < len(video_files): logger.log( u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log( u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log( u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(parse_result.air_date)+" for show "+parse_result.series_name+", skipping", logger.WARNING) return False except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: "+ex(e), logger.WARNING) return False episodeText = "|"+"|".join(map(str, episodes))+"|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) if not quality: quality = Quality.sceneQuality(name) myDB.action("INSERT INTO "+self.providerID+" (name, season, episodes, tvrid, tvdbid, url, time, quality) VALUES (?,?,?,?,?,?,?,?)", [name, season, episodeText, tvrage_id, tvdb_id, url, curTimestamp, quality]) def searchCache(self, episode, manualSearch=False): neededEps = self.findNeededEpisodes(episode, manualSearch) return neededEps[episode] def listPropers(self, date=None, delimiter="."): myDB = self._getDB() sql = "SELECT * FROM "+self.providerID+" WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
class TVCache(): def __init__(self, provider): self.provider = provider self.providerID = self.provider.getID() self.minTime = 10 def _getDB(self): return CacheDBConnection(self.providerID) def _clearCache(self): myDB = self._getDB() myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1") def _getRSSData(self): data = None return data def _checkAuth(self, data): return True def _checkItemAuth(self, title, url): return True def updateCache(self): if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # as long as the http request worked we count this as an update if data: self.setLastUpdate() else: return [] # now that we've loaded the current RSS feed lets delete the old cache logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() if self._checkAuth(data): items = data.entries cl = [] for item in items: ci = self._parseItem(item) if ci is not None: cl.append(ci) if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) else: raise AuthException(u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") return [] def _translateTitle(self, title): return title.replace(' ', '.') def _translateLinkURL(self, url): return url.replace('&', '&') def _parseItem(self, item): title = item.title url = item.link self._checkItemAuth(title, url) if title and url: title = self._translateTitle(title) url = self._translateLinkURL(url) logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) else: logger.log( u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.DEBUG) return None def _getLastUpdate(self): myDB = self._getDB() sqlResults = myDB.select( "SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID]) if sqlResults: lastTime = int(sqlResults[0]["time"]) if lastTime > int( time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: lastTime = 0 return datetime.datetime.fromtimestamp(lastTime) def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() myDB = self._getDB() myDB.upsert("lastUpdate", {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID}) lastUpdate = property(_getLastUpdate) def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta( minutes=self.minTime): logger.log( u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(datetime.timedelta(minutes=self.minTime)), logger.DEBUG) return False return True def _addCacheEntry(self, name, url, season=None, episodes=None, indexer_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log( u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None indexer_lang = None if indexer_id: # if we have only the indexer_id, use the database showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) if showObj: self.indexer = int(showObj.indexer) indexer_lang = showObj.lang else: logger.log( u"We were given a Indexer ID " + str(indexer_id) + " but it doesn't match a show we have in our list, so leaving indexer_id empty", logger.DEBUG) indexer_id = 0 # if no indexerID then fill out as much info as possible by searching the show name if not indexer_id: from_cache = False # check the name cache and see if we already know what show this is logger.log( u"Checking the cache for Indexer ID of " + parse_result.series_name, logger.DEBUG) # remember if the cache lookup worked or not so we know whether we should bother updating it later indexer_id = name_cache.retrieveNameFromCache( parse_result.series_name) if indexer_id: logger.log( u"Cache lookup found " + repr(indexer_id) + ", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if not indexer_id: logger.log( u"Checking the database for Indexer ID of " + str(parse_result.series_name), logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"" + parse_result.series_name + " was found to be show " + showResult[2] + " (" + str(showResult[1]) + ") in our DB.", logger.DEBUG) indexer_id = showResult[1] # if the database failed, try looking up the show name from scene exceptions list if not indexer_id: logger.log( u"Checking the scene exceptions list for Indexer ID of " + parse_result.series_name, logger.DEBUG) sceneResult = sickbeard.scene_exceptions.get_scene_exception_by_name( parse_result.series_name) if sceneResult: logger.log( u"" + str(parse_result.series_name) + " was found in scene exceptions list with Indexer ID: " + str(sceneResult), logger.DEBUG) indexer_id = sceneResult # if the DB lookup fails then do a comprehensive regex search if not indexer_id: logger.log( u"Checking the shows list for Indexer ID of " + str(parse_result.series_name), logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): logger.log( u"Successfully matched " + name + " to " + curShow.name + " from shows list", logger.DEBUG) indexer_id = curShow.indexerid indexer_lang = curShow.lang break # if the database failed, try looking up the show name from scene exceptions list if not indexer_id: logger.log( u"Checking Indexers for Indexer ID of " + parse_result.series_name, logger.DEBUG) # check indexers try: indexerResult = helpers.searchIndexerForShowID( parse_result.series_name) except: indexerResult = None if indexerResult: logger.log( u"" + str(parse_result.series_name) + " was found on " + str(sickbeard.indexerApi(indexerResult[0]).name) + " with Indexer ID: " + str(indexerResult[1]), logger.DEBUG) indexer_id = indexerResult[1] # if indexer_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, indexer_id) # if we came out with indexer_id = None it means we couldn't figure it out at all, just use 0 for that if indexer_id == None: indexer_id = 0 # if we found the show then retrieve the show object if indexer_id: try: showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except (MultipleShowObjectsException): showObj = None if showObj: self.indexer = int(showObj.indexer) indexer_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if (parse_result.air_by_date or parse_result.sports) and indexer_id: try: lINDEXER_API_PARMS = sickbeard.indexerApi( self.indexer).api_params.copy() if not (indexer_lang == "" or indexer_lang == "en" or indexer_lang == None): lINDEXER_API_PARMS['language'] = indexer_lang t = sickbeard.indexerApi( self.indexer).indexer(**lINDEXER_API_PARMS) epObj = None if parse_result.air_by_date: epObj = t[indexer_id].airedOn(parse_result.air_date)[0] elif parse_result.sports: epObj = t[indexer_id].airedOn(parse_result.sports_date)[0] if epObj is None: return None season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except sickbeard.indexer_episodenotfound: logger.log( u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return None except sickbeard.indexer_error, e: logger.log( u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e), logger.WARNING) return None episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) if not quality: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') myDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [ name, season, episodeText, indexer_id, url, curTimestamp, quality ])
class TVCache(): def __init__(self, provider): self.provider = provider self.providerID = self.provider.getID() self.minTime = 10 def _getDB(self): return CacheDBConnection(self.providerID) def _clearCache(self): myDB = self._getDB() myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1") def _getRSSData(self): data = None return data def _checkAuth(self, parsedXML): return True def _checkItemAuth(self, title, url): return True def updateCache(self): if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # as long as the http request worked we count this as an update if data: self.setLastUpdate() else: return [] # now that we've loaded the current RSS feed lets delete the old cache logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() parsedXML = helpers.parse_xml(data) if parsedXML is None: logger.log( u"Error trying to load " + self.provider.name + " RSS feed", logger.ERROR) return [] if self._checkAuth(parsedXML): if parsedXML.tag == 'rss': items = parsedXML.findall('.//item') else: logger.log( u"Resulting XML from " + self.provider.name + " isn't RSS, not parsing it", logger.ERROR) return [] ql = [] for item in items: ci = self._parseItem(item) if ci is not None: ql.append(ci) myDB = self._getDB() myDB.mass_action(ql) else: raise AuthException(u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") return [] def _translateTitle(self, title): return title.replace(' ', '.') def _translateLinkURL(self, url): return url.replace('&', '&') def _parseItem(self, item): title = helpers.get_xml_text(item.find('title')) url = helpers.get_xml_text(item.find('link')) self._checkItemAuth(title, url) if title and url: title = self._translateTitle(title) url = self._translateLinkURL(url) logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) else: logger.log( u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.DEBUG) return None def _getLastUpdate(self): myDB = self._getDB() sqlResults = myDB.select( "SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID]) if sqlResults: lastTime = int(sqlResults[0]["time"]) else: lastTime = 0 return datetime.datetime.fromtimestamp(lastTime) def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() myDB = self._getDB() myDB.upsert("lastUpdate", {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID}) lastUpdate = property(_getLastUpdate) def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta( minutes=self.minTime): logger.log( u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(datetime.timedelta(minutes=self.minTime)), logger.DEBUG) return False return True def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log( u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None tvdb_lang = None # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang else: logger.log( u"We were given a TVDB id " + str(tvdb_id) + " but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow( sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid tvdb_lang = showObj.lang else: logger.log( u"We were given a TVRage id " + str(tvrage_id) + " but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: # check the name cache and see if we already know what show this is logger.log( u"Checking the cache to see if we already know the tvdb id of " + parse_result.series_name, logger.DEBUG) tvdb_id = name_cache.retrieveNameFromCache( parse_result.series_name) # remember if the cache lookup worked or not so we know whether we should bother updating it later if tvdb_id == None: logger.log( u"No cache results returned, continuing on with the search", logger.DEBUG) from_cache = False else: logger.log( u"Cache lookup found " + repr(tvdb_id) + ", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if tvdb_id == None: logger.log( u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow( parse_result.series_name) if showResult: logger.log( parse_result.series_name + " was found to be show " + showResult[1] + " (" + str(showResult[0]) + ") in our DB.", logger.DEBUG) tvdb_id = showResult[0] # if the DB lookup fails then do a comprehensive regex search if tvdb_id == None: logger.log( u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult( name, curShow, False): logger.log( u"Successfully matched " + name + " to " + curShow.name + " with regex", logger.DEBUG) tvdb_id = curShow.tvdbid tvdb_lang = curShow.lang break # if tvdb_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, tvdb_id) # if we came out with tvdb_id = None it means we couldn't figure it out at all, just use 0 for that if tvdb_id == None: tvdb_id = 0 # if we found the show then retrieve the show object if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not (tvdb_lang == "" or tvdb_lang == "en" or tvdb_lang == None): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log( u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return None except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: " + ex(e), logger.WARNING) return None episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) if not quality: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') return [ "INSERT INTO [" + self.providerID + "] (name, season, episodes, tvrid, tvdbid, url, time, quality) VALUES (?,?,?,?,?,?,?,?)", [ name, season, episodeText, tvrage_id, tvdb_id, url, curTimestamp, quality ] ]
def _addCacheEntry(self, name, url, quality=None): indexerid = None in_cache = False # if we don't have complete info then parse the filename to get it try: myParser = NameParser() parse_result = myParser.parse(name) except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None cacheResult = sickbeard.name_cache.retrieveNameFromCache( parse_result.series_name) if cacheResult: in_cache = True indexerid = int(cacheResult) elif cacheResult == 0: return None if not indexerid: showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: indexerid = int(showResult[0]) if not indexerid: for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): indexerid = curShow.indexerid break showObj = None if indexerid: showObj = helpers.findCertainShow(sickbeard.showList, indexerid) if not showObj: logger.log( u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG) sickbeard.name_cache.addNameToCache(parse_result.series_name, 0) return None # scene -> indexer numbering parse_result = parse_result.convert(showObj) season = episodes = None if parse_result.air_by_date or parse_result.sports: myDB = db.DBConnection() airdate = parse_result.air_date.toordinal( ) or parse_result.sports_event_date.toordinal() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [indexerid, showObj.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int( time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') logger.log( u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) if not in_cache: sickbeard.name_cache.addNameToCache(parse_result.series_name, indexerid) return [ "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [ name, season, episodeText, indexerid, url, curTimestamp, quality ] ]
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = self.get_url(torrent_link) if not data: return None try: with BS4Parser(data, features=['html5lib', 'permissive']) as soup: file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [ x.text for x in file_table.find_all( 'td', attrs={'class': 'torFileName'}) ] video_files = filter( lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files) # Filtering SingleEpisode/MultiSeason Torrent if len(video_files) < ep_number or len(video_files) > float( ep_number * 1.1): logger.log( u'Result %s lists %s episodes with %s episodes retrieved in torrent' % (title, ep_number, len(video_files)), logger.DEBUG) logger.log( u'Result %s seem to be a single episode or multi-season torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality( os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log( u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log( u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d %s' % ( int(parse_result.season_number), self._reverse_quality(quality)) return title except Exception: logger.log( u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
def _addCacheEntry(self, name, url, quality=None): indexerid = None in_cache = False # if we don't have complete info then parse the filename to get it try: myParser = NameParser() parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None cacheResult = sickbeard.name_cache.retrieveNameFromCache(parse_result.series_name) if cacheResult: in_cache = True indexerid = int(cacheResult) if not indexerid: showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: indexerid = int(showResult[0]) showObj = None if indexerid: showObj = helpers.findCertainShow(sickbeard.showList, indexerid) if not showObj: logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG) return None season = episodes = None if parse_result.air_by_date or parse_result.sports: myDB = db.DBConnection() airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [indexerid, showObj.indexer, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) if not in_cache: sickbeard.name_cache.addNameToCache(parse_result.series_name, indexerid) return [ "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, indexerid, url, curTimestamp, quality]]
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) if self.proxy and self.proxy.isEnabled(): self.headers.update({'referer': self.proxy.getProxyURL()}) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition('.')[2].lower() in mediaExtensions, filesList) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(videoFiles) or float(ep_number * 1.1) < len(videoFiles): logger.log(u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(videoFiles))), logger.DEBUG) logger.log(u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if Quality.UNKNOWN != quality: break if None is not fileName and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(fileName)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverseQuality(quality)) return title
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = None has_signature = False details_url = '/ajax_details_filelist.php?id=%s' % torrent_id for idx, url in enumerate(self.urls['config_provider_home_uri']): data = self.get_url(url + details_url) if data and re.search(r'<title>The\sPirate\sBay', data[33:200:]): has_signature = True break else: data = None if not has_signature: logger.log(u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)' % len(self.urls['config_provider_home_uri']), logger.ERROR) if not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter(lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float(ep_number * 1.1) < len(video_files): logger.log(u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log(u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from " + curProvider.name) curPropers = curProvider.findPropers(date) # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name) except InvalidNameException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.sceneQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set( show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log( u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log( u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log( u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log( u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus( int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map( operator.attrgetter( 'tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(curProper.name)) finalPropers.append(curProper) return finalPropers
def get_quality(self, item, anime=False): (title, _) = self._get_title_and_url(item) quality = Quality.sceneQuality(title, anime) return quality
def _addCacheEntry(self, name, url, quality=None): cacheDB = self._getDB() season = None episodes = None # if we don't have complete info then parse the filename to get it try: myParser = NameParser(0) parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None if not parse_result.show: logger.log( u"Couldn't find a show in our databases matching " + name + ", unable to cache it", logger.DEBUG) return None try: myDB = db.DBConnection() if parse_result.show.air_by_date: airdate = parse_result.sports_event_date.toordinal( ) if parse_result.show.sports else parse_result.air_date.toordinal( ) sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [parse_result.show.indexerid, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int( time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [ name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality ]) except: return
def getQuality(self, item, anime=False): return Quality.sceneQuality(item[0], anime)
def getQuality(self, item, anime=False): filename = item.filename quality = Quality.sceneQuality(filename, anime) return quality
def getQuality(self, item, anime=False): title = item.get('title') quality = Quality.sceneQuality(title, anime) return quality
def getQuality(self, item, anime=False): self.debug() title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/", " ") quality = Quality.sceneQuality(title) return quality
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.url + 'torrent/' + str(torrent_id) data = self.getURL(fileURL) if not data: return None try: with BS4Parser(data, features=["html5lib", "permissive"]) as soup: files_tbody = soup.find('div', attrs={'class': 'description-files'}).find('tbody') if (not files_tbody): return None files = [] rows = files_tbody.find_all('tr') for row in rows: files.append(row.find_all('td')[1].text) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log(u"Result " + title + " have " + str( ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
def getQuality(self, item): quality = Quality.sceneQuality(item[0]) return quality
def getQuality(self, item, anime=False): quality = Quality.sceneQuality(item[0], anime) return quality
def getQuality(self, item): quality = None (title, url) = self._get_title_and_url(item) quality = Quality.sceneQuality(title) return quality
def getQuality(self, item, anime=False): title = item.get('title') quality = Quality.sceneQuality(title, anime) return quality
def getQuality(self, item, anime=False): self.debug() title = item.title quality = Quality.sceneQuality(title) return quality
def getQuality(self, item): url = item.getElementsByTagName('enclosure')[0].getAttribute('url') quality = Quality.sceneQuality(url) return quality
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = [ 'avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4' ] quality = Quality.UNKNOWN fileName = None fileURL = self.url + 'ajax_details_filelist.php?id=' + str(torrent_id) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: # disabled errormsg for now # logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) return None videoFiles = filter( lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float( ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break #if fileName is not None and quality == Quality.UNKNOWN: # quality = Quality.assumeQuality(fileName) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log( u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def getQuality(self, item): url = item.enclosures[0].href quality = Quality.sceneQuality(url) return quality
def _addCacheEntry(self, name, url): cacheDB = self._getDB() parse_result = None from_cache = False indexer_id = None # if we don't have complete info then parse the filename to get it while(True): try: myParser = NameParser() parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None logger.log( u"Checking the cahe for show:" + str(parse_result.series_name), logger.DEBUG) # remember if the cache lookup worked or not so we know whether we should bother updating it later cache_id = name_cache.retrieveNameFromCache(parse_result.series_name) if cache_id: logger.log(u"Cache lookup found Indexer ID:" + repr(indexer_id) + ", using that for " + parse_result.series_name, logger.DEBUG) from_cache = True indexer_id = cache_id break # if the cache failed, try looking up the show name in the database logger.log( u"Checking the database for show:" + str(parse_result.series_name), logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"Database lookup found Indexer ID:" + str(showResult[1]) + ", using that for " + parse_result.series_name, logger.DEBUG) indexer_id = showResult[1] break # if we didn't find a Indexer ID return None if not indexer_id: return None # if the show isn't in out database then return None try:showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except:return None if not showObj: return None # if we weren't provided with season/episode information then get it from the name that we parsed season = None episodes = None myDB = db.DBConnection() if parse_result.air_by_date: sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.air_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] elif parse_result.sports: sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.sports_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if not (season and episodes): return None # convert scene numbered releases before storing to cache convertedEps = {} for curEp in episodes: epObj = showObj.getEpisode(season, curEp, sceneConvert=True) if not epObj: return None if not epObj.season in convertedEps: convertedEps[epObj.season] = [] convertedEps[epObj.season].append(epObj.episode) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') for season, episodes in convertedEps.items(): episodeText = "|" + "|".join(map(str, episodes)) + "|" cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, indexer_id, url, curTimestamp, quality])
def getQuality(self, item, anime=False): filename = item.filename quality = Quality.sceneQuality(filename, anime) return quality
def _doSearch(self, search_params): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} for mode in search_params.keys(): for search_string in search_params[mode]: if mode != 'RSS': searchURL = self.searchurl %(urllib.quote(unidecode(search_string))) logger.log(u"Search string: " + searchURL, logger.DEBUG) else: searchURL = self.url + 'tv/?field=time_add&sorder=desc' logger.log(u"KAT cache update URL: "+ searchURL, logger.DEBUG) html = self.getURL(searchURL) if not html: continue try: soup = BeautifulSoup(html, features=["html5lib", "permissive"]) torrent_table = soup.find('table', attrs = {'class' : 'data'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] if not torrent_rows: # logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.ERROR) continue for tr in torrent_rows[1:]: link = self.url + (tr.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'] id = tr.get('id')[-7:] title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text url = tr.find('a', 'imagnet')['href'] verified = True if tr.find('a', 'iverify') else False trusted = True if tr.find('img', {'alt': 'verified'}) else False seeders = int(tr.find_all('td')[-2].text) leechers = int(tr.find_all('td')[-1].text) if mode != 'RSS' and seeders == 0: continue if sickbeard.KAT_VERIFIED and not verified: logger.log(u"KAT Provider found result "+title+" but that doesn't seem like a verified result so I'm ignoring it",logger.DEBUG) continue if mode == 'Season' and Quality.sceneQuality(title) == Quality.UNKNOWN: ep_number = int(len(search_params['Episode']) / len(allPossibleShowNames(self.show))) title = self._find_season_quality(title, link, ep_number) if not title: continue item = title, url, id, seeders, leechers items[mode].append(item) except Exception, e: logger.log(u"Failed to parsing " + self.name + (" Exceptions: " + str(e) if e else ''), logger.ERROR) #For each search mode sort all the items by seeders items[mode].sort(key=lambda tup: tup[3], reverse=True) results += items[mode]
def getQuality(self, item, anime=False): title = item.title quality = Quality.sceneQuality(title) return quality
def getQuality(self, item): url = item.getElementsByTagName('enclosure')[0].getAttribute('url') quality = Quality.sceneQuality(url) return quality
def getQuality(self, item, anime=False): quality = Quality.sceneQuality(item[0], anime) return quality
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log(u"Result " + title + " have " + str( ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)