def test_logSnatch(self): myDB = db.DBConnection("../sickbeard.db") #res = myDB.select("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;") #for r in res: #print r["name"] #print "tables-----------------------" searchResult=SearchResults([Episode(12345,2,4),Episode(54321,1,3)]) history.logSnatch(searchResult) #check if elements added res=myDB.select("SELECT COUNT(*) FROM history WHERE (action=? AND date=? AND showid=? AND season=? AND episode=? AND quality=? AND resource=? AND provider=?)", [Quality.compositeStatus(SNATCHED, 4), datetime.datetime.today().strftime(dateFormat), 12345, 2, 4, 4, "searchName", "unknown"]) self.assertEqual(len(res),1) res=myDB.select("SELECT COUNT(*) FROM history WHERE (action=? AND date=? AND showid=? AND season=? AND episode=? AND quality=? AND resource=? AND provider=?)", [Quality.compositeStatus(SNATCHED, 4), datetime.datetime.today().strftime(dateFormat), 54321, 1, 3, 4, "searchName", "unknown"]) self.assertEqual(len(res),1) #delete just-added elements myDB.action("DELETE FROM history WHERE (action=? AND date=? AND showid=? AND season=? AND episode=? AND quality=? AND resource=? AND provider=?)", [Quality.compositeStatus(SNATCHED, 4), datetime.datetime.today().strftime(dateFormat), 12345, 2, 4, 4, "searchName", "unknown"]) myDB.action("DELETE FROM history WHERE (action=? AND date=? AND showid=? AND season=? AND episode=? AND quality=? AND resource=? AND provider=?)", [Quality.compositeStatus(SNATCHED, 4), datetime.datetime.today().strftime(dateFormat), 54321, 1, 3, 4, "searchName", "unknown"])
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def history_snatched_proper_fix(): my_db = db.DBConnection() if not my_db.has_flag('history_snatch_proper'): logger.log('Updating history items with status Snatched Proper in a background process...') sql_result = my_db.select('SELECT rowid, resource, quality, showid' ' FROM history' ' WHERE action LIKE "%%%02d"' % SNATCHED + ' AND (UPPER(resource) LIKE "%PROPER%"' ' OR UPPER(resource) LIKE "%REPACK%"' ' OR UPPER(resource) LIKE "%REAL%")') if sql_result: cl = [] for r in sql_result: show_obj = None try: show_obj = helpers.findCertainShow(sickbeard.showList, int(r['showid'])) except (StandardError, Exception): pass np = NameParser(False, showObj=show_obj, testing=True) try: pr = np.parse(r['resource']) except (StandardError, Exception): continue if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime): cl.append(['UPDATE history SET action = ? WHERE rowid = ?', [Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])), r['rowid']]]) if cl: my_db.mass_action(cl) logger.log('Completed the history table update with status Snatched Proper.') my_db.add_flag('history_snatch_proper')
def logSubtitle(showid, season, episode, status, subtitleResult): resource = subtitleResult.path provider = subtitleResult.service status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(SUBTITLED, quality) _logHistoryItem(action, showid, season, episode, quality, resource, provider)
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = self.get_url(torrent_link) if not data: return None try: with BS4Parser(data, features=['html5lib', 'permissive']) as soup: file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})] video_files = filter(lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files) # Filtering SingleEpisode/MultiSeason Torrent if len(video_files) < ep_number or len(video_files) > float(ep_number * 1.1): logger.log(u'Result %s lists %s episodes with %s episodes retrieved in torrent' % (title, ep_number, len(video_files)), logger.DEBUG) logger.log(u'Result %s seem to be a single episode or multi-season torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d %s' % (int(parse_result.season_number), self._reverse_quality(quality)) return title except Exception: logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
def logFailed(epObj, release, provider=None): showid = int(epObj.show.indexerid) season = int(epObj.season) epNum = int(epObj.episode) status, quality = Quality.splitCompositeStatus(epObj.status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider)
def logFailed(tvdbid, season, episode, status, release, provider=None): showid = int(tvdbid) season = int(season) epNum = int(episode) status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider)
def _find_season_quality(self,title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs = {'class': 'torrentFileList'}) if not file_table: return None files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )] videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ): logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title except Exception, e: logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
def _find_season_quality(self, title, torrent_id): """ Rewrite the title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ["avi", "mkv", "wmv", "divx", "vob", "dvr-ms", "wtv", "ts" "ogv", "rar", "zip"] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + "ajax_details_filelist.php?id=" + str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall("<td.+>(.*?)</td>", data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) # for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): # quality = Quality.nameQuality(os.path.basename(fileName)) # if quality != Quality.UNKNOWN: break for fileName in filesList: sepFile = fileName.rpartition(".") if fileName.rpartition(".")[2].lower() in mediaExtensions: quality = Quality.nameQuality(fileName) if quality != Quality.UNKNOWN: break if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = ( parse_result.series_name + " S%02d" % int(parse_result.season_number) + " " + self._reverseQuality(quality) ) return title
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ if not self.url: return False quality = Quality.UNKNOWN file_name = None data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) if self.should_skip() or not data: return None files_list = re.findall('<td.+>(.*?)</td>', data) if not files_list: logger.log(u'Unable to get the torrent file list for ' + title, logger.ERROR) video_files = filter(lambda x: x.rpartition('.')[2].lower() in mediaExtensions, files_list) # Filtering SingleEpisode/MultiSeason Torrent if ep_number > len(video_files) or float(ep_number * 1.1) < len(video_files): logger.log(u'Result %s has episode %s and total episodes retrieved in torrent are %s' % (title, str(ep_number), str(len(video_files))), logger.DEBUG) logger.log(u'Result %s seems to be a single episode or multiseason torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality(os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show, indexer_lookup=False) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = '%s S%02d %s' % (parse_result.series_name, int(parse_result.season_number), self._reverse_quality(quality)) return title
def set_episode_failed(ep_obj): try: with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() except EpisodeNotFoundException as e: logger.log('Unable to get episode, please set its status manually: %s' % ex(e), logger.WARNING)
def markFailed(epObj): log_str = u"" try: with epObj.lock: quality = Quality.splitCompositeStatus(epObj.status)[1] epObj.status = Quality.compositeStatus(FAILED, quality) epObj.saveToDB() except EpisodeNotFoundException, e: logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
def getQuality(self, item, anime=False): if 'quality' in item: if item.get('quality') == "480p": return Quality.SDTV elif item.get('quality') == "720p": return Quality.HDWEBDL elif item.get('quality') == "1080p": return Quality.FULLHDWEBDL else: return Quality.sceneQuality(item.get('title'), anime) else: return Quality.sceneQuality(item.get('title'), anime)
def markFailed(show_obj, season, episode=None): log_str = u"" if episode: try: ep_obj = show_obj.getEpisode(season, episode) with ep_obj.lock: quality = Quality.splitCompositeStatus(ep_obj.status)[1] ep_obj.status = Quality.compositeStatus(FAILED, quality) ep_obj.saveToDB() except exceptions.EpisodeNotFoundException, e: log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING)
def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): # make a fake episode object ep = TVEpisode(2, 3, 3, "Ep Name") # pylint: disable=protected-access ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) ep._airdate = datetime.date(2011, 3, 9) if abd: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.air_by_date = 1 elif sports: ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP' ep.show.sports = 1 else: if anime_type != 3: ep.show.anime = 1 ep._release_name = 'Show.Name.003.HDTV.XviD-RLSGROUP' else: ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP' if multi is not None: ep._name = "Ep Name (1)" if anime_type != 3: ep.show.anime = 1 ep._release_name = 'Show.Name.003-004.HDTV.XviD-RLSGROUP' secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name ep.relatedEps.append(secondEp) else: ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP' secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) secondEp._release_name = ep._release_name thirdEp = TVEpisode(2, 5, 5, "Ep Name (3)") thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV) thirdEp._release_name = ep._release_name ep.relatedEps.append(secondEp) ep.relatedEps.append(thirdEp) return ep
def logFailed(epObj, release, provider=None): """ Log a failed download :param epObj: Episode object :param release: Release group :param provider: Provider used for snatch """ showid = int(epObj.show.indexerid) season = int(epObj.season) epNum = int(epObj.episode) status, quality = Quality.splitCompositeStatus(epObj.status) action = Quality.compositeStatus(FAILED, quality) _logHistoryItem(action, showid, season, epNum, quality, release, provider)
def isFinalResult(result): """ Checks if the given result is good enough quality that we can stop searching for other ones. :param result: quality to check :return: True if the result is the highest quality in both the any/best quality lists else False """ logger.log("Checking if we should keep searching after we've found " + result.name, logger.DEBUG) show_obj = result.episodes[0].show any_qualities, best_qualities = Quality.splitQuality(show_obj.quality) # if there is a re-download that's higher than this then we definitely need to keep looking if best_qualities and result.quality < max(best_qualities): return False # if it does not match the shows black and white list its no good elif show_obj.is_anime and show_obj.release_groups.is_valid(result): return False # if there's no re-download that's higher (above) and this is the highest initial download then we're good elif any_qualities and result.quality in any_qualities: return True elif best_qualities and result.quality == max(best_qualities): return True # if we got here than it's either not on the lists, they're empty, or it's lower than the highest required else: return False
def logSnatch(searchResult): """ Log history of snatch :param searchResult: search result object """ for curEpObj in searchResult.episodes: showid = int(curEpObj.show.indexerid) season = int(curEpObj.season) episode = int(curEpObj.episode) quality = searchResult.quality version = searchResult.version providerClass = searchResult.provider if providerClass != None: provider = providerClass.name else: provider = "unknown" action = Quality.compositeStatus(SNATCHED, searchResult.quality) resource = searchResult.name _logHistoryItem(action, showid, season, episode, quality, resource, provider, version)
def _parseItem(self, item): if item.findtext('title') != None and item.findtext('title') == "You must be logged in to view this feed": raise exceptions.AuthException("TVBinz authentication details are incorrect, check your config") if item.findtext('title') == None or item.findtext('link') == None: logger.log(u"The XML returned from the TVBinz RSS feed is incomplete, this result is unusable: "+str(item), logger.ERROR) return title = item.findtext('title') url = item.findtext('link').replace('&', '&') sInfo = item.find('{http://tvbinz.net/rss/tvb/}seriesInfo') if sInfo == None: logger.log(u"No series info, this is some kind of non-standard release, ignoring it", logger.DEBUG) return logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG) quality = Quality.nameQuality(title) if sInfo.findtext('{http://tvbinz.net/rss/tvb/}tvrID') == None: tvrid = 0 else: tvrid = int(sInfo.findtext('{http://tvbinz.net/rss/tvb/}tvrID')) # since TVBinz normalizes the scene names it's more reliable to parse the episodes out myself # than to rely on it, because it doesn't support multi-episode numbers in the feed self._addCacheEntry(title, url, tvrage_id=tvrid, quality=quality)
def refine(video, tv_episode=None, **kwargs): """Refine a video by using TVEpisode information. :param video: the video to refine. :type video: Episode :param tv_episode: the TVEpisode to be used. :type tv_episode: sickbeard.tv.TVEpisode :param kwargs: """ if video.series_tvdb_id and video.tvdb_id: logger.debug('No need to refine with TVEpisode') return if not tv_episode: logger.debug('No TVEpisode to be used to refine') return if not isinstance(video, Episode): logger.debug('Video {name} is not an episode. Skipping refiner...', name=video.name) return if tv_episode.show: logger.debug('Refining using TVShow information.') series, year, country = series_re.match(tv_episode.show.name).groups() enrich({'series': series, 'year': int(year) if year else None}, video) enrich(SHOW_MAPPING, video, tv_episode.show) logger.debug('Refining using TVEpisode information.') enrich(EPISODE_MAPPING, video, tv_episode) enrich({'release_group': tv_episode.release_group}, video, overwrite=False) guess = Quality.to_guessit(tv_episode.status) enrich({'resolution': guess['screen_size'], 'format': guess['format']}, video, overwrite=False)
def revert_episode(ep_obj): """Restore the episodes of a failed download to their original state""" sql_results = db_select( 'SELECT * FROM history t WHERE t.showid=? AND t.season=?', [ep_obj.show.indexerid, ep_obj.season]) history_eps = {r['episode']: r for r in sql_results} try: logger.log('Reverting episode %sx%s: [%s]' % (ep_obj.season, ep_obj.episode, ep_obj.name)) with ep_obj.lock: if ep_obj.episode in history_eps: status_revert = history_eps[ep_obj.episode]['old_status'] status, quality = Quality.splitCompositeStatus(status_revert) logger.log('Found in failed.db history with status: %s quality: %s' % ( statusStrings[status], Quality.qualityStrings[quality])) else: status_revert = WANTED logger.log('Episode not found in failed.db history. Setting it to WANTED', logger.WARNING) ep_obj.status = status_revert ep_obj.saveToDB() except EpisodeNotFoundException as e: logger.log('Unable to create episode, please set its status manually: %s' % ex(e), logger.WARNING)
def getQuality(self, item): filename = helpers.get_xml_text( item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName')) quality = Quality.nameQuality(filename) return quality
def query(self, filepath, languages, keywords, series, season, episode): logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages)) self.init_cache() try: series = series.lower().replace('(','').replace(')','') series_id = self.get_series_id(series) except KeyError: logger.debug(u'Could not find series id for %s' % series) return [] episode_id = self.get_episode_id(series, series_id, season, episode, Quality.nameQuality(filepath)) if not episode_id: logger.debug(u'Could not find subtitle for series %s' % series) return [] r = self.session.get(self.server_url + 'index.php?option=com_remository&Itemid=6&func=fileinfo&id=' + episode_id) soup = BeautifulSoup(r.content) sub_link = soup.find('div', attrs = {'id' : 'remositoryfileinfo'}).find(href=re.compile('func=download'))['href'] sub_language = self.get_language('it') path = get_subtitle_path(filepath, sub_language, self.config.multi) subtitle = ResultSubtitle(path, sub_language, self.__class__.__name__.lower(), sub_link) return [subtitle]
def _doSearch(self, searchString, show=None, season=None, french=None): if not self.login_done: self._doLogin(sickbeard.SOTORRENT_USERNAME, sickbeard.SOTORRENT_PASSWORD) results = [] search_url = "{0}/sphinx.php?{1}".format(self.url, searchString.replace('!','')) req = self.opener.open(search_url) page = BeautifulSoup(req) torrent_table = page.find("table", {"id" : "torrent_list"}) if torrent_table: logger.log(u"So-torrent found shows ! " , logger.DEBUG) torrent_rows = torrent_table.findAll("tr", {"id" : "infos_sphinx"}) for row in torrent_rows: release = row.strong.string id_search = row.find("img", {"alt" : "+"}) id_torrent = id_search['id'].replace('expandoGif', '') download_url = "https://so-torrent.com/get.php?id={0}".format(id_search['id'].replace('expandoGif', '')) id_quality = Quality.nameQuality(release) if show and french==None: results.append(SOTORRENTSearchResult(self.opener, release, download_url, id_quality, str(show.audio_lang))) elif show and french: results.append(SOTORRENTSearchResult(self.opener, release, download_url, id_quality, 'fr')) else: results.append(SOTORRENTSearchResult(self.opener, release, download_url, id_quality)) else: logger.log(u"No table founded.", logger.DEBUG) self.login_done = False return results
def _doSearch(self, searchString, show=None, season=None, french=None): if not self.login_done: self._doLogin( sickbeard.T411_USERNAME, sickbeard.T411_PASSWORD ) results = [] searchUrl = self.url + '/torrents/search/?' + searchString.replace('!','') logger.log(u"Search string: " + searchUrl, logger.DEBUG) r = self.opener.open( searchUrl ) soup = BeautifulSoup( r, "html.parser" ) resultsTable = soup.find("table", { "class" : "results" }) if resultsTable: rows = resultsTable.find("tbody").findAll("tr") for row in rows: link = row.find("a", title=True) title = link['title'] id = row.find_all('td')[2].find_all('a')[0]['href'][1:].replace('torrents/nfo/?id=','') downloadURL = ('http://www.t411.io/torrents/download/?id=%s' % id) quality = Quality.nameQuality( title ) if quality==Quality.UNKNOWN and title: if '720p' not in title.lower() and '1080p' not in title.lower(): quality=Quality.SDTV if show and french==None: results.append( T411SearchResult( self.opener, link['title'], downloadURL, quality, str(show.audio_lang) ) ) elif show and french: results.append( T411SearchResult( self.opener, link['title'], downloadURL, quality, 'fr' ) ) else: results.append( T411SearchResult( self.opener, link['title'], downloadURL, quality ) ) return results
def _doSearch(self, searchString, show=None, season=None, french=None): if not self.login_done: self._doLogin( sickbeard.LIBERTALIA_USERNAME, sickbeard.LIBERTALIA_PASSWORD ) results = [] searchUrl = self.url + '/torrents.php?' + searchString.replace('!','') logger.log(u"Search string: " + searchUrl, logger.DEBUG) r = self.opener.open( searchUrl ) soup = BeautifulSoup( r) resultsTable = soup.find("table", { "class" : "torrent_table" }) if resultsTable: logger.log(u"LIBERTALIA found resulttable ! " , logger.DEBUG) rows = resultsTable.findAll("tr" , {"class" : "torrent_row new "} ) # torrent_row new for row in rows: #bypass first row because title only columns = row.find('td', {"class" : "torrent_name"} ) logger.log(u"LIBERTALIA found rows ! " , logger.DEBUG) isvfclass = row.find('td', {"class" : "sprite-vf"} ) isvostfrclass = row.find('td', {"class" : "sprite-vostfr"} ) link = columns.find("a", href=re.compile("torrents")) if link: if isvostfrclass and str(show.audio_lang)=='fr': logger.log(u"LIBERTALIA found VOSTFR et demande *"+str(show.audio_lang)+"* je skip ! " + link.text , logger.DEBUG) link = columns.find("a", href=re.compile("nepastrouver")) if link: if isvfclass and str(show.audio_lang)!='fr' : logger.log(u"LIBERTALIA found VF et demande *"+str(show.audio_lang)+"* je skip ! " + link.text , logger.DEBUG) link = columns.find("a", href=re.compile("nepastrouver")) if link: title = link.text recherched=searchUrl.split("&[PARAMSTR]=")[1] recherched=recherched.replace(".","(.*)").replace(" ","(.*)").replace("'","(.*)") logger.log(u"LIBERTALIA TITLE : " + title, logger.DEBUG) logger.log(u"LIBERTALIA CHECK MATCH : " + recherched, logger.DEBUG) #downloadURL = self.url + "/" + row.find("a",href=re.compile("torrent_pass"))['href'] if re.match(recherched,title , re.IGNORECASE): downloadURL = row.find("a",href=re.compile("torrent_pass"))['href'] quality = Quality.nameQuality( title ) if quality==Quality.UNKNOWN and title: if '720p' not in title.lower() and '1080p' not in title.lower(): quality=Quality.SDTV if show and french==None: results.append( LIBERTALIASearchResult( self.opener, title, downloadURL, quality, str(show.audio_lang) ) ) elif show and french: results.append( LIBERTALIASearchResult( self.opener, title, downloadURL, quality, 'fr' ) ) else: results.append( LIBERTALIASearchResult( self.opener, title, downloadURL, quality ) ) else: logger.log(u"Pas de table trouvée ! je délogue", logger.DEBUG) self.login_done = False return results
def markFailed(epObj): """ Mark an episode as failed :param epObj: Episode object to mark as failed :return: empty string """ log_str = u"" try: with epObj.lock: quality = Quality.splitCompositeStatus(epObj.status)[1] epObj.status = Quality.compositeStatus(FAILED, quality) epObj.saveToDB() except EpisodeNotFoundException, e: logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
def logSubtitle(showid, season, episode, status, subtitleResult): """ Log download of subtitle :param showid: Showid of download :param season: Show season :param episode: Show episode :param status: Status of download :param subtitleResult: Result object """ resource = subtitleResult.language.opensubtitles provider = subtitleResult.provider_name status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(SUBTITLED, quality) _logHistoryItem(action, showid, season, episode, quality, resource, provider)
def _find_season_quality(self,title,torrent_id): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url+'ajax_details_filelist.php?id='+str(torrent_id)) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>',data) if not filesList: logger.log(u"Unable to get the torrent file list for "+title, logger.ERROR) for fileName in filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList): quality = Quality.nameQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName!=None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"No Season quality for "+title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName) except InvalidNameException: return None logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality) return title
def getQuality(self, item, anime=False): try: quality = Quality.sceneQuality(item.filename, anime) except: quality = Quality.UNKNOWN return quality
def _addCacheEntry(self, name, url, quality=None): cacheDB = self._getDB() season = None episodes = None # if we don't have complete info then parse the filename to get it try: myParser = NameParser(0) parse_result = myParser.parse(name).convert() except InvalidNameException: logger.log( u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log( u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log( u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None if not parse_result.show: logger.log( u"Couldn't find a show in our databases matching " + name + ", unable to cache it", logger.DEBUG) return None try: myDB = db.DBConnection() if parse_result.show.air_by_date: airdate = parse_result.sports_event_date.toordinal( ) if parse_result.show.sports else parse_result.air_date.toordinal( ) sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [parse_result.show.indexerid, airdate]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if season and episodes: # store episodes as a seperated string episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int( time.mktime(datetime.datetime.today().timetuple())) # get quality of release if quality is None: quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [ name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality ]) except: return
def pickBestResult(results, show): # pylint: disable=too-many-branches """ Find the best result out of a list of search results for a show :param results: list of result objects :param show: Shows we check for :return: best result object """ results = results if isinstance(results, list) else [results] logger.log( u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) bestResult = None # find the best result for the current episode for cur_result in results: if show and cur_result.show is not show: continue # build the black And white list if show.is_anime: if not show.release_groups.is_valid(cur_result): continue logger.log(u"Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) anyQualities, bestQualities = Quality.splitQuality(show.quality) if cur_result.quality not in anyQualities + bestQualities: logger.log( cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue if not show_name_helpers.filter_bad_releases( cur_result.name, parse=False, show=show): continue if hasattr(cur_result, 'size'): if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed( cur_result.name, cur_result.size, cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue if not bestResult: bestResult = cur_result elif cur_result.quality in bestQualities and ( bestResult.quality < cur_result.quality or bestResult.quality not in bestQualities): bestResult = cur_result elif cur_result.quality in anyQualities and bestResult.quality not in bestQualities and bestResult.quality < cur_result.quality: bestResult = cur_result elif bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower( ) or "real" in cur_result.name.lower( ) or "repack" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (repack/proper/real over nuked)") bestResult = cur_result elif "internal" in bestResult.name.lower( ) and "internal" not in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (normal instead of internal)") bestResult = cur_result elif "xvid" in bestResult.name.lower( ) and "x264" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)") bestResult = cur_result if bestResult: logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG) else: logger.log(u"No result picked.", logger.DEBUG) return bestResult
def snatchEpisode(result, endStatus=SNATCHED): # pylint: disable=too-many-branches, too-many-statements """ Contains the internal logic necessary to actually "snatch" a result that has been found. :param result: SearchResult instance to be snatched. :param endStatus: the episode status that should be used for the episode object once it's snatched. :return: boolean, True on success """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta( days=7): result.priority = 1 endStatus = SNATCHED_PROPER if re.search(r'\b(proper|repack|real)\b', result.name, re.I) else endStatus if result.url.startswith('magnet') or result.url.endswith('torrent'): result.resultType = 'torrent' # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) elif sickbeard.NZB_METHOD == "download_station": client = clients.getClientIstance(sickbeard.NZB_METHOD)( sickbeard.SYNOLOGY_DSM_HOST, sickbeard.SYNOLOGY_DSM_USERNAME, sickbeard.SYNOLOGY_DSM_PASSWORD) dlResult = client.sendNZB(result) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # Torrents can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: if not result.content and not result.url.startswith('magnet'): if result.provider.login(): result.content = result.provider.get_url(result.url, returns='content') if result.content or result.url.startswith('magnet'): client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Torrent file content is empty", logger.WARNING) dlResult = False else: logger.log( u"Unknown result type, unable to download it ({0!r})".format( result.resultType), logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] trakt_data = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus( endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: try: notifiers.notify_snatch("{0} from {1}".format( curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'), result.provider.name)) # pylint: disable=protected-access except Exception: # Without this, when notification fail, it crashes the snatch thread and SR will # keep snatching until notification is sent logger.log(u"Failed to send snatch notification", logger.DEBUG) trakt_data.append((curEpObj.season, curEpObj.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log( u"Add episodes, showid: indexerid " + str(result.show.indexerid) + ", Title " + str(result.show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(result.show, data_episode=data, update="add") if sql_l: main_db_con = db.DBConnection() main_db_con.mass_action(sql_l) return True
def getQuality(self, item): (title, url) = item quality = Quality.nameQuality(title) return quality
def findNeededEpisodes(self, episode, manualSearch=False): neededEps = {} cl = [] myDB = self.get_db() if type(episode) != list: sqlResults = myDB.select( 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?', [ self.providerID, episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%' ]) else: for epObj in episode: cl.append([ 'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?' + ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')', [ self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%' ] ]) sqlResults = myDB.mass_action(cl) if sqlResults: sqlResults = list(itertools.chain(*sqlResults)) if not sqlResults: self.setLastSearch() return neededEps # for each cache entry for curResult in sqlResults: # skip non-tv crap if not show_name_helpers.pass_wordlist_checks( curResult['name'], parse=False, indexer_lookup=False): continue # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: logger.log( u'' + str(showObj.name) + ' is not an anime, skipping', logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult['season']) if curSeason == -1: continue curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult['quality']) curReleaseGroup = curResult['release_group'] curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log( u'Skipping ' + curResult['name'] + ' because we don\'t want an episode that\'s ' + Quality.qualityStrings[curQuality], logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult['name'] url = curResult['url'] logger.log(u'Found result ' + title + ' at ' + url) result = self.provider.get_result([epObj], url) if None is result: continue result.show = showObj result.name = title result.quality = curQuality result.release_group = curReleaseGroup result.version = curVersion result.content = None np = NameParser(False, showObj=showObj) try: parsed_result = np.parse(title) extra_info_no_name = parsed_result.extra_info_no_name() version = parsed_result.version is_anime = parsed_result.is_anime except (StandardError, Exception): extra_info_no_name = None version = -1 is_anime = False result.is_repack, result.properlevel = Quality.get_proper_level( extra_info_no_name, version, is_anime, check_is_repack=True) # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() return neededEps
def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from "+curProvider.name) curPropers = curProvider.findPropers(date) # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: "+x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name cp = CompleteParser() cpr = cp.parse(curProper.name) parse_result = cpr.parse_result if not parse_result.episode_numbers and not parse_result.is_anime: logger.log(u"Ignoring "+curProper.name+" because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 if parse_result.is_anime: logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG) continue curProper.episode = parse_result.ab_episode_numbers[0] else: curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name,parse_result.is_anime) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set(show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log(u"Successful match! Result "+parse_result.series_name+" matched to show "+curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log(u"Proper "+curProper.name+" isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log(u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(curProper.episode)+" for show "+parse_result.series_name+", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map(operator.attrgetter('tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: "+str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = [ 'avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4' ] quality = Quality.UNKNOWN fileName = None data = self.getURL(torrent_link) if not data: return None try: soup = BeautifulSoup(data, features=["html5lib", "permissive"]) file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [ x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'}) ] videoFiles = filter( lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float( ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG) logger.log( u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName != None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser() parse_result = myParser.parse(fileName, True) except InvalidNameException: return None logger.log( u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int( parse_result.season_number) + ' ' + self._reverseQuality( quality) return title except Exception, e: logger.log( u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(parse_result.air_date)+" for show "+parse_result.series_name+", skipping", logger.WARNING) return False except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: "+e.message.decode(sickbeard.SYS_ENCODING), logger.WARNING) return False episodeText = "|"+"|".join(map(str, episodes))+"|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) if not quality: quality = Quality.nameQuality(name) myDB.action("INSERT INTO "+self.providerID+" (name, season, episodes, tvrid, tvdbid, url, time, quality) VALUES (?,?,?,?,?,?,?,?)", [name, season, episodeText, tvrage_id, tvdb_id, url, curTimestamp, quality]) def searchCache(self, episode, manualSearch=False): neededEps = self.findNeededEpisodes(episode, manualSearch) return neededEps[episode] def listPropers(self, date=None, delimiter="."): myDB = self._getDB() sql = "SELECT * FROM "+self.providerID+" WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
logger.log( u"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) # only get anime proper if it has release group and version if parse_result.is_anime: if not curProper.release_group and curProper.version == -1: logger.log( u"Proper " + curProper.name + " doesn't have a release group and version, ignoring it", logger.DEBUG) continue if not show_name_helpers.filterBadReleases(curProper.name, parse=False): logger.log( u"Proper " + curProper.name + " isn't a valid scene release that we want, ignoring it",
def _addCacheEntry(self, name, url): cacheDB = self._getDB() parse_result = None from_cache = False indexer_id = None # if we don't have complete info then parse the filename to get it while(True): try: myParser = NameParser() parse_result = myParser.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return None if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return None if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None logger.log( u"Checking the cahe for show:" + str(parse_result.series_name), logger.DEBUG) # remember if the cache lookup worked or not so we know whether we should bother updating it later cache_id = name_cache.retrieveNameFromCache(parse_result.series_name) if cache_id: logger.log(u"Cache lookup found Indexer ID:" + repr(indexer_id) + ", using that for " + parse_result.series_name, logger.DEBUG) from_cache = True indexer_id = cache_id break # if the cache failed, try looking up the show name in the database logger.log( u"Checking the database for show:" + str(parse_result.series_name), logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log( u"Database lookup found Indexer ID:" + str(showResult[1]) + ", using that for " + parse_result.series_name, logger.DEBUG) indexer_id = showResult[1] break # if we didn't find a Indexer ID return None if not indexer_id: return None # if the show isn't in out database then return None try:showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) except:return None if not showObj: return None # if we weren't provided with season/episode information then get it from the name that we parsed season = None episodes = None myDB = db.DBConnection() if parse_result.air_by_date: sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.air_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] elif parse_result.sports: sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [showObj.indexerid, parse_result.sports_date.toordinal()]) if sql_results > 0: season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: season = parse_result.season_number episodes = parse_result.episode_numbers if not (season and episodes): return None # convert scene numbered releases before storing to cache convertedEps = {} for curEp in episodes: epObj = showObj.getEpisode(season, curEp, sceneConvert=True) if not epObj: return None if not epObj.season in convertedEps: convertedEps[epObj.season] = [] convertedEps[epObj.season].append(epObj.episode) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = Quality.sceneQuality(name) if not isinstance(name, unicode): name = unicode(name, 'utf-8') for season, episodes in convertedEps.items(): episodeText = "|" + "|".join(map(str, episodes)) + "|" cacheDB.action( "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, indexer_id, url, curTimestamp, quality])
def getQuality(self, item, anime=False): quality = Quality.sceneQuality(item[0], anime) return quality
def getQuality(self, item): filename = item.filename quality = Quality.nameQuality(filename) return quality
def _doSearch(self, searchString, show=None, season=None, french=None): results = [] searchUrl = self.url + 'rdirect.php?type=search&' + searchString logger.log(u"Search URL: " + searchUrl, logger.DEBUG) data = self.getURL(searchUrl) if "bad key" in str(data).lower(): logger.log(u"GKS key invalid, check your config", logger.ERROR) return [] parsedXML = parseString(data) channel = parsedXML.getElementsByTagName('channel')[0] description = channel.getElementsByTagName('description')[0] description_text = helpers.get_xml_text(description).lower() if "user can't be found" in description_text: logger.log(u"GKS invalid digest, check your config", logger.ERROR) return [] elif "invalid hash" in description_text: logger.log(u"GKS invalid hash, check your config", logger.ERROR) return [] else: items = channel.getElementsByTagName('item') for item in items: title = helpers.get_xml_text( item.getElementsByTagName('title')[0]) if "aucun resultat" in title.lower(): logger.log(u"No results found in " + searchUrl, logger.DEBUG) return [] count = 1 if season: count = 0 if show: if show.audio_lang == 'fr': for frword in ['french', 'truefrench', 'multi']: if frword in title.lower(): count += 1 else: count += 1 else: count += 1 if count == 0: continue else: downloadURL = helpers.get_xml_text( item.getElementsByTagName('link')[0]) quality = Quality.nameQuality(title) if quality == Quality.UNKNOWN and title: if '720p' not in title.lower( ) and '1080p' not in title.lower(): quality = Quality.SDTV if show and french == None: results.append( GksSearchResult(self.opener, title, downloadURL, quality, str(show.audio_lang))) elif show and french: results.append( GksSearchResult(self.opener, title, downloadURL, quality, 'fr')) else: results.append( GksSearchResult(self.opener, title, downloadURL, quality)) return results
class TVCache(): def __init__(self, provider): self.provider = provider self.providerID = self.provider.getID() self.minTime = 10 def _getDB(self): return CacheDBConnection(self.providerID) def _clearCache(self): myDB = self._getDB() myDB.action("DELETE FROM " + self.providerID + " WHERE 1") def _getRSSData(self): data = None return data def _checkAuth(self, parsedXML): return True def _checkItemAuth(self, title, url): return True def updateCache(self): if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # as long as the http request worked we count this as an update if data: self.setLastUpdate() else: return [] # now that we've loaded the current RSS feed lets delete the old cache logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() parsedXML = helpers.parse_xml(data) if parsedXML is None: logger.log(u"Error trying to load " + self.provider.name + " RSS feed", logger.ERROR) return [] if self._checkAuth(parsedXML): if parsedXML.tag == 'rss': items = parsedXML.findall('.//item') else: logger.log(u"Resulting XML from " + self.provider.name + " isn't RSS, not parsing it", logger.ERROR) return [] for item in items: self._parseItem(item) else: raise AuthException(u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") return [] def _translateTitle(self, title): return title.replace(' ', '.') def _translateLinkURL(self, url): return url.replace('&', '&') def _parseItem(self, item): title = helpers.get_xml_text(item.find('title')) url = helpers.get_xml_text(item.find('link')) self._checkItemAuth(title, url) if title and url: title = self._translateTitle(title) url = self._translateLinkURL(url) logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) self._addCacheEntry(title, url) else: logger.log(u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.DEBUG) return def _getLastUpdate(self): myDB = self._getDB() sqlResults = myDB.select("SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID]) if sqlResults: lastTime = int(sqlResults[0]["time"]) else: lastTime = 0 return datetime.datetime.fromtimestamp(lastTime) def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() myDB = self._getDB() myDB.upsert("lastUpdate", {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID}) lastUpdate = property(_getLastUpdate) def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(datetime.timedelta(minutes=self.minTime)), logger.DEBUG) return False return True def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log(u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG) continue if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG) return False if not parse_result.series_name: logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return False tvdb_lang = None # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang else: logger.log(u"We were given a TVDB id " + str(tvdb_id) + " but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow(sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid tvdb_lang = showObj.lang else: logger.log(u"We were given a TVRage id " + str(tvrage_id) + " but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: # check the name cache and see if we already know what show this is logger.log(u"Checking the cache to see if we already know the tvdb id of " + parse_result.series_name, logger.DEBUG) tvdb_id = name_cache.retrieveNameFromCache(parse_result.series_name) # remember if the cache lookup worked or not so we know whether we should bother updating it later if tvdb_id == None: logger.log(u"No cache results returned, continuing on with the search", logger.DEBUG) from_cache = False else: logger.log(u"Cache lookup found " + repr(tvdb_id) + ", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if tvdb_id == None: logger.log(u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log(parse_result.series_name + " was found to be show " + showResult[1] + " ("+str(showResult[0]) + ") in our DB.", logger.DEBUG) tvdb_id = showResult[0] # if the DB lookup fails then do a comprehensive regex search if tvdb_id == None: logger.log(u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): logger.log(u"Successfully matched " + name + " to " + curShow.name + " with regex", logger.DEBUG) tvdb_id = curShow.tvdbid tvdb_lang = curShow.lang break # if tvdb_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, tvdb_id) # if we came out with tvdb_id = None it means we couldn't figure it out at all, just use 0 for that if tvdb_id == None: tvdb_id = 0 # if we found the show then retrieve the show object if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not (tvdb_lang == "" or tvdb_lang == "en" or tvdb_lang == None): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date " + str(parse_result.air_date) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) return False except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: " + ex(e), logger.WARNING) return False episodeText = "|" + "|".join(map(str, episodes)) + "|" # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) if not quality: quality = Quality.nameQuality(name) myDB.action("INSERT INTO " + self.providerID + " (name, season, episodes, tvrid, tvdbid, url, time, quality) VALUES (?,?,?,?,?,?,?,?)", [name, season, episodeText, tvrage_id, tvdb_id, url, curTimestamp, quality])
def get_quality(self, item, anime=False): (title, url_) = self._get_title_and_url(item) quality = Quality.scene_quality(title, anime) return quality
def getQuality(self, item): quality = Quality.nameQuality(item[0]) return quality
def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] for mode in search_params: items = [] logger.log(u'Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if search_string == '': continue search_string = six.text_type(search_string).replace('.', ' ') logger.log( u'Search string: {0}'.format( search_string.decode('utf-8')), logger.DEBUG) last_page = False for page in range(0, self.max_pages): if last_page: break logger.log('Processing page {0} of results'.format(page), logger.DEBUG) search_url = self.urls['search'].format( search_string, page) data = self.get_url(search_url, returns='text') if not data: logger.log(u'No data returned from provider', logger.DEBUG) continue try: with BS4Parser(data, 'html5lib') as html: table_header = html.find('tr', class_='bordo') torrent_table = table_header.find_parent( 'table') if table_header else None if not torrent_table: logger.log(u'Could not find table of torrents', logger.ERROR) continue torrent_rows = torrent_table('tr') # Continue only if one Release is found if len(torrent_rows) < 6 or len( torrent_rows[2]('td')) == 1: logger.log( u'Data returned from provider does not contain any torrents', logger.DEBUG) last_page = True continue if len(torrent_rows) < 45: last_page = True for result in torrent_rows[2:-3]: result_cols = result('td') if len(result_cols) == 1: # Ignore empty rows in the middle of the table continue try: info_link = result('td')[1].find( 'a')['href'] title = re.sub( ' +', ' ', info_link.rsplit('/', 1)[-1].replace( '_', ' ')) info_hash = result('td')[3].find( 'input', class_='downarrow')['value'].upper() download_url = self._magnet_from_result( info_hash, title) seeders = try_int(result('td')[5].text) leechers = try_int(result('td')[6].text) torrent_size = result('td')[2].string size = convert_size(torrent_size) or -1 except (AttributeError, IndexError, TypeError): continue filename_qt = self._reverseQuality( self._episodeQuality(result)) for text in self.hdtext: title1 = title title = title.replace(text, filename_qt) if title != title1: break if Quality.nameQuality( title) == Quality.UNKNOWN: title += filename_qt if not self._is_italian( title) and not self.subtitle: logger.log( u'Torrent is subtitled, skipping: {0}'. format(title), logger.DEBUG) continue if self.engrelease and not self._is_english( title): logger.log( u'Torrent isn\'t english audio/subtitled, skipping: {0}' .format(title), logger.DEBUG) continue search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] show_title = search_show ep_params = '' rindex = re.search(r'([Ss][\d{1,2}]+)', title) if rindex: show_title = title[:rindex.start()] ep_params = title[rindex.start():] if show_title.lower() != search_show.lower( ) and search_show.lower() in show_title.lower( ): new_title = search_show + ep_params title = new_title if not all([title, download_url]): continue if self._is_season_pack(title): title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: logger.log( u'Discarding torrent because it doesn\'t meet the minimum' u' seeders or leechers: {0} (S:{1} L:{2})' .format(title, seeders, leechers), logger.DEBUG) continue item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': info_hash } if mode != 'RSS': logger.log( u'Found result: {0} with {1} seeders and {2} leechers' .format(title, seeders, leechers), logger.DEBUG) items.append(item) except Exception as error: logger.log( u'Failed parsing provider. Error: {0}'.format( error), logger.ERROR) # For each search mode sort all the items by seeders if available items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results
def _getProperList(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements """ Walk providers for propers """ propers = {} search_date = datetime.datetime.today() - datetime.timedelta(days=2) # for each provider get a list of the origThreadName = threading.currentThread().name providers = [ x for x in sickbeard.providers.sortedProviderList( sickbeard.RANDOMIZE_PROVIDERS) if x.is_active() ] for curProvider in providers: threading.currentThread( ).name = origThreadName + " :: [" + curProvider.name + "]" logger.log("Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.find_propers(search_date) except AuthException as e: logger.log("Authentication error: " + ex(e), logger.WARNING) continue except Exception as e: logger.log( "Exception while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: if not re.search(r'\b(proper|repack|real)\b', x.name, re.I): logger.log( 'find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG) continue name = self._genericName(x.name) if name not in propers: logger.log("Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x threading.currentThread().name = origThreadName # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: try: parse_result = NameParser(False).parse(curProper.name) except (InvalidNameException, InvalidShowException) as error: logger.log("{0}".format(error), logger.DEBUG) continue if not parse_result.series_name: continue if not parse_result.episode_numbers: logger.log( "Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue logger.log( "Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = parse_result.show.indexerid # set the indexer in the db to the show's indexer curProper.indexer = parse_result.show.indexer # populate our Proper instance curProper.show = parse_result.show curProper.season = parse_result.season_number if parse_result.season_number is not None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.release_group = parse_result.release_group curProper.version = parse_result.version curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.content = None # filter release bestResult = pickBestResult(curProper, parse_result.show) if not bestResult: logger.log( "Proper " + curProper.name + " were rejected by our release filters.", logger.DEBUG) continue # only get anime proper if it has release group and version if bestResult.show.is_anime and not bestResult.release_group and bestResult.version == -1: logger.log( "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it", logger.DEBUG) continue # check if we actually want this proper (if it's the right quality) main_db_con = db.DBConnection() sql_results = main_db_con.select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [bestResult.indexerid, bestResult.season, bestResult.episode]) if not sql_results: continue # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) oldStatus, oldQuality = Quality.splitCompositeStatus( int(sql_results[0][b"status"])) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality: continue # check if we actually want this proper (if it's the right release group and a higher version) if bestResult.show.is_anime: main_db_con = db.DBConnection() sql_results = main_db_con.select( "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [ bestResult.indexerid, bestResult.season, bestResult.episode ]) oldVersion = int(sql_results[0][b"version"]) oldRelease_group = (sql_results[0][b"release_group"]) if -1 < oldVersion < bestResult.version: logger.log("Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion)) else: continue if oldRelease_group != bestResult.release_group: logger.log("Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group) continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in { (p.indexerid, p.season, p.episode) for p in finalPropers }: logger.log("Found a proper that we need: " + str(bestResult.name)) finalPropers.append(bestResult) return finalPropers
def massEditSubmit(self, paused=None, default_ep_status=None, anime=None, sports=None, scene=None, season_folders=None, quality_preset=None, subtitles=None, air_by_date=None, anyQualities=None, bestQualities=None, toEdit=None, *args, **kwargs): dir_map = {} for cur_arg in filter(lambda x: x.startswith('orig_root_dir_'), kwargs): dir_map[kwargs[cur_arg]] = ek( six.text_type, kwargs[cur_arg.replace('orig_root_dir_', 'new_root_dir_')], 'utf-8') showIDs = toEdit.split("|") errors = [] for curShow in showIDs: curErrors = [] show_obj = Show.find(sickbeard.showList, int(curShow or 0)) if not show_obj: continue # noinspection PyProtectedMember cur_root_dir = ek(os.path.dirname, show_obj._location) # noinspection PyProtectedMember cur_show_dir = ek(os.path.basename, show_obj._location) if cur_root_dir in dir_map and cur_root_dir != dir_map[ cur_root_dir]: new_show_dir = ek(os.path.join, dir_map[cur_root_dir], cur_show_dir) # noinspection PyProtectedMember logger.log("For show " + show_obj.name + " changing dir from " + show_obj._location + " to " + new_show_dir) else: # noinspection PyProtectedMember new_show_dir = show_obj._location new_paused = ('off', 'on')[(paused == 'enable', show_obj.paused)[paused == 'keep']] new_default_ep_status = ( default_ep_status, show_obj.default_ep_status)[default_ep_status == 'keep'] new_anime = ('off', 'on')[(anime == 'enable', show_obj.anime)[anime == 'keep']] new_sports = ('off', 'on')[(sports == 'enable', show_obj.sports)[sports == 'keep']] new_scene = ('off', 'on')[(scene == 'enable', show_obj.scene)[scene == 'keep']] new_air_by_date = ( 'off', 'on')[(air_by_date == 'enable', show_obj.air_by_date)[air_by_date == 'keep']] new_season_folders = ('off', 'on')[( season_folders == 'enable', show_obj.season_folders)[season_folders == 'keep']] new_subtitles = ('off', 'on')[(subtitles == 'enable', show_obj.subtitles)[subtitles == 'keep']] if quality_preset == 'keep': anyQualities, bestQualities = Quality.splitQuality( show_obj.quality) elif try_int(quality_preset, None): bestQualities = [] exceptions_list = [] curErrors += self.editShow(curShow, new_show_dir, anyQualities, bestQualities, exceptions_list, defaultEpStatus=new_default_ep_status, season_folders=new_season_folders, paused=new_paused, sports=new_sports, subtitles=new_subtitles, anime=new_anime, scene=new_scene, air_by_date=new_air_by_date, directCall=True) if curErrors: logger.log("Errors: " + str(curErrors), logger.ERROR) errors.append( '<b>{0}:</b>\n<ul>'.format(show_obj.name) + ' '.join( ['<li>{0}</li>'.format(error) for error in curErrors]) + "</ul>") if len(errors) > 0: ui.notifications.error( _('{num_errors:d} error{plural} while saving changes:').format( num_errors=len(errors), plural="" if len(errors) == 1 else "s"), " ".join(errors)) return self.redirect("/manage/")
def refine_video(video, episode): # try to enrich video object using information in original filename if episode.release_name: guess_ep = Episode.fromguess(None, guessit(episode.release_name)) for name in vars(guess_ep): if getattr(guess_ep, name) and not getattr(video, name): setattr(video, name, getattr(guess_ep, name)) # Use sickbeard metadata metadata_mapping = { 'episode': 'episode', 'release_group': 'release_group', 'season': 'season', 'series': 'show.name', 'series_imdb_id': 'show.imdbid', 'size': 'file_size', 'title': 'name', 'year': 'show.startyear' } def get_attr_value(obj, name): value = None for attr in name.split('.'): if not value: value = getattr(obj, attr, None) else: value = getattr(value, attr, None) return value for name in metadata_mapping: if not getattr(video, name) and get_attr_value(episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) elif episode.show.subtitles_sr_metadata and get_attr_value( episode, metadata_mapping[name]): setattr(video, name, get_attr_value(episode, metadata_mapping[name])) # Set quality from metadata _, quality = Quality.splitCompositeStatus(episode.status) if not video.format or episode.show.subtitles_sr_metadata: if quality & Quality.ANYHDTV: video.format = Quality.combinedQualityStrings.get(Quality.ANYHDTV) elif quality & Quality.ANYWEBDL: video.format = Quality.combinedQualityStrings.get(Quality.ANYWEBDL) elif quality & Quality.ANYBLURAY: video.format = Quality.combinedQualityStrings.get( Quality.ANYBLURAY) if not video.resolution or episode.show.subtitles_sr_metadata: if quality & (Quality.HDTV | Quality.HDWEBDL | Quality.HDBLURAY): video.resolution = '720p' elif quality & Quality.RAWHDTV: video.resolution = '1080i' elif quality & (Quality.FULLHDTV | Quality.FULLHDWEBDL | Quality.FULLHDBLURAY): video.resolution = '1080p' elif quality & (Quality.UHD_4K_TV | Quality.UHD_4K_WEBDL | Quality.UHD_4K_BLURAY): video.resolution = '4K' elif quality & (Quality.UHD_8K_TV | Quality.UHD_8K_WEBDL | Quality.UHD_8K_BLURAY): video.resolution = '8K'
def _find_season_quality(self, title, torrent_link, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ quality = Quality.UNKNOWN file_name = None data = self.get_url(torrent_link) if not data: return None try: with BS4Parser(data, features=['html5lib', 'permissive']) as soup: file_table = soup.find('table', attrs={'class': 'torrentFileList'}) if not file_table: return None files = [ x.text for x in file_table.find_all( 'td', attrs={'class': 'torFileName'}) ] video_files = filter( lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files) # Filtering SingleEpisode/MultiSeason Torrent if len(video_files) < ep_number or len(video_files) > float( ep_number * 1.1): logger.log( u'Result %s lists %s episodes with %s episodes retrieved in torrent' % (title, ep_number, len(video_files)), logger.DEBUG) logger.log( u'Result %s seem to be a single episode or multi-season torrent, skipping result...' % title, logger.DEBUG) return None if Quality.UNKNOWN != Quality.sceneQuality(title): return title for file_name in video_files: quality = Quality.sceneQuality(os.path.basename(file_name)) if Quality.UNKNOWN != quality: break if None is not file_name and Quality.UNKNOWN == quality: quality = Quality.assumeQuality( os.path.basename(file_name)) if Quality.UNKNOWN == quality: logger.log( u'Unable to obtain a Season Quality for ' + title, logger.DEBUG) return None try: my_parser = NameParser(showObj=self.show) parse_result = my_parser.parse(file_name) except (InvalidNameException, InvalidShowException): return None logger.log( u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d %s' % ( int(parse_result.season_number), self._reverse_quality(quality)) return title except Exception: logger.log( u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
def addShowByID(self, indexer_id, show_name, indexer="TVDB", which_series=None, indexer_lang=None, root_dir=None, default_status=None, quality_preset=None, any_qualities=None, best_qualities=None, season_folders=None, subtitles=None, full_show_path=None, other_shows=None, skip_show=None, provided_indexer=None, anime=None, scene=None, blacklist=None, whitelist=None, default_status_after=None, default_season_folders=None, configure_show_options=None): if indexer != "TVDB": indexer_id = helpers.tvdbid_from_remote_id(indexer_id, indexer.upper()) if not indexer_id: logger.log( "Unable to to find tvdb ID to add {0}".format(show_name)) ui.notifications.error( "Unable to add {0}".format(show_name), "Could not add {0}. We were unable to locate the tvdb id at this time." .format(show_name)) return indexer_id = try_int(indexer_id) if indexer_id <= 0 or Show.find(sickbeard.showList, indexer_id): return # Sanitize the parameter anyQualities and bestQualities. As these would normally be passed as lists any_qualities = any_qualities.split(',') if any_qualities else [] best_qualities = best_qualities.split(',') if best_qualities else [] # If configure_show_options is enabled let's use the provided settings if config.checkbox_to_value(configure_show_options): # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) season_folders = config.checkbox_to_value(season_folders) subtitles = config.checkbox_to_value(subtitles) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not any_qualities: any_qualities = [] if not best_qualities or try_int(quality_preset, None): best_qualities = [] if not isinstance(any_qualities, list): any_qualities = [any_qualities] if not isinstance(best_qualities, list): best_qualities = [best_qualities] quality = Quality.combineQualities( [int(q) for q in any_qualities], [int(q) for q in best_qualities]) location = root_dir else: default_status = sickbeard.STATUS_DEFAULT quality = sickbeard.QUALITY_DEFAULT season_folders = sickbeard.SEASON_FOLDERS_DEFAULT subtitles = sickbeard.SUBTITLES_DEFAULT anime = sickbeard.ANIME_DEFAULT scene = sickbeard.SCENE_DEFAULT default_status_after = sickbeard.STATUS_DEFAULT_AFTER if sickbeard.ROOT_DIRS: root_dirs = sickbeard.ROOT_DIRS.split('|') location = root_dirs[int(root_dirs[0]) + 1] else: location = None if not location: logger.log( "There was an error creating the show, no root directory setting found" ) return _("No root directories setup, please go back and add one.") show_name = sickchill.indexer[1].get_series_by_id( indexer_id, indexer_lang).seriesName show_dir = None if not show_name: ui.notifications.error(_('Unable to add show')) return self.redirect('/home/') # add the show sickbeard.showQueueScheduler.action.add_show( indexer=1, indexer_id=indexer_id, showDir=show_dir, default_status=default_status, quality=quality, season_folders=season_folders, lang=indexer_lang, subtitles=subtitles, subtitles_sr_metadata=None, anime=anime, scene=scene, paused=None, blacklist=blacklist, whitelist=whitelist, default_status_after=default_status_after, root_dir=location) ui.notifications.message( _('Show added'), _('Adding the specified show {show_name}').format( show_name=show_name)) # done adding show return self.redirect('/home/')
def findEpisode(self, episode, manualSearch=False): self._checkAuth() logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results # create a copy of the episode, using scene numbering episode_scene = copy.copy(episode) episode_scene.convertToSceneNumbering() simple_show_name = self._get_simple_name_for_show(episode.show) if not simple_show_name: logger.log( u"Show %s not known to dtvt, not running any further search." % (episode.show.name), logger.MESSAGE) return results query_params = {'show_name': simple_show_name} if episode.show.air_by_date: query_params['episode_num'] = str(episode.airdate) else: query_params['episode_num'] = 'S%02dE%02d' % (episode.season, episode.episode) api_result = self._api_call('1.0/torrent.getInfosAll', query_params) if api_result: for cur_result in api_result: #{ # "name": "Futurama.S06E23.720p.HDTV.x264-IMMERSE", # "quality": "720", # "age": 47406999, # "data_size": 369900878, # "seeds": 2, # "leechers": 0, # "link": "http:\/\/www.dailytvtorrents.org\/dl\/9pa\/Futurama.S06E23.720p.HDTV.x264-IMMERSE.DailyTvTorrents.torrent" #} title = cur_result['name'] url = cur_result['link'] try: myParser = NameParser() parse_result = myParser.parse(title, True) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( "Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( "Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue #quality = cur_result['quality'] - actually, we get a bit more info # from the torrent name, so let's use that instead. quality = Quality.nameQuality(title) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) else: logger.log(u"No result from api call 1.0/torrent.getInfosAll", logger.WARNING) return results
def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results self.categories = "cat=" + str(self.cat) for mode in search_params: items = [] logger.log("Search Mode: {0}".format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode == 'RSS': self.page = 2 last_page = 0 y = int(self.page) if search_string == '': continue search_string = str(search_string).replace('.', ' ') for x in range(0, y): z = x * 20 if last_page: break if mode != 'RSS': search_url = (self.urls['search_page'] + '&filter={2}').format( z, self.categories, search_string) else: search_url = self.urls['search_page'].format( z, self.categories) if mode != 'RSS': logger.log( "Search string: {0}".format( search_string.decode("utf-8")), logger.DEBUG) data = self.get_url(search_url, returns='text') if not data: logger.log("No data returned from provider", logger.DEBUG) continue try: with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', class_='copyright') torrent_rows = torrent_table( 'tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 3: logger.log( "Data returned from provider does not contain any torrents", logger.DEBUG) last_page = 1 continue if len(torrent_rows) < 42: last_page = 1 for result in torrent_table('tr')[2:]: try: link = result.find('td').find('a') title = link.string download_url = self.urls[ 'download'] % result('td')[8].find( 'a')['href'][-8:] leechers = result('td')[3]('td')[0].text leechers = int(leechers.strip('[]')) seeders = result('td')[3]('td')[1].text seeders = int(seeders.strip('[]')) torrent_size = result('td')[3]( 'td')[3].text.strip('[]') + " GB" size = convert_size(torrent_size) or -1 except (AttributeError, TypeError): continue filename_qt = self._reverseQuality( self._episodeQuality(result)) for text in self.hdtext: title1 = title title = title.replace(text, filename_qt) if title != title1: break if Quality.nameQuality( title) == Quality.UNKNOWN: title += filename_qt if not self._is_italian( result) and not self.subtitle: logger.log( "Torrent is subtitled, skipping: {0} ". format(title), logger.DEBUG) continue if self.engrelease and not self._is_english( result): logger.log( "Torrent isnt english audio/subtitled , skipping: {0} " .format(title), logger.DEBUG) continue search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] show_title = search_show rindex = re.search(r'([Ss][\d{1,2}]+)', title) if rindex: show_title = title[:rindex.start()] ep_params = title[rindex.start():] if show_title.lower() != search_show.lower( ) and search_show.lower() in show_title.lower( ): new_title = search_show + ep_params title = new_title if not all([title, download_url]): continue if self._is_season_pack(title): title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: if mode != 'RSS': logger.log( "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})" .format(title, seeders, leechers), logger.DEBUG) continue item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': '' } if mode != 'RSS': logger.log( "Found result: {0} with {1} seeders and {2} leechers" .format(title, seeders, leechers), logger.DEBUG) items.append(item) except Exception: logger.log( "Failed parsing provider. Traceback: {0}".format( traceback.format_exc()), logger.ERROR) # For each search mode sort all the items by seeders if available if available items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results
def getQuality(self, item, anime=False): title = item.get('title') quality = Quality.sceneQuality(title, anime) return quality
def getQuality(self, item, anime=False): title = helpers.get_xml_text( item.getElementsByTagName('title')[0]).replace("/", " ") quality = Quality.nameQuality(title, anime) return quality
def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.url + 'ajax_details_filelist.php?id=' + str(torrent_id) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: # disabled errormsg for now # logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) return None videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title
def addNewShow(self, whichSeries=None, indexerLang=None, rootDir=None, defaultStatus=None, quality_preset=None, anyQualities=None, bestQualities=None, season_folders=None, subtitles=None, subtitles_sr_metadata=None, fullShowPath=None, other_shows=None, skipShow=None, providedIndexer=None, anime=None, scene=None, blacklist=None, whitelist=None, defaultStatusAfter=None): """ Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are provided then it forwards back to newShow, if not it goes to /home. """ if not indexerLang: indexerLang = sickbeard.INDEXER_DEFAULT_LANGUAGE # grab our list of other dirs if given if not other_shows: other_shows = [] elif not isinstance(other_shows, list): other_shows = [other_shows] def finishAddShow(): # if there are no extra shows then go home if not other_shows: return self.redirect('/home/') # peel off the next one next_show_dir = other_shows[0] rest_of_show_dirs = other_shows[1:] # go to add the next show return self.newShow(next_show_dir, rest_of_show_dirs) # if we're skipping then behave accordingly if skipShow: return finishAddShow() # sanity check on our inputs if (not rootDir and not fullShowPath) or not whichSeries: return _( "Missing params, no Indexer ID or folder: {show_to_add} and {root_dir}/{show_path}" ).format(show_to_add=whichSeries, root_dir=rootDir, show_path=fullShowPath) # figure out what show we're adding and where series_pieces = whichSeries.split('|') if (whichSeries and rootDir) or (whichSeries and fullShowPath and len(series_pieces) > 1): if len(series_pieces) < 6: logger.log( "Unable to add show due to show selection. Not anough arguments: {0}" .format((repr(series_pieces))), logger.ERROR) ui.notifications.error( _("Unknown error. Unable to add show due to problem with show selection." )) return self.redirect('/addShows/existingShows/') indexer = int(series_pieces[1]) indexer_id = int(series_pieces[3]) # Show name was sent in UTF-8 in the form show_name = xhtml_unescape(series_pieces[4]).decode('utf-8') else: # if no indexer was provided use the default indexer set in General settings if not providedIndexer: providedIndexer = sickbeard.INDEXER_DEFAULT indexer = int(providedIndexer) indexer_id = int(whichSeries) show_name = ek(os.path.basename, ek(os.path.normpath, xhtml_unescape(fullShowPath))) # use the whole path if it's given, or else append the show name to the root dir to get the full show path if fullShowPath: show_dir = ek(os.path.normpath, xhtml_unescape(fullShowPath)) else: show_dir = ek(os.path.join, rootDir, sanitize_filename(xhtml_unescape(show_name))) # blanket policy - if the dir exists you should have used "add existing show" numbnuts if ek(os.path.isdir, show_dir) and not fullShowPath: ui.notifications.error( _("Unable to add show"), _("Folder {show_dir} exists already").format( show_dir=show_dir)) return self.redirect('/addShows/existingShows/') # don't create show dir if config says not to if sickbeard.ADD_SHOWS_WO_DIR: logger.log("Skipping initial creation of " + show_dir + " due to config.ini setting") else: dir_exists = helpers.makeDir(show_dir) if not dir_exists: logger.log( "Unable to create the folder " + show_dir + ", can't add the show", logger.ERROR) ui.notifications.error( _("Unable to add show"), _("Unable to create the folder {show_dir}, can't add the show" ).format(show_dir=show_dir)) # Don't redirect to default page because user wants to see the new show return self.redirect("/home/") else: helpers.chmodAsParent(show_dir) # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) season_folders = config.checkbox_to_value(season_folders) subtitles = config.checkbox_to_value(subtitles) subtitles_sr_metadata = config.checkbox_to_value(subtitles_sr_metadata) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not anyQualities: anyQualities = [] if not bestQualities or try_int(quality_preset, None): bestQualities = [] if not isinstance(anyQualities, list): anyQualities = [anyQualities] if not isinstance(bestQualities, list): bestQualities = [bestQualities] newQuality = Quality.combineQualities([int(q) for q in anyQualities], [int(q) for q in bestQualities]) # add the show sickbeard.showQueueScheduler.action.add_show( indexer, indexer_id, showDir=show_dir, default_status=int(defaultStatus), quality=newQuality, season_folders=season_folders, lang=indexerLang, subtitles=subtitles, subtitles_sr_metadata=subtitles_sr_metadata, anime=anime, scene=scene, paused=None, blacklist=blacklist, whitelist=whitelist, default_status_after=int(defaultStatusAfter), root_dir=None) ui.notifications.message( _('Show added'), _('Adding the specified show into {show_dir}').format( show_dir=show_dir)) return finishAddShow()
def getQuality(self, item): url = item.getElementsByTagName('enclosure')[0].getAttribute('url') quality = Quality.sceneQuality(url) return quality
class ProperFinder(): def __init__(self): self.updateInterval = datetime.timedelta(hours=1) def run(self): if not sickbeard.DOWNLOAD_PROPERS: return # look for propers every night at 1 AM updateTime = datetime.time(hour=1) logger.log(u"Checking proper time", logger.DEBUG) hourDiff = datetime.datetime.today().time().hour - updateTime.hour dayDiff = (datetime.date.today() - self._get_lastProperSearch()).days # if it's less than an interval after the update time then do an update if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >= 1: logger.log(u"Beginning the search for new propers") else: return propers = self._getProperList() self._downloadPropers(propers) self._set_lastProperSearch(datetime.datetime.today().toordinal()) def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue search_date = datetime.datetime.today() - datetime.timedelta( days=2) logger.log(u"Searching for any new PROPER releases from " + curProvider.name) try: curPropers = curProvider.findPropers(search_date) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) continue # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: " + x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(curProper.name).convert() except InvalidNameException: logger.log( u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) continue if not parse_result.episode_numbers: logger.log( u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set( show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log( u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG) # set the indexerid in the db to the show's indexerid curProper.indexerid = curShow.indexerid # set the indexer in the db to the show's indexer curProper.indexer = curShow.indexer # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.indexerid != -1: break if curProper.indexerid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log( u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG) continue showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if not showObj: logger.log( u"Unable to find the show with indexerID " + str(curProper.indexerid), logger.ERROR) continue if showObj.rls_ignore_words and search.filter_release_name( curProper.name, showObj.rls_ignore_words): logger.log( u"Ignoring " + curProper.name + " based on ignored words filter: " + showObj.rls_ignore_words, logger.MESSAGE) continue if showObj.rls_require_words and not search.filter_release_name( curProper.name, showObj.rls_require_words): logger.log( u"Ignoring " + curProper.name + " based on required words filter: " + showObj.rls_require_words, logger.MESSAGE) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.indexerid: indexer_lang = showObj.lang lINDEXER_API_PARMS = sickbeard.indexerApi( showObj.indexer).api_params.copy() if indexer_lang and not indexer_lang == 'en': lINDEXER_API_PARMS['language'] = indexer_lang try: t = sickbeard.indexerApi( showObj.indexer).indexer(**lINDEXER_API_PARMS) epObj = t[curProper.indexerid].airedOn( curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except sickbeard.indexer_episodenotfound: logger.log( u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING) continue else: # items stored in cache are scene numbered, convert before lookups epObj = showObj.getEpisode(curProper.season, curProper.episode) curProper.season = epObj.season curProper.episode = epObj.episode # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select( "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus( int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.indexerid != -1 and ( curProper.indexerid, curProper.season, curProper.episode) not in map( operator.attrgetter('indexerid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: " + str(curProper.name)) finalPropers.append(curProper) return finalPropers