def _makeURL(self, result): urls = [] filename = u'' if result.url.startswith('magnet'): try: torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() torrent_name = re.findall('dn=([^&]+)', result.url)[0] if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).upper() if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return (urls, filename) urls = [ 'http://torcache.net/torrent/' + torrent_hash + '.torrent', 'http://zoink.ch/torrent/' + torrent_name + '.torrent', 'http://torrage.com/torrent/' + torrent_hash + '.torrent', ] except: urls = [result.url] else: urls = [result.url] if self.providerType == GenericProvider.TORRENT: filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: filename = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) return (urls, filename)
def _makeURL(self, result): urls = [] filename = u'' if result.url.startswith('magnet'): try: torrent_hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0].upper() try: torrent_name = re.findall('dn=([^&]+)', result.url)[0] except Exception: torrent_name = 'NO_DOWNLOAD_NAME' if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).upper() if not torrent_hash: logger.log(u"Unable to extract torrent hash from magnet: " + ex(result.url), logger.ERROR) return urls, filename urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.btCacheURLS] except Exception: logger.log(u"Unable to extract torrent hash or name from magnet: " + ex(result.url), logger.ERROR) return urls, filename else: urls = [result.url] if self.providerType == GenericProvider.TORRENT: filename = ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: filename = ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) return urls, filename
def downloadResult(self, result): """ Save the result to disk. """ # check for auth if not self._doLogin(): return False if self.providerType == GenericProvider.TORRENT: try: torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() torrent_name = re.findall('dn=([^&]+)', result.url)[0] if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).upper() if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False urls = [ 'http://torcache.net/torrent/' + torrent_hash + '.torrent', 'http://zoink.ch/torrent/' + torrent_name + '.torrent', 'http://torrage.com/torrent/' + torrent_hash + '.torrent', ] except: urls = [result.url] filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: urls = [result.url] filename = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) else: return for url in urls: logger.log(u"Downloading a result from " + self.name + " at " + url) if helpers.download_file(url, filename, session=self.session): if self._verify_download(filename): if self.providerType == GenericProvider.TORRENT: logger.log(u"Saved magnet link to " + filename, logger.INFO) else: logger.log(u"Saved result to " + filename, logger.INFO) return True else: logger.log(u"Could not download %s" % url, logger.WARNING) helpers._remove_file_failed(filename) if len(urls): logger.log(u"Failed to download any results", logger.WARNING) return False
def downloadResult(self, result): """ Save the result to disk. """ # check for auth if not self._doLogin(): return False if self.providerType == GenericProvider.TORRENT: try: torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False urls = [ 'http://torcache.net/torrent/' + torrent_hash + '.torrent', 'http://torrage.com/torrent/' + torrent_hash + '.torrent', 'http://zoink.it/torrent/' + torrent_hash + '.torrent', ] except: urls = [result.url] filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: urls = [result.url] filename = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) else: return for url in urls: if helpers.download_file(url, filename, session=self.session): logger.log(u"Downloading a result from " + self.name + " at " + url) if self.providerType == GenericProvider.TORRENT: logger.log(u"Saved magnet link to " + filename, logger.MESSAGE) else: logger.log(u"Saved result to " + filename, logger.MESSAGE) #TODO This is not working on Android for some reason #if self._verify_download(filename): return True logger.log(u"Failed to download result", logger.ERROR) return False
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._do_login(): return False if GenericProvider.TORRENT == self.providerType: try: torrent_hash = re.findall('urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = ['https://%s/%s.torrent' % (u, torrent_hash) for u in ('torcache.net/torrent', 'torrage.com/torrent', 'getstrike.net/torrents/api/download')] except: urls = [result.url] filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif GenericProvider.NZB == self.providerType: urls = [result.url] filename = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) else: return for url in urls: if helpers.download_file(url, filename, session=self.session): logger.log(u'Downloading a result from ' + self.name + ' at ' + url) if GenericProvider.TORRENT == self.providerType: logger.log(u'Saved magnet link to ' + filename, logger.MESSAGE) else: logger.log(u'Saved result to ' + filename, logger.MESSAGE) if self._verify_download(filename): return True elif ek.ek(os.path.isfile, filename): ek.ek(os.remove, filename) logger.log(u'Failed to download result', logger.ERROR) return False
def downloadResult(self, result): logger.log(u"Downloading a result from " + self.name + " at " + result.url) data = self.getURL(result.url) if data == None: return False if self.providerType == GenericProvider.NZB: saveDir = sickbeard.NZB_DIR writeMode = "w" elif self.providerType == GenericProvider.TORRENT: saveDir = sickbeard.TORRENT_DIR writeMode = "wb" else: return False fileName = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + "." + self.providerType) logger.log(u"Saving to " + fileName, logger.DEBUG) fileOut = open(fileName, writeMode) fileOut.write(data) fileOut.close() return True
def addDefaultShow(self, indexer, indexer_id, name, status): """ Adds a new show with the default settings """ if not helpers.findCertainShow(sickbeard.showList, int(indexer_id)): logger.log(u"Adding show " + str(indexer_id)) root_dirs = sickbeard.ROOT_DIRS.split('|') try: location = root_dirs[int(root_dirs[0]) + 1] except: location = None if location: showPath = ek(os.path.join, location, helpers.sanitizeFileName(name)) dir_exists = helpers.makeDir(showPath) if not dir_exists: logger.log(u"Unable to create the folder %s , can't add the show" % showPath, logger.WARNING) return else: helpers.chmodAsParent(showPath) sickbeard.showQueueScheduler.action.addShow(int(indexer), int(indexer_id), showPath, default_status=status, quality=int(sickbeard.QUALITY_DEFAULT), flatten_folders=int(sickbeard.FLATTEN_FOLDERS_DEFAULT), paused=sickbeard.TRAKT_START_PAUSED, default_status_after=status, archive=sickbeard.ARCHIVE_DEFAULT) else: logger.log(u"There was an error creating the show, no root directory setting found", logger.WARNING) return
def downloadResult(self, result): logger.log(u"Downloading a result from " + self.name+" at " + result.url) data = self.getURL(result.url) if data == None: return False if self.providerType == GenericProvider.NZB: saveDir = sickbeard.NZB_DIR writeMode = 'w' elif self.providerType == GenericProvider.TORRENT: saveDir = sickbeard.TORRENT_DIR writeMode = 'wb' else: return False fileName = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType) logger.log(u"Saving to " + fileName, logger.DEBUG) try: fileOut = open(fileName, writeMode) fileOut.write(data) fileOut.close() except IOError, e: logger.log("Unable to save the NZB: "+str(e).decode('utf-8'), logger.ERROR) return False
def downloadResult(self, nzb): id = self.getIDFromURL(nzb.url) if not id: logger.log("Unable to get an ID from " + str(nzb.url) + ", can't download from Newzbin's API", logger.ERROR) return False logger.log("Downloading an NZB from newzbin with id " + id) fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name) + ".nzb") logger.log("Saving to " + fileName) urllib._urlopener = NewzbinDownloader() params = urllib.urlencode( {"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id} ) try: urllib.urlretrieve(self.url + "api/dnzb/", fileName, data=params) except exceptions.NewzbinAPIThrottled: logger.log("Done waiting for Newzbin API throttle limit, starting downloads again") self.downloadResult(nzb) except (urllib.ContentTooShortError, IOError), e: logger.log("Error downloading NZB: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) return False
def addDefaultShow(self, indexer, indexer_id, name, status): """ Adds a new show with the default settings """ if not helpers.findCertainShow(sickbeard.showList, int(indexer_id)): logger.log(u"Adding show " + str(indexer_id)) root_dirs = sickbeard.ROOT_DIRS.split('|') try: location = root_dirs[int(root_dirs[0]) + 1] except: location = None if location: showPath = ek.ek(os.path.join, location, helpers.sanitizeFileName(name)) dir_exists = helpers.makeDir(showPath) if not dir_exists: logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR) return else: helpers.chmodAsParent(showPath) sickbeard.showQueueScheduler.action.addShow(int(indexer), int(indexer_id), showPath, status, int(sickbeard.QUALITY_DEFAULT), int(sickbeard.FLATTEN_FOLDERS_DEFAULT)) else: logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR) return
def downloadResult(self, result): """ Save the result to disk. """ logger.log(u"Downloading a result from " + self.name + " at " + result.url) data = self.getURL(result.url) if data is None: return False # use the appropriate watch folder if self.providerType == GenericProvider.NZB: saveDir = sickbeard.NZB_DIR writeMode = 'w' elif self.providerType == GenericProvider.TORRENT: saveDir = sickbeard.TORRENT_DIR writeMode = 'wb' else: return False # use the result name as the filename file_name = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType) logger.log(u"Saving to " + file_name, logger.DEBUG) try: with open(file_name, writeMode) as fileOut: fileOut.write(data) helpers.chmodAsParent(file_name) except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False
def _destination_file_name(self, new_name): existing_extension = self.file_name.rpartition('.')[-1] new_name = helpers.sanitizeFileName(new_name) if sickbeard.RENAME_EPISODES: return new_name + '.' + existing_extension else: return self.file_name
def renameFile(curFile, newName): filePath = os.path.split(curFile) oldFile = os.path.splitext(filePath[1]) newFilename = ek.ek(os.path.join, filePath[0], helpers.sanitizeFileName(newName) + oldFile[1]) logger.log("Renaming from " + curFile + " to " + newFilename) try: ek.ek(os.rename, curFile, newFilename) except (OSError, IOError), e: logger.log("Failed renaming " + curFile + " to " + os.path.basename(newFilename) + ": " + str(e), logger.ERROR) return False
def renameFile(movedFilePath, newName): filePath = os.path.split(movedFilePath) oldFile = os.path.splitext(filePath[1]) renamedFilePathname = ek.ek(os.path.join, filePath[0], helpers.sanitizeFileName(newName) + oldFile[1]) logger.log(u"Renaming from " + movedFilePath + " to " + renamedFilePathname) try: ek.ek(os.rename, movedFilePath, renamedFilePathname) except (OSError, IOError), e: logger.log(u"Failed renaming " + movedFilePath + " to " + os.path.basename(renamedFilePathname) + ": " + str(e), logger.ERROR) return False
def downloadResult(self, result): try: logger.log(u"Downloading a result from " + self.name + " at " + result.url) torrentFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) #add self referer to get application/x-bittorrent from torcache.net data = self.getURL(result.url, [("Referer", result.url)]) if data == None: return False fileOut = open(torrentFileName, 'wb') logger.log(u"Saving to " + torrentFileName, logger.DEBUG) fileOut.write(data) fileOut.close() helpers.chmodAsParent(torrentFileName) return self._verify_download(torrentFileName) except Exception, e: logger.log("Unable to save the file: "+str(e).decode('utf-8'), logger.ERROR) return False
def downloadTorrent (torrent): logger.log("Downloading a torrent from EZTV@BT-Chat at " + torrent.url) data = _getEZTVURL(torrent.url) if data == None: return False fileName = os.path.join(sickbeard.TORRENT_DIR, helpers.sanitizeFileName(torrent.name)+".torrent") logger.log("Saving to " + fileName, logger.DEBUG) fileOut = open(fileName, "wb") fileOut.write(data) fileOut.close() return True
def downloadNZB(nzb): logger.log("Downloading an NZB from newzbin at " + nzb.url) fileName = os.path.join(sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.fileName())) logger.log("Saving to " + fileName, logger.DEBUG) urllib._urlopener = NewzbinDownloader() params = urllib.urlencode({"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": nzb.extraInfo[0]}) try: urllib.urlretrieve("http://v3.newzbin.com/api/dnzb/", fileName, data=params) except exceptions.NewzbinAPIThrottled: logger.log("Done waiting for Newzbin API throttle limit, starting downloads again") downloadNZB(nzb) except (urllib.ContentTooShortError, IOError), e: logger.log("Error downloading NZB: " + str(sys.exc_info()) + " - " + str(e), logger.ERROR) return False
def test_encoding(self): rootDir = 'C:\\Temp\\TV' strings = [u'Les Enfants De La T\xe9l\xe9', u'RT� One'] sickbeard.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") sickbeard.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # For OSes that are poorly configured I'll just randomly force UTF-8 if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): sickbeard.SYS_ENCODING = 'UTF-8' for s in strings: show_dir = ek(os.path.join, rootDir, sanitizeFileName(s)) self.assertIsInstance(show_dir, unicode)
def addDefaultShow(self, tvdbid, name, status): """ Adds a new show with the default settings """ showObj = helpers.findCertainShow(sickbeard.showList, int(tvdbid)) if showObj != None: return logger.log(u"Adding show " + tvdbid) root_dirs = sickbeard.ROOT_DIRS.split('|') location = root_dirs[int(root_dirs[0]) + 1] showPath = ek.ek(os.path.join, location, helpers.sanitizeFileName(name)) dir_exists = helpers.makeDir(showPath) if not dir_exists: logger.log(u"Unable to create the folder " + showPath + ", can't add the show", logger.ERROR) return else: helpers.chmodAsParent(showPath) sickbeard.showQueueScheduler.action.addShow(int(tvdbid), showPath, status, int(sickbeard.QUALITY_DEFAULT), int(sickbeard.FLATTEN_FOLDERS_DEFAULT),"fr", int(sickbeard.SUBTITLES_DEFAULT), sickbeard.AUDIO_SHOW_DEFAULT)
def downloadResult(self, result): """ Save the result to disk. """ #Hack for rtorrent user (it will not work for other torrent client) if sickbeard.TORRENT_METHOD == "blackhole" and result.url.startswith('magnet'): magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = 'd10:magnet-uri' + `len(result.url)` + ':' + result.url + 'e' try: fileOut = open(magnetFileName, 'wb') fileOut.write(magnetFileContent) fileOut.close() helpers.chmodAsParent(magnetFileName) except IOError, e: logger.log("Unable to save the file: "+ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to "+magnetFileName+" ", logger.MESSAGE) return True
def downloadResult(self, result): """ Overridden to handle magnet links (using multiple fallbacks) """ logger.log(u"Downloading a result from " + self.name+" at " + result.url) if result.url and result.url.startswith('magnet:'): torrent_hash = self.getHashFromMagnet(result.url) if torrent_hash: urls = [url_fmt % torrent_hash for url_fmt in MAGNET_TO_TORRENT_URLS] else: logger.log(u"Failed to handle magnet url %s, skipping..." % torrent_hash, logger.DEBUG) return False else: urls = [result.url] # use the result name as the filename fileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) for url in urls: logger.log(u"Trying d/l url: " + url, logger.DEBUG) data = self.getURL(url) if data == None: logger.log(u"Got no data for " + url, logger.DEBUG) # fall through to next iteration else: try: fileOut = open(fileName, 'wb') fileOut.write(data) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log("Unable to save the file: "+ex(e), logger.ERROR) return False if self._verify_download(fileName): logger.log(u"Success with url: " + url, logger.DEBUG) return True else: logger.log(u"d/l url %s failed" % (url), logger.MESSAGE)
def _downloadResult(result): """ Downloads a result to the appropriate black hole folder. Returns a bool representing success. result: SearchResult instance to download. """ resProvider = result.provider newResult = False if resProvider == None: logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR) return False # nzbs with an URL can just be downloaded from the provider if result.resultType == "nzb": newResult = resProvider.downloadResult(result) # if it's an nzb data result elif result.resultType == "nzbdata": # get the final file path to the nzb fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + ".nzb") logger.log(u"Saving NZB to " + fileName) newResult = True # save the data to disk try: fileOut = open(fileName, "w") fileOut.write(result.extraInfo[0]) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log(u"Error trying to save NZB to black hole: "+ex(e), logger.ERROR) newResult = False
def test_encoding(self): rootDir = "C:\\Temp\\TV" strings = [u"Les Enfants De La T\xe9l\xe9", u"RT� One"] sickbeard.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") sickbeard.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # For OSes that are poorly configured I'll just randomly force UTF-8 if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ("ANSI_X3.4-1968", "US-ASCII", "ASCII"): sickbeard.SYS_ENCODING = "UTF-8" for s in strings: try: show_dir = ek(os.path.join, rootDir, sanitizeFileName(s)) self.assertTrue(isinstance(show_dir, unicode)) except Exception, e: ex(e)
# for air-by-date shows use the year as the season folder if rootEp.show.is_air_by_date: seasonFolder = str(rootEp.airdate.year) else: seasonFolder = 'Season ' + str(rootEp.season) returnStr += logHelper(u"Season folders were " + str(rootEp.show.seasonfolders) + " which gave " + seasonFolder, logger.DEBUG) destDir = os.path.join(rootEp.show.location, seasonFolder) # movedFilePath is the full path to where we will move the file movedFilePath = os.path.join(destDir, biggestFileName) # renamedFilePath is the full path to the renamed file's eventual location if sickbeard.RENAME_EPISODES: renamedFilePath = os.path.join(destDir, helpers.sanitizeFileName(rootEp.prettyName())+biggestFileExt) else: renamedFilePath = movedFilePath returnStr += logHelper(u"The ultimate destination for " + fileName + " is " + renamedFilePath, logger.DEBUG) existingResult = _checkForExistingFile(renamedFilePath, fileName) # if there's no file with that exact filename then check for a different episode file (in case we're going to delete it) if existingResult == 0: existingResult = _checkForExistingFile(rootEp.location, fileName) if existingResult == -1: existingResult = -2 if existingResult == 1: existingResult = 2 returnStr += logHelper(u"Existing result: "+str(existingResult), logger.DEBUG)
def process(self): """ Post-process a given file """ self._log(u"Processing "+self.file_path+" ("+str(self.nzb_name)+")") # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the episode we're processing: "+str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: "+str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # if renaming is turned on then rename the episode (and associated files, if necessary) if sickbeard.RENAME_EPISODES: new_file_name = helpers.sanitizeFileName(ep_obj.prettyName()) try: self._rename(self.file_path, new_file_name, sickbeard.MOVE_ASSOCIATED_FILES) except OSError, IOError: raise exceptions.PostProcessingFailed("Unable to rename the files") # remember the new name of the file new_file_path = ek.ek(os.path.join, self.folder_path, new_file_name + '.' + self.file_name.rpartition('.')[-1]) self._log(u"After renaming the new file path is "+new_file_path, logger.DEBUG)
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._authorised(): return False if GenericProvider.TORRENT == self.providerType: final_dir = sickbeard.TORRENT_DIR link_type = 'magnet' try: torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log('Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,)) for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'), ('s', 'torrage.info'), ('', 'reflektor.karmorra.info'), ('s', 'torrentproject.se'), ('', 'thetorrent.org'))] except (StandardError, Exception): link_type = 'torrent' urls = [result.url] elif GenericProvider.NZB == self.providerType: final_dir = sickbeard.NZB_DIR link_type = 'nzb' urls = [result.url] else: return ref_state = 'Referer' in self.session.headers and self.session.headers['Referer'] saved = False for url in urls: cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir() base_name = '%s.%s' % (helpers.sanitizeFileName(result.name), self.providerType) cache_file = ek.ek(os.path.join, cache_dir, base_name) self.session.headers['Referer'] = url if helpers.download_file(url, cache_file, session=self.session): if self._verify_download(cache_file): logger.log(u'Downloaded %s result from %s' % (self.name, url)) final_file = ek.ek(os.path.join, final_dir, base_name) try: helpers.moveFile(cache_file, final_file) msg = 'moved' except (OSError, Exception): msg = 'copied cached file' logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) saved = True break remove_file_failed(cache_file) if 'Referer' in self.session.headers: if ref_state: self.session.headers['Referer'] = ref_state else: del(self.session.headers['Referer']) if not saved and 'magnet' == link_type: logger.log(u'All torrent cache servers failed to return a downloadable result', logger.ERROR) logger.log(u'Advice: in search settings, change from method blackhole to direct torrent client connect', logger.ERROR) final_file = ek.ek(os.path.join, final_dir, '%s.%s' % (helpers.sanitizeFileName(result.name), link_type)) try: with open(final_file, 'wb') as fp: fp.write(result.url) fp.flush() os.fsync(fp.fileno()) logger.log(u'Saved magnet link to file as some clients (or plugins) support this, %s' % final_file) except (StandardError, Exception): pass elif not saved: logger.log(u'Server failed to return anything useful', logger.ERROR) return saved
if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False try: r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False) except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False if not r.status_code == 200: return False magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content try: with open(magnetFileName, 'wb') as fileOut: fileOut.write(magnetFileContent) helpers.chmodAsParent(magnetFileName) except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True
try: ek.ek(os.mkdir, dest_path) helpers.chmodAsParent(dest_path) except OSError, IOError: raise exceptions.PostProcessingFailed("Unable to create the episode's destination folder: "+dest_path) # update the statuses before we rename so the quality goes into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) cur_ep.saveToDB() # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = helpers.sanitizeFileName(ep_obj.prettyName()) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except OSError, IOError: raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None): found_results = {} final_results = [] search_done = False orig_thread_name = threading.currentThread().name use_quality_list = None if any([episodes]): old_status = old_status or failed_history.find_old_status( episodes[0]) or episodes[0].status if old_status: status, quality = Quality.splitCompositeStatus(old_status) use_quality_list = (status not in (common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) provider_list = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and ( not torrent_only or x.providerType == GenericProvider.TORRENT) ] for cur_provider in provider_list: if cur_provider.anime_only and not show.is_anime: logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG) continue threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) provider_id = cur_provider.get_id() found_results[provider_id] = {} search_count = 0 search_mode = cur_provider.search_mode while True: search_count += 1 if 'eponly' == search_mode: logger.log(u'Performing episode search for %s' % show.name) else: logger.log(u'Performing season pack search for %s' % show.name) try: cur_provider.cache._clearCache() search_results = cur_provider.find_search_results( show, episodes, search_mode, manual_search, try_other_searches=try_other_searches) if any(search_results): logger.log(', '.join([ '%s %s candidate%s' % (len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode], helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems() ])) except exceptions.AuthException as e: logger.log(u'Authentication error: %s' % ex(e), logger.ERROR) break except Exception as e: logger.log( u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) break finally: threading.currentThread().name = orig_thread_name search_done = True if len(search_results): # make a list of all the results for this provider for cur_ep in search_results: # skip non-tv crap search_results[cur_ep] = filter( lambda ep_item: show_name_helpers.pass_wordlist_checks( ep_item.name, parse=False) and ep_item.show == show, search_results[cur_ep]) if cur_ep in found_results: found_results[provider_id][cur_ep] += search_results[ cur_ep] else: found_results[provider_id][cur_ep] = search_results[ cur_ep] break elif not cur_provider.search_fallback or search_count == 2: break search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode] logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode]) # skip to next provider if we have no results to process if not len(found_results[provider_id]): continue any_qualities, best_qualities = Quality.splitQuality(show.quality) # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[provider_id]: best_season_result = pick_best_result( found_results[provider_id][SEASON_RESULT], show, any_qualities + best_qualities) highest_quality_overall = 0 for cur_episode in found_results[provider_id]: for cur_result in found_results[provider_id][cur_episode]: if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality: highest_quality_overall = cur_result.quality logger.log( u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if best_season_result: # get the quality of the season nzb season_qual = best_season_result.quality logger.log( u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual], best_season_result.provider.providerType), logger.DEBUG) my_db = db.DBConnection() sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\ (show.indexerid, ','.join([str(x.season) for x in episodes])) ep_nums = [int(x['episode']) for x in my_db.select(sql)] logger.log(u'Executed query: [%s]' % sql) logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG) all_wanted = True any_wanted = False for ep_num in ep_nums: for season in set([x.season for x in episodes]): if not show.wantEpisode(season, ep_num, season_qual): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then just download this and # be done with it (unless single episodes are preferred) if all_wanted and highest_quality_overall == best_season_result.quality: logger.log( u'Every episode in this season is needed, downloading the whole %s %s' % (best_season_result.provider.providerType, best_season_result.name)) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs return [best_season_result] elif not any_wanted: logger.log( u'No episodes from this season are wanted at this quality, ignoring the result of ' + best_season_result.name, logger.DEBUG) else: if GenericProvider.NZB == best_season_result.provider.providerType: logger.log( u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG) # if not, break it apart and add them as the lowest priority results individual_results = nzbSplitter.splitResult( best_season_result) individual_results = filter( lambda r: show_name_helpers.pass_wordlist_checks( r.name, parse=False) and r.show == show, individual_results) for cur_result in individual_results: if 1 == len(cur_result.episodes): ep_num = cur_result.episodes[0].episode elif 1 < len(cur_result.episodes): ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append( cur_result) else: found_results[provider_id][ep_num] = [cur_result] # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it logger.log( u'Adding multi episode result for full season torrent. In your torrent client, set ' + u'the episodes that you do not want to "don\'t download"' ) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append( best_season_result) else: found_results[provider_id][ep_num] = [ best_season_result ] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: logger.log( u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG) if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed( multi_result.name, multi_result.size, multi_result.provider.name): logger.log( u'Rejecting previously failed multi episode result [%s]' % multi_result.name) continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode # if we have results for the episode if ep_num in found_results[provider_id] and 0 < len( found_results[provider_id][ep_num]): needed_eps.append(ep_num) else: not_needed_eps.append(ep_num) logger.log( u'Single episode check result is... needed episodes: %s, not needed episodes: %s' % (needed_eps, not_needed_eps), logger.DEBUG) if not not_needed_eps: logger.log( u'All of these episodes were covered by single episode results, ' + 'ignoring this multi episode result', logger.DEBUG) continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in multi_results: multi_not_needed_eps.append(ep_num) else: multi_needed_eps.append(ep_num) logger.log( u'Multi episode check result is... multi needed episodes: ' + '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG) if not multi_needed_eps: logger.log( u'All of these episodes were covered by another multi episode nzb, ' + 'ignoring this multi episode result', logger.DEBUG) continue # if we're keeping this multi-result then remember it for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in found_results[provider_id]: logger.log( u'A needed multi episode result overlaps with a single episode result for episode ' + '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG) del found_results[provider_id][ep_num] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) quality_list = use_quality_list and ( None, best_qualities)[any(best_qualities)] or None for cur_ep in found_results[provider_id]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if 0 == len(found_results[provider_id][cur_ep]): continue best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list) # if all results were rejected move on to the next episode if not best_result: continue # filter out possible bad torrents from providers if 'torrent' == best_result.resultType: if best_result.url.startswith('magnet'): if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = None else: cache_file = ek.ek( os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(), '%s.torrent' % (helpers.sanitizeFileName(best_result.name))) if not helpers.download_file( best_result.url, cache_file, session=best_result.provider.session): continue try: with open(cache_file, 'rb') as fh: td = fh.read() setattr(best_result, 'cache_file', cache_file) except (StandardError, Exception): continue if getattr(best_result.provider, 'chk_td', None): name = None try: hdr = re.findall('(\w+(\d+):)', td[0:6])[0] x, v = len(hdr[0]), int(hdr[1]) for item in range(0, 12): y = x + v name = 'name' == td[x:y] w = re.findall('((?:i\d+e|d|l)?(\d+):)', td[y:y + 32])[0] x, v = y + len(w[0]), int(w[1]) if name: name = td[x:x + v] break except (StandardError, Exception): continue if name: if not pass_show_wordlist_checks(name, show): continue if not show_name_helpers.pass_wordlist_checks( name): logger.log( 'Ignored: %s (debug log has detail)' % name) continue best_result.name = name if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = td # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_result_ep in best_result.episodes: if best_result_ep in result.episodes: if best_result.quality > result.quality: final_results.pop(i) else: found = True if not found: final_results += [best_result] # check that we got all the episodes we wanted first before doing a match and snatch wanted_ep_count = 0 for wanted_ep in episodes: for result in final_results: if wanted_ep in result.episodes and is_final_result(result): wanted_ep_count += 1 # make sure we search every provider for results unless we found everything we wanted if len(episodes) == wanted_ep_count: break if not len(provider_list): logger.log( 'No NZB/Torrent sources enabled in Media Provider options to do backlog searches', logger.WARNING) elif not search_done: logger.log( 'Failed backlog search of %s enabled provider%s. More info in debug log.' % (len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR) elif not any(final_results): logger.log('No suitable candidates') return final_results
def test_sanitizeFileName(self): self.assertEqual(helpers.sanitizeFileName('a/b/c'), 'a-b-c') self.assertEqual(helpers.sanitizeFileName('abc'), 'abc') self.assertEqual(helpers.sanitizeFileName('a"b'), 'ab') self.assertEqual(helpers.sanitizeFileName('.a.b..'), 'a.b')
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent') except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False if not r.status_code == 200: return False magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content try: fileOut = open(magnetFileName, 'wb') fileOut.write(magnetFileContent) fileOut.close() helpers.chmodAsParent(magnetFileName) except IOError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True def findPropers(self, search_date=datetime.datetime.today()):
def findFrench(self, episode=None, manualSearch=False): results = [] self._checkAuth() logger.log(u"Searching "+self.name+" for " + episode.prettyName()) itemList = [] for cur_search_string in self._get_episode_search_strings(episode,'french'): itemList += self._doSearch(cur_search_string, show=episode.show, french='french') for item in itemList: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(helpers.sanitizeFileName(title)) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) continue language = self._get_language(title,item) if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG) continue if not language == 'fr': logger.log(u"Ignoring result "+title+" because the language: " + showLanguages[language] + " does not match the desired language: French") continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.item = item if hasattr(item , 'getNZB'): result.extraInfo = [item.getNZB() ] elif hasattr(item , 'extraInfo'): result.extraInfo = item.extraInfo result.url = url result.name = title result.quality = quality if hasattr(item , 'audio_langs'): result.audio_lang=''.join(item.audio_langs) else: result.audio_lang=language results.append(result) return results
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent') except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False if not r.status_code == 200: return False magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content try: fileOut = open(magnetFileName, 'wb') fileOut.write(magnetFileContent) fileOut.close() helpers.chmodAsParent(magnetFileName) except IOError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True class KATCache(tvcache.TVCache):
def downloadResult(self, result): """ Save the result to disk. """ # check for auth if not self._doLogin(): return False if self.providerType == GenericProvider.TORRENT: try: torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log( "Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False urls = [ 'http://torcache.net/torrent/' + torrent_hash + '.torrent', 'http://torrage.com/torrent/' + torrent_hash + '.torrent', 'http://zoink.it/torrent/' + torrent_hash + '.torrent', ] except: urls = [result.url] filename = ek.ek( os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: urls = [result.url] filename = ek.ek( os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) else: return for url in urls: if helpers.download_file(url, filename, session=self.session): logger.log(u"Downloading a result from " + self.name + " at " + url) if self.providerType == GenericProvider.TORRENT: logger.log(u"Saved magnet link to " + filename, logger.INFO) else: logger.log(u"Saved result to " + filename, logger.INFO) if self._verify_download(filename): return True logger.log(u"Failed to download result", logger.WARNING) return False
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._do_login(): return False if GenericProvider.TORRENT == self.providerType: try: torrent_hash = re.findall('urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log( 'Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = [ 'https://%s/%s.torrent' % (u, torrent_hash) for u in ('torcache.net/torrent', 'torrage.com/torrent', 'getstrike.net/torrents/api/download') ] except: urls = [result.url] filename = ek.ek( os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif GenericProvider.NZB == self.providerType: urls = [result.url] filename = ek.ek( os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) else: return for url in urls: if helpers.download_file(url, filename, session=self.session): logger.log(u'Downloading a result from ' + self.name + ' at ' + url) if GenericProvider.TORRENT == self.providerType: logger.log(u'Saved magnet link to ' + filename, logger.MESSAGE) else: logger.log(u'Saved result to ' + filename, logger.MESSAGE) if self._verify_download(filename): return True elif ek.ek(os.path.isfile, filename): ek.ek(os.remove, filename) logger.log(u'Failed to download result', logger.ERROR) return False
if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False try: r = requests.get("http://torcache.net/torrent/" + torrent_hash + ".torrent") except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False if not r.status_code == 200: return False magnetFileName = ek.ek( os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + "." + self.providerType ) magnetFileContent = r.content try: with open(magnetFileName, "wb") as fileOut: fileOut.write(magnetFileContent) helpers.chmodAsParent(magnetFileName) except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True
def findSeasonResults(self, show, season): itemList = [] results = {} for curString in self._get_season_search_strings(show, season): itemList += self._doSearch(curString, show=show, season=season) for item in itemList: (title, url) = self._get_title_and_url(item) quality = self.getQuality(item) # parse the file name try: myParser = NameParser(False) parse_result = myParser.parse(helpers.sanitizeFileName(title)) except InvalidNameException: logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING) continue language = self._get_language(title,item) if not show.air_by_date: # this check is meaningless for non-season searches if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1): logger.log(u"The result "+title+" doesn't seem to be a valid episode for season "+str(season)+", ignoring") continue # we just use the existing info for normal searches actual_season = season actual_episodes = parse_result.episode_numbers else: if not parse_result.air_by_date: logger.log(u"This is supposed to be an air-by-date search but the result "+title+" didn't parse as one, skipping it", logger.DEBUG) continue myDB = db.DBConnection() sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.tvdbid, parse_result.air_date.toordinal()]) if len(sql_results) != 1: logger.log(u"Tried to look up the date for the episode "+title+" but the database didn't give proper results, skipping it", logger.WARNING) continue actual_season = int(sql_results[0]["season"]) actual_episodes = [int(sql_results[0]["episode"])] # make sure we want the episode wantEp = True for epNo in actual_episodes: if not show.wantEpisode(actual_season, epNo, quality): wantEp = False break if not wantEp: logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG) continue if not language == show.audio_lang: logger.log(u"Ignoring result "+title+" because the language: " + showLanguages[parse_result.audio_langs] + " does not match the desired language: " + showLanguages[show.audio_lang]) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) # make a result object epObj = [] for curEp in actual_episodes: epObj.append(show.getEpisode(actual_season, curEp)) result = self.getResult(epObj) if hasattr(item, 'getNZB'): result.extraInfo = [item.getNZB()] elif hasattr(item , 'extraInfo'): result.extraInfo = item.extraInfo result.url = url result.name = title result.quality = quality if hasattr(item , 'audio_langs'): result.audio_lang=''.join(item.audio_langs) else: result.audio_lang=language if len(epObj) == 1: epNum = epObj[0].episode elif len(epObj) > 1: epNum = MULTI_EP_RESULT logger.log(u"Separating multi-episode result to check for later - result contains episodes: "+str(parse_result.episode_numbers), logger.DEBUG) elif len(epObj) == 0: epNum = SEASON_RESULT if result.extraInfo: result.extraInfo.append( show ) else: result.extraInfo = [show] logger.log(u"Separating full season result to check for later", logger.DEBUG) if epNum in results: results[epNum].append(result) else: results[epNum] = [result] return results
def download_result(self, result): """ Save the result to disk. """ # check for auth if not self._authorised(): return False if GenericProvider.TORRENT == self.providerType: final_dir = sickbeard.TORRENT_DIR link_type = 'magnet' try: torrent_hash = re.findall('(?i)urn:btih:([0-9a-f]{32,40})', result.url)[0].upper() if 32 == len(torrent_hash): torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log( 'Unable to extract torrent hash from link: ' + ex(result.url), logger.ERROR) return False urls = [ 'http%s://%s/%s.torrent' % (u + (torrent_hash, )) for u in (('s', 'torcache.net/torrent'), ('', 'thetorrent.org/torrent'), ('s', 'itorrents.org/torrent')) ] except: link_type = 'torrent' urls = [result.url] elif GenericProvider.NZB == self.providerType: final_dir = sickbeard.NZB_DIR link_type = 'nzb' urls = [result.url] else: return ref_state = 'Referer' in self.session.headers and self.session.headers[ 'Referer'] saved = False for url in urls: cache_dir = sickbeard.CACHE_DIR or helpers._getTempDir() base_name = '%s.%s' % (helpers.sanitizeFileName( result.name), self.providerType) cache_file = ek.ek(os.path.join, cache_dir, base_name) self.session.headers['Referer'] = url if helpers.download_file(url, cache_file, session=self.session): if self._verify_download(cache_file): logger.log(u'Downloaded %s result from %s' % (self.name, url)) final_file = ek.ek(os.path.join, final_dir, base_name) try: helpers.moveFile(cache_file, final_file) msg = 'moved' except: msg = 'copied cached file' logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) saved = True break remove_file_failed(cache_file) if 'Referer' in self.session.headers: if ref_state: self.session.headers['Referer'] = ref_state else: del (self.session.headers['Referer']) if not saved: logger.log( u'All torrent cache servers failed to return a downloadable result', logger.ERROR) return saved
def downloadResult(self, result): """ Overridden to handle magnet links (using multiple fallbacks), and now libtorrent downloads also. """ logger.log(u"Downloading a result from " + self.name + " at " + result.url) if sickbeard.USE_LIBTORRENT: # libtorrent can download torrent files from urls, but it's probably safer for us # to do it first so that we can report errors immediately. if result.url and (result.url.startswith('http://') or result.url.startswith('https://')): torrent = self.getURL(result.url) # and now that we have it, we can check the torrent file too! if not self.is_valid_torrent_data(torrent): logger.log(u'The torrent retrieved from "%s" is not a valid torrent file.' % (result.url), logger.ERROR) self.blacklistUrl(result.url) return False else: torrent = result.url if torrent: return downloader.download_from_torrent(torrent=torrent, filename=result.name, episodes=result.episodes, originalTorrentUrl=result.url, blacklistOrigUrlOnFailure=True) else: logger.log(u'Failed to retrieve torrent from "%s"' % (result.url), logger.ERROR) return False else: # Ye olde way, using blackhole ... if result.url and result.url.startswith('magnet:'): torrent_hash = self.getHashFromMagnet(result.url) if torrent_hash: urls = [url_fmt % torrent_hash for url_fmt in MAGNET_TO_TORRENT_URLS] else: logger.log(u"Failed to handle magnet url %s, skipping..." % torrent_hash, logger.DEBUG) self.blacklistUrl(result.url) return False else: urls = [result.url] # use the result name as the filename fileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) for url in urls: logger.log(u"Trying d/l url: " + url, logger.DEBUG) data = self.getURL(url) if data == None: logger.log(u"Got no data for " + url, logger.DEBUG) # fall through to next iteration elif not self.is_valid_torrent_data(data): logger.log(u"d/l url %s failed, not a valid torrent file" % (url), logger.MESSAGE) self.blacklistUrl(url) else: try: fileOut = open(fileName, 'wb') fileOut.write(data) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log("Unable to save the file: "+ex(e), logger.ERROR) return False logger.log(u"Success with url: " + url, logger.DEBUG) return True else:
# save the data to disk try: fileOut = open(fileName, "w") fileOut.write(result.extraInfo[0]) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) newResult = False elif result.resultType == "torrentdata": # get the final file path to the nzb fileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + ".torrent") logger.log(u"Saving Torrent to " + fileName) newResult = True # save the data to disk try: fileOut = open(fileName, "wb") fileOut.write(result.extraInfo[0]) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log(u"Error trying to save Torrent to black hole: " + ex(e), logger.ERROR) newResult = False
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log( u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus( common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log( u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log( u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log( u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # if renaming is turned on then rename the episode (and associated files, if necessary) if sickbeard.RENAME_EPISODES: new_file_name = helpers.sanitizeFileName(ep_obj.prettyName()) try: self._rename(self.file_path, new_file_name, sickbeard.MOVE_ASSOCIATED_FILES) except OSError, IOError: raise exceptions.PostProcessingFailed( "Unable to rename the files") # remember the new name of the file new_file_path = ek.ek( os.path.join, self.folder_path, new_file_name + '.' + self.file_name.rpartition('.')[-1]) self._log(u"After renaming the new file path is " + new_file_path, logger.DEBUG)
logger.ERROR) return False try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent') except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False if not r.status_code == 200: return False magnetFileName = ek.ek( os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content try: with open(magnetFileName, 'wb') as fileOut: fileOut.write(magnetFileContent) helpers.chmodAsParent(magnetFileName) except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE) return True