def AddTorrentSkipHashCheck(self, logger, torrentPath, downloadPath): logger.info( "Adding torrent '%s' without hash checking to rTorrent to '%s'." % (torrentPath, downloadPath)) sourceDirectory, sourceFilename = os.path.split(torrentPath) sourceFilename = "fast resume " + sourceFilename destinationTorrentPath = os.path.join(sourceDirectory, sourceFilename) if os.path.exists(destinationTorrentPath): raise PtpUploaderException( "Can't create fast resume torrent because path '%s' already exists." % destinationTorrentPath) shutil.copyfile(torrentPath, destinationTorrentPath) metainfo = bencode.bread(destinationTorrentPath) metafile.add_fast_resume(metainfo, downloadPath.encode('utf-8')) bencode.bwrite(destinationTorrentPath, metainfo) infoHash = "" try: infoHash = self.AddTorrent(logger, destinationTorrentPath, downloadPath) finally: # We always remove the fast resume torrent regardless of result of adding the torrent to rTorrent. # This ensures that even if adding to rTorent fails, then resuming the job will work. os.remove(destinationTorrentPath) return infoHash
def AddTorrentSkipHashCheck(self, logger, torrentPath, downloadPath): logger.info( "Adding torrent '%s' without hash checking to rTorrent to '%s'." % ( torrentPath, downloadPath ) ); sourceDirectory, sourceFilename = os.path.split( torrentPath ); sourceFilename = "fast resume " + sourceFilename; destinationTorrentPath = os.path.join( sourceDirectory, sourceFilename ); if os.path.exists( destinationTorrentPath ): raise PtpUploaderException( "Can't create fast resume torrent because path '%s' already exists." % destinationTorrentPath ) shutil.copyfile( torrentPath, destinationTorrentPath ); metainfo = bencode.bread( destinationTorrentPath ) metafile.add_fast_resume( metainfo, downloadPath.encode( 'utf-8' ) ) bencode.bwrite( destinationTorrentPath, metainfo ) infoHash = "" try: infoHash = self.AddTorrent( logger, destinationTorrentPath, downloadPath ) finally: # We always remove the fast resume torrent regardless of result of adding the torrent to rTorrent. # This ensures that even if adding to rTorent fails, then resuming the job will work. os.remove( destinationTorrentPath ) return infoHash
def CleanTorrentFile(self, logger, torrentPath): logger.info("Cleaning torrent file '%s'." % torrentPath) metainfo = bencode.bread(torrentPath) metafile.clean_meta(metainfo, including_info=False, logger=logger.info) bencode.bwrite(torrentPath, metainfo)
def CleanTorrentFile(self, logger, torrentPath): logger.info( "Cleaning torrent file '%s'." % torrentPath ) metainfo = bencode.bread( torrentPath ) metafile.clean_meta( metainfo, including_info = False, logger = logger.info ) bencode.bwrite( torrentPath, metainfo )