def updateBundles(self): dirsToBundle = self.__getDirsToBundle() #Delete bundles that don't have to be updated for bId in self.__bundles: if bId not in dirsToBundle: gLogger.info("Deleting old bundle %s" % bId) del (self.__bundles[bId]) for bId in dirsToBundle: bundlePaths = dirsToBundle[bId] gLogger.info("Updating %s bundle %s" % (bId, bundlePaths)) buffer_ = cStringIO.StringIO() filesToBundle = sorted(File.getGlobbedFiles(bundlePaths)) if filesToBundle: commonPath = File.getCommonPath(filesToBundle) commonEnd = len(commonPath) gLogger.info("Bundle will have %s files with common path %s" % (len(filesToBundle), commonPath)) tarBuffer = tarfile.open('dummy', "w:gz", buffer_) for filePath in filesToBundle: tarBuffer.add(filePath, filePath[commonEnd:]) tarBuffer.close() zippedData = buffer_.getvalue() buffer_.close() hash_ = File.getMD5ForFiles(filesToBundle) gLogger.info("Bundled %s : %s bytes (%s)" % (bId, len(zippedData), hash_)) self.__bundles[bId] = (hash_, zippedData) else: self.__bundles[bId] = (None, None)
def createTarball(tarballPath, directoryToTar, additionalDirectoriesToTar=None): tf = tarfile.open(tarballPath, "w:gz") tf.add(directoryToTar, os.path.basename(os.path.abspath(directoryToTar)), recursive=True) if type(additionalDirectoriesToTar) in (types.StringType, types.UnicodeType): additionalDirectoriesToTar = [additionalDirectoriesToTar] if additionalDirectoriesToTar: for dirToTar in additionalDirectoriesToTar: if os.path.isdir(dirToTar): tf.add(dirToTar, os.path.basename(os.path.abspath(dirToTar)), recursive=True) tf.close() md5FilePath = False for suffix in (".tar.gz", ".gz"): sLen = len(suffix) if tarballPath[len(tarballPath) - sLen:] == suffix: md5FilePath = "%s.md5" % tarballPath[:-sLen] break if not md5FilePath: return S_ERROR("Could not generate md5 filename") md5str = File.getMD5ForFiles([tarballPath]) fd = open(md5FilePath, "w") fd.write(md5str) fd.close() return S_OK()
def updateBundles( self ): dirsToBundle = self.__getDirsToBundle() #Delete bundles that don't have to be updated for bId in self.__bundles: if bId not in dirsToBundle: gLogger.info( "Deleting old bundle %s" % bId ) del( self.__bundles[ bId ] ) for bId in dirsToBundle: bundlePaths = dirsToBundle[ bId ] gLogger.info( "Updating %s bundle %s" % ( bId, bundlePaths ) ) buffer_ = cStringIO.StringIO() filesToBundle = sorted( File.getGlobbedFiles( bundlePaths ) ) if filesToBundle: commonPath = File.getCommonPath( filesToBundle ) commonEnd = len( commonPath ) gLogger.info( "Bundle will have %s files with common path %s" % ( len( filesToBundle ), commonPath ) ) tarBuffer = tarfile.open( 'dummy', "w:gz", buffer_ ) for filePath in filesToBundle: tarBuffer.add( filePath, filePath[ commonEnd: ] ) tarBuffer.close() zippedData = buffer_.getvalue() buffer_.close() hash_ = File.getMD5ForFiles( filesToBundle ) gLogger.info( "Bundled %s : %s bytes (%s)" % ( bId, len( zippedData ), hash_ ) ) self.__bundles[ bId ] = ( hash_, zippedData ) else: self.__bundles[ bId ] = ( None, None )
def createTarball( tarballPath, directoryToTar, additionalDirectoriesToTar = None ): tf = tarfile.open( tarballPath, "w:gz" ) tf.add( directoryToTar, os.path.basename( os.path.abspath( directoryToTar ) ), recursive = True ) if type( additionalDirectoriesToTar ) in ( types.StringType, types.UnicodeType ): additionalDirectoriesToTar = [ additionalDirectoriesToTar ] if additionalDirectoriesToTar: for dirToTar in additionalDirectoriesToTar: if os.path.isdir( dirToTar ): tf.add( dirToTar, os.path.basename( os.path.abspath( dirToTar ) ), recursive = True ) tf.close() md5FilePath = False for suffix in ( ".tar.gz", ".gz" ): sLen = len( suffix ) if tarballPath[ len( tarballPath ) - sLen: ] == suffix: md5FilePath = "%s.md5" % tarballPath[:-sLen] break if not md5FilePath: return S_ERROR( "Could not generate md5 filename" ) md5str = File.getMD5ForFiles( [ tarballPath ] ) fd = open( md5FilePath, "w" ) fd.write( md5str ) fd.close() return S_OK()