def do(self): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe.getType() == recipe.RECIPE_TYPE_CAPSULE: return macros = self.macros subtrees = self.invariantsubtrees if self.subtrees: subtrees.extend(self.subtrees) for path in subtrees: path = util.normpath(path % macros) fullpath = '/'.join((self.macros.destdir, path)) if not os.path.exists(fullpath): continue # this state can only be reached if SharedLibrary is called with # bad arguments... see CNP-45 mode = os.stat(fullpath)[stat.ST_MODE] if not stat.S_ISDIR(mode): self.error('The subtrees= argument takes directories only;' ' %s is not a directory', path) continue oldfiles = set(os.listdir(fullpath)) bootStrapLdConfig = True ldConfigPath = '%(destdir)s%(essentialsbindir)s/ldconfig'%macros if (not os.path.exists(ldConfigPath)) or self.recipe.isCrossCompiling(): bootStrapLdConfig = False ldConfigPath = '%(essentialsbindir)s/ldconfig'%macros util.execute('%s -n %s' %(ldConfigPath, fullpath)) if not bootStrapLdConfig: db = database.Database(self.recipe.cfg.root, self.recipe.cfg.dbPath) ldConfigTroveName = [ x.getName() for x in db.iterTrovesByPath(ldConfigPath) ] if ldConfigTroveName: ldConfigTroveName = ldConfigTroveName[0] else: ldConfigTroveName = 'glibc:runtime' try: if ldConfigTroveName in self.recipe._getTransitiveBuildRequiresNames(): self.recipe.reportExcessBuildRequires(ldConfigTroveName) else: self.recipe.reportMissingBuildRequires(ldConfigTroveName) except AttributeError: # older Conary that does not have # reportExcessBuildRequires or even the older # reportMissingBuildRequires or # _getTransitiveBuildRequiresNames pass newfiles = set(os.listdir(fullpath)) addedfiles = newfiles - oldfiles removedfiles = oldfiles - newfiles if addedfiles: self.info('ldconfig added the following new files in %s: %s', path, ', '.join(sorted(list(addedfiles)))) if removedfiles: self.warn('ldconfig removed files in %s: %s', path, ', '.join(sorted(list(removedfiles))))
def archiveChroot(self, chrootPath, newPath): chrootPath = os.path.realpath(self.baseDir + '/' + chrootPath) newPath = os.path.realpath(self.archiveDir + '/' + newPath) assert(os.path.dirname(chrootPath) == self.baseDir) assert(os.path.dirname(newPath) == self.archiveDir) util.mkdirChain(self.archiveDir) util.execute('/bin/mv %s %s' % (chrootPath, newPath)) self.queue.deleteChroot(chrootPath) return 'archive/' + os.path.basename(newPath)
def archiveChroot(self, chrootPath, newPath): chrootPath = os.path.realpath(self.baseDir + "/" + chrootPath) newPath = os.path.realpath(self.archiveDir + "/" + newPath) assert os.path.dirname(chrootPath) == self.baseDir assert os.path.dirname(newPath) == self.archiveDir util.mkdirChain(self.archiveDir) util.execute("/bin/mv %s %s" % (chrootPath, newPath)) self.queue.deleteChroot(chrootPath) return "archive/" + os.path.basename(newPath)
def replaceBuildPath(self, path): #Now remove references to builddir, but # we don't want to mess with binaries # XXX probbaly need a better check for binary status m = magic.magic(path, basedir='/') extension = path.split('.')[-1] if m and m.name != 'ltwrapper': return if extension in ('pyo', 'pyc'): # add as needed return util.execute(("sed -i -e 's|%%(builddir)s|%%(testdir)s/%%(name)s-%%(version)s|g' '%s'" % path) % self.macros, verbose=False)
def _compress(self, dirname, names): for name in names: path = dirname + os.sep + name if util.isregular(path): if not self.gzip: self.gzip = self._findProg('gzip') util.execute('gzip -f -n -9 ' + dirname + os.sep + name) try: self.recipe.recordMove(dirname + os.sep + name, dirname + os.sep + name + '.gz') except AttributeError: pass
def replaceBuildPath(self, path): #Now remove references to builddir, but # we don't want to mess with binaries # XXX probbaly need a better check for binary status m = magic.magic(path, basedir='/') extension = path.split('.')[-1] if m and m.name != 'ltwrapper': return if extension in ('pyo', 'pyc'): # add as needed return util.execute(( "sed -i -e 's|%%(builddir)s|%%(testdir)s/%%(name)s-%%(version)s|g' '%s'" % path) % self.macros, verbose=False)
def doFile(self, path): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe._getCapsulePathsForFile(path): return dir = self.recipe.macros.destdir fullPath = util.joinPaths(dir, path) m = magic.magic(fullPath) if not (m and m.name == 'ZIP'): # if it's not a zip, we can't unpack it, PythonEggs will raise # an error on this path return tmpPath = tempfile.mkdtemp(dir = self.recipe.macros.builddir) util.execute("unzip -q -o -d '%s' '%s'" % (tmpPath, fullPath)) self._addActionPathBuildRequires(['unzip']) os.unlink(fullPath) shutil.move(tmpPath, fullPath)
def doFile(self, path): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe._getCapsulePathsForFile(path): return m = self.recipe.magic[path] if not m: return # Note: uses external gzip/bunzip if they exist because a # pipeline is faster in a multiprocessing environment def _mktmp(fullpath): fd, path = tempfile.mkstemp('.temp', '', os.path.dirname(fullpath)) os.close(fd) return path def _move(tmppath, fullpath): os.chmod(tmppath, os.lstat(fullpath).st_mode) os.rename(tmppath, fullpath) def _findProg(prog): if not self.db: self.db = database.Database(self.recipe.cfg.root, self.recipe.cfg.dbPath) return _findProgPath(prog, self.db, self.recipe) fullpath = self.macros.destdir+path if m.name == 'gzip' and \ (m.contents['compression'] != '9' or 'name' in m.contents): tmppath = _mktmp(fullpath) if not self.gzip: self.gzip = _findProg('gzip') util.execute('%s -dc %s | %s -f -n -9 > %s' %(self.gzip, fullpath, self.gzip, tmppath)) _move(tmppath, fullpath) del self.recipe.magic[path] if m.name == 'bzip' and m.contents['compression'] != '9': tmppath = _mktmp(fullpath) if not self.bzip: self.bzip = _findProg('bzip2') util.execute('%s -dc %s | %s -9 > %s' %(self.bzip, fullpath, self.bzip, tmppath)) _move(tmppath, fullpath) del self.recipe.magic[path]
def testInit(self): self.getRbuildHandle() from rbuild_plugins import config mock.mock(config.Config, 'isComplete', lambda: True) self.addProductDefinition(shortName='foo', upstream=['group-dist=localhost@rpl:linux']) txt = self.runCommand('init localhost@rpl:foo-1') self.assertEquals( txt, 'Created checkout for localhost@foo:foo-1 at foo-1\n') self.verifyFile('foo-1/stable/.stage', 'stable\n') self.verifyFile('foo-1/qa/.stage', 'qa\n') self.verifyFile('foo-1/devel/.stage', 'devel\n') assert (os.path.exists('foo-1/.rbuild/rbuildrc')) self.assertEquals( os.stat('foo-1/.rbuild/rbuildrc').st_mode & 0777, 0600) # confirm that the cached product directory looks the same as the # fresh one self.initProductDirectory('foo2') try: for root, dirs, files in os.walk('foo2'): for f in files: if f == 'CONARY': continue if f == 'product-definition.xml': # unfortunately, this file looks different # under addProductDefinition now because it # has arch-specific flavors for building. continue self.verifyFile('foo-1%s/%s' % (root[4:], f), open('%s/%s' % (root, f)).read()) for d in dirs: assert os.path.exists('foo-1%s/%s' % (root[4:], d)) except Exception, err: os.chdir('foo-1') util.execute('tar -czf foo-product.tgz * .rbuild/* ' '--exclude ".rbuild/rbuildrc"') errorStr = str(err) + """ New tarball at %s/foo-product.tgz. Run: cp %s/foo-product.tgz %s To update the archived product definition """ % (os.getcwd(), os.getcwd(), pathManager.getPath("RBUILD_ARCHIVE_PATH")) raise RuntimeError(errorStr)
def testInit(self): self.getRbuildHandle() from rbuild_plugins import config mock.mock(config.Config, 'isComplete', lambda: True) self.addProductDefinition(shortName='foo', upstream=['group-dist=localhost@rpl:linux']) txt = self.runCommand('init localhost@rpl:foo-1') self.assertEquals(txt, 'Created checkout for localhost@foo:foo-1 at foo-1\n') self.verifyFile('foo-1/stable/.stage', 'stable\n') self.verifyFile('foo-1/qa/.stage', 'qa\n') self.verifyFile('foo-1/devel/.stage', 'devel\n') assert(os.path.exists('foo-1/.rbuild/rbuildrc')) self.assertEquals(os.stat('foo-1/.rbuild/rbuildrc').st_mode & 0777, 0600) # confirm that the cached product directory looks the same as the # fresh one self.initProductDirectory('foo2') try: for root, dirs, files in os.walk('foo2'): for f in files: if f == 'CONARY': continue if f == 'product-definition.xml': # unfortunately, this file looks different # under addProductDefinition now because it # has arch-specific flavors for building. continue self.verifyFile('foo-1%s/%s' % (root[4:], f), open('%s/%s' % (root, f)).read()) for d in dirs: assert os.path.exists('foo-1%s/%s' % (root[4:], d)) except Exception, err: os.chdir('foo-1') util.execute( 'tar -czf foo-product.tgz * .rbuild/* ' '--exclude ".rbuild/rbuildrc"') errorStr = str(err) + """ New tarball at %s/foo-product.tgz. Run: cp %s/foo-product.tgz %s To update the archived product definition """ % (os.getcwd(), os.getcwd(), pathManager.getPath("RBUILD_ARCHIVE_PATH")) raise RuntimeError(errorStr)
def _uncompress(self, dirname, names): for name in names: path = dirname + os.sep + name if name.endswith('.gz') and util.isregular(path): if not self.gunzip: self.gunzip = self._findProg('gunzip') util.execute('gunzip ' + dirname + os.sep + name) try: self.recipe.recordMove(util.joinPaths(dirname, name), util.joinPaths(dirname, name)[:-3]) except AttributeError: pass if name.endswith('.bz2') and util.isregular(path): if not self.bunzip: self.bunzip = self._findProg('bunzip2') util.execute('bunzip2 ' + dirname + os.sep + name) try: self.recipe.recordMove(util.joinPaths(dirname, name), util.joinPaths(dirname, name)[:-4]) except AttributeError: pass
def _compress(self, dirname, names): for name in names: path = dirname + os.sep + name # Resolve symlinks before comporessing to make sure the target # file gets compressed. if os.path.islink(path): path = os.path.realpath(path) # Assumed already compressed. if path.endswith('.gz'): continue # Already been compressed via symlink lookup or the dangling # symlink policy will catch it. if not os.path.exists(path): continue if util.isregular(path): if not self.gzip: self.gzip = self._findProg('gzip') util.execute(self.gzip + ' -f -n -9 ' + path) try: self.recipe.recordMove(path, path + '.gz') except AttributeError: pass
def _processInfoFile(self, file): syspath = '%(destdir)s/%(infodir)s/' %self.macros + file path = '%(infodir)s/' %self.macros + file if not self.policyException(path): m = self.recipe.magic[path] if not m or m.name not in ('gzip', 'bzip'): # not compressed if not self.gzip: self.gzip = self._findProg('gzip') util.execute('gzip -f -n -9 %s' %syspath) try: self.recipe.recordMove(syspath, syspath + '.gz') except AttributeError: pass del self.recipe.magic[path] elif m.name == 'gzip' and \ (m.contents['compression'] != '9' or \ 'name' in m.contents): if not self.gzip: self.gzip = self._findProg('gzip') if not self.gunzip: self.gunzip = self._findProg('gunzip') util.execute('gunzip %s; gzip -f -n -9 %s' %(syspath, syspath[:-3])) # filename didn't change, so don't record it in the manifest del self.recipe.magic[path] elif m.name == 'bzip': # should use gzip instead if not self.gzip: self.gzip = self._findProg('gzip') if not self.bunzip: self.bunzip = self._findProg('bunzip2') util.execute('bunzip2 %s; gzip -f -n -9 %s' %(syspath, syspath[:-4])) try: self.recipe.recordMove(syspath, syspath[:-4] + '.gz') except AttributeError: pass del self.recipe.magic[path]
def doFile(self, path): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe._getCapsulePathsForFile(path): return m = self.recipe.magic[path] if not m: return # FIXME: should be: #if (m.name == "ELF" or m.name == "ar") and \ # m.contents['hasDebug']): # but this has to wait until ewt writes debug detection # for archives as well as elf files if (m.name == "ELF" and m.contents['hasDebug']) or \ (m.name == "ar"): oldmode = None fullpath = self.dm.destdir+path mode = os.lstat(fullpath)[stat.ST_MODE] if mode & 0600 != 0600: # need to be able to read and write the file to strip it oldmode = mode os.chmod(fullpath, mode|0600) if self.debuginfo and m.name == 'ELF' and not path.endswith('.o'): dir=os.path.dirname(path) b=os.path.basename(path) if not b.endswith('.debug'): b += '.debug' debuglibdir = '%(destdir)s%(debuglibdir)s' %self.dm +dir debuglibpath = util.joinPaths(debuglibdir, b) if os.path.exists(debuglibpath): return self._openDb() if (_findProgPath(self.macros.debugedit, self.db, self.recipe, error=False) and _findProgPath(self.macros.strip, self.db, self.recipe, error=False)): # null-separated AND terminated list, so we need to throw # away the last (empty) item before updating self.debugfiles self.debugfiles |= set(util.popen( '%(debugedit)s -b %(topbuilddir)s -d %(debugsrcdir)s' ' -l /dev/stdout '%self.dm +fullpath).read().split('\x00')[:-1]) util.mkdirChain(debuglibdir) util.execute('%s -f %s %s' %( self.dm.strip, debuglibpath, fullpath)) else: self._openDb() if m.name == 'ar' or path.endswith('.o'): # just in case strip is eu-strip, which segfaults # whenever it touches an ar archive, and seems to # break some .o files if _findProgPath(self.macros.strip_archive, self.db, self.recipe, error=False): util.execute('%(strip_archive)s ' %self.dm +fullpath) else: if _findProgPath(self.macros.strip, self.db, self.recipe, error=False): util.execute('%(strip)s ' %self.dm +fullpath) del self.recipe.magic[path] if oldmode is not None: os.chmod(fullpath, oldmode)
def initProductDirectory(self, directory): util.mkdirChain(directory) util.execute('tar -xzf %s -C %s' % ( resources.get_archive('foo-product.tgz'), directory)) self.rbuildCfg.writeCheckoutFile(directory + '/.rbuild/rbuildrc')
def testExecuteNoUserInput(self): util.execute('bash') # should return instantly with no exit code
def do(self): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe.getType() == recipe.RECIPE_TYPE_CAPSULE: return macros = self.macros subtrees = self.invariantsubtrees if self.subtrees: subtrees.extend(self.subtrees) for path in subtrees: path = util.normpath(path % macros) fullpath = '/'.join((self.macros.destdir, path)) if not os.path.exists(fullpath): continue # this state can only be reached if SharedLibrary is called with # bad arguments... see CNP-45 mode = os.stat(fullpath)[stat.ST_MODE] if not stat.S_ISDIR(mode): self.error( 'The subtrees= argument takes directories only;' ' %s is not a directory', path) continue oldfiles = set(os.listdir(fullpath)) bootStrapLdConfig = True ldConfigPath = '%(destdir)s%(essentialsbindir)s/ldconfig' % macros if (not os.path.exists(ldConfigPath) ) or self.recipe.isCrossCompiling(): bootStrapLdConfig = False ldConfigPath = '%(essentialsbindir)s/ldconfig' % macros if not os.path.exists(ldConfigPath): self.warn('ldconfig not found') continue util.execute('%s -n %s' % (ldConfigPath, fullpath)) if not bootStrapLdConfig: db = database.Database(self.recipe.cfg.root, self.recipe.cfg.dbPath) ldConfigTroveName = [ x.getName() for x in db.iterTrovesByPath(ldConfigPath) ] if ldConfigTroveName: ldConfigTroveName = ldConfigTroveName[0] else: ldConfigTroveName = 'glibc:runtime' try: if ldConfigTroveName in self.recipe._getTransitiveBuildRequiresNames( ): self.recipe.reportExcessBuildRequires( ldConfigTroveName) else: self.recipe.reportMissingBuildRequires( ldConfigTroveName) except AttributeError: # older Conary that does not have # reportExcessBuildRequires or even the older # reportMissingBuildRequires or # _getTransitiveBuildRequiresNames pass newfiles = set(os.listdir(fullpath)) addedfiles = newfiles - oldfiles removedfiles = oldfiles - newfiles if addedfiles: self.info('ldconfig added the following new files in %s: %s', path, ', '.join(sorted(list(addedfiles)))) if removedfiles: self.warn('ldconfig removed files in %s: %s', path, ', '.join(sorted(list(removedfiles))))
def write(self): topDir = os.path.join(self.workDir, 'unified') tbdir = os.path.join(topDir, self.productDir, 'tarballs') baseDir = os.path.join(topDir, self.productDir, 'base') util.mkdirChain(tbdir) util.mkdirChain(baseDir) basePath = os.path.join(self.workDir, self.basefilename) if os.path.exists(basePath): util.rmtree(basePath) util.mkdirChain(basePath) outputDir = os.path.join(constants.finishedDir, self.UUID) util.mkdirChain(outputDir) tarball = os.path.join(self.workDir, self.basefilename + '.tar.gz') cwd = os.getcwd() try: self.installFileTree(basePath, no_mbr=True) self.status('Preparing to build ISOs') os.chdir(basePath) util.execute('tar -C %s -cpPsS --to-stdout ./ | gzip > %s' % \ (basePath, tarball)) ts = TarSplit(tarball) ts.splitFile(tbdir) ts.writeTbList(os.path.join(baseDir, 'tblist')) util.rmtree(basePath, ignore_errors = True) util.rmtree(tarball, ignore_errors = True) try: os.chdir(cwd) except: # block all errors so that real ones can get through pass self.callback = installable_iso.Callback(self.status) print >> sys.stderr, "Building ISOs of size: %d Mb" % \ (self.maxIsoSize / 1048576) sys.stderr.flush() # FIXME: hack to ensure we don't trigger overburns. # there are probably cleaner ways to do this. if self.maxIsoSize > 681574400: self.maxIsoSize -= 1024 * 1024 templateDir, clientVersion = self.retrieveTemplates() csdir = self.prepareTemplates(topDir, templateDir) util.rmtree(csdir, ignore_errors=True) if self.arch == 'x86': anacondaArch = 'i386' else: anacondaArch = self.arch # write .discinfo discInfoPath = os.path.join(topDir, ".discinfo") if os.path.exists(discInfoPath): os.unlink(discInfoPath) discInfoFile = open(discInfoPath, "w") print >> discInfoFile, time.time() print >> discInfoFile, self.jobData['name'] print >> discInfoFile, anacondaArch print >> discInfoFile, "1" for x in ["base", "tarballs", 'pixmaps']: print >> discInfoFile, "%s/%s" % (self.productDir, x) discInfoFile.close() self.extractMediaTemplate(topDir) self.setupKickstart(topDir) self.writeProductImage(topDir, installable_iso.getArchFlavor(self.baseFlavor).freeze()) self.status("Building ISOs") # Mostly copied from splitdistro current = os.path.join(self.workDir, 'disc1') discnum = 1 if os.path.isdir(current): print >> sys.stderr, 'removing stale', current util.rmtree(current) print >> sys.stderr, 'creating', current os.mkdir(current) splitdistro.lndir(topDir, current, excludes=('media-template',)) # lay 'disc1' before 'all' to ensure collisions are handled correctly for cDir in ('disc1', 'all'): if 'media-template' in os.listdir(topDir) and \ cDir in os.listdir(os.path.join(topDir, 'media-template')): splitdistro.lndir(os.path.join(topDir, 'media-template', cDir), current) for cDir in ('all', 'disc1'): srcDir = os.path.join(topDir, 'media-template2', cDir) if os.path.exists(srcDir): for src in os.listdir(srcDir): call('cp', '-R', '--no-dereference', os.path.join(srcDir, src), current) outputFileList = self.buildIsos(topDir) if self.buildOVF10: self.workingDir = os.path.join(self.workDir, self.basefilename) util.mkdirChain(self.workingDir) diskFileSize = imagegen.getFileSize(outputFileList[0][0]) self.ovfImage = ovf_image.ISOOvfImage(self.basefilename, self.jobData['description'], None, outputFileList[0][0], diskFileSize, self.maxIsoSize, False, self.getBuildData('vmMemory'), self.workingDir, self.outputDir) self.ovfObj = self.ovfImage.createOvf() self.ovfXml = self.ovfImage.writeOvf() self.ovfImage.createManifest() self.ovaPath = self.ovfImage.createOva() outputFileList.append((self.ovaPath, 'Appliance ISO OVF 1.0')) # notify client that images are ready self.postOutput(outputFileList) finally: util.rmtree(os.path.normpath(os.path.join(topDir, "..")), ignore_errors = True) util.rmtree(constants.cachePath, ignore_errors = True)
def doFile(self, path): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe._getCapsulePathsForFile(path): return m = self.recipe.magic[path] if not m: return # FIXME: should be: #if (m.name == "ELF" or m.name == "ar") and \ # m.contents['hasDebug']): # but this has to wait until ewt writes debug detection # for archives as well as elf files if (m.name == "ELF" and m.contents['hasDebug']) or \ (m.name == "ar"): oldmode = None fullpath = self.dm.destdir + path mode = os.lstat(fullpath)[stat.ST_MODE] if mode & 0600 != 0600: # need to be able to read and write the file to strip it oldmode = mode os.chmod(fullpath, mode | 0600) if self.debuginfo and m.name == 'ELF' and not path.endswith('.o'): dir = os.path.dirname(path) b = os.path.basename(path) if not b.endswith('.debug'): b += '.debug' debuglibdir = '%(destdir)s%(debuglibdir)s' % self.dm + dir debuglibpath = util.joinPaths(debuglibdir, b) if os.path.exists(debuglibpath): return self._openDb() if (_findProgPath( self.macros.debugedit, self.db, self.recipe, error=False) and _findProgPath(self.macros.strip, self.db, self.recipe, error=False)): # null-separated AND terminated list, so we need to throw # away the last (empty) item before updating self.debugfiles self.debugfiles |= set( util.popen( '%(debugedit)s -b %(topbuilddir)s -d %(debugsrcdir)s' ' -l /dev/stdout ' % self.dm + fullpath).read().split('\x00')[:-1]) util.mkdirChain(debuglibdir) util.execute('%s -f %s %s' % (self.dm.strip, debuglibpath, fullpath)) else: self._openDb() if m.name == 'ar' or path.endswith('.o'): # just in case strip is eu-strip, which segfaults # whenever it touches an ar archive, and seems to # break some .o files if _findProgPath(self.macros.strip_archive, self.db, self.recipe, error=False): util.execute('%(strip_archive)s ' % self.dm + fullpath) else: if _findProgPath(self.macros.strip, self.db, self.recipe, error=False): util.execute('%(strip)s ' % self.dm + fullpath) del self.recipe.magic[path] if oldmode is not None: os.chmod(fullpath, oldmode)