def run(self, context): options = context.mergeOptions(self) # get the merged options mkdir(os.path.dirname(self.path)) options['native.compilers'].archiver.archive(context, output=self.path, options=options, src=self.objects.resolve(context))
def run(self, context): options = self.options mkdir(os.path.dirname(self.path)) options['native.compilers'].archiver.archive(context, output=self.path, options=options, src=self.objects.resolve(context))
def run(self, context): self.log.info("Touching %s", self.path) mkdir(os.path.dirname(self.path)) path = normLongPath(self.path) with openForWrite(path, "wb") as f: pass
def run(self, context): libs = self.libs.resolve(context) libnames = map(lambda x: os.path.basename(x), libs) libpaths = map(lambda x: os.path.dirname(x), libs) flags = [context.expandPropertyValues(x) for x in self.flags] args = [self.getOption('csharp.compiler'), "-out:" + self.path] if libnames: args.extend([ "-reference:" + ",".join(libnames), "-lib:" + ",".join(libpaths) ]) if self.main: args.extend(["-target:exe", "-main:" + self.main]) else: args.append("-target:library") for (file, id) in self.resources: args.append('-resource:%s,%s' % (context.expandPropertyValues(file), context.expandPropertyValues(id))) args.extend(self.options['csharp.options']) args.extend(flags) args.extend(self.compile.resolve(context)) mkdir(os.path.dirname(self.path)) call(args, outputHandler=self.getOption('csharp.outputHandlerFactory')( 'csc', False, options=self.options), timeout=self.options['process.timeout'])
def run(self, context): options = context.mergeOptions(self) # get the merged options mkdir(os.path.dirname(self.path)) options['native.compilers'].ccompiler.compile(context, output=self.path, options=options, flags=flatten((options['native.c.flags'] or options['native.cxx.flags'])+[context.expandPropertyValues(x).split(' ') for x in self.flags]), src=self.source.resolve(context), includes=flatten(self.includes.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.include']]))
def run(self, context): options = self.options mkdir(os.path.dirname(self.path)) options['native.compilers'].ccompiler.compile(context, output=self.path, options=options, flags=flatten((options['native.c.flags'] or options['native.cxx.flags'])+[context.expandPropertyValues(x).split(' ') for x in self.flags]), src=self.source.resolve(context), includes=flatten(self.includes.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.include']]))
def run(self, context): self.log.info('Cleaning existing files from %s', self.path) deleteDir(self.path) for a in self.archives: if isinstance(a, FilteredArchiveContents): items = [(a.getResolvedPath(context), '')] else: assert isinstance(a, BasePathSet) filteredMembers = None items = a.resolveWithDestinations(context) for (srcAbs, destRel) in items: if destRel and not isDirPath(destRel): destRel = os.path.dirname(destRel) # strip off the zip filename try: filesize = os.path.getsize(srcAbs) except Exception: filesize = 0 self.log.info("Unpacking %s (%0.1f MB) to %s", os.path.basename(srcAbs), filesize/1024.0/1024, self.name+destRel) starttime = time.time() with self. __openArchive(srcAbs) as f: mkdir(self.path+destRel) if isinstance(a, FilteredArchiveContents) and a.hasIncludeExcludeFilters(): fullList = _getnames(f) if not fullList: raise BuildException('No files were found in archive "%s"'%(srcAbs)) filteredMembers = [x for x in fullList if a.isIncluded(context, x)] self.log.info("Unpacking %d of %d members in %s", len(filteredMembers), len(fullList), os.path.basename(srcAbs)) if not filteredMembers: raise BuildException('No files matching the specified include/exclude filters were found in archive "%s": %s'%(srcAbs, a)) if len(filteredMembers)==len(fullList): raise BuildException('No files were excluded from the unpacking operation by the specified filters (check filters are correct): %s'%a) else: filteredMembers = _getnames(f) # NB: some archive types want a list of string members, others want TarInfo objects etc, so # if we support other archive types in future might need to do a bit of work here path = normLongPath(self.path+destRel) for m in filteredMembers: if not isDirPath(m): info = _getinfo(f, m) if isinstance(a, FilteredArchiveContents): _setfilename(info, a.mapDestPath(context, _getfilename(info))) if isWindows(): _setfilename(info, _getfilename(info).replace('/', '\\')) f.extract(info, path=path) else: # we should create empty directories too if isinstance(a, FilteredArchiveContents): m = a.mapDestPath(context, m).rstrip('/') m = path.rstrip('/\\')+'/'+m if isWindows(): m = m.replace('/', '\\') mkdir(m) self.log.info("Completed unpacking %s (%0.1f MB) in %0.1f seconds", os.path.basename(srcAbs), filesize/1024.0/1024, (time.time()-starttime))
def run(self, context): options = context.mergeOptions(self) # get the merged options mkdir(os.path.dirname(self.path)) options['native.compilers'].linker.link(context, output=self.path, options=options, flags=options['native.link.flags']+self.flags, shared=self.shared, src=self.objects.resolve(context), libs=flatten([map(string.strip, context.expandPropertyValues(x, expandList=True)) for x in self.libs+options['native.libs'] if x]), libdirs=flatten(self.libpaths.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.libpaths']]))
def run(self, context): options = self.options mkdir(os.path.dirname(self.path)) options['native.compilers'].linker.link(context, output=self.path, options=options, flags=options['native.link.flags']+self.flags, shared=self.shared, src=self.objects.resolve(context), libs=flatten([map(string.strip, context.expandPropertyValues(x, expandList=True)) for x in self.libs+options['native.libs'] if x]), libdirs=flatten(self.libpaths.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.libpaths']]))
def run(self, context): contents = self._getContents(context) mkdir(os.path.dirname(self.path)) path = normLongPath(self.path) with openForWrite(path, 'wb') as f: f.write(contents.replace('\n', os.linesep)) if self.__mode and not isWindows(): os.chmod(path, self.__mode) if self.__executable and not isWindows(): os.chmod(path, stat.S_IXOTH | stat.S_IXUSR | stat.S_IXGRP | os.stat(self.path).st_mode)
def run(self, context): mkdir(os.path.dirname(self.path)) alreadyDone = set() with zipfile.ZipFile(normLongPath(self.path), 'w') as output: for (f, o) in self.inputs.resolveWithDestinations(context): # if we don't check for duplicate entries we'll end up creating an invalid zip if o in alreadyDone: dupsrc = ['"%s"'%src for (src, dest) in self.inputs.resolveWithDestinations(context) if dest == o] raise BuildException('Duplicate zip entry "%s" from: %s'%(o, ', '.join(dupsrc))) alreadyDone.add(o) # can't compress directory entries! (it messes up Java) output.write(normLongPath(f).rstrip('/\\'), o, zipfile.ZIP_STORED if isDirPath(f) else zipfile.ZIP_DEFLATED)
def run(self, context): contents = self._getContents(context) mkdir(os.path.dirname(self.path)) path = normLongPath(self.path) with openForWrite(path, 'wb') as f: f.write(contents.replace('\n', os.linesep)) if self.__mode and not isWindows(): os.chmod(path, self.__mode) if self.__executable and not isWindows(): os.chmod( path, stat.S_IXOTH | stat.S_IXUSR | stat.S_IXGRP | os.stat(self.path).st_mode)
def run(self, context): options = context.mergeOptions(self) # get the merged options # make sure outputdir exists mkdir(self.path) # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir(options.get('javac.logs')) javac(self.path, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs')+'/'+targetNameToUniqueId(self.name), targetname=self.name)
def run(self, context): mkdir(os.path.dirname(self.path)) src = self.src.resolve(context) if len(src) != 1: raise BuildException( 'SymLink target "%s" is invalid - must have only only source path' % self.name) if src[0].endswith('/'): raise BuildException( 'SymLink target "%s" is invalid - must be a file not a directory' % self.name) os.symlink( src[0] if not self.relative else os.path.relpath( src[0], os.path.dirname(self.path)), self.path)
def run(self, context): # make sure outputdir exists mkdir(self.path) # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir(self.getOption('javac.logs')) javac(self.path, self.compile.resolve(context), classpath, options=self.options, logbasename=self.options['javac.logs'] + '/' + targetNameToUniqueId(self.name), targetname=self.name)
def run(self, context): """ Calls the wrapped run method """ implicitInputs = self.__getImplicitInputs(context) if implicitInputs or isDirPath(self.target.name): deleteFile(self._implicitInputsFile) self.target.run(context) # if target built successfully, record what the implicit inputs were to help with the next up to date # check and ensure incremental build is correct if implicitInputs or isDirPath(self.target.name): log.debug('writing implicitInputsFile: %s', self._implicitInputsFile) mkdir(os.path.dirname(self._implicitInputsFile)) with openForWrite(toLongPathSafe(self._implicitInputsFile), 'wb') as f: f.write(os.linesep.join(implicitInputs))
def run(self, context): """ Calls the wrapped run method """ implicitInputs = self.__getImplicitInputs(context) if implicitInputs or isDirPath(self.target.name): deleteFile(self._implicitInputsFile) self.target.run(context) # if target built successfully, record what the implicit inputs were to help with the next up to date # check and ensure incremental build is correct if implicitInputs or isDirPath(self.target.name): log.debug('writing implicitInputsFile: %s', self._implicitInputsFile) mkdir(os.path.dirname(self._implicitInputsFile)) iminpath = os.path.normpath(self._implicitInputsFile) if isWindows(): iminpath = u'\\\\?\\'+iminpath with openForWrite(iminpath, 'wb') as f: f.write(os.linesep.join(implicitInputs))
def run(self, context): options = context.mergeOptions(self) # get the merged options libs = self.libs.resolve(context) libnames = map(lambda x:os.path.basename(x), libs) libpaths = map(lambda x:os.path.dirname(x), libs) flags = [context.expandPropertyValues(x) for x in self.flags] args = [options['csharp.compiler'], "-out:"+self.path] if libnames: args.extend(["-reference:"+",".join(libnames), "-lib:"+",".join(libpaths)]) if self.main: args.extend(["-target:exe", "-main:"+self.main]) else: args.append("-target:library") for (file, id) in self.resources: args.append('-resource:%s,%s' % (context.expandPropertyValues(file), context.expandPropertyValues(id))) args.extend(options['csharp.options']) args.extend(flags) args.extend(self.compile.resolve(context)) mkdir(os.path.dirname(self.path)) call(args, outputHandler=options['csharp.processoutputhandler']('csc', False, options=options), timeout=options['process.timeout'])
def run(self, context): self.log.info("Copying %s to %s", self.src, self.path) src = self.src.resolveWithDestinations( context) # a map of srcAbsolute: destRelative # implicitly ensure parent of target exists, to keep things simple copied = 0 if not isDirPath(self.name): # it's a simple file operation. if len(src) != 1: raise BuildException( 'Copy destination must be a directory (ending with "/") when multiple sources are specified (not: %s)' % src) src, mappedDest = src[0] if isDirPath(src): raise BuildException( 'Copy source must be files (or PathSets) not directories: %s' % src) mkdir(os.path.dirname(self.path)) self._copyFile( context, src, self.path ) # we kindof have to ignore mappedDest here, since the target path already fully defines it if self.mode: os.chmod(self.path, self.mode) copied += 1 else: lastDirCreated = None for (srcAbs, destRel) in src: # there should not be any directories here only files from pathsets if '..' in destRel: # to avoid people abusing this to copy files outside the dest directory! raise Exception( 'This target does not permit destination paths to contain ".." relative path expressions' ) if isDirPath(srcAbs): # allows creating of empty directories. mkdir(self.path + destRel) else: dest = os.path.normpath(self.path + destRel) #self.log.debug('Processing %s -> %s i.e. %s', srcAbs, destRel, dest) if not lastDirCreated or lastDirCreated != os.path.dirname( dest): lastDirCreated = os.path.dirname(dest) self.log.debug('Creating intermediate dir %s', lastDirCreated) mkdir(lastDirCreated) try: self._copyFile(context, srcAbs, dest) if self.mode: os.chmod(dest, self.mode) except Exception, e: raise BuildException( 'Error copying from "%s" to "%s"' % (srcAbs, dest), causedBy=True) copied += 1
def run(self, context): options = context.mergeOptions(self) args = [ options['docker.path'] ] environs = { 'DOCKER_HOST' : options['docker.host'] } if options['docker.host'] else {} if self.mode == Docker.BUILD: dargs = list(args) dargs.extend([ 'build', '--rm=true', '-t', context.expandPropertyValues(self.imagename), ]) if self.buildArgs: dargs.extend(["--build-arg=%s" % [context.expandPropertyValues(x) for x in self.buildArgs]]) if self.dockerfile: dargs.extend(["-f", context.expandPropertyValues(self.dockerfile)]) inputs = self.inputs.resolve(context) if len(inputs) != 1: raise BuildException("Must specify a single input for Docker.BUILD", location = self.location) dargs.append(inputs[0]) cwd = os.path.dirname(inputs[0]) call(dargs, outputHandler=options['docker.processoutputhandler']('docker-build', False, options=options), timeout=options['process.timeout'], env=environs, cwd=cwd) elif self.mode == Docker.PUSHTAG: inputs = self.inputs.resolve(context) if len(inputs) != 0: raise BuildException("Must not specify inputs for Docker.PUSHTAG", location = self.location) dargs = list(args) dargs.extend([ 'tag', context.expandPropertyValues(self.depimage), context.expandPropertyValues(self.imagename), ]) call(dargs, outputHandler=options['docker.processoutputhandler']('docker-tag', False, options=options), timeout=options['process.timeout'], env=environs) dargs = list(args) dargs.extend([ 'push', context.expandPropertyValues(self.imagename), ]) call(dargs, outputHandler=options['docker.processoutputhandler']('docker-push', False, options=options), timeout=options['process.timeout'], env=environs) else: raise BuildException('Unknown Docker mode. Must be Docker.BUILD or Docker.PUSHTAG', location = self.location) # update the stamp file path = normLongPath(self.path) mkdir(os.path.dirname(path)) with openForWrite(path, 'wb') as f: pass
def run(self, context): self.log.info("Copying %s to %s", self.src, self.path) src = self.src.resolveWithDestinations(context) # a map of srcAbsolute: destRelative # implicitly ensure parent of target exists, to keep things simple copied = 0 if not isDirPath(self.name): # it's a simple file operation. if len(src) != 1: raise BuildException('Copy destination must be a directory (ending with "/") when multiple sources are specified (not: %s)' % src) src, mappedDest = src[0] if isDirPath(src): raise BuildException('Copy source must be files (or PathSets) not directories: %s'%src) mkdir(os.path.dirname(self.path)) self._copyFile(context, src, self.path) # we kindof have to ignore mappedDest here, since the target path already fully defines it if self.mode: os.chmod(self.path, self.mode) copied += 1 else: lastDirCreated = None for (srcAbs, destRel) in src: # there should not be any directories here only files from pathsets if '..' in destRel: # to avoid people abusing this to copy files outside the dest directory! raise Exception('Cannot use ".." relative path expressions in a Copy target') if isDirPath(srcAbs): # allows creating of empty directories. mkdir(self.path+destRel) else: dest = os.path.normpath(self.path+destRel) #self.log.debug('Processing %s -> %s i.e. %s', srcAbs, destRel, dest) if not lastDirCreated or lastDirCreated!=os.path.dirname(dest): lastDirCreated = os.path.dirname(dest) self.log.debug('Creating intermediate dir %s', lastDirCreated) mkdir(lastDirCreated) try: self._copyFile(context, srcAbs, dest) if self.mode: os.chmod(dest, self.mode) except Exception, e: raise BuildException('Error copying from "%s" to "%s"'%(srcAbs, dest), causedBy=True) copied += 1
def run(self, context): if self.cwd: self.cwd = context.getFullPath(self.cwd, self.baseDir) if isDirPath(self.path): mkdir(self.path) cwd = self.cwd or self.path else: mkdir(os.path.dirname(self.path)) cwd = self.cwd or self.workDir mkdir(self.workDir) cmd = self._resolveCommand(context) # this location is a lot easier to find than the target's workdir logbasename = os.path.normpath( context.getPropertyValue('BUILD_WORK_DIR') + '/CustomCommandOutput/' + os.path.basename(cmd[0]) + "." + targetNameToUniqueId(self.name)) stdoutPath = context.getFullPath( self.path if self.redirectStdOutToTarget else (self.stdout or logbasename + '.out'), defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/') stderrPath = context.getFullPath( self.stderr or logbasename + '.err', defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/') self.log.info('Building %s by executing command line: %s', self.name, ''.join(['\n\t"%s"' % x for x in cmd])) if self.cwd: self.log.info('Building %s from working directory: %s', self.name, self.cwd) # only print if overridden env = self.env or {} if env: if callable(env): env = env(context) else: env = { k: None if None == env[k] else self._resolveItem( env[k], context) for k in env } self.log.info('Environment overrides for %s are: %s', self.name, ''.join(['\n\t"%s=%s"' % (k, env[k]) for k in env])) for k in os.environ: if k not in env: env[k] = os.getenv(k) for k in env.keys(): if None == env[k]: del env[k] self.log.info('Output from %s will be written to "%s" and "%s"', self.name, stdoutPath, stderrPath) if not os.path.exists(cmd[0]) and not ( isWindows() and os.path.exists(cmd[0] + '.exe')): raise BuildException( 'Cannot run command because the executable does not exist: "%s"' % (cmd[0]), location=self.location) try: success = False rc = None try: # maybe send output to a file instead mkdir(os.path.dirname(logbasename)) with open(stderrPath, 'w') as fe: with open(stdoutPath, 'w') as fo: process = subprocess.Popen(cmd, stderr=fe, stdout=fo, cwd=cwd, env=env) rc = _wait_with_timeout( process, '%s(%s)' % (self.name, os.path.basename(cmd[0])), self.options['process.timeout'], False) success = rc == 0 finally: try: if os.path.getsize(stderrPath) == 0 and not self.stderr: deleteFile(stderrPath, allowRetry=True) if not self.redirectStdOutToTarget and os.path.getsize( stdoutPath) == 0 and not self.stdout: deleteFile(stdoutPath, allowRetry=True) except Exception, e: # stupid windows, it passes understanding self.log.info( 'Failed to delete empty .out/.err files (ignoring error as its not critical): %s', e) #if not os.listdir(self.workDir): deleteDir(self.workDir) # don't leave empty work dirs around mainlog = '<command generated no output>' logMethod = self.log.info if success else self.log.error if not self.redirectStdOutToTarget and os.path.isfile( stdoutPath) and os.path.getsize(stdoutPath) > 0: if os.path.getsize(stdoutPath) < 15 * 1024: logMethod( 'Output from %s stdout is: \n%s', self.name, open(stdoutPath, 'r').read().replace('\n', '\n\t')) mainlog = stdoutPath if not success: context.publishArtifact('%s stdout' % self, stdoutPath) if os.path.isfile( stderrPath) and os.path.getsize(stderrPath) > 0: if os.path.getsize(stderrPath) < 15 * 1024: logMethod( 'Output from %s stderr is: \n%s', self.name, open(stderrPath, 'r').read().replace('\n', '\n\t')) mainlog = stderrPath # take precedence over stdout if not success: context.publishArtifact('%s stderr' % self, stderrPath) if rc != None and rc != 0: raise BuildException( '%s command failed with error code %s; see output at "%s"' % (os.path.basename(cmd[0]), rc, mainlog), location=self.location)
def run(self, context): mkdir(os.path.dirname(self.path)) with tarfile.open(normLongPath(self.path), 'w:gz') as output: for (f, o) in self.inputs.resolveWithDestinations(context): output.add(normLongPath(f).rstrip('/\\'), o)
def run(self, context): self.log.info('Cleaning existing files from %s', self.path) deleteDir(self.path) for a in self.archives: if isinstance(a, FilteredArchiveContents): items = [(a.getResolvedPath(context), '')] else: assert isinstance(a, BasePathSet) filteredMembers = None items = a.resolveWithDestinations(context) for (srcAbs, destRel) in items: if destRel and not isDirPath(destRel): destRel = os.path.dirname( destRel) # strip off the zip filename if '..' in destRel: raise Exception( 'This target does not permit destination paths to contain ".." relative path expressions' ) try: filesize = os.path.getsize(srcAbs) except Exception: filesize = 0 self.log.info("Unpacking %s (%0.1f MB) to %s", os.path.basename(srcAbs), filesize / 1024.0 / 1024, self.name + destRel) starttime = time.time() with self.__openArchive(srcAbs) as f: mkdir(self.path + destRel) if isinstance(a, FilteredArchiveContents ) and a.hasIncludeExcludeFilters(): fullList = _getnames(f) if not fullList: raise BuildException( 'No files were found in archive "%s"' % (srcAbs)) filteredMembers = [ x for x in fullList if a.isIncluded(context, x) ] self.log.info("Unpacking %d of %d members in %s", len(filteredMembers), len(fullList), os.path.basename(srcAbs)) if not filteredMembers: raise BuildException( 'No files matching the specified include/exclude filters were found in archive "%s": %s' % (srcAbs, a)) if len(filteredMembers) == len(fullList): raise BuildException( 'No files were excluded from the unpacking operation by the specified filters (check filters are correct): %s' % a) else: filteredMembers = _getnames(f) # NB: some archive types want a list of string members, others want TarInfo objects etc, so # if we support other archive types in future might need to do a bit of work here path = normLongPath(self.path + destRel) for m in filteredMembers: if not isDirPath(m): info = _getinfo(f, m) if isinstance(a, FilteredArchiveContents): _setfilename( info, a.mapDestPath(context, _getfilename(info))) if isWindows(): _setfilename( info, _getfilename(info).replace('/', '\\')) f.extract(info, path=path) else: # we should create empty directories too if isinstance(a, FilteredArchiveContents): m = a.mapDestPath(context, m).rstrip('/') m = path.rstrip('/\\') + '/' + m if isWindows(): m = m.replace('/', '\\') mkdir(m) self.log.info( "Completed unpacking %s (%0.1f MB) in %0.1f seconds", os.path.basename(srcAbs), filesize / 1024.0 / 1024, (time.time() - starttime))
def run(self, context): self.keystore = context.expandPropertyValues(self.keystore) options = self.options mkdir(self.path) for src, dest in self.jars.resolveWithDestinations(context): if '..' in dest: # to avoid people abusing this to copy files outside the dest directory! raise Exception( 'This target does not permit destination paths to contain ".." relative path expressions' ) try: with open(src, 'rb') as s: with openForWrite(os.path.join(self.path, dest), 'wb') as d: d.write(s.read()) shutil.copystat(src, os.path.join(self.path, dest)) # When we re-jar with the user specified manifest entries, jar will complain # about duplicate attributes IF the original MANIFEST.MF already has those entries. # This is happening for latest version of SL where Application-Name, Permission etc # were already there. # # The block of code below will first extract the original MANIFEST.MF from the source # jar file, read all manifest entry to a list. When constructing the new manifest entries, # make sure the old MANIFEST.MF doesn't have that entry before putting the new manifest entry # to the list. This will avoid the duplicate attribute error. # if self.manifestDefaults: lines = [] # read each line of MANIFEST.MF of the original jar and put them in lines with zipfile.ZipFile(src, 'r') as zf: lst = zf.infolist() for zi in lst: fn = zi.filename if fn.lower().endswith('manifest.mf'): try: manifest_txt = zf.read(zi.filename) except Exception, e: raise BuildException( 'Failed reading the manifest file %s with exception:%s' % (fn, e)) # if we have all manifest text, parse and save each line if manifest_txt: # CR LF | LF | CR can be there as line feed and hence the code below lines = manifest_txt.replace( '\r\n', '\n').replace('\r', '\n').split('\n') # done break original_entries = collections.OrderedDict( ) # to ensure we don't overwrite/duplicate these # populate the manifest_entries with original values from original manifest for l in lines: if ':' in l and not l.startswith( ' ' ): # ignore continuation lines etc because keys are all we care about key, value = l.split(':', 1) original_entries[key] = value.strip() # build up a list of the new manifest entries (will be merged into any existing manifest by jar) manifest_entries = collections.OrderedDict() for i in self.manifestDefaults: # if entry isn't there yet, add to the list if i not in original_entries: manifest_entries[i] = context.expandPropertyValues( self.manifestDefaults[i]) # create the manifest file # we want to add the manifest entries explicitly specified here but # NOT the 'default' manifest entries we usually add, since these # are likely to have been set already, and we do not want duplicates mkdir(self.workDir) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file options = dict(options) options['jar.manifest.defaults'] = {} create_manifest(manifest, manifest_entries, options) # update the EXISTING jar file with the new manifest entries, which will be merged into # existing manifest by the jar tool jar(os.path.join(self.path, dest), manifest, None, options, update=True) signjar(os.path.join(self.path, dest), self.keystore, options, alias=self.alias, storepass=self.storepass, outputHandler=ProcessOutputHandler( 'signjars', treatStdErrAsErrors=False, options=options)) except BuildException, e: raise BuildException('Error processing %s: %s' % (os.path.basename(dest), e))
def run(self, context): options = self.options # make sure temp dir exists mkdir(self.workDir) classes = os.path.join(self.workDir, "classes") # output dir for classes # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir( classes ) # (need this for assembling other files to package later on, even if we don't do any javac) if self.compile: mkdir(self.getOption('javac.logs')) javac(classes, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs') + '/' + targetNameToUniqueId(self.name), targetname=self.name) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file if isinstance(self.manifest, basestring): manifest = context.getFullPath(self.manifest, self.baseDir) elif self.manifest == None: manifest = None else: # generate one # rewrite property values in the manifest manifest_entries = {} for i in self.manifest: manifest_entries[i] = context.expandPropertyValues( self.manifest[i]) # determine classpath for manifest classpath_entries = [] if "Class-path" not in manifest_entries: # assuming it wasn't hardcoded, set it here for src, dest in self.classpath.resolveWithDestinations( context): # we definitely do want to support use of ".." in destinations here, it can be very useful classpath_entries.append(dest) assert isinstance( options['jar.manifest.classpathAppend'], list), options[ 'jar.manifest.classpathAppend'] # must not be a string classpath_entries.extend( options['jar.manifest.classpathAppend'] or []) # need to always use / not \ for these to be valid classpath_entries = [ p.replace(os.path.sep, '/').replace('\\', '/') for p in classpath_entries if p ] if classpath_entries: manifest_entries["Class-path"] = " ".join( classpath_entries) # include the classpath from here if not manifest_entries.get( 'Class-path' ): # suppress this element entirely if not needed, otherwise there would be no way to have an empty classpath manifest_entries.pop('Class-path', '') # create the manifest file create_manifest(manifest, manifest_entries, options=options) # copy in the additional things to include for (src, dest) in self.package.resolveWithDestinations(context): if '..' in dest: raise Exception( 'This target does not permit packaged destination paths to contain ".." relative path expressions' ) mkdir(os.path.dirname(os.path.join(classes, dest))) destpath = normLongPath(classes + '/' + dest) srcpath = normLongPath(src) if os.path.isdir(srcpath): mkdir(destpath) else: with open(srcpath, 'rb') as s: with openForWrite(destpath, 'wb') as d: d.write(s.read()) # create the jar jar(self.path, manifest, classes, options=options, preserveManifestFormatting=self.preserveManifestFormatting, outputHandler=ProcessOutputHandler('jar', treatStdErrAsErrors=False, options=options))
def updateStampFile(self): """ Assumes self.path is a stamp file that just needs creating / timestamp updating and does so """ path = normLongPath(self.path) mkdir(os.path.dirname(path)) with openForWrite(path, 'wb') as f: pass
def run(self, context): if self.cwd: self.cwd = context.getFullPath(self.cwd, self.baseDir) if isDirPath(self.path): mkdir(self.path) cwd = self.cwd or self.path else: mkdir(os.path.dirname(self.path)) cwd = self.cwd or self.workDir mkdir(self.workDir) cmd = self._resolveCommand(context) # this location is a lot easier to find than the target's workdir logbasename = os.path.normpath(context.getPropertyValue('BUILD_WORK_DIR')+'/CustomCommandOutput/'+os.path.basename(cmd[0])+"."+targetNameToUniqueId(self.name)) stdoutPath = context.getFullPath(self.path if self.redirectStdOutToTarget else (self.stdout or logbasename+'.out'), defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/') stderrPath = context.getFullPath(self.stderr or logbasename+'.err', defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/') self.log.info('Building %s by executing command line: %s', self.name, ''.join(['\n\t"%s"'%x for x in cmd])) if self.cwd: self.log.info('Building %s from working directory: %s', self.name, self.cwd) # only print if overridden env = self.env if env: if callable(env): env = env(context) else: env = {k: self._resolveItem(env[k], context) for k in env} self.log.info('Environment overrides for %s are: %s', self.name, ''.join(['\n\t"%s=%s"'%(k, env[k]) for k in env])) for k in os.environ: if k not in env: env[k] = os.getenv(k) self.log.info('Output from %s will be written to "%s" and "%s"', self.name, stdoutPath, stderrPath) if not os.path.exists(cmd[0]) and not (isWindows() and os.path.exists(cmd[0]+'.exe')): raise BuildException('Cannot run command because the executable does not exist: "%s"'%(cmd[0]), location=self.location) try: success=False rc = None try: # maybe send output to a file instead mkdir(os.path.dirname(logbasename)) with open(stderrPath, 'w') as fe: with open(stdoutPath, 'w') as fo: process = subprocess.Popen(cmd, stderr=fe, stdout=fo, cwd=cwd, env=env) options = context.mergeOptions(self) # get the merged options rc = _wait_with_timeout(process, '%s(%s)'%(self.name, os.path.basename(cmd[0])), options['process.timeout'], False) success = rc == 0 finally: try: if os.path.getsize(stderrPath) == 0 and not self.stderr: deleteFile(stderrPath, allowRetry=True) if not self.redirectStdOutToTarget and os.path.getsize(stdoutPath) == 0 and not self.stdout: deleteFile(stdoutPath, allowRetry=True) except Exception, e: # stupid windows, it passes understanding self.log.info('Failed to delete empty .out/.err files (ignoring error as its not critical): %s', e) #if not os.listdir(self.workDir): deleteDir(self.workDir) # don't leave empty work dirs around mainlog = '<command generated no output>' logMethod = self.log.info if success else self.log.error if not self.redirectStdOutToTarget and os.path.isfile(stdoutPath) and os.path.getsize(stdoutPath) > 0: if os.path.getsize(stdoutPath) < 15*1024: logMethod('Output from %s stdout is: \n%s', self.name, open(stdoutPath, 'r').read().replace('\n', '\n\t')) mainlog = stdoutPath if not success: context.publishArtifact('%s stdout'%self, stdoutPath) if os.path.isfile(stderrPath) and os.path.getsize(stderrPath) > 0: if os.path.getsize(stderrPath) < 15*1024: logMethod('Output from %s stderr is: \n%s', self.name, open(stderrPath, 'r').read().replace('\n', '\n\t')) mainlog = stderrPath # take precedence over stdout if not success: context.publishArtifact('%s stderr'%self, stderrPath) if rc != None and rc != 0: raise BuildException('%s command failed with error code %s; see output at "%s"'%(os.path.basename(cmd[0]), rc, mainlog), location=self.location)
def run(self, context): options = context.mergeOptions(self) # get the merged options # make sure temp dir exists mkdir(self.workDir) classes = os.path.join(self.workDir, "classes") # output dir for classes # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir(classes) # (need this for assembling other files to package later on, even if we don't do any javac) if self.compile: mkdir(options.get('javac.logs')) javac(classes, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs')+'/'+targetNameToUniqueId(self.name), targetname=self.name) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file if isinstance(self.manifest, basestring): manifest = context.getFullPath(self.manifest, self.baseDir) elif self.manifest == None: manifest = None else: # generate one # rewrite property values in the manifest manifest_entries = {} for i in self.manifest: manifest_entries[i] = context.expandPropertyValues(self.manifest[i]) # determine classpath for manifest classpath_entries = [] if "Class-path" not in manifest_entries: # assuming it wasn't hardcoded, set it here for src, dest in self.classpath.resolveWithDestinations(context): classpath_entries.append(dest) assert isinstance(options['jar.manifest.classpathAppend'], list), options['jar.manifest.classpathAppend'] # must not be a string classpath_entries.extend(options['jar.manifest.classpathAppend'] or []) # need to always use / not \ for these to be valid classpath_entries = [p.replace(os.path.sep, '/').replace('\\', '/') for p in classpath_entries if p] if classpath_entries: manifest_entries["Class-path"] = " ".join(classpath_entries) # include the classpath from here if not manifest_entries.get('Class-path'): # suppress this element entirely if not needed, otherwise there would be no way to have an empty classpath manifest_entries.pop('Class-path','') # create the manifest file create_manifest(manifest, manifest_entries, options=options) # copy in the additional things to include for (src, dest) in self.package.resolveWithDestinations(context): mkdir(os.path.dirname(os.path.join(classes, dest))) destpath = normLongPath(classes+'/'+dest) srcpath = normLongPath(src) if os.path.isdir(srcpath): mkdir(destpath) else: with open(srcpath, 'rb') as s: with openForWrite(destpath, 'wb') as d: d.write(s.read()) # create the jar jar(self.path, manifest, classes, options=options, preserveManifestFormatting=self.preserveManifestFormatting, outputHandler=ProcessOutputHandler('jar', treatStdErrAsErrors=False,options=options))
def run(self, context): mkdir(os.path.dirname(self.path)) uri = context.expandPropertyValues(self.uri) urllib.urlretrieve(uri, self.path)
def run(self, context): self.keystore = context.expandPropertyValues(self.keystore) options = context.mergeOptions(self) # get the merged options mkdir(self.path) for src, dest in self.jars.resolveWithDestinations(context): try: with open(src, 'rb') as s: with openForWrite(os.path.join(self.path, dest), 'wb') as d: d.write(s.read()) shutil.copystat(src, os.path.join(self.path, dest)) # When we re-jar with the user specified manifest entries, jar will complain # about duplicate attributes IF the original MANIFEST.MF already has those entries. # This is happening for latest version of SL where Application-Name, Permission etc # were already there. # # The block of code below will first extract the original MANIFEST.MF from the source # jar file, read all manifest entry to a list. When constructing the new manifest entries, # make sure the old MANIFEST.MF doesn't have that entry before putting the new manifest entry # to the list. This will avoid the duplicate attribute error. # if self.manifestDefaults: lines = [] # read each line of MANIFEST.MF of the original jar and put them in lines with zipfile.ZipFile(src, 'r') as zf: lst = zf.infolist() for zi in lst: fn = zi.filename if fn.lower().endswith('manifest.mf'): try: manifest_txt = zf.read(zi.filename) except Exception, e: raise BuildException('Failed reading the manifest file %s with exception:%s' % (fn, e)) # if we have all manifest text, parse and save each line if manifest_txt: # CR LF | LF | CR can be there as line feed and hence the code below lines = manifest_txt.replace('\r\n', '\n').replace('\r','\n').split('\n') # done break original_entries = collections.OrderedDict() # to ensure we don't overwrite/duplicate these # populate the manifest_entries with original values from original manifest for l in lines: if ':' in l and not l.startswith(' '): # ignore continuation lines etc because keys are all we care about key,value = l.split(':', 1) original_entries[key] = value.strip() # build up a list of the new manifest entries (will be merged into any existing manifest by jar) manifest_entries = collections.OrderedDict() for i in self.manifestDefaults: # if entry isn't there yet, add to the list if i not in original_entries: manifest_entries[i] = context.expandPropertyValues(self.manifestDefaults[i]) # create the manifest file # we want to add the manifest entries explicitly specified here but # NOT the 'default' manifest entries we usually add, since these # are likely to have been set already, and we do not want duplicates mkdir(self.workDir) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file options['jar.manifest.defaults'] = {} create_manifest(manifest, manifest_entries, options) # update the EXISTING jar file with the new manifest entries, which will be merged into # existing manifest by the jar tool jar(os.path.join(self.path, dest), manifest, None, options, update=True) signjar(os.path.join(self.path, dest), self.keystore, options, alias=self.alias, storepass=self.storepass, outputHandler=ProcessOutputHandler('signjars', treatStdErrAsErrors=False, options=options)) except BuildException, e: raise BuildException('Error processing %s: %s'%(os.path.basename(dest), e))
class CompilerMakeDependsPathSet(BasePathSet): """ Use the selection ToolChain to get a list of dependencies from a set of source files """ def __init__(self, target, src, flags=None, includes=None): """ @param target: the BaseTarget object for which this path set is being caculated @param src: a PathSet of source file paths @param flags: additional compiler flags @param includes: a list of include directory paths """ BasePathSet.__init__(self) self.log = makedeplog self.target = target self.sources = src self.flags = flatten([flags]) or [] self.includes = includes or [] def __repr__(self): return "MakeDepend(%s, %s)" % (self.sources, self.flags) def resolveWithDestinations(self, context): return [(i, os.path.basename(i)) for i in _resolveUnderlyingDependencies(context)] def clean(self): dfile = self.target.workDir+'.makedepend' deleteFile(dfile) def _resolveUnderlyingDependencies(self, context): deplist = None options = self.target.options # get the merged options dfile = normLongPath(self.target.workDir+'.makedepend') testsources = self.sources.resolve(context) depsources = self.sources._resolveUnderlyingDependencies(context) needsRebuild = not os.path.exists(dfile) if needsRebuild: self.log.info("Rebuilding dependencies for %s because cached dependencies file does not exist (%s)" % (self.target, dfile)) dfiletime = 0 if needsRebuild else getmtime(dfile) for x in testsources: if not exists(x): # can't generate any deps if some source files don't yet exist self.log.info("Dependency generation %s postponed because source file does not exist: %s" % (self.target, x)) return depsources elif getmtime(x) > dfiletime: if not needsRebuild: self.log.info("Rebuilding dependencies for %s because cached dependencies file is older than %s" % (self.target, x)) needsRebuild = True if not needsRebuild: # read in cached dependencies deplist = [] with open(dfile) as f: lines = f.readlines() header = lines[0].strip() lines = lines[1:] for d in lines: d = d.strip() if context._isValidTarget(d) or exists(normLongPath(d)): deplist.append(d) else: needsRebuild = True self.log.warn("Rebuilding dependencies for %s because dependency %s is missing" % (self.target, d)) break if header != str(self): self.log.info("Rebuilding dependencies for %s because target options have changed (%s != %s)" % (self.target, header, str(self))) elif not needsRebuild: return deplist # generate them again startt = time.time() self.log.info("*** Generating native dependencies for %s" % self.target) try: deplist = options['native.compilers'].dependencies.depends(context=context, src=testsources, options=options, flags=flatten(options['native.cxx.flags']+[context.expandPropertyValues(x).split(' ') for x in self.flags]), includes=flatten(self.includes.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.include']])) except BuildException, e: if len(testsources)==1 and testsources[0] not in str(e): raise BuildException('Dependency resolution failed for %s: %s'%(testsources[0], e)) raise deplist += depsources mkdir(os.path.dirname(dfile)) with openForWrite(dfile, 'wb') as f: assert not os.linesep in str(self) f.write(str(self)+os.linesep) for d in deplist: f.write(d.encode('UTF-8')+os.linesep) if time.time()-startt > 5: # this should usually be pretty quick, so may indicate a real build file mistake self.log.warn('Dependency generation took a long time: %0.1f s to evaluate %s', time.time()-startt, self) return deplist
def _worker_main(self): """ The entry point of the worker threads. Loops while the pool is active, waiting for the queue to become non-empty. Also takes the returned errors and new queue items and adds them to the appropriate queues """ log.debug("Starting worker") if self.profile: profiler = cProfile.Profile() profiler.enable() try: while self.running: target = None # With the lock held wait for a non-empty queue and get an item from it with self.lock: #log.debug("Checking queue contents") while self.queue.empty(): if self.running: #log.debug("Wait for queue to become full") self.condition.wait() else: return if not self.running: return self.workerCount = self.workerCount + 1 # increment the number of running workers (priority, target) = self.queue.get_nowait() self.inprogress.add(target) # Without the lock, run the function log.debug("Worker running target %s with priority %s", target, priority) failed = False errs = [] keepgoing = False enqueue = [] try: self.utilisation.incr() try: (enqueue, errs, keepgoing) = self.fn(target) except Exception as e: log.exception('Serious problem in thread pool worker: ') # log it but mustn't throw and miss the code below errs.append('Serious problem in thread pool worker: %r'%e) failed = True finally: self.utilisation.decr() # Take the lock again to update the errors, pending items in the queue and decrement the number of running workers with self.lock: log.debug("Updating errors and queue contents") self._errors.extend(errs) if not failed: for i in enqueue: self.queue.put_nowait(i) if not keepgoing: self.running = False self.workerCount = self.workerCount - 1 self.completed = self.completed + 1 self.inprogress.remove(target) self.condition.notifyAll() finally: if self.profile: profiler.disable() profiler.create_stats() with self.lock: dirpath = os.path.join(os.getcwd(), 'profile-%s' % os.getpid()) mkdir(dirpath) file = os.path.join(dirpath, "thread-%s" % threading.current_thread().name) index=0 while os.path.exists(file+'.%s' % index): index = index + 1 profiler.dump_stats(file+'.%s' % index) with self.lock: self.condition.notifyAll()
for t in findTargetsList: # this must be very easy to copy+paste, so don't put anything else on the line at all print >> stdout, '%s' % (t.name) elif task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD]: if not logFile: logFile = _maybeCustomizeLogFilename( init.getPropertyValue('LOG_FILE'), includedTargets[0] if (len(includedTargets) == 1 and includedTargets[0] != 'all' and includedTargets[0] in init.tags() and not excludedTargets) else None, task == _TASK_CLEAN) logFile = os.path.abspath(logFile) logdir = os.path.dirname(logFile) if logdir and not os.path.exists(logdir): mkdir(logdir) log.critical('Writing build log to: %s', os.path.abspath(logFile)) hdlr = logging.FileHandler(logFile, mode='w', encoding='UTF-8') hdlr.setFormatter( logging.Formatter( '%(asctime)s %(relativeCreated)05d %(levelname)-8s [%(threadName)s %(thread)5d] %(name)-10s - %(message)s', None)) hdlr.setLevel(logLevel or logging.INFO) logging.getLogger().addHandler(hdlr) log.info('Using xpybuild %s with build options: %s', _XPYBUILD_VERSION, buildOptions) try: # sometimes useful to have this info available import socket, getpass