def _copyFile(self, context, src, dest): dest = normLongPath(dest) src = normLongPath(src) with open(src, 'rb') as s: with openForWrite(dest, 'wb') as d: for m in self.mappers: x = m.getHeader(context) if x: self.__unusedMappers.discard(m) d.write(x) for l in s: for m in self.mappers: prev = l l = m.mapLine(context, l) if prev != l: self.__unusedMappers.discard(m) if None == l: break if None != l: d.write(l) for m in self.mappers: x = m.getFooter(context) if x: self.__unusedMappers.discard(m) d.write(x) shutil.copymode(src, dest) assert os.path.exists(dest)
def _copyFile(self, context, src, dest): src = normLongPath(src) dest = normLongPath(dest) with open(src, 'rb') as inp: with openForWrite(dest, 'wb') as out: shutil.copyfileobj(inp, out) shutil.copymode(src, dest) assert os.path.exists(dest)
def run(self, context): mkdir(os.path.dirname(self.path)) alreadyDone = set() with zipfile.ZipFile(normLongPath(self.path), 'w') as output: for (f, o) in self.inputs.resolveWithDestinations(context): # if we don't check for duplicate entries we'll end up creating an invalid zip if o in alreadyDone: dupsrc = ['"%s"'%src for (src, dest) in self.inputs.resolveWithDestinations(context) if dest == o] raise BuildException('Duplicate zip entry "%s" from: %s'%(o, ', '.join(dupsrc))) alreadyDone.add(o) # can't compress directory entries! (it messes up Java) output.write(normLongPath(f).rstrip('/\\'), o, zipfile.ZIP_STORED if isDirPath(f) else zipfile.ZIP_DEFLATED)
def _resolveUnderlyingDependencies(self, context): deplist = None options = self.target.options # get the merged options dfile = normLongPath(self.target.workDir+'.makedepend') testsources = self.sources.resolve(context) depsources = self.sources._resolveUnderlyingDependencies(context) needsRebuild = not os.path.exists(dfile) if needsRebuild: self.log.info("Rebuilding dependencies for %s because cached dependencies file does not exist (%s)" % (self.target, dfile)) dfiletime = 0 if needsRebuild else getmtime(dfile) for x in testsources: if not exists(x): # can't generate any deps if some source files don't yet exist self.log.info("Dependency generation %s postponed because source file does not exist: %s" % (self.target, x)) return depsources elif getmtime(x) > dfiletime: if not needsRebuild: self.log.info("Rebuilding dependencies for %s because cached dependencies file is older than %s" % (self.target, x)) needsRebuild = True if not needsRebuild: # read in cached dependencies deplist = [] with open(dfile) as f: lines = f.readlines() header = lines[0].strip() lines = lines[1:] for d in lines: d = d.strip() if context._isValidTarget(d) or exists(normLongPath(d)): deplist.append(d) else: needsRebuild = True self.log.warn("Rebuilding dependencies for %s because dependency %s is missing" % (self.target, d)) break if header != str(self): self.log.info("Rebuilding dependencies for %s because target options have changed (%s != %s)" % (self.target, header, str(self))) elif not needsRebuild: return deplist # generate them again startt = time.time() self.log.info("*** Generating native dependencies for %s" % self.target) try: deplist = options['native.compilers'].dependencies.depends(context=context, src=testsources, options=options, flags=flatten(options['native.cxx.flags']+[context.expandPropertyValues(x).split(' ') for x in self.flags]), includes=flatten(self.includes.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.include']])) except BuildException, e: if len(testsources)==1 and testsources[0] not in str(e): raise BuildException('Dependency resolution failed for %s: %s'%(testsources[0], e)) raise
def _resolveUnderlyingDependencies(self, context): deplist = None options = context.mergeOptions(self.target) # get the merged options log = logging.getLogger('MakeDepend') dfile = normLongPath(self.target.workDir+'.makedepend') testsources = self.sources.resolve(context) depsources = self.sources._resolveUnderlyingDependencies(context) needsRebuild = not os.path.exists(dfile) if needsRebuild: log.info("Rebuilding dependencies for %s because cached dependencies file does not exist (%s)" % (self.target, dfile)) dfiletime = 0 if needsRebuild else getmtime(dfile) for x in testsources: if not os.path.exists(x): return depsources elif getmtime(x) > dfiletime: if not needsRebuild: log.info("Rebuilding dependencies for %s because cached dependencies file is older than %s" % (self.target, x)) needsRebuild = True if not needsRebuild: # read in cached dependencies deplist = [] with open(dfile) as f: lines = f.readlines() header = lines[0].strip() lines = lines[1:] for d in lines: d = d.strip() if context._isValidTarget(d) or exists(normLongPath(d)): deplist.append(d) else: needsRebuild = True log.warn("Rebuilding dependencies for %s because dependency %s is missing" % (self.target, d)) break if header != str(self): log.info("Rebuilding dependencies for %s because target options have changed (%s != %s)" % (self.target, header, str(self))) elif not needsRebuild: return deplist # generate them again startt = time.time() log.info("*** Generating native dependencies for %s" % self.target) try: deplist = options['native.compilers'].dependencies.depends(context=context, src=testsources, options=options, flags=flatten(options['native.cxx.flags']+[context.expandPropertyValues(x).split(' ') for x in self.flags]), includes=flatten(self.includes.resolve(context)+[context.expandPropertyValues(x, expandList=True) for x in options['native.include']])) except BuildException, e: if len(testsources)==1 and testsources[0] not in str(e): raise BuildException('Dependency resolution failed for %s: %s'%(testsources[0], e)) raise
def run(self, context): self.log.info("Touching %s", self.path) mkdir(os.path.dirname(self.path)) path = normLongPath(self.path) with openForWrite(path, "wb") as f: pass
def isNewer(path): pathmodtime = getmtime(normLongPath(path)) if pathmodtime <= stampmodtime: return False if pathmodtime-stampmodtime < 1: # such a small time gap seems dodgy log.warn('Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" by just %0.1f seconds', self.name, path, self.stampfile, pathmodtime-stampmodtime) else: log.info('Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" (by %0.1f seconds)', self.name, path, self.stampfile, pathmodtime-stampmodtime) return True
def run(self, context): self.log.info('Cleaning existing files from %s', self.path) deleteDir(self.path) for a in self.archives: if isinstance(a, FilteredArchiveContents): items = [(a.getResolvedPath(context), '')] else: assert isinstance(a, BasePathSet) filteredMembers = None items = a.resolveWithDestinations(context) for (srcAbs, destRel) in items: if destRel and not isDirPath(destRel): destRel = os.path.dirname(destRel) # strip off the zip filename try: filesize = os.path.getsize(srcAbs) except Exception: filesize = 0 self.log.info("Unpacking %s (%0.1f MB) to %s", os.path.basename(srcAbs), filesize/1024.0/1024, self.name+destRel) starttime = time.time() with self. __openArchive(srcAbs) as f: mkdir(self.path+destRel) if isinstance(a, FilteredArchiveContents) and a.hasIncludeExcludeFilters(): fullList = _getnames(f) if not fullList: raise BuildException('No files were found in archive "%s"'%(srcAbs)) filteredMembers = [x for x in fullList if a.isIncluded(context, x)] self.log.info("Unpacking %d of %d members in %s", len(filteredMembers), len(fullList), os.path.basename(srcAbs)) if not filteredMembers: raise BuildException('No files matching the specified include/exclude filters were found in archive "%s": %s'%(srcAbs, a)) if len(filteredMembers)==len(fullList): raise BuildException('No files were excluded from the unpacking operation by the specified filters (check filters are correct): %s'%a) else: filteredMembers = _getnames(f) # NB: some archive types want a list of string members, others want TarInfo objects etc, so # if we support other archive types in future might need to do a bit of work here path = normLongPath(self.path+destRel) for m in filteredMembers: if not isDirPath(m): info = _getinfo(f, m) if isinstance(a, FilteredArchiveContents): _setfilename(info, a.mapDestPath(context, _getfilename(info))) if isWindows(): _setfilename(info, _getfilename(info).replace('/', '\\')) f.extract(info, path=path) else: # we should create empty directories too if isinstance(a, FilteredArchiveContents): m = a.mapDestPath(context, m).rstrip('/') m = path.rstrip('/\\')+'/'+m if isWindows(): m = m.replace('/', '\\') mkdir(m) self.log.info("Completed unpacking %s (%0.1f MB) in %0.1f seconds", os.path.basename(srcAbs), filesize/1024.0/1024, (time.time()-starttime))
def run(self, context): contents = self._getContents(context) mkdir(os.path.dirname(self.path)) path = normLongPath(self.path) with openForWrite(path, 'wb') as f: f.write(contents.replace('\n', os.linesep)) if self.__mode and not isWindows(): os.chmod(path, self.__mode) if self.__executable and not isWindows(): os.chmod(path, stat.S_IXOTH | stat.S_IXUSR | stat.S_IXGRP | os.stat(self.path).st_mode)
def run(self, context): contents = self._getContents(context) mkdir(os.path.dirname(self.path)) path = normLongPath(self.path) with openForWrite(path, 'wb') as f: f.write(contents.replace('\n', os.linesep)) if self.__mode and not isWindows(): os.chmod(path, self.__mode) if self.__executable and not isWindows(): os.chmod( path, stat.S_IXOTH | stat.S_IXUSR | stat.S_IXGRP | os.stat(self.path).st_mode)
def isNewer(path): pathmodtime = getmtime(normLongPath(path)) if pathmodtime <= stampmodtime: return False if pathmodtime - stampmodtime < 1: # such a small time gap seems dodgy log.warn( 'Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" by just %0.1f seconds', self.name, path, self.stampfile, pathmodtime - stampmodtime) else: log.info( 'Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" (by %0.1f seconds)', self.name, path, self.stampfile, pathmodtime - stampmodtime) return True
def run(self, context): options = context.mergeOptions(self) args = [ options['docker.path'] ] environs = { 'DOCKER_HOST' : options['docker.host'] } if options['docker.host'] else {} if self.mode == Docker.BUILD: dargs = list(args) dargs.extend([ 'build', '--rm=true', '-t', context.expandPropertyValues(self.imagename), ]) if self.buildArgs: dargs.extend(["--build-arg=%s" % [context.expandPropertyValues(x) for x in self.buildArgs]]) if self.dockerfile: dargs.extend(["-f", context.expandPropertyValues(self.dockerfile)]) inputs = self.inputs.resolve(context) if len(inputs) != 1: raise BuildException("Must specify a single input for Docker.BUILD", location = self.location) dargs.append(inputs[0]) cwd = os.path.dirname(inputs[0]) call(dargs, outputHandler=options['docker.processoutputhandler']('docker-build', False, options=options), timeout=options['process.timeout'], env=environs, cwd=cwd) elif self.mode == Docker.PUSHTAG: inputs = self.inputs.resolve(context) if len(inputs) != 0: raise BuildException("Must not specify inputs for Docker.PUSHTAG", location = self.location) dargs = list(args) dargs.extend([ 'tag', context.expandPropertyValues(self.depimage), context.expandPropertyValues(self.imagename), ]) call(dargs, outputHandler=options['docker.processoutputhandler']('docker-tag', False, options=options), timeout=options['process.timeout'], env=environs) dargs = list(args) dargs.extend([ 'push', context.expandPropertyValues(self.imagename), ]) call(dargs, outputHandler=options['docker.processoutputhandler']('docker-push', False, options=options), timeout=options['process.timeout'], env=environs) else: raise BuildException('Unknown Docker mode. Must be Docker.BUILD or Docker.PUSHTAG', location = self.location) # update the stamp file path = normLongPath(self.path) mkdir(os.path.dirname(path)) with openForWrite(path, 'wb') as f: pass
def run(self, context): self.log.info('Cleaning existing files from %s', self.path) deleteDir(self.path) for a in self.archives: if isinstance(a, FilteredArchiveContents): items = [(a.getResolvedPath(context), '')] else: assert isinstance(a, BasePathSet) filteredMembers = None items = a.resolveWithDestinations(context) for (srcAbs, destRel) in items: if destRel and not isDirPath(destRel): destRel = os.path.dirname( destRel) # strip off the zip filename if '..' in destRel: raise Exception( 'This target does not permit destination paths to contain ".." relative path expressions' ) try: filesize = os.path.getsize(srcAbs) except Exception: filesize = 0 self.log.info("Unpacking %s (%0.1f MB) to %s", os.path.basename(srcAbs), filesize / 1024.0 / 1024, self.name + destRel) starttime = time.time() with self.__openArchive(srcAbs) as f: mkdir(self.path + destRel) if isinstance(a, FilteredArchiveContents ) and a.hasIncludeExcludeFilters(): fullList = _getnames(f) if not fullList: raise BuildException( 'No files were found in archive "%s"' % (srcAbs)) filteredMembers = [ x for x in fullList if a.isIncluded(context, x) ] self.log.info("Unpacking %d of %d members in %s", len(filteredMembers), len(fullList), os.path.basename(srcAbs)) if not filteredMembers: raise BuildException( 'No files matching the specified include/exclude filters were found in archive "%s": %s' % (srcAbs, a)) if len(filteredMembers) == len(fullList): raise BuildException( 'No files were excluded from the unpacking operation by the specified filters (check filters are correct): %s' % a) else: filteredMembers = _getnames(f) # NB: some archive types want a list of string members, others want TarInfo objects etc, so # if we support other archive types in future might need to do a bit of work here path = normLongPath(self.path + destRel) for m in filteredMembers: if not isDirPath(m): info = _getinfo(f, m) if isinstance(a, FilteredArchiveContents): _setfilename( info, a.mapDestPath(context, _getfilename(info))) if isWindows(): _setfilename( info, _getfilename(info).replace('/', '\\')) f.extract(info, path=path) else: # we should create empty directories too if isinstance(a, FilteredArchiveContents): m = a.mapDestPath(context, m).rstrip('/') m = path.rstrip('/\\') + '/' + m if isWindows(): m = m.replace('/', '\\') mkdir(m) self.log.info( "Completed unpacking %s (%0.1f MB) in %0.1f seconds", os.path.basename(srcAbs), filesize / 1024.0 / 1024, (time.time() - starttime))
def run(self, context): mkdir(os.path.dirname(self.path)) with tarfile.open(normLongPath(self.path), 'w:gz') as output: for (f, o) in self.inputs.resolveWithDestinations(context): output.add(normLongPath(f).rstrip('/\\'), o)
def resolveWithDestinations(self, context): """ Uses the file system to returns a list of relative paths for files matching the specified include/exclude patterns, throwing a BuildException if none can be found. This method will cache its result after being called the first time. """ log = logging.getLogger('FindPaths') log.debug('FindPaths resolve starting for: %s', self) with self.__lock: # think this operation is atomically thread-safe due to global interpreter lock if self.__cached: return self.__cached # resolve dir if needed, relative to where the fileset was specified in the build file resolveddir = _resolveDirPath(self.__dir, context, self.location) def dirCouldMatchIncludePattern(includePattern, d): if d.startswith('**'): return True d = d.split('/') p = includePattern.split('/')[:-1] # strip off trailing '' or filename if '**' not in includePattern and len(d) > len(p): # don't go into a dir structure that's more deeply nested than the pattern #log.debug(' maybe vetoing %s based on counts : %s', d, p) return False i = 0 while i < len(d) and i < len(p) and p[i]: if '*' in p[i]: return True # any kind of wildcard and we give up trying to match if d[i] != p[i]: #log.debug(' maybe vetoing %s due to not matching %s', d, includePattern) return False i += 1 return True matches = [] try: if not os.path.isdir(resolveddir): raise BuildException('FindPaths root directory does not exist: "%s"'%os.path.normpath(resolveddir), location=self.location) startt = time.time() usedIncludes = set() # give an error if any are not used longdir = normLongPath(resolveddir) visited = 0 for root, dirs, files in os.walk(longdir): visited += 1 root = root.replace(longdir, '').replace('\\','/').lstrip('/').rstrip('/') #log.debug('visiting: "%s"'%root) # optimization: if this doesn't require walking down the dir tree, don't do any! # (this optimization applies to includes like prefix/** but not if there is a bare '**' # in the includes lsit) if self.includes and "**" not in self.includes: dirs[:] = [d for d in dirs if any(dirCouldMatchIncludePattern(e, (root+'/'+d).lstrip('/')) for e in self.includes)] # optimization: if there's an exclude starting with this dir and ending with '/**' or '/*', don't navigate to it dirs[:] = [d for d in dirs if not next( (e for e in self.excludes if antGlobMatch(e, root+'/'+d)), None)] for p in files+[d+'/' for d in dirs]: if root: p = root+'/'+p # first check if it matches an exclude if next( (True for e in self.excludes if antGlobMatch(e, p)), False): continue if not self.includes: # include all files (not directories - that wouldn't make sense or be helpful) if not p.endswith('/'): matches.append(p) else: m = next( (i for i in self.includes if antGlobMatch(i, p)), None) if m: log.debug('FindPaths matched %s from pattern %s', p, m) usedIncludes.add(m) matches.append(p) log.info('FindPaths in "%s" found %d path(s) for %s after visiting %s directories; %s', resolveddir, len(matches), self, visited, self.location) if time.time()-startt > 5: # this should usually be pretty quick, so may indicate a real build file mistake log.warn('FindPaths took a long time: %0.1f s to evaluate %s; see %s', time.time()-startt, self, self.location) if not matches: raise BuildException('No matching files found', location=self.location) if len(usedIncludes) < len(self.includes): # this is a check that ant doesn't do, but it's helpful for ensuring correctness raise BuildException('Some include patterns did not match any files: %s'%', '.join(set(self.includes)-usedIncludes), location=self.location) except BuildException, e: raise BuildException('%s for %s'%(e.toSingleLineString(target=None), self), causedBy=False, location=self.location) except Exception, e: raise BuildException('%s for %s'%(repr(e), self), causedBy=True, location=self.location)
def uptodate(self, context, ignoreDeps): """ Checks whether the target needs to be rebuilt. Returns true if the target is up to date and does not need a rebuild Holds the object lock Called during the main build phase, after the dependency resolution phase """ with self.lock: log.debug('Up-to-date check for %s', self.name) if self.isdirty: # no need to log at info, will already have been done when it was marked dirty log.debug('Up-to-date check: %s has been marked dirty', self.name) return False if not exists(self.path): log.info( 'Up-to-date check: %s must be rebuilt because file does not exist: "%s"', self.name, self.path) self.isdirty = True # make sure we don't log this again return False if ignoreDeps: return True if not isfile( self.stampfile ): # this is really an existence check, but if we have a dir it's an error so ignore # for directory targets log.info( 'Up-to-date check: %s must be rebuilt because stamp file does not exist: "%s"', self.name, self.stampfile) return False # assume that by this point our explicit dependencies at least exist, so it's safe to call getHashableImplicitDependencies implicitInputs = self.__getImplicitInputs(context) if implicitInputs or isDirPath(self.target.name): # this is to cope with targets that have implicit inputs (e.g. globbed pathsets); might as well use the same mechanism for directories (which need a stamp file anyway) if not exists(self._implicitInputsFile): log.info( 'Up-to-date check: %s must be rebuilt because implicit inputs/stamp file does not exist: "%s"', self.name, self._implicitInputsFile) return False iminpath = os.path.normpath(self._implicitInputsFile) iminpath = normLongPath(iminpath) with open(iminpath, 'rb') as f: latestImplicitInputs = f.read().split(os.linesep) if latestImplicitInputs != implicitInputs: thediff = list( difflib.unified_diff( latestImplicitInputs, implicitInputs, fromfile= 'inputs for previous build of the target (%d lines)' % len(implicitInputs), tofile= 'inputs for current build of the target (%d lines)' % len(latestImplicitInputs), lineterm='', n=0)) if len(thediff) > 10: thediff = thediff[:10] + ['...'] log.info( 'Up-to-date check: %s must be rebuilt because implicit inputs file has changed: "%s"\n\t%s\n', self.name, self._implicitInputsFile, '\n\t'.join(thediff).replace('\r', '\\r\r')) return False else: log.debug( "Up-to-date check: implicit inputs file contents has not changed: %s", self._implicitInputsFile) else: log.debug( "Up-to-date check: target has no implicitInputs data: %s", self) # NB: there shouldn't be any file system errors here since we've checked for the existence of deps # already in _expand_deps; if this happens it's probably a build system bug stampmodtime = getmtime(self.stampfile) def isNewer(path): pathmodtime = getmtime(normLongPath(path)) if pathmodtime <= stampmodtime: return False if pathmodtime - stampmodtime < 1: # such a small time gap seems dodgy log.warn( 'Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" by just %0.1f seconds', self.name, path, self.stampfile, pathmodtime - stampmodtime) else: log.info( 'Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" (by %0.1f seconds)', self.name, path, self.stampfile, pathmodtime - stampmodtime) return True for f in self.fdeps: if isdir(normLongPath(f)): log.debug( 'Up-to-date check: walking dependency directory %s to check for newly modified files', f) for path, subdirs, files in os.walk(normLongPath(f)): for name in files: if isNewer(os.path.join(path, name)): return False log.debug('uptodate: done walking dependency directory %s', f) else: if isNewer(f): return False return True
def run(self, context): options = self.options # make sure temp dir exists mkdir(self.workDir) classes = os.path.join(self.workDir, "classes") # output dir for classes # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir( classes ) # (need this for assembling other files to package later on, even if we don't do any javac) if self.compile: mkdir(self.getOption('javac.logs')) javac(classes, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs') + '/' + targetNameToUniqueId(self.name), targetname=self.name) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file if isinstance(self.manifest, basestring): manifest = context.getFullPath(self.manifest, self.baseDir) elif self.manifest == None: manifest = None else: # generate one # rewrite property values in the manifest manifest_entries = {} for i in self.manifest: manifest_entries[i] = context.expandPropertyValues( self.manifest[i]) # determine classpath for manifest classpath_entries = [] if "Class-path" not in manifest_entries: # assuming it wasn't hardcoded, set it here for src, dest in self.classpath.resolveWithDestinations( context): # we definitely do want to support use of ".." in destinations here, it can be very useful classpath_entries.append(dest) assert isinstance( options['jar.manifest.classpathAppend'], list), options[ 'jar.manifest.classpathAppend'] # must not be a string classpath_entries.extend( options['jar.manifest.classpathAppend'] or []) # need to always use / not \ for these to be valid classpath_entries = [ p.replace(os.path.sep, '/').replace('\\', '/') for p in classpath_entries if p ] if classpath_entries: manifest_entries["Class-path"] = " ".join( classpath_entries) # include the classpath from here if not manifest_entries.get( 'Class-path' ): # suppress this element entirely if not needed, otherwise there would be no way to have an empty classpath manifest_entries.pop('Class-path', '') # create the manifest file create_manifest(manifest, manifest_entries, options=options) # copy in the additional things to include for (src, dest) in self.package.resolveWithDestinations(context): if '..' in dest: raise Exception( 'This target does not permit packaged destination paths to contain ".." relative path expressions' ) mkdir(os.path.dirname(os.path.join(classes, dest))) destpath = normLongPath(classes + '/' + dest) srcpath = normLongPath(src) if os.path.isdir(srcpath): mkdir(destpath) else: with open(srcpath, 'rb') as s: with openForWrite(destpath, 'wb') as d: d.write(s.read()) # create the jar jar(self.path, manifest, classes, options=options, preserveManifestFormatting=self.preserveManifestFormatting, outputHandler=ProcessOutputHandler('jar', treatStdErrAsErrors=False, options=options))
def run(self, context): options = context.mergeOptions(self) # get the merged options # make sure temp dir exists mkdir(self.workDir) classes = os.path.join(self.workDir, "classes") # output dir for classes # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir(classes) # (need this for assembling other files to package later on, even if we don't do any javac) if self.compile: mkdir(options.get('javac.logs')) javac(classes, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs')+'/'+targetNameToUniqueId(self.name), targetname=self.name) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file if isinstance(self.manifest, basestring): manifest = context.getFullPath(self.manifest, self.baseDir) elif self.manifest == None: manifest = None else: # generate one # rewrite property values in the manifest manifest_entries = {} for i in self.manifest: manifest_entries[i] = context.expandPropertyValues(self.manifest[i]) # determine classpath for manifest classpath_entries = [] if "Class-path" not in manifest_entries: # assuming it wasn't hardcoded, set it here for src, dest in self.classpath.resolveWithDestinations(context): classpath_entries.append(dest) assert isinstance(options['jar.manifest.classpathAppend'], list), options['jar.manifest.classpathAppend'] # must not be a string classpath_entries.extend(options['jar.manifest.classpathAppend'] or []) # need to always use / not \ for these to be valid classpath_entries = [p.replace(os.path.sep, '/').replace('\\', '/') for p in classpath_entries if p] if classpath_entries: manifest_entries["Class-path"] = " ".join(classpath_entries) # include the classpath from here if not manifest_entries.get('Class-path'): # suppress this element entirely if not needed, otherwise there would be no way to have an empty classpath manifest_entries.pop('Class-path','') # create the manifest file create_manifest(manifest, manifest_entries, options=options) # copy in the additional things to include for (src, dest) in self.package.resolveWithDestinations(context): mkdir(os.path.dirname(os.path.join(classes, dest))) destpath = normLongPath(classes+'/'+dest) srcpath = normLongPath(src) if os.path.isdir(srcpath): mkdir(destpath) else: with open(srcpath, 'rb') as s: with openForWrite(destpath, 'wb') as d: d.write(s.read()) # create the jar jar(self.path, manifest, classes, options=options, preserveManifestFormatting=self.preserveManifestFormatting, outputHandler=ProcessOutputHandler('jar', treatStdErrAsErrors=False,options=options))
def updateStampFile(self): """ Assumes self.path is a stamp file that just needs creating / timestamp updating and does so """ path = normLongPath(self.path) mkdir(os.path.dirname(path)) with openForWrite(path, 'wb') as f: pass
def resolveWithDestinations(self, context): """ Uses the file system to returns a list of relative paths for files matching the specified include/exclude patterns, throwing a BuildException if none can be found. This method will cache its result after being called the first time. Note that it is possible the destinations may contain "../" elements - targets for which that could be a problem should check for and disallow such destinations (e.g. for copy we would not want to allow copying to destinations outside the specified root directory). """ log = logging.getLogger('FindPaths') log.debug('FindPaths resolve starting for: %s', self) with self.__lock: # think this operation is atomically thread-safe due to global interpreter lock if self.__cached: return self.__cached # resolve dir if needed, relative to where the fileset was specified in the build file resolveddir = _resolveDirPath(self.__dir, context, self.location) def dirCouldMatchIncludePattern(includePattern, d): if d.startswith('**'): return True d = d.split('/') p = includePattern.split( '/')[:-1] # strip off trailing '' or filename if '**' not in includePattern and len(d) > len(p): # don't go into a dir structure that's more deeply nested than the pattern #log.debug(' maybe vetoing %s based on counts : %s', d, p) return False i = 0 while i < len(d) and i < len(p) and p[i]: if '*' in p[i]: return True # any kind of wildcard and we give up trying to match if d[i] != p[i]: #log.debug(' maybe vetoing %s due to not matching %s', d, includePattern) return False i += 1 return True matches = [] try: if not os.path.isdir(resolveddir): raise BuildException( 'FindPaths root directory does not exist: "%s"' % os.path.normpath(resolveddir), location=self.location) startt = time.time() usedIncludes = set() # give an error if any are not used longdir = normLongPath(resolveddir) visited = 0 for root, dirs, files in os.walk(longdir): visited += 1 root = root.replace(longdir, '').replace( '\\', '/').lstrip('/').rstrip('/') #log.debug('visiting: "%s"'%root) # optimization: if this doesn't require walking down the dir tree, don't do any! # (this optimization applies to includes like prefix/** but not if there is a bare '**' # in the includes lsit) if self.includes and "**" not in self.includes: dirs[:] = [ d for d in dirs if any( dirCouldMatchIncludePattern( e, (root + '/' + d).lstrip('/')) for e in self.includes) ] # optimization: if there's an exclude starting with this dir and ending with '/**' or '/*', don't navigate to it dirs[:] = [ d for d in dirs if not next((e for e in self.excludes if antGlobMatch(e, root + '/' + d)), None) ] for p in files + [d + '/' for d in dirs]: if root: p = root + '/' + p # first check if it matches an exclude if next((True for e in self.excludes if antGlobMatch(e, p)), False): continue if not self.includes: # include all files (not directories - that wouldn't make sense or be helpful) if not p.endswith('/'): matches.append(p) else: m = next( (i for i in self.includes if antGlobMatch(i, p)), None) if m: log.debug( 'FindPaths matched %s from pattern %s', p, m) usedIncludes.add(m) matches.append(p) log.info( 'FindPaths in "%s" found %d path(s) for %s after visiting %s directories; %s', resolveddir, len(matches), self, visited, self.location) if time.time( ) - startt > 5: # this should usually be pretty quick, so may indicate a real build file mistake log.warn( 'FindPaths took a long time: %0.1f s to evaluate %s; see %s', time.time() - startt, self, self.location) if not matches: raise BuildException('No matching files found', location=self.location) if len(usedIncludes) < len( self.includes ): # this is a check that ant doesn't do, but it's helpful for ensuring correctness raise BuildException( 'Some include patterns did not match any files: %s' % ', '.join(set(self.includes) - usedIncludes), location=self.location) except BuildException, e: raise BuildException('%s for %s' % (e.toSingleLineString(target=None), self), causedBy=False, location=self.location) except Exception, e: raise BuildException('%s for %s' % (repr(e), self), causedBy=True, location=self.location)
def _deps_target(self, tname): """ Function called by a worker to check the deps for a single target tname - this is the canonical PATH of the target, not the name """ errors = [] pending = [ ] # list of new jobs to done as part of dependency resolution log.debug("Inspecting dependencies of target %s", tname) target = self.targets.get(tname, None) # only log dependency status periodically since usually its very quick # and not worthwhile with self.lock: self.index += 1 log.critical(self.progressFormat + "Resolving dependencies for %s", self.index, self.total, target) if not target: assert False # I'm not sure how we can get here, think it should actually be impossible if not exists(tname): errors.append("Unknown target %s" % tname) else: log.debug( 'Scheduler cannot find target in build file or on disk: %s', target) # is this a problem? maybe assert False here? elif self.options['ignore-deps'] and exists(target.path): # in this mode, any target that already exists should be treated as # a leaf with no deps which means it won't be built under any # circumstances (even if a target it depends on is rebuilt), # and allows us to avoid the time-consuming transitive resolution # of dependencies. Has to be implemented this way, since if we were # to allow ANY already-existing target to be re-built in the normal # way, we would have to resolve dependencies for all targets in # order to ensure we never rebuild a target at the same time as # a target that depends on it. We're essentially deleting the entire # dependency subtree for all nodes that exist already log.debug( 'Scheduler is treating existing target as a leaf and will not rebuild it: %s', target) self.leaves.append(target) elif not (self.options['ignore-deps'] and self.options['clean']): try: deps = target.resolveDependencies(self.context) if deps: log.debug('%s has %d dependencies', target.target, len(deps)) targetDeps = [] # just for logging leaf = True for dname in deps: #log.debug('Processing dependency: %s -> %s', tname, dname) dpath = normLongPath(dname) if dname in self.targets: leaf = False dtarget = self.targets[dname] if dtarget in target.rdeps(): raise Exception( 'Circular dependency between targets: %s and %s' % (dtarget.name, target.name)) dtarget.rdep(target) self._updatePriority(target) target.increment() if not isDirPath(dname): target.filedep( dname ) # might have an already built target dependency which is still newer else: # special case directory target deps - must use stamp file not dir, to avoid re-walking # the directory needlessly, and possibly making a wrong decision if the dir pathset is # from a filtered pathset target.filedep(self.targets[dname].stampfile) with self.lock: if not dname in self.pending: self.pending.append(dname) pending.append((0, dname)) targetDeps.append(str(self.targets[dname])) elif (isDirPath(dname) and isdir(dpath)) or (not isDirPath(dname) and isfile(dpath)): target.filedep(dname) else: # in the specific case of a dependency error, build will definitely fail immediately so we should log line number # at ERROR log level not just at info ex = BuildException("Cannot find dependency %s" % dname) log.error('FAILED during dependency resolution: %s', ex.toMultiLineString(target, includeStack=False), extra=ex.getLoggerExtraArgDict(target)) assert not os.path.exists(dpath), dname errors.append(ex.toSingleLineString(target)) break if leaf: log.info( 'Target dependencies of %s (priority %s) are: <no dependencies>', target, -target.priority) self.leaves.append(target) else: log.info( 'Target dependencies of %s (priority %s) are: %s', target, -target.priority, ', '.join(targetDeps) ) # this is important for debugging missing dependencies etc except Exception as e: errors.extend( self._handle_error( target.target, prefix="Target FAILED during dependency resolution")) else: # For clean ignoring deps we want to be as light-weight as possible self.leaves.append(target) if pending: # if we're adding some new jobs with self.lock: self.total += len(pending) # NB: the keep-going option does NOT apply to dependency failures return (pending, errors, 0 == len(errors))
def resolveWithDestinations(self, context): """ Uses the file system to returns a list of relative paths for files matching the specified include/exclude patterns, throwing a BuildException if none can be found. This method will cache its result after being called the first time. Note that it is possible the destinations may contain "../" elements - targets for which that could be a problem should check for and disallow such destinations (e.g. for copy we would not want to allow copying to destinations outside the specified root directory). """ log = logging.getLogger('FindPaths') log.debug('FindPaths resolve starting for: %s', self) with self.__lock: # think this operation is atomically thread-safe due to global interpreter lock if self.__cached: return self.__cached # resolve dir if needed, relative to where the fileset was specified in the build file resolveddir = _resolveDirPath(self.__dir, context, self.location) matches = [] try: if not os.path.isdir(resolveddir): raise BuildException( 'FindPaths root directory does not exist: "%s"' % os.path.normpath(resolveddir), location=self.location) startt = time.time() if self.includes is not None: unusedPatternsTracker = GlobUnusedPatternTracker( self.includes) # give an error if any are not used else: unusedPatternsTracker = None longdir = normLongPath(resolveddir) visited = 0 for root, dirs, files in os.walk(longdir): visited += 1 root = root.replace(longdir, '').replace('\\', '/').strip('/') if root != '': root += '/' #log.debug('visiting: "%s"'%root) # optimization: if this doesn't require walking down the dir tree, don't do any! # (this optimization applies to includes like prefix/** but not if there is a bare '**' # in the includes lsit) if self.includes is not None: self.includes.removeUnmatchableDirectories(root, dirs) # optimization: if there's an exclude starting with this dir and ending with '/**' or '/*', don't navigate to it # we deliberately match only against filename patterns (not dir patterns) since # empty dirs are handled in the later loop not through this mechanism, so it's just files that matter if self.excludes is not None and dirs != []: # nb: both dirs and the result of getPathMatches will have no trailing slashes self.__removeNamesFromList( dirs, self.excludes.getPathMatches(root, filenames=dirs)) # now find which files and empty dirs match matchedemptydirs = dirs if self.includes is not None: files, matchedemptydirs = self.includes.getPathMatches( root, filenames=files, dirnames=matchedemptydirs, unusedPatternsTracker=unusedPatternsTracker) else: matchedemptydirs = [ ] # only include empty dirs if explicitly specified if self.excludes is not None: exfiles, exdirs = self.excludes.getPathMatches( root, filenames=files, dirnames=matchedemptydirs) self.__removeNamesFromList(files, exfiles) self.__removeNamesFromList(matchedemptydirs, exdirs) for p in files: matches.append(root + p) for p in matchedemptydirs: matches.append(root + p + '/') log.info( 'FindPaths in "%s" found %d path(s) for %s after visiting %s directories; %s', resolveddir, len(matches), self, visited, self.location) if time.time( ) - startt > 5: # this should usually be pretty quick, so may indicate a real build file mistake log.warn( 'FindPaths took a long time: %0.1f s to evaluate %s; see %s', time.time() - startt, self, self.location) if not matches: raise BuildException('No matching files found', location=self.location) if unusedPatternsTracker is not None: unusedPatterns = unusedPatternsTracker.getUnusedPatterns() if unusedPatterns != []: raise BuildException( 'Some include patterns did not match any files: %s' % ', '.join(unusedPatterns), location=self.location) except BuildException, e: raise BuildException('%s for %s' % (e.toSingleLineString(target=None), self), causedBy=False, location=self.location) except Exception, e: raise BuildException('%s for %s' % (repr(e), self), causedBy=True, location=self.location)
def uptodate(self, context, ignoreDeps): """ Checks whether the target needs to be rebuilt. Returns true if the target is up to date and does not need a rebuild Holds the object lock Called during the main build phase, after the dependency resolution phase """ with self.lock: log.debug('Up-to-date check for %s', self.name) if self.isdirty: # no need to log at info, will already have been done when it was marked dirty log.debug('Up-to-date check: %s has been marked dirty', self.name) return False if not exists(self.path): log.info('Up-to-date check: %s must be rebuilt because file does not exist: "%s"', self.name, self.path) self.isdirty = True # make sure we don't log this again return False if ignoreDeps: return True if not isfile(self.stampfile): # this is really an existence check, but if we have a dir it's an error so ignore # for directory targets log.info('Up-to-date check: %s must be rebuilt because stamp file does not exist: "%s"', self.name, self.stampfile) return False # assume that by this point our explicit dependencies at least exist, so it's safe to call getHashableImplicitDependencies implicitInputs = self.__getImplicitInputs(context) if implicitInputs or isDirPath(self.target.name): # this is to cope with targets that have implicit inputs (e.g. globbed pathsets); might as well use the same mechanism for directories (which need a stamp file anyway) if not exists(self._implicitInputsFile): log.info('Up-to-date check: %s must be rebuilt because implicit inputs/stamp file does not exist: "%s"', self.name, self._implicitInputsFile) return False iminpath = os.path.normpath(self._implicitInputsFile) iminpath = normLongPath(iminpath) with open(iminpath, 'rb') as f: latestImplicitInputs = f.read().split(os.linesep) if latestImplicitInputs != implicitInputs: thediff = list(difflib.unified_diff(latestImplicitInputs, implicitInputs, fromfile='inputs for previous build of the target (%d lines)'%len(implicitInputs), tofile='inputs for current build of the target (%d lines)'%len(latestImplicitInputs), lineterm='', n=0 )) if len(thediff)>10: thediff = thediff[:10]+['...'] log.info('Up-to-date check: %s must be rebuilt because implicit inputs file has changed: "%s"\n\t%s\n', self.name, self._implicitInputsFile, '\n\t'.join(thediff).replace('\r','\\r\r')) return False else: log.debug("Up-to-date check: implicit inputs file contents has not changed: %s", self._implicitInputsFile) else: log.debug("Up-to-date check: target has no implicitInputs data: %s", self) # NB: there shouldn't be any file system errors here since we've checked for the existence of deps # already in _expand_deps; if this happens it's probably a build system bug stampmodtime = getmtime(self.stampfile) def isNewer(path): pathmodtime = getmtime(normLongPath(path)) if pathmodtime <= stampmodtime: return False if pathmodtime-stampmodtime < 1: # such a small time gap seems dodgy log.warn('Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" by just %0.1f seconds', self.name, path, self.stampfile, pathmodtime-stampmodtime) else: log.info('Up-to-date check: %s must be rebuilt because input file "%s" is newer than "%s" (by %0.1f seconds)', self.name, path, self.stampfile, pathmodtime-stampmodtime) return True for f in self.fdeps: if isdir(normLongPath(f)): log.debug('Up-to-date check: walking dependency directory %s to check for newly modified files', f) for path, subdirs, files in os.walk(normLongPath(f)): for name in files: if isNewer(os.path.join(path, name)): return False log.debug('uptodate: done walking dependency directory %s', f) else: if isNewer(f): return False return True
def _deps_target(self, tname): """ Function called by a worker to check the deps for a single target tname - this is the canonical PATH of the target, not the name """ errors = [] pending = [] # list of new jobs to done as part of dependency resolution log.debug("Inspecting dependencies of target %s", tname) target = self.targets.get(tname, None) # only log dependency status periodically since usually its very quick # and not worthwhile with self.lock: self.index += 1 log.critical(self.progressFormat+"Resolving dependencies for %s", self.index, self.total, target) if not target: assert False # I'm not sure how we can get here, think it should actually be impossible if not exists(tname): errors.append("Unknown target %s" % tname) else: log.debug('Scheduler cannot find target in build file or on disk: %s', target) # is this a problem? maybe assert False here? elif self.options['ignore-deps'] and exists(target.path): # in this mode, any target that already exists should be treated as # a leaf with no deps which means it won't be built under any # circumstances (even if a target it depends on is rebuilt), # and allows us to avoid the time-consuming transitive resolution # of dependencies. Has to be implemented this way, since if we were # to allow ANY already-existing target to be re-built in the normal # way, we would have to resolve dependencies for all targets in # order to ensure we never rebuild a target at the same time as # a target that depends on it. We're essentially deleting the entire # dependency subtree for all nodes that exist already log.debug('Scheduler is treating existing target as a leaf and will not rebuild it: %s', target) self.leaves.append(target) elif not (self.options['ignore-deps'] and self.options['clean']): try: deps = target.resolveDependencies(self.context) if deps: log.debug('%s has %d dependencies', target.target, len(deps)) targetDeps = [] # just for logging leaf = True for dname in deps: #log.debug('Processing dependency: %s -> %s', tname, dname) dpath = normLongPath(dname) if dname in self.targets: leaf = False dtarget = self.targets[dname] if dtarget in target.rdeps(): raise Exception('Circular dependency between targets: %s and %s'%(dtarget.name, target.name)) dtarget.rdep(target) self._updatePriority(target) target.increment() if not isDirPath(dname): target.filedep(dname) # might have an already built target dependency which is still newer else: # special case directory target deps - must use stamp file not dir, to avoid re-walking # the directory needlessly, and possibly making a wrong decision if the dir pathset is # from a filtered pathset target.filedep(self.targets[dname].stampfile) with self.lock: if not dname in self.pending: self.pending.append(dname) pending.append((0, dname)) targetDeps.append(str(self.targets[dname])) elif (isDirPath(dname) and isdir(dpath)) or (not isDirPath(dname) and isfile(dpath)): target.filedep(dname) else: # in the specific case of a dependency error, build will definitely fail immediately so we should log line number # at ERROR log level not just at info ex = BuildException("Cannot find dependency %s" % dname) log.error('FAILED during dependency resolution: %s', ex.toMultiLineString(target, includeStack=False), extra=ex.getLoggerExtraArgDict(target)) assert not os.path.exists(dpath), dname errors.append(ex.toSingleLineString(target)) break if leaf: log.info('Target dependencies of %s (priority %s) are: <no dependencies>', target, -target.priority) self.leaves.append(target) else: log.info('Target dependencies of %s (priority %s) are: %s', target, -target.priority, ', '.join(targetDeps)) # this is important for debugging missing dependencies etc except Exception as e: errors.extend(self._handle_error(target.target, prefix="Target FAILED during dependency resolution")) else: # For clean ignoring deps we want to be as light-weight as possible self.leaves.append(target) if pending: # if we're adding some new jobs with self.lock: self.total += len(pending) # NB: the keep-going option does NOT apply to dependency failures return (pending, errors, 0 == len(errors))