示例#1
0
	def run(self, context):
		mkdir(os.path.dirname(self.path))
		src = self.src.resolve(context)
		if len(src) != 1: raise BuildException('SymLink target "%s" is invalid - must have only only source path'%self.name)
		if src[0].endswith('/'): raise BuildException('SymLink target "%s" is invalid - must be a file not a directory'%self.name)

		os.symlink(src[0] if not self.relative else os.path.relpath(src[0], os.path.dirname(self.path)), self.path)
示例#2
0
	def run(self, context):
		self.log.info('Cleaning existing files from %s', self.path)
		deleteDir(self.path)
		
		iswindows = IS_WINDOWS
		
		for a in self.archives:
			a_is_filteredarchivecontents = isinstance(a, FilteredArchiveContents)
			if a_is_filteredarchivecontents:
				items = [(a.getResolvedPath(context), '')]
			else:
				assert isinstance(a, BasePathSet)
				filteredMembers = None
				items = a.resolveWithDestinations(context)
			for (srcAbs, destRel) in items:
				if destRel and not isDirPath(destRel): destRel = os.path.dirname(destRel) # strip off the zip filename
				if '..' in destRel: raise Exception('This target does not permit destination paths to contain ".." relative path expressions')
					
				try:
					filesize = os.path.getsize(srcAbs)
				except Exception:
					filesize = 0
				
				self.log.info("Unpacking %s (%0.1f MB) to %s", os.path.basename(srcAbs), filesize/1024.0/1024, self.name+destRel)
				starttime = time.time()
				with self. __openArchive(srcAbs) as f:
					mkdir(self.path+destRel)
					if a_is_filteredarchivecontents and a.hasIncludeExcludeFilters():
						fullList = _getnames(f)
						if not fullList:
							raise BuildException('No files were found in archive "%s"'%(srcAbs))
						filteredMembers = [x for x in fullList if a.isIncluded(context, x)]
						self.log.info("Unpacking %d of %d members in %s", len(filteredMembers), len(fullList), os.path.basename(srcAbs))
						if not filteredMembers:
							raise BuildException('No files matching the specified include/exclude filters were found in archive "%s": %s'%(srcAbs,  a))
						if len(filteredMembers)==len(fullList):
							raise BuildException('No files were excluded from the unpacking operation by the specified filters (check filters are correct): %s'%a)
					else:
						filteredMembers = _getnames(f)
					# NB: some archive types want a list of string members, others want TarInfo objects etc, so 
					# if we support other archive types in future might need to do a bit of work here
					path = normLongPath(self.path+destRel)
					for m in filteredMembers:						
						if not isDirPath(m):
							info = _getinfo(f, m)
							if a_is_filteredarchivecontents:
								_setfilename(info, a.mapDestPath(context, _getfilename(info)))
							if iswindows: _setfilename(info, _getfilename(info).replace('/', '\\'))
							f.extract(info, path=path)
						else:
							# we should create empty directories too
							if a_is_filteredarchivecontents:
								m = a.mapDestPath(context, m).rstrip('/')

							m = path.rstrip('/\\')+'/'+m
							if iswindows: m = m.replace('/', '\\')
							mkdir(m)
							
				
				self.log.info("Completed unpacking %s (%0.1f MB) in %0.1f seconds", os.path.basename(srcAbs), filesize/1024.0/1024, (time.time()-starttime))
示例#3
0
    def run(self, context):
        libs = self.libs.resolve(context)
        libnames = [os.path.basename(x) for x in libs]
        libpaths = [os.path.dirname(x) for x in libs]
        flags = [context.expandPropertyValues(x) for x in self.flags]

        args = [self.getOption('csharp.compiler'), "-out:" + self.path]
        if libnames:
            args.extend([
                "-reference:" + ",".join(libnames),
                "-lib:" + ",".join(libpaths)
            ])
        if self.main:
            args.extend(["-target:exe", "-main:" + self.main])
        else:
            args.append("-target:library")
        for (file, id) in self.resources:
            args.append('-resource:%s,%s' %
                        (context.expandPropertyValues(file),
                         context.expandPropertyValues(id)))
        args.extend(self.options['csharp.options'])
        args.extend(flags)
        args.extend(self.compile.resolve(context))

        mkdir(os.path.dirname(self.path))
        call(args,
             outputHandler=self.getOption('csharp.outputHandlerFactory')(
                 'csc', False, options=self.options),
             timeout=self.options['process.timeout'])
示例#4
0
	def run(self, context):
		"""
			Calls the wrapped run method
		"""
		retries = None
		
		retryNumber = 0 # 1=first retry, etc
		self.target.retriesRemaining = int(self.target.options['Target.failureRetries']) # default is 0
		backoffSecs = self.target.options['Target.failureRetriesInitialBackoffSecs']
		
		while True:
			try:
				implicitInputs = self.__getImplicitInputs(context)
				if implicitInputs or self.isDirPath:
					deleteFile(self.__implicitInputsFile)

				self.target.run(context)
				if retryNumber > 0: self.target.log.warning('Target %s succeeded on retry #%d', self, retryNumber)
				break
			except Exception as ex:
				if self.target.retriesRemaining == 0: 
					if retryNumber > 0: 
						self.target.log.warning('Target %s failed even after %d retries', self, retryNumber)
					raise

				self.target.retriesRemaining -= 1
				retryNumber += 1
				
				time.sleep(backoffSecs)
				
				self.target.log.info('Target %s failed on attempt #%d, will now clean before retry', self, retryNumber)
				# hopefully after the backoff time enough file handles will have been removed for the clean to succeed 
				try:
					self.clean(context)
				except Exception as ex:
					self.target.log.error('Failed to cleanup during retry, after initial failure of %s', self)
					raise

				# this logic is to prevent CI (e.g. TeamCity) error messages from one retry from causing the whole job to be flagged as a 
				# failure even if a subsequent retry succeeds
				buf = outputBufferingManager.resetStdoutBufferForCurrentThread()
				self.target.log.warning('Target %s failed on attempt #%d, will retry after %d seconds backoff (see .log file for details).', self, retryNumber, backoffSecs)
				# for now let's just throw away buf - user can look at the .log file to see what happened before the failure if they care 
				# (can't just re-log it here as that would result in duplication with the .log output)

				backoffSecs *= 2
				
		# we expect self.path to NOT be in the fileutils stat cache at this point; 
		# it's too costly to check this explictly, but unwanted incremental rebuilds 
		# can be caused if the path does get into the stat cache since it'll have a stale value
		
		# if target built successfully, record what the implicit inputs were to help with the next up to date 
		# check and ensure incremental build is correct
		if implicitInputs or self.isDirPath:
			log.debug('writing implicitInputsFile: %s', self.__implicitInputsFile)
			mkdir(os.path.dirname(self.__implicitInputsFile))
			with openForWrite(self.__implicitInputsFile, 'wb') as f:
				f.write(os.linesep.join(implicitInputs).encode('utf-8'))
示例#5
0
    def run(self, context):
        options = self.options

        mkdir(os.path.dirname(self.path))
        options['native.compilers'].archiver.archive(
            context,
            output=self.path,
            options=options,
            src=self.objects.resolve(context))
示例#6
0
    def run(self, context):
        mkdir(self.path)
        if os.listdir(self.path):
            self.log.error('TEST FAILED - clean did not occur between retries')

        open(self.path + '/touchfile.txt', 'w').close()

        self.log.error('Error logged by target')
        raise Exception('Simulated target failure')
示例#7
0
    def updateStampFile(self):
        """
		.. private:: Not useful enough to be in the public API. 
		
		Assumes self.path is a stamp file that just needs creating / timestamp updating and does so """
        path = normLongPath(self.path)
        mkdir(os.path.dirname(path))
        with openForWrite(path, 'wb') as f:
            pass
示例#8
0
 def run(self, context):
     options = self.options
     mkdir(os.path.dirname(self.path))
     options['native.compilers'].ccompiler.compile(
         context,
         output=self.path,
         options=options,
         flags=self._getCompilerFlags(context),
         src=self.source.resolve(context),
         includes=self._getIncludeDirs(context))
示例#9
0
文件: java.py 项目: xpybuild/xpybuild
	def run(self, context):
		# make sure outputdir exists
		mkdir(self.path)

		# create the classpath, sorting within PathSet (for determinism), but retaining original order of 
		# PathSet elements in the list
		classpath = os.pathsep.join(self.classpath.resolve(context)) 

		# compile everything
		mkdir(self.getOption('javac.logs'))
		javac(self.path, self.compile.resolve(context), classpath, options=self.options, logbasename=self.options['javac.logs']+'/'+targetNameToUniqueId(self.name), targetname=self.name, workDir=self.workDir)
示例#10
0
	def run(self, context):
		mkdir(os.path.dirname(self.path))
		alreadyDone = set()
		with zipfile.ZipFile(normLongPath(self.path), 'w') as output:
			for (f, o) in self.inputs.resolveWithDestinations(context):
				# if we don't check for duplicate entries we'll end up creating an invalid zip
				if o in alreadyDone:
					dupsrc = ['"%s"'%src for (src, dest) in self.inputs.resolveWithDestinations(context) if dest == o]
					raise BuildException('Duplicate zip entry "%s" from: %s'%(o, ', '.join(dupsrc)))
				alreadyDone.add(o)
				# can't compress directory entries! (it messes up Java)
				output.write(normLongPath(f).rstrip('/\\'), o, zipfile.ZIP_STORED if isDirPath(f) else zipfile.ZIP_DEFLATED) 
示例#11
0
def jar(path, manifest, sourcedir, options, preserveManifestFormatting=False, update=False, outputHandler=None):
	""" Create a jar file containing a manifest and some other files

	@param path: jar file to create. Typically this file does not already exist, but if it does 
	then the specified files or manifest will be merged into it. 
	
	@param manifest: path to the manifest.mf file (or None to disable manifest entirely)

	@param sourcedir: the directory to pack everything from (this method may add extra files to this dir)

	@param options: options map. jar.options is a list of additional arguments

	@param preserveManifestFormatting: an advanced option that prevents that jar executable from 
	reformatting the specified manifest file to comply with Java conventions 
	(also prevents manifest merging if jar already exists)
	"""
	# work out if we need to create a parent directory
	dir = os.path.dirname(path)
	if dir and not os.path.exists(dir): mkdir(dir)
	# location of jar
	if options['java.home']:
		binary = os.path.join(options['java.home'], "bin/jar")
	else:
		binary = "jar"
	# build up arguments
	args = [binary]
	args.extend(options['jar.options'])

	if update:
		mode='-u'
	else:
		mode='-c'
	
	if not manifest: 
		args.extend([mode+"fM", path])
	elif preserveManifestFormatting:
		mkdir(sourcedir+'/META-INF')
		srcf = normLongPath(sourcedir+'/META-INF/manifest.mf')

		with open(manifest, 'rb') as s:
			with openForWrite(srcf, 'wb') as d:
				d.write(s.read())
		args.extend([mode+"f", path])
	else:
		args.extend([mode+"fm", path, manifest])

	if sourcedir: 
		args.extend(["-C", sourcedir, "."])


	# actually call jar
	call(args, outputHandler=outputHandler, timeout=options['process.timeout'])
示例#12
0
    def run(self, context):
        contents = self._getContents(context)

        mkdir(os.path.dirname(self.path))
        path = normLongPath(self.path)
        with self.openFile(context,
                           path,
                           'wb' if isinstance(contents, bytes) else 'w',
                           encoding=self.__encoding) as f:
            f.write(contents)

        if self.__mode and not IS_WINDOWS:
            os.chmod(path, self.__mode)
        if self.__executable and not IS_WINDOWS:
            os.chmod(
                path, stat.S_IXOTH | stat.S_IXUSR | stat.S_IXGRP
                | os.stat(self.path).st_mode)
示例#13
0
def javadoc(path, sources, classpath, options, workDir, outputHandler):
	""" Create javadoc from sources and a set of options

	@param path: The directory under which to create the javadoc

	@param sources: a list of source files

	@param classpath: a list of jars for the classpath

	@param options: the current set of options to use
	
	@param workDir: where temporary files are stored
	
	@param outputHandler: the output handler (optional)
	"""
	deleteDir(path)
	mkdir(path)
	# location of javadoc
	if options['java.home']:
		binary = os.path.join(options['java.home'], "bin/javadoc")
	else:
		binary = "javadoc"

	# store the list of files in a temporary file, then build from that.
	mkdir(workDir)
	inputlistfile = os.path.join(workDir, "javadoc.inputs")
	with openForWrite(inputlistfile, 'w', encoding=locale.getpreferredencoding()) as f:
		f.writelines('"'+x.replace('\\','\\\\')+'"'+'\n' for x in sources)

	# build up arguments
	args = [binary]
	args.extend(options['javadoc.options'])
	if options['javadoc.ignoreSourceFilesFromClasspath']:
		args.extend(['-sourcepath', path+'/xpybuild_fake_sourcepath'])
	args.extend([
		"-d", path,
		"-classpath", classpath,
		"-windowtitle", options['javadoc.title'],
		"-doctitle", options['javadoc.title'],
		"-%s" % options['javadoc.access'],
		"@%s" % inputlistfile
	])
	# actually call javadoc
	call(args, outputHandler=outputHandler, timeout=options['process.timeout'])
示例#14
0
    def run(self, context):
        options = self.options

        mkdir(os.path.dirname(self.path))
        options['native.compilers'].linker.link(
            context,
            output=self.path,
            options=options,
            flags=options['native.link.flags'] + self.flags,
            shared=self.shared,
            src=self.objects.resolve(context),
            libs=flatten([
                (y.strip()
                 for y in context.expandPropertyValues(x, expandList=True))
                for x in self.libs + options['native.libs'] if x
            ]),
            libdirs=flatten(
                self.libpaths.resolve(context) + [
                    context.expandPropertyValues(x, expandList=True)
                    for x in options['native.libpaths']
                ]))
示例#15
0
	def run(self, context):
		mkdir(os.path.dirname(self.path))
		with tarfile.open(normLongPath(self.path), 'w:gz') as output:
			for (f, o) in self.inputs.resolveWithDestinations(context):
				output.add(normLongPath(f).rstrip('/\\'), o)
示例#16
0
    def getHashableImplicitInputs(self, context):
        r = super(Cpp, self).getHashableImplicitInputs(context)

        r.append('compiler flags: %s' % self._getCompilerFlags(context))

        # this will provide a quick way to notice changes such as TP library version number changed etc
        # and also is the only way we'll detect the need to rebuild for includes that are regex'd out
        includedirs = self._getIncludeDirs(context)
        for path in includedirs:
            r.append('include dir: ' + os.path.normcase(path))

        # This is called exactly once during up-to-date checking OR run, which
        # means we will have generated all target dependencies
        # (e.g. include files, source files etc) by this point

        # Since non-target include files won't be known until this point, we need
        # to perform up-to-date-ness checking for them here (rather than in
        # targetwrapper as normally happens for dependencies).

        startt = time.time()

        try:
            targetmtime = os.stat(
                self.path
            ).st_mtime  # must NOT use getstat cache, don't want to pollute it with non-existence
        except os.error:  # file doesn't exist
            targetmtime = 0

        makedependsfile = self._getMakeDependsFile(context)
        if targetmtime != 0 and not os.path.exists(
                makedependsfile
        ):  # no value in using stat cache for this, not used elsewhere
            targetmtime = 0  # treat the same as if target itself didn't exist

        newestFile, newestTime = None, 0  # keep track of the newest source or include file

        if IS_WINDOWS:  # normalizes case for this OS but not slashes (handy for regex matching)

            def xpybuild_normcase(path):
                return path.lower()
        else:

            def xpybuild_normcase(path):
                return path

        # changes in these options must cause us to re-execute makedepends
        ignoreregex = self.options['native.include.upToDateCheckIgnoreRegex']
        if ignoreregex:
            ignoreregex = xpybuild_normcase(ignoreregex)
            r.append('option native.include.upToDateCheckIgnoreRegex=%s' %
                     ignoreregex)
        makedependsoptions = "upToDateCheckIgnoreRegex='%s', upToDateCheckIgnoreSystemHeaders=%s, flags=%s" % (
            ignoreregex,
            self.options['native.include.upToDateCheckIgnoreSystemHeaders'],
            self._getCompilerFlags(context),
        )

        # first, figure out if we need to (re-)run makedepends or can use the cached info from the last build
        runmakedepends = False

        if targetmtime == 0:
            runmakedepends = True

        alreadychecked = set()  # paths that we've already checked the date of
        sourcepaths = []
        for path, _ in self.source.resolveWithDestinations(context):
            mtime = cached_getmtime(path)
            alreadychecked.add(path)
            sourcepaths.append(path)
            if mtime > newestTime: newestFile, newestTime = path, mtime
        if newestTime > targetmtime: runmakedepends = True

        if (not runmakedepends) and os.path.exists(
                makedependsfile):  # (no point using stat cache for this file)
            # read file from last time; if any of the transitive dependencies
            # have changed, we should run makedepends again to update them
            with io.open(makedependsfile, 'r', encoding='utf-8') as f:
                flags = f.readline().strip()
                if flags != makedependsoptions:
                    runmakedepends = True
                else:
                    for path in f:
                        path = path.strip()
                        pathstat = cached_stat(path, errorIfMissing=False)
                        if pathstat is False:
                            # file doesn't exist - must rebuild
                            runmakedepends = True
                            (
                                self.log.critical
                                if Cpp.__rebuild_makedepend_count <= 5 else
                                self.log.info
                            )('Recalculating C/C++ dependencies of %s as dependency no longer exists: %s',
                              self, newestFile)

                            break
                        mtime = pathstat.st_mtime
                        alreadychecked.add(path)
                        if mtime > newestTime:
                            newestFile, newestTime = path, mtime

            if newestTime > targetmtime: runmakedepends = True

        # (re-)run makedepends
        if runmakedepends:
            # only bother to log if we're recalculating
            if targetmtime != 0:
                Cpp.__rebuild_makedepend_count += 1  # log the first few at crit
                (
                    self.log.critical
                    if Cpp.__rebuild_makedepend_count <= 5 else self.log.info
                )('Recalculating C/C++ dependencies of %s; most recently modified dependent file is %s at %s',
                  self, newestFile, datetime.datetime.fromtimestamp(
                      newestTime).strftime('%a %Y-%m-%d %H:%M:%S'))

            try:
                makedependsoutput = self.options[
                    'native.compilers'].dependencies.depends(
                        context=context,
                        src=sourcepaths,
                        options=self.options,
                        flags=self._getCompilerFlags(context),
                        includes=includedirs,
                    )
            except Exception as ex:
                raise BuildException('Dependency resolution failed for %s' %
                                     (sourcepaths[0]),
                                     causedBy=True)

            # normalize case to avoid problems on windows, and strip out sources since we already checked them above
            makedependsoutput = [
                os.path.normcase(path) for path in makedependsoutput
                if path not in sourcepaths
            ]
            makedependsoutput.sort()
            if ignoreregex:
                ignoreregex = re.compile(ignoreregex)
                # match against version of path with forward slashes because making a regex with backslashes is a pain and not cross-platform
                makedependsoutput = [
                    path for path in makedependsoutput
                    if not ignoreregex.match(path.replace(os.sep, '/'))
                ]

            # find the newest time from these files; if this is same as previous makedepends, won't do anything
            for path in makedependsoutput:
                if path in alreadychecked: continue
                mtime = cached_getmtime(path)
                if mtime > newestTime: newestFile, newestTime = path, mtime

            # write out new makedepends file for next time
            mkdir(os.path.dirname(makedependsfile))
            assert '\n' not in makedependsoptions, makedependsoptions  # sanity check
            with io.open(makedependsfile, 'w', encoding='utf-8') as f:
                f.write(makedependsoptions)
                f.write('\n')
                for path in makedependsoutput:
                    f.write('%s\n' % path)

        # endif runmakedepends

        # include the newest timestamp as an implicit input, so that we'll rebuild if any include files have changed
        # no need to log this, as targetwrapper already logs differences in implicit inputs
        if newestFile is not None:
            newestDateTime = datetime.datetime.fromtimestamp(newestTime)
            r.append('newest dependency was modified at %s.%03d: %s' %
                     (newestDateTime.strftime('%a %Y-%m-%d %H:%M:%S'),
                      newestDateTime.microsecond / 1000,
                      os.path.normcase(newestFile)))

        if time.time(
        ) - startt > 5:  # this should usually be pretty quick, so if it takes a while it may indicate a real build file mistake
            self.log.warn(
                'C/C++ dependency generation took a long time: %0.1f s to evaluate %s',
                time.time() - startt, self)

        return r
示例#17
0
    def _expand_deps(self):
        """
			Run over the list of targets to build, expanding it with all the dependencies and processing them
			for replacements and expansions. Also builds up the initial leaf set self.leaves and all the rdepends of each
			target (self.pending), along with the total number of dependencies each target has.
		"""
        self.index = 0  # identifies thread pool item n out of total=len(self.pending)
        self.total = len(self.pending)  # can increase during this phase
        pending = queue.Queue()
        for i in self.pending:
            pending.put_nowait((0, i))

        depcheckingworkers = 1  # more threads actually makes this slower
        pool = ThreadPool('dependencychecking',
                          depcheckingworkers,
                          pending,
                          self._deps_target,
                          self.utilisation,
                          profile=self.options["profile"])

        pool.start()

        pool.wait()

        pool.stop()

        # these are GIL-intensive operation
        if (not self.options['clean']) and len(pool.errors) == 0:
            for targetwrapper, _ in self.selectedtargetwrappers:
                targetwrapper.updatePriority()
                targetwrapper.checkForNonTargetDependenciesUnderOutputDirs()

        if not pool.errors:
            # this is the only place we can list all targets since only here are final priorities known
            # printing the deps in one place is important for debugging missing dependencies etc
            # might move this to a separate file at some point
            self.selectedtargetwrappers.sort(
                key=lambda targetwrapper_targetdeps:
                (-targetwrapper_targetdeps[0].effectivePriority,
                 targetwrapper_targetdeps[0].name))
            targetinfodir = mkdir(
                self.context.expandPropertyValues(
                    '${BUILD_WORK_DIR}/targets/'))
            with io.open(targetinfodir + '/xpybuild-version.properties',
                         'w',
                         encoding='utf-8') as f:
                # write this file in case we want to avoid mixed xpybuild versions in working dir
                f.write('xpybuildVersion=%s\n' % XPYBUILD_VERSION)
                f.write(
                    'workDirVersion=%d\n' % 1
                )  # bump this when we make a breaking change that should force a rebuild

            with io.open(targetinfodir + '/selected-targets.txt',
                         'w',
                         encoding='utf-8') as f:
                f.write('%d targets selected for building:\n' %
                        (len(self.selectedtargetwrappers)))
                for targetwrapper, targetdeps in self.selectedtargetwrappers:
                    f.write('- Target %s with priority %s depends on: %s\n\n' %
                            (targetwrapper, targetwrapper.effectivePriority,
                             ', '.join(str(d) for d in targetdeps)
                             if len(targetdeps) > 0 else '<no dependencies>'))

        #assert (not pool.errors) or (self.total == self.index), (self.total, self.index) #disabled because assertion triggers during ctrl+c
        return pool.errors
示例#18
0
文件: main.py 项目: xpybuild/xpybuild
def main(args):
	""" Command line argument parser. 
	"""
	
	try:
		usage = [
###############################################################################
'',
'eXtensible Python-based Build System %s on Python %s.%s.%s'% (XPYBUILD_VERSION, sys.version_info[0], sys.version_info[1], sys.version_info[2]),
'',
'xpybuild.py [operation]? [options]* [property=value]* [-x] [target|tag|regex]* ', 
'',
'A regex containing * can be used instead of a target, but only if it uniquely ', 
'identifies a single target. ',
'',
'Special pseudo-tags:',
'  full                       Include all targets for a full build (the default)',
'',
'Special properties:',
'  OUTPUT_DIR=output          The main directory output will be written to',
'  BUILD_MODE=release         Specifies release, debug (or user-defined) mode',
'  BUILD_NUMBER=n             Build number string, for reporting and use by build',
'',
'Operations: ',
###############################################################################

'  (if none is specified, the default operation is a normal build)',
'      --clean                Clean specified targets incl all deps',
'      --rebuild              Clean specified targets incl all deps then build',
'      --rebuild-ignore-deps  Clean only the specified targets (not deps) then ',
'        (or --rid)           build those targets and any missing dependencies, ',
'                             but not any out-of-date dependencies. This is a ',
'                             fast but less correct way to get a quick ',
'                             incremental build, so use with care. ',
'',
'   -s --search <str>         Show info on targets/tags/properties/options ',
'                             containing the specified substring or regex', 
# hide these from usage (though they still work), as superceded by the more useful "-s" option
#' --ft --find-targets <str>   List targets containing the specified substring', 
#' --ti --target-info <str>    Print details including build file location for ',
#'                             targets containing the specified substring',
'      --targets              List available targets and tags (filtered by any ', 
'                             target or tag names specified on the command line)',
'      --properties           List properties that can be set and their ',
'                             defaults in this build file',
'      --options              List the target options available to build rules ',
'                             and their default values in this build file',
'',
'Options:',
'   -x --exclude <target>     Specifies a target or tag to exclude (unless ',
'                             needed as a dependency of an included target) ',
'',
'   -J --parallel             Build in parallel (this is the default). ',
'                             The number of workers is determined from the ',
'                             `build.workers` build file option or else the ',
'                             number of CPUs and the XPYBUILD_WORKERS_PER_CPU ',
'                             environment varable (default is currently 1.0), ',
'                             with an upper limit for this machine from the ',
'                             XPYBUILD_MAX_WORKERS variable. ',
'   -j --workers <number>     Override the number of worker threads to use for ',
'                             building. Use -j1 for single-threaded. ',
'                             (ignores any environment variables)',
'',
'   -k --keep-going           Continue rather than aborting on errors',
'',
'   -n --dry-run              Don\'t actually build anything, just print',
'                             what would be done (finds missing dependencies)',
'',
' --id --ignore-deps          Skip all dependency/up-to-date checking: only ', 
'                             clean/build targets that do not exist at all ',
'                             (faster builds, but no guarantee of correctness)', 
'',
'   -f --buildfile <file>     Specify the root build file to import ',
'                             (default is ./root.xpybuild.py)',
'',
'   -l --log-level LEVEL      Set the log level to debug/info/critical',
'   -L --logfile <file>       Set the log file location',
'      --timefile <file>      Dump the time for each target in <file> at the',
'                             end of the run',
'      --depgraph <file>      Just resolve dependencies and dump them to <file>',
'      --cpu-stats            Log CPU utilisation stats',
'      --random-priority      Randomizes build order',
'      --verify               Performs additional verifications during the ',
'                             build to to help detect bugs in the build files. ',
'                             [verify is currently an experimental feature]',
'      --profile              Profiles all the worker threads',
'   -F --format               Message output format.',
'                             Options:',
] + [
'                                - '+ h for h in _registeredConsoleFormatters
] + [

]
		if reduce(max, list(map(len, usage))) > 80:
			raise Exception('Invalid usage string - all lines must be less than 80 characters')

		# set up defaults
		properties = {} 
		buildOptions = { "keep-going":False, "workers":0, "dry-run":False, 
			"ignore-deps":False, "logCPUUtilisation":False, "profile":False, "verify":False } 
		includedTargets = []
		excludedTargets = []
		task = _TASK_BUILD
		buildFile = os.path.abspath('root.xpybuild.py')
		logLevel = None
		logFile = None
		findTargetsPattern = None
		format = "default"

		opts,targets = getopt.gnu_getopt(args, "knJh?x:j:l:L:f:F:s:", 
			["help","exclude=","parallel","workers=","keep-going",
			"log-level=","logfile=","buildfile=", "dry-run",
			"targets", 'target-info=', 'ti=', "properties", "options", "clean", "rebuild", "rebuild-ignore-deps", "rid", "ignore-deps", "id",
			"format=", "timefile=", "ft=", "find-targets=", "search=", "depgraph=", 'cpu-stats', 'random-priority', 'profile', 'verify'])
		
		for o, a in opts: # option arguments
			o = o.strip('-')
			if o in ["?", "h", "help"]:
				print('\n'.join(usage))
				return 0
			elif o in ["x", "exclude"]:
				excludedTargets.append(a)
			elif o in ["f", "buildfile"]:
				buildFile = os.path.abspath(a)
			elif o in ['targets']:
				task = _TASK_LIST_TARGETS
			elif o in ['find-targets', 'ft']:
				task = _TASK_LIST_FIND_TARGETS
				findTargetsPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['target-info', 'ti']:
				task = _TASK_LIST_TARGET_INFO
				findTargetsPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['search', 's']:
				task = _TASK_LIST_SEARCH
				searchPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['properties']:
				task = _TASK_LIST_PROPERTIES
			elif o in ['options']:
				task = _TASK_LIST_OPTIONS
			elif o in ['J', 'parallel']:
				buildOptions['workers'] = 0
			elif o in ['j', 'workers']:
				buildOptions['workers'] = int(a)
			elif o in ['l', 'log-level']:
				logLevel = getattr(logging, a.upper(), None)
			elif o in ['cpu-stats']:
				buildOptions["logCPUUtilisation"] = True
			elif o in ['random-priority']:
				buildOptions["randomizePriorities"] = True
			elif o in ['L', 'logfile']:
				logFile = a
			elif o in ['F', 'format']:
				format = None
				if a =='xpybuild': a = 'default' # for compatibility
				for h in _registeredConsoleFormatters:
					if h.upper() == a.upper():
						format = h
				if not format:
					print('invalid format "%s"; valid formatters are: %s'%(a, ', '.join(_registeredConsoleFormatters.keys())))
					print('\n'.join(usage))
					return 1
			elif o in ['clean']:
				task = _TASK_CLEAN
				buildOptions['keep-going'] = True
			elif o in ['rebuild']:
				task = _TASK_REBUILD
			elif o in ['rebuild-ignore-deps', 'rid']:
				task = _TASK_REBUILD
				buildOptions['ignore-deps'] = True
			elif o in ['id', 'ignore-deps']:
				buildOptions['ignore-deps'] = True
			elif o in ['k', 'keep-going']:
				buildOptions['keep-going'] = True
			elif o in ['n', 'dry-run']:
				buildOptions['dry-run'] = True
			elif o in ['timefile']:
				buildOptions['timeFile'] = a
			elif o in ['verify']:
				buildOptions['verify'] = True
			elif o in ['profile']:
				buildOptions['profile'] = True
			elif o in ['depgraph']:
				buildOptions['depGraphFile'] = a
			else:
				assert False, "unhandled option: '%s'" % o

		for o in targets: # non-option arguments (i.e. no -- prefix)
			arg = o.strip()
			if arg:
				if '=' in arg:
					properties[arg.split('=')[0].upper()] = arg.split('=')[1]
				elif arg=='all': # pre-4.0 alias for all
					includedTargets.append('full')
				else:
					includedTargets.append(BaseTarget._normalizeTargetName(arg))
			
		# default is all
		if (not includedTargets) or includedTargets==['']:
			includedTargets = ['full']
		
	except getopt.error as msg:
		print(msg)
		print("For help use --help")
		return 2
	
	threading.currentThread().setName('main')
	logging.getLogger().setLevel(logLevel or logging.INFO)

	if buildOptions["workers"] < 0: buildOptions["workers"] = 0 # means there's no override
	
	outputBufferingDisabled = buildOptions['workers']==1 # nb: this also affects the .log handler below
	
	# nb: it's possible workers=0 (auto) and will later be set to 1 but doesn't really matter much

	# initialize logging to stdout - minimal output to avoid clutter, but indicate progress
	hdlr = _registeredConsoleFormatters.get(format, None)
	assert hdlr # shouldn't happen
	wrapper = OutputBufferingStreamWrapper(sys.stdout, bufferingDisabled=outputBufferingDisabled)
	# actually instantiate it
	hdlr = hdlr(
		wrapper, 
		buildOptions=buildOptions) 
	# Handler can override wrapper with a preference in either direction
	if hdlr.bufferingDisabled: wrapper.bufferingDisabled = True
	if hdlr.bufferingRequired: wrapper.bufferingDisabled = False
		
	hdlr.setLevel(logLevel or logging.WARNING)
	logging.getLogger().addHandler(hdlr)
	log.info('Build options: %s'%{k:buildOptions[k] for k in buildOptions if k != 'workers'})
	
	stdout = sys.stdout
	
	# redirect to None, to prevent any target code from doing 'print' statements - should always use the logger
	sys.stdout = None

	listen_for_stack_signal() # make USR1 print a python stack trace

	allTargets = ('full' in includedTargets) and not excludedTargets

	try:
		def loadBuildFile():
			init = BuildInitializationContext(properties)
			isRealBuild = (task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD])
			init._defineOption("process.timeout", 600)
			init._defineOption("build.keepGoing", buildOptions["keep-going"])
			
			# 0 means default behaviour
			init._defineOption("build.workers", 0)
			
			init.initializeFromBuildFile(buildFile, isRealBuild=isRealBuild)
			
			# now handle setting real value of workers, starting with value from build file
			workers = int(init._globalOptions.get("build.workers", 0))
			# default value if not specified in build file
			if workers <= 0: 
				workers = multiprocessing.cpu_count() 
			if os.getenv('XPYBUILD_WORKERS_PER_CPU'):
				workers = min(workers, int(round(multiprocessing.cpu_count()  * float(os.getenv('XPYBUILD_WORKERS_PER_CPU')))))
			
			# machine/user-specific env var can cap it
			if os.getenv('XPYBUILD_MAX_WORKERS'):
				workers = min(workers, int(os.getenv('XPYBUILD_MAX_WORKERS')))
			
			# finally an explicit command line --workers take precedence
			if buildOptions['workers']: workers = buildOptions['workers']
			
			if workers < 1: workers = 1
			
			# finally write the final number of workers where it's available to both scheduler and targets
			buildOptions['workers'] = workers
			init._globalOptions['build.workers'] = workers
			
			init._finalizeGlobalOptions()
			
			return init

		if buildOptions['profile']:
			import cProfile, pstats
			profiler = cProfile.Profile()
			profiler.enable()

		init = loadBuildFile()

		# nb: don't import any modules that might define options (including outputhandler) 
		# until build file is loaded
		# or we may not have a build context in place yet#
		from xpybuild.internal.scheduler import BuildScheduler, logTargetTimes


		if buildOptions['profile']:
			profilepath = 'xpybuild-profile-%s.txt'%'parsing'
			with open(profilepath, 'w') as f:
				p = pstats.Stats(profiler, stream=f)
				p.sort_stats('cumtime').print_stats(f)
				p.dump_stats(profilepath.replace('.txt', '')) # also in binary format
				log.critical('=== Wrote Python profiling output to: %s', profilepath)

		def lookupTarget(s):
			tfound = init.targets().get(s,None)
			if not tfound and '*' in s: 
				
				matchregex = s.rstrip('$')+'$'
				try:
					matchregex = re.compile(matchregex, re.IGNORECASE)
				except Exception as e:
					raise BuildException('Invalid target regular expression "%s": %s'%(matchregex, e))
				matches = [t for t in init.targets().values() if matchregex.match(t.name)]
				if len(matches) > 1:
					print('Found multiple targets matching pattern %s:'%(s), file=stdout)
					print(file=stdout)
					for m in matches:
						print(m.name, file=stdout)
					print(file=stdout)
					raise BuildException('Target regex must uniquely identify a single target: %s (use tags to specify multiple related targets)'%s)
				if matches: return matches[0]
				
			if not tfound: raise BuildException('Unknown target name, target regex or tag name: %s'%s)
			return tfound

		# expand tags to targets here, and do include/exclude calculations
		selectedTargets = set() # contains BaseTarget objects
		for t in includedTargets:
			tlist = init.tags().get(t,None)
			if tlist:
				selectedTargets.update(tlist)
			else:
				selectedTargets.add(lookupTarget(t))
		for t in excludedTargets:
			tlist = init.tags().get(t,None)
			if tlist:
				selectedTargets.difference_update(tlist)
			else:
				selectedTargets.discard(lookupTarget(t))

		# convert findTargetsPattern to list
		if findTargetsPattern:
			findTargetsPattern = findTargetsPattern.lower()
			# sort matches at start of path first, then anywhere in name, finally anywhere in type
			# make 'all'/'full' into a special case that maps to all *selected* targets 
			# (could be different to 'all' tag if extra args were specified, but this is unlikely and kindof useful)
			findTargetsList = [t for t in sorted(
				 init.targets().values() if allTargets else selectedTargets, key=lambda t:(
					'/'+findTargetsPattern.lower() not in t.name.lower(), 
					findTargetsPattern.lower() not in t.name.lower(), 
					findTargetsPattern.lower() not in t.type.lower(), 
					t.name
					)) if findTargetsPattern in t.name.lower() or findTargetsPattern in t.type.lower() 
						or findTargetsPattern == 'full' or findTargetsPattern == 'all']

		if task == _TASK_LIST_PROPERTIES:
			p = init.getProperties()
			print("Properties: ", file=stdout)
			pad = max(list(map(len, p.keys())))
			if pad > 30: pad = 0
			for k in sorted(p.keys()):
				print(('%'+str(pad)+'s = %s') % (k, p[k]), file=stdout)
				
		elif task == _TASK_LIST_OPTIONS:
			options = init.mergeOptions(None)
			pad = max(list(map(len, options.keys())))
			if pad > 30: pad = 0
			for k in sorted(options.keys()):
				print(("%"+str(pad)+"s = %s") % (k, options[k]), file=stdout)

		elif task == _TASK_LIST_TARGETS:
			if len(init.targets())-len(selectedTargets) > 0:
				print("%d target(s) excluded (unless required as dependencies): "%(len(init.targets())-len(selectedTargets)), file=stdout)
				for t in sorted(['   %-15s %s'%('<'+t.type+'>', t.name) for t in init.targets().values() if t not in selectedTargets]):
					print(t, file=stdout)
				print(file=stdout)
				
			print("%d target(s) included: "%(len(selectedTargets)), file=stdout)
			for t in sorted(['   %-15s %s'%('<'+t.type+'>', t.name) for t in selectedTargets]):
				print(t, file=stdout)
			print(file=stdout)

			if allTargets:
				print("%d tag(s) are defined: "%(len(init.tags())), file=stdout)
				for t in sorted(['   %-15s (%d targets)'%(t, len(init.tags()[t])) for t in init.tags()]):
					print(t, file=stdout)

		elif task == _TASK_LIST_TARGET_INFO:
			if findTargetsList == '*': findTargetsList = init.targets().values()
			for t in sorted(findTargetsList, key=lambda t:(t.type+' '+t.name)):
				print('- %s priority: %s, tags: %s, location: \n   %s'%(t, t.getPriority(), t.getTags(), t.location), file=stdout)

		elif task == _TASK_LIST_FIND_TARGETS:
			# sort matches at start of path first, then anywhere in name, finally anywhere in type
			for t in findTargetsList:
				# this must be very easy to copy+paste, so don't put anything else on the line at all
				print('%s'%(t.name), file=stdout)

		elif task == _TASK_LIST_SEARCH:
			def showPatternMatches(x): # case sensitive is fine (and probably useful)
				if searchPattern.replace('\\', '/') in x.replace('\\','/'): return True # basic substring check (with path normalization)
				if '*' in searchPattern or '?' in searchPattern or '[' in searchPattern: # probably a regex
					if re.search(searchPattern, x): return True
				return False

			for t in init.targets().values():
				t._resolveTargetPath(init)

			print('', file=stdout)

			tagMatches = [t for t in init.tags() if showPatternMatches(t)]
			if tagMatches: 
				print ('%d matching tags:'%len(tagMatches), file=stdout)
				for t in sorted(tagMatches):
					print(t, file=stdout)
				print('', file=stdout)
				
			targetMatches = [t for t in init.targets().values() if showPatternMatches(t.name) or showPatternMatches(t.path)]
			if targetMatches: 
				print ('%d matching targets:'%len(targetMatches), file=stdout)
				for t in sorted(targetMatches, key=lambda t:(t.type+' '+t.name)):
					print('- %s priority: %s, tags: [%s]\n   output:  %s\n   defined:  %s'%(t, t.getPriority(), ' '.join(sorted(t.getTags())) or 'none', os.path.relpath(t.path), t.location), file=stdout)
				print('', file=stdout)

			propMatches = {key:value for (key,value) in init.getProperties().items() if showPatternMatches(key)}
			if propMatches:
				print('%d matching properties:'%len(propMatches), file=stdout)
				pad = max(list(map(len, propMatches.keys())))
				for k in sorted(propMatches.keys()):
					print(('%'+str(pad)+'s = %s') % (k, propMatches[k]), file=stdout)
					if init._propertyLocations[k]: # don't do this for built-in property like BUILD_MODE
						print(('%'+str(pad)+'s   (defined: %s)') % ('', init._propertyLocations[k]), file=stdout)
					
			options = init.mergeOptions(None)
			optionMatches = {key:value for (key,value) in options.items() if showPatternMatches(key)}
			if optionMatches:
				print('%d matching options:'%len(optionMatches), file=stdout)
				pad = max(list(map(len, optionMatches.keys())))
				for k in sorted(optionMatches.keys()):
					print(('%'+str(pad)+'s = %s') % (k, optionMatches[k]), file=stdout)

				
		elif task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD]:
			
			if not logFile:
				if allTargets:
					buildtag = None
				else:
					buildtag = 'custom'
				logFile = _maybeCustomizeLogFilename(init.getPropertyValue('LOG_FILE'), 
					buildtag,
					task==_TASK_CLEAN)
			logFile = os.path.abspath(logFile)

			logdir = os.path.dirname(logFile)
			if logdir and not os.path.exists(logdir): mkdir(logdir)
			log.critical('Writing build log to: %s', os.path.abspath(logFile))
			
			# also buffer the .log file, since it's just a lot harder to read when multiple target lines are all jumbled up; 
			# we have an undocumented env var for disabling this in case of debugging
			if os.getenv('XPYBUILD_LOGFILE_OUTPUT_BUFFERING_DISABLED','')=='true': outputBufferingDisabled = True
			logFileStream = OutputBufferingStreamWrapper(open(logFile, 'w', encoding='UTF-8'), bufferingDisabled=outputBufferingDisabled)
			hdlr = logging.StreamHandler(logFileStream)
			hdlr.setFormatter(logging.Formatter('%(asctime)s %(relativeCreated)05d %(levelname)-8s [%(threadName)s %(thread)5d] %(name)-10s - %(message)s', None))
			hdlr.setLevel(logLevel or logging.INFO)
			logging.getLogger().addHandler(hdlr)

			log.info('Using xpybuild %s from %s on Python %s.%s.%s', XPYBUILD_VERSION, os.path.normpath(os.path.dirname(__file__)), sys.version_info[0], sys.version_info[1], sys.version_info[2])
			log.info('Using build options: %s (logfile target outputBuffering=%s, stdout target outputBuffering=%s)', buildOptions, not outputBufferingDisabled, not wrapper.bufferingDisabled)
			
			try:
				# sometimes useful to have this info available
				import socket, getpass
				log.info('Build running on %s as user %s', socket.gethostname(), getpass.getuser())
			except Exception as e:
				log.info('Failed to get host/user: %s', e)

			log.info('Default encoding for subprocesses assumed to be: %s (stdout=%s, preferred=%s)', 
				DEFAULT_PROCESS_ENCODING, stdout.encoding, locale.getpreferredencoding())
			
			def lowerCurrentProcessPriority():
				if xpybuild.buildcommon.IS_WINDOWS:
					import win32process, win32api,win32con
					win32process.SetPriorityClass(win32api.GetCurrentProcess(), win32process.BELOW_NORMAL_PRIORITY_CLASS)
				else:
					# on unix, people may run nice before executing the process, so 
					# only change the priority unilaterally if it's currently at its 
					# default value
					if os.nice(0) == 0:
						os.nice(1) # change to 1 below the current level

			try:
				# if possible, set priority of builds to below normal by default, 
				# to avoid starving machines (e.g. on windows) of resources 
				# that should be used for interactive processes
				if os.getenv('XPYBUILD_DISABLE_PRIORITY_CHANGE','') != 'true':
					lowerCurrentProcessPriority()
					log.info('Successfully changed process priority to below normal')
			except Exception as e:
				log.warning('Failed to lower current process priority: %s'%e)
			
			if buildOptions['ignore-deps']:
				log.warning('The ignore-deps option is enabled: dependency graph will be ignored for all targets that already exist on disk, so correctness is not guaranteed')
			
			for (k,v) in sorted(init.getProperties().items()):
				log.info('Setting property %s=%s', k, v)

			try:
				DATE_TIME_FORMAT = "%a %Y-%m-%d %H:%M:%S %Z"
				
				errorsList = []
				if task in [_TASK_CLEAN, _TASK_REBUILD]:
					startTime = time.time()
					log.critical('Starting "%s" clean "%s" at %s', init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )))
					
					cleanBuildOptions = buildOptions.copy()
					cleanBuildOptions['clean'] = True
					if allTargets: cleanBuildOptions['ignore-deps'] = True
					scheduler = BuildScheduler(init, selectedTargets, cleanBuildOptions)
					errorsList, targetsBuilt, targetsCompleted, totalTargets = scheduler.run()
		
					if allTargets and not cleanBuildOptions['dry-run']: # special-case this common case
						for dir in init.getOutputDirs():
							deleteDir(dir)
		
					log.critical('Completed "%s" clean "%s" at %s after %s\n', init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), formatTimePeriod(time.time()-startTime))
						
					if errorsList: 
						log.critical('XPYBUILD FAILED: %d error(s): \n   %s', len(errorsList), '\n   '.join(sorted(errorsList)))
						return 3
				
				if task == _TASK_REBUILD:
					# we must reload the build file here, as it's the only way of flushing out 
					# cached data (especially in PathSets) that may have changed as a 
					# result of the clean
					init = loadBuildFile()
				
				if task in [_TASK_BUILD, _TASK_REBUILD] and not errorsList:

					for cb in init.getPreBuildCallbacks():
						try:
							cb(BuildContext(init))
						except BuildException as be:
							log.error("Pre-build check failed: %s", be)
							return 7

					buildtype = 'incremental' if any(os.path.exists(dir) for dir in init.getOutputDirs()) else 'fresh'
					if not buildOptions['dry-run']:
						for dir in init.getOutputDirs():
							log.info('Creating output directory: %s', dir)
							mkdir(dir)
					
					startTime = time.time()
					log.critical('Starting %s "%s" build "%s" at %s using %d workers', buildtype, 
						init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), 
						buildOptions['workers']
						)
					
					buildOptions['clean'] = False
					scheduler = BuildScheduler(init, selectedTargets, buildOptions)
					errorsList, targetsBuilt, targetsCompleted, totalTargets = scheduler.run()
					log.critical('Completed %s "%s" build "%s" at %s after %s\n', buildtype, init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), formatTimePeriod(time.time()-startTime))
					if 'timeFile' in buildOptions:
						logTargetTimes(buildOptions['timeFile'], scheduler, init)
	
				if errorsList: 
					# heuristically: it's useful to have them in order of failure when a small number, but if there are 
					# lots then it's too hard to read and better to sort, so similar ones are together
					if len(errorsList)>=10: errorsList.sort()
						
					log.critical('*** XPYBUILD FAILED: %d error(s) (aborted with %d targets outstanding): \n   %s', len(errorsList), totalTargets-targetsCompleted, '\n   '.join(errorsList))
					return 4
				else:
					# using *** here means we get a valid final progress message
					log.critical('*** XPYBUILD SUCCEEDED: %s built (%d up-to-date)', targetsBuilt if targetsBuilt else '<NO TARGETS>', (totalTargets-targetsBuilt))
					return 0
			finally:
				publishArtifact('Xpybuild logfile', logFile)
		else:
			raise Exception('Task type not implemented yet - '+task) # should not happen
		
	except BuildException as e:
		# hopefully we don't end up here very often
		log.error('*** XPYBUILD FAILED: %s', e.toMultiLineString(None))
		return 5

	except Exception as e:
		log.exception('*** XPYBUILD FAILED: ')
		return 6
示例#19
0
def javac(output, inputs, classpath, options, logbasename, targetname, workDir):
	""" Compile some java files to class files.

	Will raise BuildException if compilation fails.

	@param output: path to a directory in which to put the class files (will be created)

	@param inputs: list of paths (.java files) to be compiled

	@param classpath: classpath to compile with, as a string

	@param options: options map. javac.options is a list of additional arguments, javac.source is the source version, 
	javac.target is the target version

	@param logbasename: absolute, expanded, path to a directory and filename prefix 
		to use for files such as .err, .out, etc files

	@param targetname: to log appropriate error messages
	
	@param workDir: where temporary files are stored.  

	"""

	assert logbasename and '$' not in logbasename
	logbasename = os.path.normpath(logbasename)
	# make the output directory
	if not os.path.exists(output): mkdir(output)
	# location of javac
	if options['java.home']:
		javacpath = os.path.join(options['java.home'], "bin/javac")
	else:
		javacpath = "javac" # just get it from the path
	# store the list of files in a temporary file, then build from that.
	mkdir(workDir)
	
	argsfile = os.path.join(workDir, "javac_args.txt")
	
	# build up the arguments
	args = ["-d", output]
	if options["javac.source"]: args.extend(["-source", options["javac.source"]])
	if options["javac.target"]: args.extend(["-target", options["javac.target"]])
	if options["javac.encoding"]: args.extend(["-encoding", options["javac.encoding"]])
	if options["javac.debug"]:
		args.append('-g')
	if options['javac.warningsAsErrors']:
		args.append('-Werror')
	# TODO: should add -Xlint options here I think
		
	args.extend(getStringList(options['javac.options']))
	if classpath: args.extend(['-cp', classpath])
	args.extend([x for x in inputs if x.endswith('.java')]) # automatically filter out non-java files

	with openForWrite(argsfile, 'w', encoding=locale.getpreferredencoding()) as f:
		for a in args:
			f.write('"%s"'%a.replace('\\','\\\\')+'\n')

	success=False
	try:

		log.info('Executing javac for %s, writing output to %s: %s', targetname, logbasename+'.out', ''.join(['\n\t"%s"'%x for x in [javacpath]+args]))
		
		# make sure we have no old ones hanging around still
		try:
			deleteFile(logbasename+'-errors.txt', allowRetry=True)
			deleteFile(logbasename+'-warnings.txt', allowRetry=True)
			deleteFile(logbasename+'.out', allowRetry=True)
		except Exception as e:
			log.info('Cleaning up file failed: %s' % e)
		
		outputHandler = options.get('javac.outputHandlerFactory', JavacProcessOutputHandler)(targetname, options=options)
		if hasattr(outputHandler, 'setJavacLogBasename'):
			outputHandler.setJavacLogBasename(logbasename)
		call([javacpath, "@%s" % argsfile], outputHandler=outputHandler, outputEncoding='UTF-8', cwd=output, timeout=options['process.timeout'])
		if (not os.listdir(output)): # unlikely, but useful failsafe
			raise EnvironmentError('javac command failed to create any target files (but returned no error code); see output at "%s"'%(logbasename+'.out'))
		success = True
	finally:
		if not success and classpath:
			log.info('Classpath for failed javac was: \n   %s', '\n   '.join(classpath.split(os.pathsep)))
示例#20
0
文件: java.py 项目: xpybuild/xpybuild
	def run(self, context):
		options = self.options

		# make sure temp dir exists
		mkdir(self.workDir)

		classes = os.path.join(self.workDir, "classes") # output dir for classes
		
		# create the classpath, sorting within PathSet (for determinism), but retaining original order of 
		# PathSet elements in the list
		classpath = os.pathsep.join(self.classpath.resolve(context)) 

		# compile everything
		mkdir(classes) # (need this for assembling other files to package later on, even if we don't do any javac)
		if self.compile:
			mkdir(self.getOption('javac.logs'))
			javac(classes, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs')+'/'+targetNameToUniqueId(self.name), targetname=self.name, workDir=self.workDir)

		manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file
	
		if isinstance(self.manifest, str):
			manifest = context.getFullPath(self.manifest, self.baseDir)
		elif self.manifest == None:
			manifest = None
		else: # generate one
			# rewrite property values in the manifest
			manifest_entries = {}
			for i in self.manifest:
				manifest_entries[i] = context.expandPropertyValues(self.manifest[i])
	
			# determine classpath for manifest
			classpath_entries = []
			
			if "Class-path" not in manifest_entries: # assuming it wasn't hardcoded, set it here
				for src, dest in self.classpath.resolveWithDestinations(context):
					# we definitely do want to support use of ".." in destinations here, it can be very useful
					classpath_entries.append(dest)
				assert isinstance(options['jar.manifest.classpathAppend'], list), options['jar.manifest.classpathAppend'] # must not be a string
				classpath_entries.extend(options['jar.manifest.classpathAppend'] or [])
				
				# need to always use / not \ for these to be valid
				classpath_entries = [p.replace(os.path.sep, '/').replace('\\', '/') for p in classpath_entries if p]
				
				if classpath_entries:
					manifest_entries["Class-path"] = " ".join(classpath_entries) # include the classpath from here
			if not manifest_entries.get('Class-path'): # suppress this element entirely if not needed, otherwise there would be no way to have an empty classpath
				manifest_entries.pop('Class-path','')
			
			# create the manifest file
			create_manifest(manifest, manifest_entries, options=options)

		# copy in the additional things to include
		for (src, dest) in self.package.resolveWithDestinations(context):
			if '..' in dest: raise Exception('This target does not permit packaged destination paths to contain ".." relative path expressions')
			mkdir(os.path.dirname(os.path.join(classes, dest)))
			destpath = normLongPath(classes+'/'+dest)
			srcpath = normLongPath(src)

			if os.path.isdir(srcpath):
				mkdir(destpath)
			else:
				with open(srcpath, 'rb') as s:
					with openForWrite(destpath, 'wb') as d:
						d.write(s.read())

		# create the jar
		jar(self.path, manifest, classes, options=options, preserveManifestFormatting=self.preserveManifestFormatting, 
			outputHandler=ProcessOutputHandler.create('jar', treatStdErrAsErrors=False,options=options))
示例#21
0
文件: java.py 项目: xpybuild/xpybuild
	def run(self, context):
		self.keystore = context.expandPropertyValues(self.keystore)
		options = self.options

		mkdir(self.path)
		for src, dest in self.jars.resolveWithDestinations(context):
			if '..' in dest:
					# to avoid people abusing this to copy files outside the dest directory!
					raise Exception('This target does not permit destination paths to contain ".." relative path expressions')

			try:
				with open(src, 'rb') as s:
					with openForWrite(os.path.join(self.path, dest), 'wb') as d:
						d.write(s.read())

				shutil.copystat(src, os.path.join(self.path, dest))
				
				# When we re-jar with the user specified manifest entries, jar will complain
				# about duplicate attributes IF the original MANIFEST.MF already has those entries.
				# This is happening for latest version of SL where Application-Name, Permission etc
				# were already there.
				#
				# The block of code below will first extract the original MANIFEST.MF from the source
				# jar file, read all manifest entry to a list.  When constructing the new manifest entries,
				# make sure the old MANIFEST.MF doesn't have that entry before putting the new manifest entry
				# to the list.  This will avoid the duplicate attribute error.
				#  
			
				if self.manifestDefaults:
					
					lines = []
					
					# read each line of MANIFEST.MF of the original jar and put them in lines
					with zipfile.ZipFile(src, 'r') as zf:
						lst = zf.infolist()
						for zi in lst:
							fn = zi.filename
							if fn.lower().endswith('manifest.mf'):
								try:
									manifest_txt = zf.read(zi.filename).decode('utf-8', errors='strict')
								except Exception as e:
									raise BuildException('Failed reading the manifest file %s with exception:%s' % (fn, e))

								# if we have all manifest text, parse and save each line
								if manifest_txt:
									# CR LF | LF | CR  can be there as line feed and hence the code below
									lines = manifest_txt.replace('\r\n', '\n').replace('\r','\n').split('\n')
										
								# done
								break
						
					
					original_entries = collections.OrderedDict()  # to ensure we don't overwrite/duplicate these
					# populate the manifest_entries with original values from original manifest
					for l in lines:
						if ':' in l and not l.startswith(' '): # ignore continuation lines etc because keys are all we care about
							key,value = l.split(':', 1)
							original_entries[key] = value.strip()
					
					# build up a list of the new manifest entries (will be merged into any existing manifest by jar)
					manifest_entries = collections.OrderedDict()
					for i in self.manifestDefaults:
						# if entry isn't there yet, add to the list
						if i not in original_entries:
							manifest_entries[i] = context.expandPropertyValues(self.manifestDefaults[i])
		
					# create the manifest file
					# we want to add the manifest entries explicitly specified here but 
					# NOT the 'default' manifest entries we usually add, since these 
					# are likely to have been set already, and we do not want duplicates
					mkdir(self.workDir)
					manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file

					options = dict(options)
					options['jar.manifest.defaults'] = {}
					create_manifest(manifest, manifest_entries, options)
	
					# update the EXISTING jar file with the new manifest entries, which will be merged into 
					# existing manifest by the jar tool
					jar(os.path.join(self.path, dest), manifest, None, options, update=True)
	
				signjar(os.path.join(self.path, dest), self.keystore, options, alias=self.alias, storepass=self.storepass, 
					outputHandler=ProcessOutputHandler.create('signjars', treatStdErrAsErrors=False, options=options))
			except BuildException as e:
				raise BuildException('Error processing %s: %s'%(os.path.basename(dest), e))
示例#22
0
 def run(self, context):
     mkdir(os.path.dirname(self.path))
     uri = context.expandPropertyValues(self.uri)
     urllib.request.urlretrieve(uri, self.path)
示例#23
0
    def run(self, context):
        self.log.info("Copying %s to %s", self.src, self.path)

        src = self.src.resolveWithDestinations(
            context)  #  a map of srcAbsolute: destRelative

        symlinks = self.options['Copy.symlinks']
        if isinstance(symlinks, str): symlinks = symlinks.lower() == 'true'
        assert symlinks in [True, False], repr(symlinks)

        # implicitly ensure parent of target exists, to keep things simple

        copied = 0
        if not isDirPath(self.name):
            # it's a simple file operation.
            if len(src) != 1:
                raise BuildException(
                    'Copy destination must be a directory (ending with "/") when multiple sources are specified (not: %s)'
                    % src)
            src, mappedDest = src[0]
            if isDirPath(src):
                raise BuildException(
                    'Copy source must be files (or PathSets) not directories: %s'
                    % src)
            mkdir(os.path.dirname(self.path))
            self._copyFile(
                context, src, self.path
            )  # we kindof have to ignore mappedDest here, since the target path already fully defines it
            if self.mode:
                os.chmod(self.path, self.mode)
            copied += 1
        else:
            lastDirCreated = None

            for (srcAbs, destRel) in src:
                srcAbs = normLongPath(srcAbs)
                dest = normLongPath(self.path + destRel)
                # there should not be any directories here only files from pathsets
                if '..' in destRel:
                    # to avoid people abusing this to copy files outside the dest directory!
                    raise Exception(
                        'This target does not permit destination paths to contain ".." relative path expressions'
                    )
                issymlink = symlinks and os.path.islink(srcAbs.rstrip(os.sep))

                if isDirPath(
                        srcAbs
                ) and not issymlink:  # allows creating of empty directories.
                    mkdir(dest)
                else:
                    #self.log.debug('Processing %s -> %s i.e. %s', srcAbs, destRel, dest)

                    if issymlink:  # this may be a directory path, and dirname will fail unless we strip off the /
                        dest = dest.rstrip(os.sep)

                    if not lastDirCreated or lastDirCreated != os.path.dirname(
                            dest):
                        lastDirCreated = os.path.dirname(dest)
                        self.log.debug('Creating intermediate dir %s',
                                       lastDirCreated)
                        mkdir(lastDirCreated)

                    try:
                        if issymlink:
                            os.symlink(os.readlink(srcAbs.rstrip(os.sep)),
                                       dest)
                        else:
                            self._copyFile(context, srcAbs, dest)
                            if self.mode:
                                os.chmod(dest, self.mode)
                    except Exception as e:
                        raise BuildException(
                            'Error copying from "%s" to "%s"' % (srcAbs, dest),
                            causedBy=True)

                    copied += 1

        self.log.info('Copied %d file(s) to %s', copied, self.path)
示例#24
0
    def run(self, context):
        if self.cwd: self.cwd = context.getFullPath(self.cwd, self.baseDir)
        if isDirPath(self.path):
            mkdir(self.path)
            cwd = self.cwd or self.path
        else:
            mkdir(os.path.dirname(self.path))
            cwd = self.cwd or self.workDir
        mkdir(self.workDir)

        commands = self._resolveCommands(context)
        assert len(
            commands) > 0, 'No commands were specified to run in this target!'
        if len(commands) > 1:
            assert not (
                self.redirectStdOutToTarget or self.stdout or self.stderr
            ), 'Invalid argument was specified for multiple commands mode'

        cmdindex = 0
        for cmd in commands:
            cmdindex += 1
            # this location is a lot easier to find than the target's workdir
            logbasename = os.path.normpath(
                context.getPropertyValue('BUILD_WORK_DIR') +
                '/CustomCommandOutput/' + os.path.basename(cmd[0]) + "." +
                targetNameToUniqueId(self.name))
            if cmdindex > 1:
                logbasename = logbasename + ".%d" % cmdindex  # make this unique

            cmdDisplaySuffix = ' #%d' % (cmdindex) if len(commands) > 1 else ''

            stdoutPath = context.getFullPath(
                self.path if self.redirectStdOutToTarget else
                (self.stdout or logbasename + '.out'),
                defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/')
            stderrPath = context.getFullPath(
                self.stderr or logbasename + '.err',
                defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/')

            self.log.info('Building %s by executing command%s: %s', self.name,
                          cmdDisplaySuffix,
                          ''.join(['\n\t"%s"' % x for x in cmd]))
            if self.cwd and cmdindex == 1:
                self.log.info('    building %s from working directory: %s',
                              self.name, self.cwd)  # only print if overridden
            env = self.env or {}
            if env:
                if callable(env):
                    env = env(context)
                else:
                    env = {
                        k: None if None == env[k] else self._resolveItem(
                            env[k], context)
                        for k in env
                    }
                if cmdindex == 1:
                    self.log.info(
                        '   environment overrides for %s are: %s', self.name,
                        ''.join(['\n\t"%s=%s"' % (k, env[k]) for k in env]))
            for k in os.environ:
                if k not in env: env[k] = os.getenv(k)

            for k in list(env.keys()):
                if None == env[k]:
                    del env[k]
            self.log.info(
                '    output from %s will be written to "%s" and "%s"',
                self.name + cmdDisplaySuffix, stdoutPath, stderrPath)

            if not os.path.exists(cmd[0]) and not (
                    IS_WINDOWS and os.path.exists(cmd[0] + '.exe')):
                raise BuildException(
                    'Cannot run command because the executable does not exist: "%s"'
                    % (cmd[0]),
                    location=self.location)

            encoding = self.options['common.processOutputEncodingDecider'](
                context, cmd[0])
            handler = self.options['CustomCommand.outputHandlerFactory']
            if handler:  # create a new handler for each command
                handler = handler(str(self), options=self.options)

            success = False
            rc = None
            try:
                # maybe send output to a file instead
                mkdir(os.path.dirname(logbasename))
                with open(
                        stderrPath,
                        'wb') as fe:  # can't use openForWrite with subprocess
                    with open(stdoutPath, 'wb') as fo:
                        process = subprocess.Popen(cmd,
                                                   stderr=fe,
                                                   stdout=fo,
                                                   cwd=cwd,
                                                   env=env)

                        rc = _wait_with_timeout(
                            process,
                            '%s(%s)' % (self.name, os.path.basename(cmd[0])),
                            self.options['process.timeout'], False)
                        success = rc == 0

            finally:
                try:
                    if os.path.getsize(stderrPath) == 0 and not self.stderr:
                        deleteFile(stderrPath, allowRetry=True)
                    if not self.redirectStdOutToTarget and os.path.getsize(
                            stdoutPath) == 0 and not self.stdout:
                        deleteFile(stdoutPath, allowRetry=True)
                except Exception as e:
                    # stupid windows, it passes understanding
                    self.log.info(
                        'Failed to delete empty .out/.err files (ignoring error as it is not critical): %s',
                        e)

                #if not os.listdir(self.workDir): deleteDir(self.workDir) # don't leave empty work dirs around

                mainlog = '<command did not write any stdout/stderr>'

                logMethod = self.log.info if success else self.log.error

                if (handler
                        or not self.redirectStdOutToTarget) and os.path.isfile(
                            stdoutPath) and os.path.getsize(stdoutPath) > 0:
                    if handler:
                        with open(stdoutPath,
                                  'r',
                                  encoding=encoding,
                                  errors='replace') as f:
                            for l in f:
                                handler.handleLine(l, isstderr=False)
                    elif os.path.getsize(stdoutPath) < 15 * 1024:
                        logMethod(
                            '    stdout from %s is: \n%s',
                            self.name + cmdDisplaySuffix,
                            open(stdoutPath,
                                 'r',
                                 encoding=encoding,
                                 errors='replace').read().replace(
                                     '\n', '\n\t'))
                    mainlog = stdoutPath
                    if not success:
                        context.publishArtifact(
                            '%s%s stdout' % (self, cmdDisplaySuffix),
                            stdoutPath)
                if os.path.isfile(
                        stderrPath) and os.path.getsize(stderrPath) > 0:
                    if handler:
                        with open(stderrPath,
                                  'r',
                                  encoding=encoding,
                                  errors='replace') as f:
                            for l in f:
                                handler.handleLine(l, isstderr=True)
                    elif os.path.getsize(stderrPath) < 15 * 1024:
                        logMethod(
                            '    stderr from %s is: \n%s',
                            self.name + cmdDisplaySuffix,
                            open(stderrPath,
                                 'r',
                                 encoding=encoding,
                                 errors='replace').read().replace(
                                     '\n', '\n\t'))
                    mainlog = stderrPath  # take precedence over stdout
                    if not success:
                        context.publishArtifact(
                            '%s%s stderr' % (self, cmdDisplaySuffix),
                            stderrPath)

            if handler:
                handler.handleEnd(returnCode=rc)
            elif rc != None and rc != 0 and not handler:

                if IS_WINDOWS:
                    quotearg = lambda c: '"%s"' % c if ' ' in c else c
                else:
                    quotearg = shlex.quote
                # having it in this format makes it easier for people to re-run the command manually
                self.log.info('    full command line is: %s',
                              ' '.join(quotearg(c) for c in cmd))

                raise BuildException(
                    '%s command%s failed with error code %s; see output at "%s" or look under %s'
                    % (os.path.basename(
                        cmd[0]), cmdDisplaySuffix, rc, mainlog, cwd),
                    location=self.location)

        # final sanity check
        if not os.path.exists(self.path):
            raise BuildException(
                '%s returned no error code but did not create the output file/dir; see output at "%s" or look under %s'
                % (self, mainlog, cwd),
                location=self.location)

        if (not isDirPath(self.path)) and (not os.path.isfile(self.path)):
            raise BuildException(
                '%s did not create a file as expected (please check that trailing "/" is used if and only if a directory output is intended)'
                % self,
                location=self.location)
        if isDirPath(self.path) and not os.listdir(self.path):
            raise BuildException('%s created an empty directory' % self,
                                 location=self.location)