Пример #1
0
    def __init__(self,
                 target,
                 command=None,
                 dependencies=[],
                 cwd=None,
                 redirectStdOutToTarget=False,
                 env=None,
                 stdout=None,
                 stderr=None,
                 commands=None):
        BaseTarget.__init__(self, target, dependencies)

        assert not (command
                    and commands), 'Cannot specify both command= and commands='
        self.command = command
        self.commands = commands
        self.cwd = cwd
        self.deps = PathSet(dependencies)
        self.redirectStdOutToTarget = redirectStdOutToTarget
        if redirectStdOutToTarget and isDirPath(target):
            raise BuildException(
                'Cannot set redirectStdOutToTarget and specify a directory for the target name - please specify a file instead: %s'
                % target)
        self.env = env
        self.stdout, self.stderr = stdout, stderr

        if stdout and redirectStdOutToTarget:
            raise BuildException(
                'Cannot set both redirectStdOutToTarget and stdout')
Пример #2
0
    def __init__(self,
                 output,
                 compile,
                 main=None,
                 libs=None,
                 flags=None,
                 dependencies=None,
                 resources=None):
        """ 
		@param output: the resulting .exe or .dll
		
		@param compile: the input PathSet, path or list of .cs file(s)
		
		@param main: The main class to execute if an exe is to be built.
		If this is set then an executable will be created.
		Otherwise this target will build a library.

		@param libs: a list of input libraries (or a PathSet)
		"""
        self.compile = FilteredPathSet(_isDotNetFile, PathSet(compile))
        self.main = main
        self.flags = flags or []
        self.libs = PathSet(libs or [])
        self.resources = resources or []
        BaseTarget.__init__(self, output, [
            self.compile, self.libs, [x for (x, y) in self.resources],
            dependencies or []
        ])
        self.tags('c#')
Пример #3
0
	def __init__(self, dest, archives): 
		"""
		@param dest: the output directory (ending with a "/"). Never 
		specify a dest directory that is also written to by another 
		target (e.g. do not specify a build 'output' directory here). 
			
		@param archives: the input archives to be unpacked, which may be any 
		combination of strings, PathSets, FilteredArchiveContents and lists of these. 
		If these PathSets include mapping information, this 
		will be used to define where (under the dest directory) each 
		file from within that archive is copied (but cannot be used to 
		change the archive-relative path of each item). 
		
		For advanced cases, FilteredArchiveContents can be used to provide 
		customized mapping and filtering of the archive contents, 
		including manipulation of the destinations encoded within the 
		archive itself. 
		
		"""
		if not dest.endswith('/'): raise BuildException('Unpack target destination must be a directory (ending with "/"), not: "%s"'%dest)
		
		# NB: we could also support copying in non-archived files into the directory future too
		
		# we should preserve the specified order of archives since it may 
		# affect what happens when they contain the same files and must 
		# overwrite each other
		
		archives = [a if (isinstance(a, BasePathSet) or isinstance(a, FilteredArchiveContents)) else PathSet(a) for a in flatten(archives)]
		
		BaseTarget.__init__(self, dest, [
			(a.getDependency() if isinstance(a, FilteredArchiveContents) else a)
			for a in archives])
		self.archives = archives
Пример #4
0
    def __init__(self, name, time, dependencies=None):
        """
		name: the output filename
		time: the length of time to sleep (in seconds)
		"""
        BaseTarget.__init__(self, name, dependencies or [])
        self.time = float(time)
Пример #5
0
	def __init__(self, archive, inputs):
		"""
		@param archive: The archive to be created, ending with ``.zip``. 

		@param inputs: The files (usually pathsets) to be included in the archive.

		"""
		self.inputs = PathSet(inputs)
		BaseTarget.__init__(self, archive, self.inputs)
Пример #6
0
    def __init__(self,
                 name,
                 getContents,
                 dependencies=None,
                 mode=None,
                 executable=False,
                 encoding=None,
                 args=None,
                 kwargs=None):
        """
		Example usage:: 
		
			WriteFile('${OUTPUT_DIR}/foo.txt', lambda context: '\\n'.join(['Foo:', context.expandPropertyValues('${FOO}')]))
		
		@param name: the output filename
		
		@param getContents: 
			a unicode character string (which will be subject to expansion), 
			or binary bytes, or a function that accepts a context as input 
			followed optionally by any specified 'args') and returns 
			the string/bytes that should be written to the file, using ``\\n`` for newlines 
			(not ``os.linesep`` - any occurrences of the newline character ``\\n`` in 
			the provided string will be replaced automatically with the 
			OS-specific line separator unless bytes are provided).
			
			The function will be evaluated during the dependency resolution 
			phase (before the build phase commences). 
			
		@param mode: unix permissions to set with chmod on the destination files. 
			If not specified, default mode is used. 
			Ignored on Windows platforms. 
		
		@param executable: set to True to add Unix executable permissions (simpler 
			alternative to setting using mode)
		
		@param encoding: The encoding to use for converting the str to bytes; 
			if not specified the ``common.fileEncodingDecider`` option is used. 

		@param args: optional tuple containing arguments that should be passed to 
			the getContents function, after the context argument (first arg).
		
		@param kwargs: optional dictionary containing kwargs that should be passed 
			to the getContents function. 
		
		@param dependencies: any targets which need to be built in order to run this
			target.
		"""
        BaseTarget.__init__(self, name, dependencies or [])
        self.getContents = getContents
        self.__args = args or ()
        self.__kwargs = kwargs or {}
        self.__resolved = None
        self.__mode = mode
        self.__executable = executable
        self.__encoding = encoding
        self.addHashableImplicitInputOption('common.fileEncodingDecider')
Пример #7
0
    def __init__(self, bin, objects):
        """
		@param bin: the output library

		@param objects: a (list of) input objects

		"""
        self.objects = PathSet(objects)
        BaseTarget.__init__(self, bin, self.objects)
        self.tags('native')
Пример #8
0
	def internal_clean(self, context):
		"""
			Calls the BaseTarget clean, not the target-specific clean
		"""
		try:
			deleteFile(self.__implicitInputsFile)
		except Exception:
			time.sleep(10.0)
			deleteFile(self.__implicitInputsFile)
		BaseTarget.clean(self.target, context)
Пример #9
0
	def __init__(self, jar, compile, classpath, manifest, options=None, package=None, preserveManifestFormatting=False):
		self.compile = FilteredPathSet(_isJavaFile, PathSet(compile)) if compile else None
			
		self.classpath = PathSet(classpath)
		
		self.package = PathSet(package)
		self.manifest = manifest
		BaseTarget.__init__(self, jar, [self.compile,self.classpath,self.package, 
			manifest if isinstance(manifest, str) else None])
			
		for k,v in (options or {}).items(): self.option(k, v)
		self.preserveManifestFormatting = preserveManifestFormatting
Пример #10
0
	def __init__(self, destdir, source, classpath, options):
		"""
		@param destdir: directory to create docs in

		@param source: a set of files to build from

		@param classpath: a list of jars needed for the classpath

		@param options: [DEPRECATED - use .option() instead]
		"""
		self.sources = PathSet(source)
		self.classpath = PathSet(classpath)
		BaseTarget.__init__(self, destdir, [self.sources, self.classpath])
		for k,v in (options or {}).items(): self.option(k, v)
Пример #11
0
    def __init__(self,
                 object,
                 source,
                 includes=None,
                 flags=None,
                 dependencies=None,
                 options=None):
        """
		@param object: the object file to generate; see L{objectname}.
		@param source: a (list of) source files
		
		@param includes: a (list of) include directories, as strings or PathSets, 
			each with a trailing slash; the directories in the `native.include` 
			option are also added.
			
			If this target depends on some include files that are generated by another target, 
			make sure it's a directory target since all include directories must either 
			exist before the build starts or be targets themselves. 
			If specifying a subdirectory of a generated directory, do this using DirGeneratedByTarget. 
			If you have a composite generated directory made up of several 
			file targets, wrap them in TargetsWithinDir before passing as the includes parameter. 
		
		@param flags: a list of compiler flags in addition to those in the 
			`native.cxx.flags`/`native.c.flags` option. 
		
		@param dependencies: a list of additional dependencies that need to be built 
			before this target. Usually this is not needed. 
		
		@param options: DEPRECATED; use .option() instead
		"""
        self.source = PathSet(source)

        # currently we don't bother adding the native include dirs here as they're probably always going to be there
        # for time being, explicitly cope with missing slashes, though really build authors should avoid this
        self.includes = _AddTrailingDirectorySlashesPathSet(PathSet(includes))
        self.flags = flatten([flags]) or []

        # nb: do not include any individual header files in main target deps even if we've already
        # got cached makedepends from a previous build,
        # because it's possible they are no longer needed and no longer exist (and we don't want spurious
        # build failures); this also has the advantage that it doesn't enlarge and slow down the stat cache
        # during dep resolution of non-native targets, since it'll only be populated once we're into
        # the up to date checking phase

        BaseTarget.__init__(self, object,
                            PathSet([dependencies, source, self.includes]))

        for k, v in (options or {}).items():
            self.option(k, v)
        self.tags('native')
Пример #12
0
    def __init__(self, target, deps, fn, cleanfn=None):
        """
		@param target: The target file/directory that will be built

		@param deps: The list of dependencies of this target (paths, pathsets or lists)

		@param fn: The functor used to build this target

		@param cleanfn: The functor used to clean this target (optional, defaults to removing 
		the target file/dir)
		"""
        BaseTarget.__init__(self, target, deps)
        self.fn = fn
        self.cleanfn = cleanfn
        self.deps = PathSet(deps)
Пример #13
0
	def __init__(self, dest, src, relative=True):
		"""
		dest: the link to be created
		src: what it points at
		"""
		if isinstance(dest, str) and dest.endswith('/'): raise BuildException('SymLink target can only be used for files, not directories') # for now
		if not hasattr(os, 'symlink'): raise BuildException('SymLink target is not supported on this platform')

		self.src = PathSet(src)
		self.relative=relative
		BaseTarget.__init__(self, dest, [self.src])
		# technically we don't need to depend on the contents of src at all, 
		# but in practice it might be useful to have other targets depending on the 
		# link in which case it's a good idea to recreate the link when the 
		# thing it points to is rebuild - and it's a very quick operation anyway
		self.tags('native')
Пример #14
0
	def __init__(self, output, compile, classpath, options=None):
		""" 
		@param output: output dir for class files

		@param compile: PathSet (or list)  of things to compile

		@param classpath: PathSet (or list) of things to be on the classpath

		@param options: [DEPRECATED - use .option() instead]
		"""
		self.compile = FilteredPathSet(_isJavaFile, PathSet(compile))
			
		self.classpath = PathSet(classpath)
		
		BaseTarget.__init__(self, output, [self.compile,self.classpath])
		if options is not None:
			for k,v in options.items(): self.option(k, v)
Пример #15
0
    def clean(self, context):
        BaseTarget.clean(self, context)
        args = [self.getOption('docker.path')]
        environs = {
            'DOCKER_HOST': self.getOption('docker.host')
        } if self.getOption('docker.host') else {}
        args.extend(['rmi', context.expandPropertyValues(self.imagename)])

        # Allow warning/info logs but do not let failure of RMI cleanup break the build
        rmiOptions = dict(self.options)
        rmiOptions[ProcessOutputHandler.Options.ignoreReturnCode] = True
        rmiOptions[
            ProcessOutputHandler.Options.downgradeErrorsToWarnings] = True

        call(args,
             outputHandler=self.getOption('docker.outputHandlerFactory')(
                 'docker-rmi', treatStdErrAsErrors=False, options=rmiOptions),
             timeout=self.getOption('process.timeout'),
             env=environs)
Пример #16
0
    def __init__(self, dest, src, implicitDependencies=None):
        """
		@param dest: the output directory (ending with a "/") or file. Never 
		specify a dest directory that is also written to by another 
		target (e.g. do not specify an output directory here). If you need 
		to write multiple files to a directory, use separate Copy 
		targets for each, with file (rather than directory) target dest 
		names. 
			
		@param src: the input, which may be any combination of strings, PathSets and 
		lists of these. If these PathSets include mapping information, this 
		will be used to define where (under the dest directory) each 
		file is copied. 
		
		Note that only src files will be copied, any directory in the 
		src list will be created but its contents will not be copied 
		across - the only way to copy a directory is to use a FindPaths
		(or FindPaths(DirGeneratedByTarget('...'))) 
		for the src, which has the ability to find its contents on disk 
		(this is necessary to prevent complex race conditions and 
		build errors arising from implicit directory walking during 
		the execution phase - if all dir walking happens during 
		dependency resolution then such errors can be easily detected 
		before they cause a problem). 
		
		To create new empty directories that are not present in the source (mkdir), 
		you can use this simple trick which utilizes the fact that the current 
		directory ``.`` definitely exists. It doesn't copy anything from inside 
		(just copies only its 'existence') and uses a SingletonDestRenameMapper PathSet 
		to provide the destination::
		
			SingletonDestRenameMapper('my-new-dest-directory/', './'),
		
		@param implicitDependencies: provides a way to add additional implicit 
		dependencies that will not be part of src but may affect the 
		copy process (e.g. filtering in); this is intended for 
		use by subclasses, do not set this explicitly. 
		"""
        src = PathSet(src)
        BaseTarget.__init__(self, dest, [src, implicitDependencies])
        self.src = src
        self.mode = None  # not yet supported, but may be if it turns out to be useful
        self.addHashableImplicitInputOption('Copy.symlinks')
Пример #17
0
	def __init__(self, output, jars, keystore, alias=None, storepass=None, manifestDefaults=None):
		""" 
		@param output: The output directory in which to put the signed jars

		@param jars: The list (or PathSet) of input jars to copy and sign

		@param keystore: The path to the keystore

		@param alias: The alias for the keystore (optional)

		@param storepass: The password for the store file (optional)

		@param manifestDefaults: a dictionary of manifest entries to add to the existing manifest.mf file
		of each jar before signing.  Entries in this dictionary will be ignored if the same entry
		is found in the original manifest.mf file already.
		"""
		self.jars = PathSet(jars)
		self.keystore = keystore
		self.alias = alias
		self.storepass = storepass
		self.manifestDefaults = manifestDefaults
		BaseTarget.__init__(self, output, [self.jars, self.keystore])
Пример #18
0
    def __init__(self,
                 bin,
                 objects,
                 libs=None,
                 libpaths=None,
                 shared=False,
                 options=None,
                 flags=None,
                 dependencies=None):
        """
		@param bin: the output binary. See L{exename}, L{libname}, L{staticlibname}. 

		@param objects: a (list of) input object

		@param libs: a (list of) libraries linked against (optional) in platform-neutral format. 
		Can include list properties like '${FOO_LIB_NAMES[]}'. 

		@param libpaths: a (list of) additional library search directories (optional)

		@param shared: if true compiles to a shared object (.dll or .so) (optional, defaults to false)

		@param flags: a list of additional linker flags

		@param options: [DEPRECATED - use .option() instead]

		@param dependencies: a list of additional dependencies (targets or files)
		"""
        self.objects = PathSet(objects)
        self.libs = libs or []
        self.libpaths = PathSet(libpaths or [])
        self.shared = shared
        self.flags = flags or []
        BaseTarget.__init__(self, bin,
                            PathSet(self.objects, (dependencies or [])))
        for k, v in (options or {}).items():
            self.option(k, v)

        self.tags('native')
Пример #19
0
    def __init__(self,
                 imagename,
                 inputs,
                 depimage=None,
                 dockerfile=None,
                 buildArgs=None,
                 dockerArgs=None):
        """
		imagename: the name/tag of the image to build
		"""
        self.imagename = imagename
        self.depimage = depimage
        self.dockerfile = dockerfile
        self.buildArgs = buildArgs
        self.dockerArgs = dockerArgs
        self.stampfile = '${BUILD_WORK_DIR}/targets/docker/.%s' % self.imageNameToFileName(
            imagename)
        self.depstampfile = '${BUILD_WORK_DIR}/targets/docker/.%s' % self.imageNameToFileName(
            depimage) if depimage else None
        self.inputs = PathSet(inputs)
        BaseTarget.__init__(
            self, self.stampfile,
            inputs + ([self.depstampfile] if self.depstampfile else []))
Пример #20
0
    def __init__(self, path):
        """
		@param path: the output filename. 
		"""
        BaseTarget.__init__(self, path, [])
Пример #21
0
 def clean(self, context):
     deleteFile(self._getMakeDependsFile(context))
     BaseTarget.clean(self, context)
Пример #22
0
def main(args):
	""" Command line argument parser. 
	"""
	
	try:
		usage = [
###############################################################################
'',
'eXtensible Python-based Build System %s on Python %s.%s.%s'% (XPYBUILD_VERSION, sys.version_info[0], sys.version_info[1], sys.version_info[2]),
'',
'xpybuild.py [operation]? [options]* [property=value]* [-x] [target|tag|regex]* ', 
'',
'A regex containing * can be used instead of a target, but only if it uniquely ', 
'identifies a single target. ',
'',
'Special pseudo-tags:',
'  full                       Include all targets for a full build (the default)',
'',
'Special properties:',
'  OUTPUT_DIR=output          The main directory output will be written to',
'  BUILD_MODE=release         Specifies release, debug (or user-defined) mode',
'  BUILD_NUMBER=n             Build number string, for reporting and use by build',
'',
'Operations: ',
###############################################################################

'  (if none is specified, the default operation is a normal build)',
'      --clean                Clean specified targets incl all deps',
'      --rebuild              Clean specified targets incl all deps then build',
'      --rebuild-ignore-deps  Clean only the specified targets (not deps) then ',
'        (or --rid)           build those targets and any missing dependencies, ',
'                             but not any out-of-date dependencies. This is a ',
'                             fast but less correct way to get a quick ',
'                             incremental build, so use with care. ',
'',
'   -s --search <str>         Show info on targets/tags/properties/options ',
'                             containing the specified substring or regex', 
# hide these from usage (though they still work), as superceded by the more useful "-s" option
#' --ft --find-targets <str>   List targets containing the specified substring', 
#' --ti --target-info <str>    Print details including build file location for ',
#'                             targets containing the specified substring',
'      --targets              List available targets and tags (filtered by any ', 
'                             target or tag names specified on the command line)',
'      --properties           List properties that can be set and their ',
'                             defaults in this build file',
'      --options              List the target options available to build rules ',
'                             and their default values in this build file',
'',
'Options:',
'   -x --exclude <target>     Specifies a target or tag to exclude (unless ',
'                             needed as a dependency of an included target) ',
'',
'   -J --parallel             Build in parallel (this is the default). ',
'                             The number of workers is determined from the ',
'                             `build.workers` build file option or else the ',
'                             number of CPUs and the XPYBUILD_WORKERS_PER_CPU ',
'                             environment varable (default is currently 1.0), ',
'                             with an upper limit for this machine from the ',
'                             XPYBUILD_MAX_WORKERS variable. ',
'   -j --workers <number>     Override the number of worker threads to use for ',
'                             building. Use -j1 for single-threaded. ',
'                             (ignores any environment variables)',
'',
'   -k --keep-going           Continue rather than aborting on errors',
'',
'   -n --dry-run              Don\'t actually build anything, just print',
'                             what would be done (finds missing dependencies)',
'',
' --id --ignore-deps          Skip all dependency/up-to-date checking: only ', 
'                             clean/build targets that do not exist at all ',
'                             (faster builds, but no guarantee of correctness)', 
'',
'   -f --buildfile <file>     Specify the root build file to import ',
'                             (default is ./root.xpybuild.py)',
'',
'   -l --log-level LEVEL      Set the log level to debug/info/critical',
'   -L --logfile <file>       Set the log file location',
'      --timefile <file>      Dump the time for each target in <file> at the',
'                             end of the run',
'      --depgraph <file>      Just resolve dependencies and dump them to <file>',
'      --cpu-stats            Log CPU utilisation stats',
'      --random-priority      Randomizes build order',
'      --verify               Performs additional verifications during the ',
'                             build to to help detect bugs in the build files. ',
'                             [verify is currently an experimental feature]',
'      --profile              Profiles all the worker threads',
'   -F --format               Message output format.',
'                             Options:',
] + [
'                                - '+ h for h in _registeredConsoleFormatters
] + [

]
		if reduce(max, list(map(len, usage))) > 80:
			raise Exception('Invalid usage string - all lines must be less than 80 characters')

		# set up defaults
		properties = {} 
		buildOptions = { "keep-going":False, "workers":0, "dry-run":False, 
			"ignore-deps":False, "logCPUUtilisation":False, "profile":False, "verify":False } 
		includedTargets = []
		excludedTargets = []
		task = _TASK_BUILD
		buildFile = os.path.abspath('root.xpybuild.py')
		logLevel = None
		logFile = None
		findTargetsPattern = None
		format = "default"

		opts,targets = getopt.gnu_getopt(args, "knJh?x:j:l:L:f:F:s:", 
			["help","exclude=","parallel","workers=","keep-going",
			"log-level=","logfile=","buildfile=", "dry-run",
			"targets", 'target-info=', 'ti=', "properties", "options", "clean", "rebuild", "rebuild-ignore-deps", "rid", "ignore-deps", "id",
			"format=", "timefile=", "ft=", "find-targets=", "search=", "depgraph=", 'cpu-stats', 'random-priority', 'profile', 'verify'])
		
		for o, a in opts: # option arguments
			o = o.strip('-')
			if o in ["?", "h", "help"]:
				print('\n'.join(usage))
				return 0
			elif o in ["x", "exclude"]:
				excludedTargets.append(a)
			elif o in ["f", "buildfile"]:
				buildFile = os.path.abspath(a)
			elif o in ['targets']:
				task = _TASK_LIST_TARGETS
			elif o in ['find-targets', 'ft']:
				task = _TASK_LIST_FIND_TARGETS
				findTargetsPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['target-info', 'ti']:
				task = _TASK_LIST_TARGET_INFO
				findTargetsPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['search', 's']:
				task = _TASK_LIST_SEARCH
				searchPattern = BaseTarget._normalizeTargetName(a)
			elif o in ['properties']:
				task = _TASK_LIST_PROPERTIES
			elif o in ['options']:
				task = _TASK_LIST_OPTIONS
			elif o in ['J', 'parallel']:
				buildOptions['workers'] = 0
			elif o in ['j', 'workers']:
				buildOptions['workers'] = int(a)
			elif o in ['l', 'log-level']:
				logLevel = getattr(logging, a.upper(), None)
			elif o in ['cpu-stats']:
				buildOptions["logCPUUtilisation"] = True
			elif o in ['random-priority']:
				buildOptions["randomizePriorities"] = True
			elif o in ['L', 'logfile']:
				logFile = a
			elif o in ['F', 'format']:
				format = None
				if a =='xpybuild': a = 'default' # for compatibility
				for h in _registeredConsoleFormatters:
					if h.upper() == a.upper():
						format = h
				if not format:
					print('invalid format "%s"; valid formatters are: %s'%(a, ', '.join(_registeredConsoleFormatters.keys())))
					print('\n'.join(usage))
					return 1
			elif o in ['clean']:
				task = _TASK_CLEAN
				buildOptions['keep-going'] = True
			elif o in ['rebuild']:
				task = _TASK_REBUILD
			elif o in ['rebuild-ignore-deps', 'rid']:
				task = _TASK_REBUILD
				buildOptions['ignore-deps'] = True
			elif o in ['id', 'ignore-deps']:
				buildOptions['ignore-deps'] = True
			elif o in ['k', 'keep-going']:
				buildOptions['keep-going'] = True
			elif o in ['n', 'dry-run']:
				buildOptions['dry-run'] = True
			elif o in ['timefile']:
				buildOptions['timeFile'] = a
			elif o in ['verify']:
				buildOptions['verify'] = True
			elif o in ['profile']:
				buildOptions['profile'] = True
			elif o in ['depgraph']:
				buildOptions['depGraphFile'] = a
			else:
				assert False, "unhandled option: '%s'" % o

		for o in targets: # non-option arguments (i.e. no -- prefix)
			arg = o.strip()
			if arg:
				if '=' in arg:
					properties[arg.split('=')[0].upper()] = arg.split('=')[1]
				elif arg=='all': # pre-4.0 alias for all
					includedTargets.append('full')
				else:
					includedTargets.append(BaseTarget._normalizeTargetName(arg))
			
		# default is all
		if (not includedTargets) or includedTargets==['']:
			includedTargets = ['full']
		
	except getopt.error as msg:
		print(msg)
		print("For help use --help")
		return 2
	
	threading.currentThread().setName('main')
	logging.getLogger().setLevel(logLevel or logging.INFO)

	if buildOptions["workers"] < 0: buildOptions["workers"] = 0 # means there's no override
	
	outputBufferingDisabled = buildOptions['workers']==1 # nb: this also affects the .log handler below
	
	# nb: it's possible workers=0 (auto) and will later be set to 1 but doesn't really matter much

	# initialize logging to stdout - minimal output to avoid clutter, but indicate progress
	hdlr = _registeredConsoleFormatters.get(format, None)
	assert hdlr # shouldn't happen
	wrapper = OutputBufferingStreamWrapper(sys.stdout, bufferingDisabled=outputBufferingDisabled)
	# actually instantiate it
	hdlr = hdlr(
		wrapper, 
		buildOptions=buildOptions) 
	# Handler can override wrapper with a preference in either direction
	if hdlr.bufferingDisabled: wrapper.bufferingDisabled = True
	if hdlr.bufferingRequired: wrapper.bufferingDisabled = False
		
	hdlr.setLevel(logLevel or logging.WARNING)
	logging.getLogger().addHandler(hdlr)
	log.info('Build options: %s'%{k:buildOptions[k] for k in buildOptions if k != 'workers'})
	
	stdout = sys.stdout
	
	# redirect to None, to prevent any target code from doing 'print' statements - should always use the logger
	sys.stdout = None

	listen_for_stack_signal() # make USR1 print a python stack trace

	allTargets = ('full' in includedTargets) and not excludedTargets

	try:
		def loadBuildFile():
			init = BuildInitializationContext(properties)
			isRealBuild = (task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD])
			init._defineOption("process.timeout", 600)
			init._defineOption("build.keepGoing", buildOptions["keep-going"])
			
			# 0 means default behaviour
			init._defineOption("build.workers", 0)
			
			init.initializeFromBuildFile(buildFile, isRealBuild=isRealBuild)
			
			# now handle setting real value of workers, starting with value from build file
			workers = int(init._globalOptions.get("build.workers", 0))
			# default value if not specified in build file
			if workers <= 0: 
				workers = multiprocessing.cpu_count() 
			if os.getenv('XPYBUILD_WORKERS_PER_CPU'):
				workers = min(workers, int(round(multiprocessing.cpu_count()  * float(os.getenv('XPYBUILD_WORKERS_PER_CPU')))))
			
			# machine/user-specific env var can cap it
			if os.getenv('XPYBUILD_MAX_WORKERS'):
				workers = min(workers, int(os.getenv('XPYBUILD_MAX_WORKERS')))
			
			# finally an explicit command line --workers take precedence
			if buildOptions['workers']: workers = buildOptions['workers']
			
			if workers < 1: workers = 1
			
			# finally write the final number of workers where it's available to both scheduler and targets
			buildOptions['workers'] = workers
			init._globalOptions['build.workers'] = workers
			
			init._finalizeGlobalOptions()
			
			return init

		if buildOptions['profile']:
			import cProfile, pstats
			profiler = cProfile.Profile()
			profiler.enable()

		init = loadBuildFile()

		# nb: don't import any modules that might define options (including outputhandler) 
		# until build file is loaded
		# or we may not have a build context in place yet#
		from xpybuild.internal.scheduler import BuildScheduler, logTargetTimes


		if buildOptions['profile']:
			profilepath = 'xpybuild-profile-%s.txt'%'parsing'
			with open(profilepath, 'w') as f:
				p = pstats.Stats(profiler, stream=f)
				p.sort_stats('cumtime').print_stats(f)
				p.dump_stats(profilepath.replace('.txt', '')) # also in binary format
				log.critical('=== Wrote Python profiling output to: %s', profilepath)

		def lookupTarget(s):
			tfound = init.targets().get(s,None)
			if not tfound and '*' in s: 
				
				matchregex = s.rstrip('$')+'$'
				try:
					matchregex = re.compile(matchregex, re.IGNORECASE)
				except Exception as e:
					raise BuildException('Invalid target regular expression "%s": %s'%(matchregex, e))
				matches = [t for t in init.targets().values() if matchregex.match(t.name)]
				if len(matches) > 1:
					print('Found multiple targets matching pattern %s:'%(s), file=stdout)
					print(file=stdout)
					for m in matches:
						print(m.name, file=stdout)
					print(file=stdout)
					raise BuildException('Target regex must uniquely identify a single target: %s (use tags to specify multiple related targets)'%s)
				if matches: return matches[0]
				
			if not tfound: raise BuildException('Unknown target name, target regex or tag name: %s'%s)
			return tfound

		# expand tags to targets here, and do include/exclude calculations
		selectedTargets = set() # contains BaseTarget objects
		for t in includedTargets:
			tlist = init.tags().get(t,None)
			if tlist:
				selectedTargets.update(tlist)
			else:
				selectedTargets.add(lookupTarget(t))
		for t in excludedTargets:
			tlist = init.tags().get(t,None)
			if tlist:
				selectedTargets.difference_update(tlist)
			else:
				selectedTargets.discard(lookupTarget(t))

		# convert findTargetsPattern to list
		if findTargetsPattern:
			findTargetsPattern = findTargetsPattern.lower()
			# sort matches at start of path first, then anywhere in name, finally anywhere in type
			# make 'all'/'full' into a special case that maps to all *selected* targets 
			# (could be different to 'all' tag if extra args were specified, but this is unlikely and kindof useful)
			findTargetsList = [t for t in sorted(
				 init.targets().values() if allTargets else selectedTargets, key=lambda t:(
					'/'+findTargetsPattern.lower() not in t.name.lower(), 
					findTargetsPattern.lower() not in t.name.lower(), 
					findTargetsPattern.lower() not in t.type.lower(), 
					t.name
					)) if findTargetsPattern in t.name.lower() or findTargetsPattern in t.type.lower() 
						or findTargetsPattern == 'full' or findTargetsPattern == 'all']

		if task == _TASK_LIST_PROPERTIES:
			p = init.getProperties()
			print("Properties: ", file=stdout)
			pad = max(list(map(len, p.keys())))
			if pad > 30: pad = 0
			for k in sorted(p.keys()):
				print(('%'+str(pad)+'s = %s') % (k, p[k]), file=stdout)
				
		elif task == _TASK_LIST_OPTIONS:
			options = init.mergeOptions(None)
			pad = max(list(map(len, options.keys())))
			if pad > 30: pad = 0
			for k in sorted(options.keys()):
				print(("%"+str(pad)+"s = %s") % (k, options[k]), file=stdout)

		elif task == _TASK_LIST_TARGETS:
			if len(init.targets())-len(selectedTargets) > 0:
				print("%d target(s) excluded (unless required as dependencies): "%(len(init.targets())-len(selectedTargets)), file=stdout)
				for t in sorted(['   %-15s %s'%('<'+t.type+'>', t.name) for t in init.targets().values() if t not in selectedTargets]):
					print(t, file=stdout)
				print(file=stdout)
				
			print("%d target(s) included: "%(len(selectedTargets)), file=stdout)
			for t in sorted(['   %-15s %s'%('<'+t.type+'>', t.name) for t in selectedTargets]):
				print(t, file=stdout)
			print(file=stdout)

			if allTargets:
				print("%d tag(s) are defined: "%(len(init.tags())), file=stdout)
				for t in sorted(['   %-15s (%d targets)'%(t, len(init.tags()[t])) for t in init.tags()]):
					print(t, file=stdout)

		elif task == _TASK_LIST_TARGET_INFO:
			if findTargetsList == '*': findTargetsList = init.targets().values()
			for t in sorted(findTargetsList, key=lambda t:(t.type+' '+t.name)):
				print('- %s priority: %s, tags: %s, location: \n   %s'%(t, t.getPriority(), t.getTags(), t.location), file=stdout)

		elif task == _TASK_LIST_FIND_TARGETS:
			# sort matches at start of path first, then anywhere in name, finally anywhere in type
			for t in findTargetsList:
				# this must be very easy to copy+paste, so don't put anything else on the line at all
				print('%s'%(t.name), file=stdout)

		elif task == _TASK_LIST_SEARCH:
			def showPatternMatches(x): # case sensitive is fine (and probably useful)
				if searchPattern.replace('\\', '/') in x.replace('\\','/'): return True # basic substring check (with path normalization)
				if '*' in searchPattern or '?' in searchPattern or '[' in searchPattern: # probably a regex
					if re.search(searchPattern, x): return True
				return False

			for t in init.targets().values():
				t._resolveTargetPath(init)

			print('', file=stdout)

			tagMatches = [t for t in init.tags() if showPatternMatches(t)]
			if tagMatches: 
				print ('%d matching tags:'%len(tagMatches), file=stdout)
				for t in sorted(tagMatches):
					print(t, file=stdout)
				print('', file=stdout)
				
			targetMatches = [t for t in init.targets().values() if showPatternMatches(t.name) or showPatternMatches(t.path)]
			if targetMatches: 
				print ('%d matching targets:'%len(targetMatches), file=stdout)
				for t in sorted(targetMatches, key=lambda t:(t.type+' '+t.name)):
					print('- %s priority: %s, tags: [%s]\n   output:  %s\n   defined:  %s'%(t, t.getPriority(), ' '.join(sorted(t.getTags())) or 'none', os.path.relpath(t.path), t.location), file=stdout)
				print('', file=stdout)

			propMatches = {key:value for (key,value) in init.getProperties().items() if showPatternMatches(key)}
			if propMatches:
				print('%d matching properties:'%len(propMatches), file=stdout)
				pad = max(list(map(len, propMatches.keys())))
				for k in sorted(propMatches.keys()):
					print(('%'+str(pad)+'s = %s') % (k, propMatches[k]), file=stdout)
					if init._propertyLocations[k]: # don't do this for built-in property like BUILD_MODE
						print(('%'+str(pad)+'s   (defined: %s)') % ('', init._propertyLocations[k]), file=stdout)
					
			options = init.mergeOptions(None)
			optionMatches = {key:value for (key,value) in options.items() if showPatternMatches(key)}
			if optionMatches:
				print('%d matching options:'%len(optionMatches), file=stdout)
				pad = max(list(map(len, optionMatches.keys())))
				for k in sorted(optionMatches.keys()):
					print(('%'+str(pad)+'s = %s') % (k, optionMatches[k]), file=stdout)

				
		elif task in [_TASK_BUILD, _TASK_CLEAN, _TASK_REBUILD]:
			
			if not logFile:
				if allTargets:
					buildtag = None
				else:
					buildtag = 'custom'
				logFile = _maybeCustomizeLogFilename(init.getPropertyValue('LOG_FILE'), 
					buildtag,
					task==_TASK_CLEAN)
			logFile = os.path.abspath(logFile)

			logdir = os.path.dirname(logFile)
			if logdir and not os.path.exists(logdir): mkdir(logdir)
			log.critical('Writing build log to: %s', os.path.abspath(logFile))
			
			# also buffer the .log file, since it's just a lot harder to read when multiple target lines are all jumbled up; 
			# we have an undocumented env var for disabling this in case of debugging
			if os.getenv('XPYBUILD_LOGFILE_OUTPUT_BUFFERING_DISABLED','')=='true': outputBufferingDisabled = True
			logFileStream = OutputBufferingStreamWrapper(open(logFile, 'w', encoding='UTF-8'), bufferingDisabled=outputBufferingDisabled)
			hdlr = logging.StreamHandler(logFileStream)
			hdlr.setFormatter(logging.Formatter('%(asctime)s %(relativeCreated)05d %(levelname)-8s [%(threadName)s %(thread)5d] %(name)-10s - %(message)s', None))
			hdlr.setLevel(logLevel or logging.INFO)
			logging.getLogger().addHandler(hdlr)

			log.info('Using xpybuild %s from %s on Python %s.%s.%s', XPYBUILD_VERSION, os.path.normpath(os.path.dirname(__file__)), sys.version_info[0], sys.version_info[1], sys.version_info[2])
			log.info('Using build options: %s (logfile target outputBuffering=%s, stdout target outputBuffering=%s)', buildOptions, not outputBufferingDisabled, not wrapper.bufferingDisabled)
			
			try:
				# sometimes useful to have this info available
				import socket, getpass
				log.info('Build running on %s as user %s', socket.gethostname(), getpass.getuser())
			except Exception as e:
				log.info('Failed to get host/user: %s', e)

			log.info('Default encoding for subprocesses assumed to be: %s (stdout=%s, preferred=%s)', 
				DEFAULT_PROCESS_ENCODING, stdout.encoding, locale.getpreferredencoding())
			
			def lowerCurrentProcessPriority():
				if xpybuild.buildcommon.IS_WINDOWS:
					import win32process, win32api,win32con
					win32process.SetPriorityClass(win32api.GetCurrentProcess(), win32process.BELOW_NORMAL_PRIORITY_CLASS)
				else:
					# on unix, people may run nice before executing the process, so 
					# only change the priority unilaterally if it's currently at its 
					# default value
					if os.nice(0) == 0:
						os.nice(1) # change to 1 below the current level

			try:
				# if possible, set priority of builds to below normal by default, 
				# to avoid starving machines (e.g. on windows) of resources 
				# that should be used for interactive processes
				if os.getenv('XPYBUILD_DISABLE_PRIORITY_CHANGE','') != 'true':
					lowerCurrentProcessPriority()
					log.info('Successfully changed process priority to below normal')
			except Exception as e:
				log.warning('Failed to lower current process priority: %s'%e)
			
			if buildOptions['ignore-deps']:
				log.warning('The ignore-deps option is enabled: dependency graph will be ignored for all targets that already exist on disk, so correctness is not guaranteed')
			
			for (k,v) in sorted(init.getProperties().items()):
				log.info('Setting property %s=%s', k, v)

			try:
				DATE_TIME_FORMAT = "%a %Y-%m-%d %H:%M:%S %Z"
				
				errorsList = []
				if task in [_TASK_CLEAN, _TASK_REBUILD]:
					startTime = time.time()
					log.critical('Starting "%s" clean "%s" at %s', init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )))
					
					cleanBuildOptions = buildOptions.copy()
					cleanBuildOptions['clean'] = True
					if allTargets: cleanBuildOptions['ignore-deps'] = True
					scheduler = BuildScheduler(init, selectedTargets, cleanBuildOptions)
					errorsList, targetsBuilt, targetsCompleted, totalTargets = scheduler.run()
		
					if allTargets and not cleanBuildOptions['dry-run']: # special-case this common case
						for dir in init.getOutputDirs():
							deleteDir(dir)
		
					log.critical('Completed "%s" clean "%s" at %s after %s\n', init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), formatTimePeriod(time.time()-startTime))
						
					if errorsList: 
						log.critical('XPYBUILD FAILED: %d error(s): \n   %s', len(errorsList), '\n   '.join(sorted(errorsList)))
						return 3
				
				if task == _TASK_REBUILD:
					# we must reload the build file here, as it's the only way of flushing out 
					# cached data (especially in PathSets) that may have changed as a 
					# result of the clean
					init = loadBuildFile()
				
				if task in [_TASK_BUILD, _TASK_REBUILD] and not errorsList:

					for cb in init.getPreBuildCallbacks():
						try:
							cb(BuildContext(init))
						except BuildException as be:
							log.error("Pre-build check failed: %s", be)
							return 7

					buildtype = 'incremental' if any(os.path.exists(dir) for dir in init.getOutputDirs()) else 'fresh'
					if not buildOptions['dry-run']:
						for dir in init.getOutputDirs():
							log.info('Creating output directory: %s', dir)
							mkdir(dir)
					
					startTime = time.time()
					log.critical('Starting %s "%s" build "%s" at %s using %d workers', buildtype, 
						init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), 
						buildOptions['workers']
						)
					
					buildOptions['clean'] = False
					scheduler = BuildScheduler(init, selectedTargets, buildOptions)
					errorsList, targetsBuilt, targetsCompleted, totalTargets = scheduler.run()
					log.critical('Completed %s "%s" build "%s" at %s after %s\n', buildtype, init.getPropertyValue('BUILD_MODE'), init.getPropertyValue('BUILD_NUMBER'), 
						time.strftime(DATE_TIME_FORMAT, time.localtime( startTime )), formatTimePeriod(time.time()-startTime))
					if 'timeFile' in buildOptions:
						logTargetTimes(buildOptions['timeFile'], scheduler, init)
	
				if errorsList: 
					# heuristically: it's useful to have them in order of failure when a small number, but if there are 
					# lots then it's too hard to read and better to sort, so similar ones are together
					if len(errorsList)>=10: errorsList.sort()
						
					log.critical('*** XPYBUILD FAILED: %d error(s) (aborted with %d targets outstanding): \n   %s', len(errorsList), totalTargets-targetsCompleted, '\n   '.join(errorsList))
					return 4
				else:
					# using *** here means we get a valid final progress message
					log.critical('*** XPYBUILD SUCCEEDED: %s built (%d up-to-date)', targetsBuilt if targetsBuilt else '<NO TARGETS>', (totalTargets-targetsBuilt))
					return 0
			finally:
				publishArtifact('Xpybuild logfile', logFile)
		else:
			raise Exception('Task type not implemented yet - '+task) # should not happen
		
	except BuildException as e:
		# hopefully we don't end up here very often
		log.error('*** XPYBUILD FAILED: %s', e.toMultiLineString(None))
		return 5

	except Exception as e:
		log.exception('*** XPYBUILD FAILED: ')
		return 6
Пример #23
0
 def clean(self, context):
     if self.cleanfn: self.cleanfn(self.path, context)
     BaseTarget.clean(self, context)