def __init__(self, target, command=None, dependencies=[], cwd=None, redirectStdOutToTarget=False, env=None, stdout=None, stderr=None, commands=None): BaseTarget.__init__(self, target, dependencies) assert not (command and commands), 'Cannot specify both command= and commands=' self.command = command self.commands = commands self.cwd = cwd self.deps = PathSet(dependencies) self.redirectStdOutToTarget = redirectStdOutToTarget if redirectStdOutToTarget and isDirPath(target): raise BuildException( 'Cannot set redirectStdOutToTarget and specify a directory for the target name - please specify a file instead: %s' % target) self.env = env self.stdout, self.stderr = stdout, stderr if stdout and redirectStdOutToTarget: raise BuildException( 'Cannot set both redirectStdOutToTarget and stdout')
def __init__(self, output, compile, main=None, libs=None, flags=None, dependencies=None, resources=None): """ @param output: the resulting .exe or .dll @param compile: the input PathSet, path or list of .cs file(s) @param main: The main class to execute if an exe is to be built. If this is set then an executable will be created. Otherwise this target will build a library. @param libs: a list of input libraries (or a PathSet) """ self.compile = FilteredPathSet(_isDotNetFile, PathSet(compile)) self.main = main self.flags = flags or [] self.libs = PathSet(libs or []) self.resources = resources or [] BaseTarget.__init__(self, output, [ self.compile, self.libs, [x for (x, y) in self.resources], dependencies or [] ]) self.tags('c#')
def __init__(self, archive, inputs): """ @param archive: The archive to be created, ending with ``.zip``. @param inputs: The files (usually pathsets) to be included in the archive. """ self.inputs = PathSet(inputs) BaseTarget.__init__(self, archive, self.inputs)
def __init__(self, bin, objects): """ @param bin: the output library @param objects: a (list of) input objects """ self.objects = PathSet(objects) BaseTarget.__init__(self, bin, self.objects) self.tags('native')
def __init__(self, jar, compile, classpath, manifest, options=None, package=None, preserveManifestFormatting=False): self.compile = FilteredPathSet(_isJavaFile, PathSet(compile)) if compile else None self.classpath = PathSet(classpath) self.package = PathSet(package) self.manifest = manifest BaseTarget.__init__(self, jar, [self.compile,self.classpath,self.package, manifest if isinstance(manifest, str) else None]) for k,v in (options or {}).items(): self.option(k, v) self.preserveManifestFormatting = preserveManifestFormatting
def __init__(self, destdir, source, classpath, options): """ @param destdir: directory to create docs in @param source: a set of files to build from @param classpath: a list of jars needed for the classpath @param options: [DEPRECATED - use .option() instead] """ self.sources = PathSet(source) self.classpath = PathSet(classpath) BaseTarget.__init__(self, destdir, [self.sources, self.classpath]) for k,v in (options or {}).items(): self.option(k, v)
def __init__(self, object, source, includes=None, flags=None, dependencies=None, options=None): """ @param object: the object file to generate; see L{objectname}. @param source: a (list of) source files @param includes: a (list of) include directories, as strings or PathSets, each with a trailing slash; the directories in the `native.include` option are also added. If this target depends on some include files that are generated by another target, make sure it's a directory target since all include directories must either exist before the build starts or be targets themselves. If specifying a subdirectory of a generated directory, do this using DirGeneratedByTarget. If you have a composite generated directory made up of several file targets, wrap them in TargetsWithinDir before passing as the includes parameter. @param flags: a list of compiler flags in addition to those in the `native.cxx.flags`/`native.c.flags` option. @param dependencies: a list of additional dependencies that need to be built before this target. Usually this is not needed. @param options: DEPRECATED; use .option() instead """ self.source = PathSet(source) # currently we don't bother adding the native include dirs here as they're probably always going to be there # for time being, explicitly cope with missing slashes, though really build authors should avoid this self.includes = _AddTrailingDirectorySlashesPathSet(PathSet(includes)) self.flags = flatten([flags]) or [] # nb: do not include any individual header files in main target deps even if we've already # got cached makedepends from a previous build, # because it's possible they are no longer needed and no longer exist (and we don't want spurious # build failures); this also has the advantage that it doesn't enlarge and slow down the stat cache # during dep resolution of non-native targets, since it'll only be populated once we're into # the up to date checking phase BaseTarget.__init__(self, object, PathSet([dependencies, source, self.includes])) for k, v in (options or {}).items(): self.option(k, v) self.tags('native')
def __init__(self, target, deps, fn, cleanfn=None): """ @param target: The target file/directory that will be built @param deps: The list of dependencies of this target (paths, pathsets or lists) @param fn: The functor used to build this target @param cleanfn: The functor used to clean this target (optional, defaults to removing the target file/dir) """ BaseTarget.__init__(self, target, deps) self.fn = fn self.cleanfn = cleanfn self.deps = PathSet(deps)
class Ar(BaseTarget): """ Target that compiles .a archive files from collections of object files. """ def __init__(self, bin, objects): """ @param bin: the output library @param objects: a (list of) input objects """ self.objects = PathSet(objects) BaseTarget.__init__(self, bin, self.objects) self.tags('native') def run(self, context): options = self.options mkdir(os.path.dirname(self.path)) options['native.compilers'].archiver.archive( context, output=self.path, options=options, src=self.objects.resolve(context)) def getHashableImplicitInputs(self, context): r = super(Ar, self).getHashableImplicitInputs(context) r.append('objects: %s' % self.objects) return r
def __init__(self, target, command=None, dependencies=[], copySrc=None, cwd=None, redirectStdOutToTarget=False, env=None, commands=None, **kwargs): assert isDirPath( target ), 'This target can only be used for directories (ending in /)' copySrc = PathSet(copySrc) CustomCommand.__init__(self, target, command=command, commands=commands, dependencies=[dependencies, copySrc], cwd=cwd, redirectStdOutToTarget=redirectStdOutToTarget, env=env, **kwargs) # can't call Copy.__init__ without introducing a duplicate target # but use the same name used by Copy so we can call run successfully later self.src = copySrc self.mode = None
class Tarball(BaseTarget): """ Target that creates a ``.tar.gz`` archive from a set of input files. """ def __init__(self, archive, inputs): """ @param archive: The archive to be created, which should end with ``.tar.gz``. @param inputs: The files (usually pathsets) to be included in the archive. """ self.inputs = PathSet(inputs) BaseTarget.__init__(self, archive, self.inputs) def run(self, context): mkdir(os.path.dirname(self.path)) with tarfile.open(normLongPath(self.path), 'w:gz') as output: for (f, o) in self.inputs.resolveWithDestinations(context): output.add(normLongPath(f).rstrip('/\\'), o) def getHashableImplicitInputs(self, context): r = super(Tarball, self).getHashableImplicitInputs(context) # include source representation of deps list, so that changes to the list get reflected # this way of doing property expansion on the repr is a convenient # shortcut (we want to expand property values to detect changes in # versions etc that should trigger a rebuild, but just not do any # globbing/searches here) r.append('src: '+context.expandPropertyValues(('%s'%self.inputs))) return r
class Javadoc(BaseTarget): """ Creates Javadoc from a set of input files. The following options can be set with ``Javadoc(...).option(key, value)`` to customize the documentation process: - ``javadoc.title = "Documentation"`` The title. - ``javac.ignoreSourceFilesFromClasspath = False`` By default, Javadoc will parse any source .java files present in the classpath in case they contain comments that should be inherited by the source files being documented. If these files contain errors (such as missing optional dependencies) it will cause Javadoc to fail. This option prevents the classpath from being searched for source files (by setting -sourcepath to a non-existent directoryu), which avoids errors and may also speed up the Javadoc generation. - ``javac.access = "public"`` Identifies which members and classes to include. """ def __init__(self, destdir, source, classpath, options): """ @param destdir: directory to create docs in @param source: a set of files to build from @param classpath: a list of jars needed for the classpath @param options: [DEPRECATED - use .option() instead] """ self.sources = PathSet(source) self.classpath = PathSet(classpath) BaseTarget.__init__(self, destdir, [self.sources, self.classpath]) for k,v in (options or {}).items(): self.option(k, v) def run(self, context): options = self.options classpath = os.pathsep.join(self.classpath.resolve(context)) javadoc(self.path, self.sources.resolve(context), classpath, options, outputHandler=ProcessOutputHandler.create('javadoc', treatStdErrAsErrors=False, options=options), workDir=self.workDir) def getHashableImplicitInputs(self, context): # changes in the manifest text should cause a rebuild # for now, don't bother factoring global jar.manifest.defaults option # in here (it'll almost never change anyway) return super(Javadoc, self).getHashableImplicitInputs(context) + \ sorted(['option: %s = "%s"'%(k,v) for (k,v) in self.options.items() if k and k.startswith('javadoc.')])
def __init__(self, output, compile, classpath, options=None): """ @param output: output dir for class files @param compile: PathSet (or list) of things to compile @param classpath: PathSet (or list) of things to be on the classpath @param options: [DEPRECATED - use .option() instead] """ self.compile = FilteredPathSet(_isJavaFile, PathSet(compile)) self.classpath = PathSet(classpath) BaseTarget.__init__(self, output, [self.compile,self.classpath]) if options is not None: for k,v in options.items(): self.option(k, v)
def __init__(self, archivePath, includes=None, excludes=None, destMapper=None): """ @param archivePath: The archive to unpack; either a string or a singleton PathSet @param destMapper: A functor that takes a (context, destPath) where destPath is an archive-relative path (guaranteed to contain / not \\), and returns the desired destination relative path string. The functor should have a deterministic and user-friendly __str__ implementation. @param includes: a list of include patterns (if provided excludes all non-matching files) @param excludes: a list of exclude patterns (processed after includes) """ self.__path = PathSet(archivePath) self.__destMapper = destMapper self.__includes = flatten(includes) self.__excludes = flatten(excludes) self.__location = BuildFileLocation() self.__isResolved = False
def __init__(self, output, jars, keystore, alias=None, storepass=None, manifestDefaults=None): """ @param output: The output directory in which to put the signed jars @param jars: The list (or PathSet) of input jars to copy and sign @param keystore: The path to the keystore @param alias: The alias for the keystore (optional) @param storepass: The password for the store file (optional) @param manifestDefaults: a dictionary of manifest entries to add to the existing manifest.mf file of each jar before signing. Entries in this dictionary will be ignored if the same entry is found in the original manifest.mf file already. """ self.jars = PathSet(jars) self.keystore = keystore self.alias = alias self.storepass = storepass self.manifestDefaults = manifestDefaults BaseTarget.__init__(self, output, [self.jars, self.keystore])
def __init__(self, bin, objects, libs=None, libpaths=None, shared=False, options=None, flags=None, dependencies=None): """ @param bin: the output binary. See L{exename}, L{libname}, L{staticlibname}. @param objects: a (list of) input object @param libs: a (list of) libraries linked against (optional) in platform-neutral format. Can include list properties like '${FOO_LIB_NAMES[]}'. @param libpaths: a (list of) additional library search directories (optional) @param shared: if true compiles to a shared object (.dll or .so) (optional, defaults to false) @param flags: a list of additional linker flags @param options: [DEPRECATED - use .option() instead] @param dependencies: a list of additional dependencies (targets or files) """ self.objects = PathSet(objects) self.libs = libs or [] self.libpaths = PathSet(libpaths or []) self.shared = shared self.flags = flags or [] BaseTarget.__init__(self, bin, PathSet(self.objects, (dependencies or []))) for k, v in (options or {}).items(): self.option(k, v) self.tags('native')
class Zip(BaseTarget): """ Target that creates a ``.zip`` archive from a set of input files. """ def __init__(self, archive, inputs): """ @param archive: The archive to be created, ending with ``.zip``. @param inputs: The files (usually pathsets) to be included in the archive. """ self.inputs = PathSet(inputs) BaseTarget.__init__(self, archive, self.inputs) def run(self, context): mkdir(os.path.dirname(self.path)) alreadyDone = set() with zipfile.ZipFile(normLongPath(self.path), 'w') as output: for (f, o) in self.inputs.resolveWithDestinations(context): # if we don't check for duplicate entries we'll end up creating an invalid zip if o in alreadyDone: dupsrc = ['"%s"'%src for (src, dest) in self.inputs.resolveWithDestinations(context) if dest == o] raise BuildException('Duplicate zip entry "%s" from: %s'%(o, ', '.join(dupsrc))) alreadyDone.add(o) # can't compress directory entries! (it messes up Java) output.write(normLongPath(f).rstrip('/\\'), o, zipfile.ZIP_STORED if isDirPath(f) else zipfile.ZIP_DEFLATED) def getHashableImplicitInputs(self, context): r = super(Zip, self).getHashableImplicitInputs(context) # include source representation of deps list, so that changes to the list get reflected # this way of doing property expansion on the repr is a convenient # shortcut (we want to expand property values to detect changes in # versions etc that should trigger a rebuild, but just not do any # globbing/searches here) r.append('src: '+context.expandPropertyValues(('%s'%self.inputs))) return r
def __init__(self, dest, src, implicitDependencies=None): """ @param dest: the output directory (ending with a "/") or file. Never specify a dest directory that is also written to by another target (e.g. do not specify an output directory here). If you need to write multiple files to a directory, use separate Copy targets for each, with file (rather than directory) target dest names. @param src: the input, which may be any combination of strings, PathSets and lists of these. If these PathSets include mapping information, this will be used to define where (under the dest directory) each file is copied. Note that only src files will be copied, any directory in the src list will be created but its contents will not be copied across - the only way to copy a directory is to use a FindPaths (or FindPaths(DirGeneratedByTarget('...'))) for the src, which has the ability to find its contents on disk (this is necessary to prevent complex race conditions and build errors arising from implicit directory walking during the execution phase - if all dir walking happens during dependency resolution then such errors can be easily detected before they cause a problem). To create new empty directories that are not present in the source (mkdir), you can use this simple trick which utilizes the fact that the current directory ``.`` definitely exists. It doesn't copy anything from inside (just copies only its 'existence') and uses a SingletonDestRenameMapper PathSet to provide the destination:: SingletonDestRenameMapper('my-new-dest-directory/', './'), @param implicitDependencies: provides a way to add additional implicit dependencies that will not be part of src but may affect the copy process (e.g. filtering in); this is intended for use by subclasses, do not set this explicitly. """ src = PathSet(src) BaseTarget.__init__(self, dest, [src, implicitDependencies]) self.src = src self.mode = None # not yet supported, but may be if it turns out to be useful self.addHashableImplicitInputOption('Copy.symlinks')
class Custom( BaseTarget ): # deprecated because error handling/logging is poor and it promotes bad practices like not using options (e.g process timeout) """ @deprecated: Use `CustomCommand` instead, or a dedicated `BaseTarget` subclass. A custom target that builds a single file or directory of content by executing an arbitrary python functor. Functor must take: (target path, [dependency paths], context) Tip: don't forget to ensure the target path's parent dir exists using fileutils.mkdir. """ fn = None cleanfn = None def __init__(self, target, deps, fn, cleanfn=None): """ @param target: The target file/directory that will be built @param deps: The list of dependencies of this target (paths, pathsets or lists) @param fn: The functor used to build this target @param cleanfn: The functor used to clean this target (optional, defaults to removing the target file/dir) """ BaseTarget.__init__(self, target, deps) self.fn = fn self.cleanfn = cleanfn self.deps = PathSet(deps) def run(self, context): self.fn(self.path, self.deps.resolve(context), context) def clean(self, context): if self.cleanfn: self.cleanfn(self.path, context) BaseTarget.clean(self, context)
def __init__(self, imagename, inputs, depimage=None, dockerfile=None, buildArgs=None, dockerArgs=None): """ imagename: the name/tag of the image to build """ self.imagename = imagename self.depimage = depimage self.dockerfile = dockerfile self.buildArgs = buildArgs self.dockerArgs = dockerArgs self.stampfile = '${BUILD_WORK_DIR}/targets/docker/.%s' % self.imageNameToFileName( imagename) self.depstampfile = '${BUILD_WORK_DIR}/targets/docker/.%s' % self.imageNameToFileName( depimage) if depimage else None self.inputs = PathSet(inputs) BaseTarget.__init__( self, self.stampfile, inputs + ([self.depstampfile] if self.depstampfile else []))
class BaseTarget(Composable): """ The base class for all targets. .. rubric:: Configuring targets in your build files The following methods can be used to configure any target instance you add to a build file: .. autosummary :: option tags clearTags disableInFullBuild priority .. rubric:: Implementing a new target class If you are subclassing ``BaseTarget`` to create a new target class, you must implement `run`. In rare occasions you may also wish to override `clean`. The following methods are available for use by target subclasses, either at construction time (``__init__``) or at build time (during `run` or `clean`): .. autosummary :: registerImplicitInputOption registerImplicitInput getOption openFile targetNameToUniqueId This class provides several read-only attributes for use by subclasses. :ivar str name: The canonical name for the target (containing unsubstituted properties). :ivar str path: The resolved name with all properties variables expanded. This field is set only once the target is running or checking up-to-dateness but not during initialization phase when targets are initially constructed. :ivar dict options: A ``dict`` of the resolved options for this target. Can only be used once target is running or checking up-to-dateness but not during the initialization phase. See also `getOption()`. :ivar str workDir: A unique dedicated directory where this target can write temporary/working files. .. rubric:: Arguments for the BaseTarget __init__ constructor @param name: This target instance's unique name, which is the file or directory path which is created as a result of running this target. The target name may contain ``${...}`` properties (e.g. ``${OUTPUT_DIR}/myoutputfile``), and must use only forward slashes ``/``. If the target builds a directory it must end with a forward slash. @param dependencies: The dependencies, which may need to be flattened/expanded by the build system; may be any combination of strings, `xpybuild.pathsets`` and lists, and may also contain unexpanded variables. .. rubric:: BaseTarget methods """ # to allow targets to be used in sets, override hash to ensure it's deterministic; # no need to override eq/ne, they use object identity which is already correct def __hash__(self): """ Uses the target name to generate a hash. Targets are required to produce unique outputs. """ return hash(self.name) def __init__(self, name, dependencies): self.__getAttrImpl = { 'path': lambda: self.__returnOrRaiseIfNone( self.__path, 'Target path has not yet been resolved by this phase of the build process: %s' % self), 'name': lambda: self.__name, 'options': lambda: self.__returnOrRaiseIfNone( self.__optionsResolved, "Cannot read the value of basetarget.targetOptions during the initialization phase of the build as the resolved option values are not yet available" ), 'workDir': lambda: self.__workDir, 'type': lambda: self.__class__.__name__, 'baseDir': lambda: self.location.buildDir, } self.__optionsTargetOverridesUnresolved = { } # for target-specific option overrides. for internal use (by buildcontext), do not use self.__optionsResolved = None # gets assigned during end of initialization phase if isinstance(name, str): if '//' in name: raise BuildException( 'Invalid target name: double slashes are not permitted: %s' % name) if '\\' in name: raise BuildException( 'Invalid target name: backslashes are not permitted: %s' % name) self.__name = BaseTarget._normalizeTargetName(str(name)) self.__path_src = name self.__tags = ['full'] self.__priority = 0.0 # default so we can go bigger or smaller self.log = logging.getLogger(self.__class__.__name__) # put the class first, since it results in better ordering (e.g. for errors) # use a space to delimit these to make it easier to copy to the clipboard by double-clicking self.__stringvalue = f'<{self.type}> {self.name}' init = getBuildInitializationContext() if not init: # doc-test mode self.location = BuildFileLocation(raiseOnError=False) else: self.location = BuildFileLocation(raiseOnError=True) init.registerTarget(self) # this can throw # should ensure changes to the build file cause a rebuild? probs no need # PathSet will perform all necessary flattening etc self.__dependencies = PathSet(dependencies) self.__path = None # set by _resolveTargetPath self.__workDir = None self.__registeredImplicitInputs = [] # aliases for pre-3.0 self.addHashableImplicitInputOption = self.registerImplicitInputOption self.addHashableImplicitInput = self.registerImplicitInput @staticmethod def _normalizeTargetName( name ): # non-public method to ensure comparisons between target names are done consistently if xpybuild.buildcontext._EXPERIMENTAL_NO_DOLLAR_PROPERTY_SYNTAX: name = name.replace('$${', '<__xpybuild_dollar_placeholder>').replace( '${', '{').replace( '<__xpybuild_dollar_placeholder>', '$${') return name def __returnOrRaiseIfNone(self, value, exceptionMessage): if value is not None: return value raise Exception(exceptionMessage) def __setattr__(self, name, value): # this is a hack to retain backwards compat for a few classes that rely on explicitly assigning to self.options if name == 'options': # make this a WARN at some point self.log.debug( 'Target class "%s" assigns to self.options which is deprecated - instead call .option(...) to set target options' % self.__class__.__name__) if value: self.__optionsTargetOverridesUnresolved.update(value) else: object.__setattr__(self, name, value) def __getattr__(self, name): """ Getter for read-only attributes """ # nb this is not called for fields that have been set explicitly using self.X = ... try: return self.__getAttrImpl[name]() except KeyError: raise AttributeError('Unknown attribute %s' % name) def __str__( self): # string display name which is used for log statements etc """ Returns a display name including the target name and the target type (class) """ return self.__stringvalue def resolveToString(self, context): """ .. private:: There is usually no need for this to be called other than by the framework. Resolves this target's path and returns as a string. It is acceptable to call this while the build files are still being parsed (before the dependency checking phase), but an error will result if resolution depends on anything that has not yet been defined. """ # implementing this allows targets to be used in Composeable expressions # if there's no explicit parent, default to ${OUTPUT_DIR} to stop # people accidentally writing to their source directories if self.__path is not None: return self.__path # cache it for consistency self.__path = context.getFullPath( self.__path_src, context.getPropertyValue("OUTPUT_DIR")) badchars = '<>:"|?*' # Windows bad characters; it's helpful to stop people using such characters on all OSes too since almost certainly not intended foundbadchars = [ c for c in self.__path[2:] if c in badchars ] # (nb: ignore first 2 chars of absolute path which will necessarily contain a colon on Windows) if foundbadchars: raise BuildException( 'Invalid character(s) "%s" found in target name %s' % (''.join(sorted(list(set(foundbadchars)))), self.__path)) if self.__path.endswith(('.', ' ')): raise BuildException( 'Target name must not end in a "." or " "' ) # https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file self.log.debug('Resolved target name %s to canonical path %s', self.name, self.path) return self.__path def _resolveTargetPath(self, context): """.. private:: Internal method for resolving path from name, performing any required expansion etc. Do not override or call this method. @param context: The initialization context, with all properties and options fully defined. """ self.resolveToString(context) # do this early (before deps resolution) so it can be used for clean self.__workDir = os.path.normpath( context.getPropertyValue("BUILD_WORK_DIR") + '/targets/' + self.__class__.__name__ + '/' + targetNameToUniqueId(self.name)) # take the opportunity to provide a merged set of options if len(self.__optionsTargetOverridesUnresolved) == 0: self.__optionsResolved = context._globalOptions # since is immutable so we can avoid a copy else: self.__optionsResolved = context._mergeListOfOptionDicts( [ context._globalOptions, self.__optionsTargetOverridesUnresolved ], target=self) def _resolveUnderlyingDependencies(self, context, rawdeps=False): """.. private:: Internal method for resolving dependencies needed by this target, e.g. doing path expansion, globbing, etc. Do not override this method. This method should be invoked only once, by the scheduler. """ # special option just for verify implementation, returning the real deps not the underlying deps if rawdeps: return self.__dependencies.resolve(context) # don't think there's any value in caching this result return self.__dependencies._resolveUnderlyingDependencies(context) def run(self, context: xpybuild.buildcontext.BuildContext): """Called by xpybuild to request to target to run its build (all targets must implement this). This method is only called when up-to-date checking shows that the target must be built. It's possible that execution will show that the target did not really need to execute, in which case False should be returned. """ raise Exception('run() is not implemented yet for this target') def clean(self, context: xpybuild.buildcontext.BuildContext): """Called by xpybuild when the target should be deleted (can be overridden if needed). The default implementation will simply delete the target, and any target workdir, but can be overridden to delete additional temporary files if needed (shouldn't be). """ try: if self.workDir: fileutils.deleteDir(self.workDir) finally: if os.path.isdir(self.path): self.log.info('Target clean is deleting directory: %s', self.path) fileutils.deleteDir(self.path) else: fileutils.deleteFile(self.path) def registerImplicitInputOption(self, optionKey): """Target classes can call this from their ``__init__()`` to add the resolved value of the specified option(s) as 'implicit inputs' of this target. This list will be written to disk after the target builds successfully, and compared with its recorded value when subsequently checking the up-to-date-ness of the target. This allows xpybuild to detect when the target should be rebuilt as a result of a change in options or property values (e.g. build number, release/debug mode etc), even if no dependencies have changed. Call this from the target's constructor, for each option that this target is affected by, or with a callable that dynamically selects from the defined options, e.g. based on a prefix. @param optionKey: the name of an option (as a string), or a callable that accepts an optionKey and dynamically decides which options to include, returning True if it should be included. For example:: self.registerImplicitInputOption(lambda optionKey: optionKey.startswith(('java.', 'javac.'))) :return: Returns the same target instance it was called on, to permit fluent calling from build files. """ self.registerImplicitInput( lambda context: self.__getMatchingOptions(context, optionKey)) return self def __getMatchingOptions(self, context, optionKey): if callable(optionKey): keys = [k for k in self.options if optionKey(k)] else: keys = [optionKey] result = [] for k in sorted(keys): x = self.options[k] if x.__repr__.__qualname__ == 'function.__repr__': value = x.__qualname__ # avoid 0x references for top-level functions (nb: doesn't affect lambas/nested functions) else: value = repr(x) #assert '0x' not in value result.append(f'option {k}={value}') return result def registerImplicitInput(self, item): """Target classes can call this from their ``__init__()`` to add the specified string line(s) as 'implicit inputs' of this target. This list will be written to disk after the target builds successfully, and compared with its recorded value when subsequently checking the up-to-date-ness of the target. This allows xpybuild to detect when the target should be rebuilt as a result of a change in options or property values (e.g. build number, release/debug mode etc), even if no dependencies have changed. Call this from the target's constructor. @param item: The item to be added to the implicit inputs. This can be either: - a string, which may contain substitution variables, e.g. ``myparameter="${someprop}"``, and will converted to a string using `buildcontext.BuildContext.expandPropertyValues`, or - a callable to be invoked during up-to-dateness checking, that accepts a context parameter and returns a string or list of strings; any ``None`` items in the list are ignored. :return: Returns the same target instance it was called on, to permit fluent calling from build files. """ assert isinstance(item, str) or callable(item) self.__registeredImplicitInputs.append(item) return self def getHashableImplicitInputs(self, context): """(deprecated) Target classes can implement this to add the string line(s) as 'implicit inputs' of this target. @deprecated: The `registerImplicitInput` or `registerImplicitInputOption` methods should be called instead of overriding this method. The default implementation returns nothing, unless `registerImplicitInput` or `registerImplicitInputOption` have been called (in which case only the resolved paths of the file/directory dependencies will be used). """ if self.__registeredImplicitInputs: result = [] for x in self.__registeredImplicitInputs: if x is None: continue if callable(x) and not hasattr( x, 'resolveToString' ): # if we aren't delegating to expandPropertyValues to resolve this x = x(context) if x is None: continue elif isinstance(x, str): result.append(x) else: # assume it's a list or other iterable for y in x: if y is not None: result.append(y) else: result.append(context.expandPropertyValues(x)) return result return [] def getTags(self): """ .. private:: Not exposed publically as there is no public use case for this. :returns: The list of tags associated with this target. """ return self.__tags def disableInFullBuild(self): """Called by build file authors to configure this target to not build in ``all`` mode, so that it will only be built if the target name or tag is specified on the command line (or if pulled in by a dependency). This is useful for targets that perform operations such as configuring developer IDEs which would not be needed in the main build, or for expensive parts of the build that are often not needed such as generation of installers. See also `tag`. :return: Returns the same target instance it was called on, to permit fluent calling. """ self.__tags = list(set(self.__tags) - {'full'}) init = getBuildInitializationContext() init.removeFromTags(self, ['full']) return self def clearTags(self): """Called by build file authors to removes all tags other than ``all`` from this target. See `tag`. :return: Returns the same target instance it was called on, to permit fluent calling. """ init = getBuildInitializationContext() init.removeFromTags(self, self.__tags) self.__tags = ['full'] if 'full' in self.__tags else [] init.registerTags(self, self.__tags) return self def getOption(self, key, errorIfNone=True, errorIfEmptyString=True): """ Target classes can call this during `run` or `clean` to get the resolved value of a specified option for this target, with optional checking to give a friendly error message if the value is an empty string or None. This is a high-level alternative to reading directly from `self.options`. This method cannot be used while the build files are still being loaded, only during the execution of the targets. """ if hasattr(key, 'optionName'): key = key.optionName # it's an Option instance if key not in self.options: raise Exception( 'Target tried to access an option key that does not exist: %s' % key) v = self.options[key] if (errorIfNone and v == None) or (errorIfEmptyString and v == ''): raise BuildException( 'This target requires a value to be specified for option "%s" (see basetarget.option or setGlobalOption)' % key) return v def option(self, key, value): """Called by build file authors to configure this target instance with an override for an option value. This allows target-specific overriding of options. If no override is provided, the value set in `xpybuild.propertysupport.setGlobalOption` for the whole build is used, or if that was not set then the default when the option was defined. Use `self.options` or `getOption` to get resolved option values when implementing a target class. @param str|xpybuild.propertysupport.Option key: The name of a previously-defined option. Usually this is a string literal, but you cna also use the `xpybuild.propertysupport.Option` instance if you prefer. @param value: The value. If the value is a string and contains any property values these will be expanded before the option value is passed to the target. Use ``${{}`` to escape any literal ``{`` characters. :return: Returns the same target instance it was called on, to permit fluent calling. """ if hasattr(key, 'optionName'): key = key.optionName # it's an Option instance self.__optionsTargetOverridesUnresolved[key] = value return self def openFile(self, context: xpybuild.buildcontext.BuildContext, path: str, mode='r', **kwargs): """Target classes can call this from their `run` implementation to open a specified file, using an encoding specified by the ``common.fileEncodingDecider`` option (unless explicitly provided by ``encoding=``). @param context: The context that was passed to run(). @param path: The full absolute path to be opened. @param mode: The file mode. @keyword kwargs: Any additional arguments for the io.open() method can be specified here. """ if 'b' not in mode and not kwargs.get('encoding'): kwargs['encoding'] = self.getOption('common.fileEncodingDecider')( context, path) return (openForWrite if 'w' in mode else io.open)(path, mode, **kwargs) def tags(self, *tags: str): """Called by build file authors to append one or more tags to this target to make groups of related targets easier to build (or just to provide a shorter alias for the target on the command line). @param tags: The tag, tags or list of tags to add to the target. :return: Returns the same target instance it was called on, to permit fluent calling. >>> BaseTarget('a',[]).tags('abc').getTags() <using test initialization context> <using test initialization context> ['abc', 'full'] >>> BaseTarget('a',[]).tags(['abc', 'def']).getTags() <using test initialization context> <using test initialization context> ['abc', 'def', 'full'] >>> BaseTarget('a',[]).tags('abc', 'def').tags('ghi').getTags() <using test initialization context> <using test initialization context> <using test initialization context> ['ghi', 'abc', 'def', 'full'] """ taglist = getStringList(list(tags)) self.__tags = taglist + self.__tags assert sorted(list(set(self.__tags))) == sorted(list( self.__tags)) # check for duplicates init = getBuildInitializationContext() if init: init.registerTags(self, taglist) # init will be None during doctests return self def priority(self, priority: float): """Called by build file authors to configure the priority of this target to encourage it (and its dependencies) to be built earlier in the process. The default priority is 0.0 @param priority: a float representing the priority. Higher numbers will be built first where possible. Cannot be negative. :return: Returns the same target instance it was called on, to permit fluent calling. """ if priority < 0.0: raise BuildException( 'Target priority cannot be set to a lower number than 0.0') self.__priority = priority return self def updateStampFile(self): """ .. private:: Not useful enough to be in the public API. Assumes self.path is a stamp file that just needs creating / timestamp updating and does so """ path = normLongPath(self.path) mkdir(os.path.dirname(path)) with openForWrite(path, 'wb') as f: pass def getPriority(self): """ .. private:: Not exposed publically as there is no public use case for this. """ return self.__priority @staticmethod def targetNameToUniqueId(name: str) -> str: """Convert a target name (containing unexpanded property values) into a convenient unique identifier. The resulting identifier is not an absolute path, and (unless very long) does not contain any directory elements. This id is suitable for temporary filenames and directories etc """ # remove chars that are not valid on unix/windows file systems (e.g. colon) x = re.sub( r'[^()+./\w-]+', '_', name.replace('\\', '/').replace('${', '_').replace('{', '_').replace( '}', '_').rstrip('/')) if len(x) < 256: x = x.replace('/', '.') # avoid deeply nested directories in general return x class Options: """ Options for customizing the behaviour of all targets. To set an option on a specific target call `xpybuild.basetarget.BaseTarget.option` or to se a global default use `xpybuild.propertysupport.setGlobalOption`. """ failureRetries = defineOption("Target.failureRetries", 0) """ The "Target.failureRetries" option can be set on any target (or globally), and specifies how many times to retry the target's build if it fails. The default is 0, which is recommended for normal developer builds. There is an exponentially increasing backoff pause between each attempt - first 15s, then 30s, then 60s etc. See `xpybuild.buildcommon.registerBuildLoadPostProcessor` which can be used to customize this option for targets based on user-defined criteria such as target type. """ failureRetriesInitialBackoffSecs = defineOption( 'Target.failureRetriesInitialBackoffSecs', 15) # undocumented as there should be no reason to change this
class FilteredArchiveContents(object): """Object representing an archive to be passed to the Unpack target, with support for filtering which files are included/excluded, and per-item destination mapping. """ # do NOT use pathset baseclass, because we need the target to handle it in # a custom way for both per-archived-file mapping and in-archive filtering, # which would be wrecked by the normalization stuff that pathset does; # also the model is a bit different for archive contents, so simplest # just to keep it separate def __init__(self, archivePath, includes=None, excludes=None, destMapper=None): """ @param archivePath: The archive to unpack; either a string or a singleton PathSet @param destMapper: A functor that takes a (context, destPath) where destPath is an archive-relative path (guaranteed to contain / not \\), and returns the desired destination relative path string. The functor should have a deterministic and user-friendly __str__ implementation. @param includes: a list of include patterns (if provided excludes all non-matching files) @param excludes: a list of exclude patterns (processed after includes) """ self.__path = PathSet(archivePath) self.__destMapper = destMapper self.__includes = flatten(includes) self.__excludes = flatten(excludes) self.__location = BuildFileLocation() self.__isResolved = False def getDependency(self): """ Return the dependency representing this archive (unexpanded and unresolved string, or PathSet). """ return self.__path def getResolvedPath(self, context): """ Return the fully resolved archive path. """ result = self.__path.resolve(context) if len(result) != 1: raise Exception('Invalid PathSet specified for FilteredArchiveContents, must resolve to exactly one archive: %s'%self.__path) return result[0] def isIncluded(self, context, path): """ Decides whether the specified path within the archive should be unpacked, based on the include/exclude filters @param path: a relative path within the archive """ if not self.__excludes and not self.__includes: return True if not self.__isResolved: self.__includes = flatten([context.expandPropertyValues(x, expandList=True) for x in self.__includes]) self.__excludes = flatten([context.expandPropertyValues(x, expandList=True) for x in self.__excludes]) self.__isResolved = True assert '\\' not in path try: path = path.lstrip('/') # first check if it matches an exclude if next( (True for e in self.__excludes if antGlobMatch(e, path)), False): return False if not self.__includes: # include everything return True else: m = next( (i for i in self.__includes if antGlobMatch(i, path)), None) if m: return True else: return False except Exception as e: raise BuildException('FilteredArchiveContents error for %s'%(self), causedBy=True, location=self.__location) def hasIncludeExcludeFilters(self): return self.__includes or self.__excludes def mapDestPath(self, context, path): if not self.__destMapper: return path x = self.__destMapper(context, path.replace('\\','/')) if isDirPath(path) and not isDirPath(x): x += '/' return x def __repr__(self): """ Returns a string including this class name, the archive path, the destination prefix and any includes/excludes. """ return ('FilteredArchiveContents(%s, includes=%s, excludes=%s, destmapper=%s)'%( self.__path, self.__includes, self.__excludes, 'None' if not self.__destMapper else self.__destMapper.__name__)).replace('\'','"')
class Javac(BaseTarget): """ Compile Java classes to a directory (without creating a ``.jar``). Example usage:: Javac('${OUTPUT_DIR}/myclasses/', # FindPaths walks a directory tree, supporting complex ant-style globbing patterns for include/exclude compile=[ FindPaths('./src/', excludes=['**/VersionConstants.java']), '${BUILD_WORK_DIR}/filtered-java-src/VersionConstants.java', ], # DirBasedPathSet statically lists dependent paths under a directory classpath=[DirBasedPathSet('${MY_DEPENDENT_LIBRARY_DIR}/', 'mydep-api.jar', 'mydep-core.jar')], ) The following options can be set with ``Javac(...).option(key, value)`` or `xpybuild.propertysupport.setGlobalOption()` to customize the compilation process: - ``javac.warningsAsErrors: bool`` Make the build fail if any warnings are detected. - ``javac.debug = False`` Include debug information (line numbers) in the compiled ``.class`` files. - ``javac.encoding = "ASCII"`` The character encoding for ``.java`` source files. - ``javac.source = ""`` The ``.java`` source compliance level. - ``javac.target = ""`` The ``.class`` compliance level. - ``javac.options = []`` A list of extra options to pass to ``javac``. - ``javac.logs = "${BUILD_WORK_DIR}/javac_logs"`` The directory in which errors/warnings from ``javac`` will be written. - ``javac.outputHandlerFactory = JavacProcessOutputHandler`` The class used to parse and handle error/warning messages. """ compile = None classpath = None def __init__(self, output, compile, classpath, options=None): """ @param output: output dir for class files @param compile: PathSet (or list) of things to compile @param classpath: PathSet (or list) of things to be on the classpath @param options: [DEPRECATED - use .option() instead] """ self.compile = FilteredPathSet(_isJavaFile, PathSet(compile)) self.classpath = PathSet(classpath) BaseTarget.__init__(self, output, [self.compile,self.classpath]) if options is not None: for k,v in options.items(): self.option(k, v) def run(self, context): # make sure outputdir exists mkdir(self.path) # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir(self.getOption('javac.logs')) javac(self.path, self.compile.resolve(context), classpath, options=self.options, logbasename=self.options['javac.logs']+'/'+targetNameToUniqueId(self.name), targetname=self.name, workDir=self.workDir) def getHashableImplicitInputs(self, context): # changes in the manifest text should cause a rebuild # for now, don't bother factoring global jar.manifest.defaults option # in here (it'll almost never change anyway) return super(Javac, self).getHashableImplicitInputs(context) + sorted([ 'option: %s = "%s"'%(k,v) for (k,v) in self.options.items() if v and (k.startswith('javac.') or k == 'java.home')])
class Link(BaseTarget): """ Target that links object files (typically generated by `Cpp` or `C`) to an executable or library binary. """ def __init__(self, bin, objects, libs=None, libpaths=None, shared=False, options=None, flags=None, dependencies=None): """ @param bin: the output binary. See L{exename}, L{libname}, L{staticlibname}. @param objects: a (list of) input object @param libs: a (list of) libraries linked against (optional) in platform-neutral format. Can include list properties like '${FOO_LIB_NAMES[]}'. @param libpaths: a (list of) additional library search directories (optional) @param shared: if true compiles to a shared object (.dll or .so) (optional, defaults to false) @param flags: a list of additional linker flags @param options: [DEPRECATED - use .option() instead] @param dependencies: a list of additional dependencies (targets or files) """ self.objects = PathSet(objects) self.libs = libs or [] self.libpaths = PathSet(libpaths or []) self.shared = shared self.flags = flags or [] BaseTarget.__init__(self, bin, PathSet(self.objects, (dependencies or []))) for k, v in (options or {}).items(): self.option(k, v) self.tags('native') def run(self, context): options = self.options mkdir(os.path.dirname(self.path)) options['native.compilers'].linker.link( context, output=self.path, options=options, flags=options['native.link.flags'] + self.flags, shared=self.shared, src=self.objects.resolve(context), libs=flatten([ (y.strip() for y in context.expandPropertyValues(x, expandList=True)) for x in self.libs + options['native.libs'] if x ]), libdirs=flatten( self.libpaths.resolve(context) + [ context.expandPropertyValues(x, expandList=True) for x in options['native.libpaths'] ])) def getHashableImplicitInputs(self, context): r = super(Link, self).getHashableImplicitInputs(context) options = self.options r.append('libs: ' + context.expandPropertyValues( str(self.libs + options['native.libs']))) r.append('libpaths: ' + context.expandPropertyValues(str(self.libpaths))) r.append('native.libpaths: %s' % options['native.libpaths']) r.append('shared: %s, flags=%s' % (self.shared, self.flags)) return r
class Jar(BaseTarget): """ Create a jar, first compiling some Java classes, then packing it all up as a ``.jar``. Example usage:: Javac('${OUTPUT_DIR}/myapp.jar', # FindPaths walks a directory tree, supporting complex ant-style globbing patterns for include/exclude compile=[ FindPaths('./src/', excludes=['**/VersionConstants.java']), '${BUILD_WORK_DIR}/filtered-java-src/VersionConstants.java', ], # DirBasedPathSet statically lists dependent paths under a directory classpath=[DirBasedPathSet('${MY_DEPENDENT_LIBRARY_DIR}/', 'mydep-api.jar', 'mydep-core.jar')], # Specify Jar-specific key/values for the MANIFEST.MF (in addition to any set globally via options) manifest={'Implementation-Title':'My Amazing Java Application'}, package=FindPaths('resources/', includes='**/*.properties'), ) setGlobalOption('jar.manifest.defaults', {'Implementation-Version': '${APP_VERSION}', 'Implementation-Vendor': 'My Company'}) In addition to the options listed on the `Javac` target, the following options can be set when creating a Jar, using ``Jar(...).option(key, value)`` or `xpybuild.propertysupport.setGlobalOption()`: - ``jar.manifest.defaults = {}`` Default key/value pairs (e.g. version number) to include in the ``MANIFEST.MF`` of every jar. - ``jar.manifest.classpathAppend = []`` Add additional classpath entries to the ``MANIFEST.MF`` which are needed at runtime but not during compilation. - ``jar.options = []`` A list of extra options to pass to ``jar``. @param jar: path to jar to create. @param compile: PathSet (or list) of things to compile. @param classpath: PathSet (or list) of things to be on the classpath; destination mapping indicates how they will appear in the manifest. @param manifest: Typically a map of ``MANIFEST.MF`` entries (can be empty) such as:: manifest={'Implementation-Title':'My Amazing Java Application'}, Alternative, specify a string to get the manifest from a file, or ``None`` to disable manifest generation and just produce a normal zip. @param options: (deprecated - use ``.option()`` instead). @param package: PathSet (or list) of other files to include in the jar; destination mapping indicates where they will appear in the jar. @param preserveManifestFormatting: an advanced option that prevents the jar tool from reformatting the specified manifest file to comply with Java conventions (also prevents manifest merging if jar already exists). """ compile = None classpath = None package = None manifest = None def __init__(self, jar, compile, classpath, manifest, options=None, package=None, preserveManifestFormatting=False): self.compile = FilteredPathSet(_isJavaFile, PathSet(compile)) if compile else None self.classpath = PathSet(classpath) self.package = PathSet(package) self.manifest = manifest BaseTarget.__init__(self, jar, [self.compile,self.classpath,self.package, manifest if isinstance(manifest, str) else None]) for k,v in (options or {}).items(): self.option(k, v) self.preserveManifestFormatting = preserveManifestFormatting def run(self, context): options = self.options # make sure temp dir exists mkdir(self.workDir) classes = os.path.join(self.workDir, "classes") # output dir for classes # create the classpath, sorting within PathSet (for determinism), but retaining original order of # PathSet elements in the list classpath = os.pathsep.join(self.classpath.resolve(context)) # compile everything mkdir(classes) # (need this for assembling other files to package later on, even if we don't do any javac) if self.compile: mkdir(self.getOption('javac.logs')) javac(classes, self.compile.resolve(context), classpath, options=options, logbasename=options.get('javac.logs')+'/'+targetNameToUniqueId(self.name), targetname=self.name, workDir=self.workDir) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file if isinstance(self.manifest, str): manifest = context.getFullPath(self.manifest, self.baseDir) elif self.manifest == None: manifest = None else: # generate one # rewrite property values in the manifest manifest_entries = {} for i in self.manifest: manifest_entries[i] = context.expandPropertyValues(self.manifest[i]) # determine classpath for manifest classpath_entries = [] if "Class-path" not in manifest_entries: # assuming it wasn't hardcoded, set it here for src, dest in self.classpath.resolveWithDestinations(context): # we definitely do want to support use of ".." in destinations here, it can be very useful classpath_entries.append(dest) assert isinstance(options['jar.manifest.classpathAppend'], list), options['jar.manifest.classpathAppend'] # must not be a string classpath_entries.extend(options['jar.manifest.classpathAppend'] or []) # need to always use / not \ for these to be valid classpath_entries = [p.replace(os.path.sep, '/').replace('\\', '/') for p in classpath_entries if p] if classpath_entries: manifest_entries["Class-path"] = " ".join(classpath_entries) # include the classpath from here if not manifest_entries.get('Class-path'): # suppress this element entirely if not needed, otherwise there would be no way to have an empty classpath manifest_entries.pop('Class-path','') # create the manifest file create_manifest(manifest, manifest_entries, options=options) # copy in the additional things to include for (src, dest) in self.package.resolveWithDestinations(context): if '..' in dest: raise Exception('This target does not permit packaged destination paths to contain ".." relative path expressions') mkdir(os.path.dirname(os.path.join(classes, dest))) destpath = normLongPath(classes+'/'+dest) srcpath = normLongPath(src) if os.path.isdir(srcpath): mkdir(destpath) else: with open(srcpath, 'rb') as s: with openForWrite(destpath, 'wb') as d: d.write(s.read()) # create the jar jar(self.path, manifest, classes, options=options, preserveManifestFormatting=self.preserveManifestFormatting, outputHandler=ProcessOutputHandler.create('jar', treatStdErrAsErrors=False,options=options)) def getHashableImplicitInputs(self, context): # changes in the manifest text should cause a rebuild # for now, don't bother factoring global jar.manifest.defaults option # in here (it'll almost never change anyway) return super(Jar, self).getHashableImplicitInputs(context) + [ 'manifest = '+(context.expandPropertyValues(str(self.manifest)) if not isinstance(self.manifest,dict) else str({context.expandPropertyValues(k):context.expandPropertyValues(v) for k,v in self.manifest.items()})), 'classpath = '+context.expandPropertyValues(str(self.classpath)), # because classpath destinations affect manifest ]+(['preserveManifestFormatting = true'] if self.preserveManifestFormatting else [])\ +sorted(['option: %s = "%s"'%(k,v) for (k,v) in self.options.items() if v and (k.startswith('javac.') or k.startswith('jar.') or k == 'java.home')])
class Cpp(BaseTarget): """ Target that compiles a C++ source file to a single object file. """ __rebuild_makedepend_count = 0 def __init__(self, object, source, includes=None, flags=None, dependencies=None, options=None): """ @param object: the object file to generate; see L{objectname}. @param source: a (list of) source files @param includes: a (list of) include directories, as strings or PathSets, each with a trailing slash; the directories in the `native.include` option are also added. If this target depends on some include files that are generated by another target, make sure it's a directory target since all include directories must either exist before the build starts or be targets themselves. If specifying a subdirectory of a generated directory, do this using DirGeneratedByTarget. If you have a composite generated directory made up of several file targets, wrap them in TargetsWithinDir before passing as the includes parameter. @param flags: a list of compiler flags in addition to those in the `native.cxx.flags`/`native.c.flags` option. @param dependencies: a list of additional dependencies that need to be built before this target. Usually this is not needed. @param options: DEPRECATED; use .option() instead """ self.source = PathSet(source) # currently we don't bother adding the native include dirs here as they're probably always going to be there # for time being, explicitly cope with missing slashes, though really build authors should avoid this self.includes = _AddTrailingDirectorySlashesPathSet(PathSet(includes)) self.flags = flatten([flags]) or [] # nb: do not include any individual header files in main target deps even if we've already # got cached makedepends from a previous build, # because it's possible they are no longer needed and no longer exist (and we don't want spurious # build failures); this also has the advantage that it doesn't enlarge and slow down the stat cache # during dep resolution of non-native targets, since it'll only be populated once we're into # the up to date checking phase BaseTarget.__init__(self, object, PathSet([dependencies, source, self.includes])) for k, v in (options or {}).items(): self.option(k, v) self.tags('native') def run(self, context): options = self.options mkdir(os.path.dirname(self.path)) options['native.compilers'].cxxcompiler.compile( context, output=self.path, options=options, flags=self._getCompilerFlags(context), src=self.source.resolve(context), includes=self._getIncludeDirs(context)) def clean(self, context): deleteFile(self._getMakeDependsFile(context)) BaseTarget.clean(self, context) def _getMakeDependsFile(self, context): # can only be called after target resolution, when workdir is set return toLongPathSafe( context.getPropertyValue("BUILD_WORK_DIR") + '/targets/makedepend-cache/' + os.path.basename(self.workDir) + '.makedepend') def _getCompilerFlags(self, context): return flatten( self.getOption('native.cxx.flags') + [context.expandPropertyValues(x).split(' ') for x in self.flags]) def _getIncludeDirs(self, context): return self.includes.resolve(context) + flatten([ context.expandPropertyValues(x, expandList=True) for x in self.getOption('native.include') ]) def getHashableImplicitInputs(self, context): r = super(Cpp, self).getHashableImplicitInputs(context) r.append('compiler flags: %s' % self._getCompilerFlags(context)) # this will provide a quick way to notice changes such as TP library version number changed etc # and also is the only way we'll detect the need to rebuild for includes that are regex'd out includedirs = self._getIncludeDirs(context) for path in includedirs: r.append('include dir: ' + os.path.normcase(path)) # This is called exactly once during up-to-date checking OR run, which # means we will have generated all target dependencies # (e.g. include files, source files etc) by this point # Since non-target include files won't be known until this point, we need # to perform up-to-date-ness checking for them here (rather than in # targetwrapper as normally happens for dependencies). startt = time.time() try: targetmtime = os.stat( self.path ).st_mtime # must NOT use getstat cache, don't want to pollute it with non-existence except os.error: # file doesn't exist targetmtime = 0 makedependsfile = self._getMakeDependsFile(context) if targetmtime != 0 and not os.path.exists( makedependsfile ): # no value in using stat cache for this, not used elsewhere targetmtime = 0 # treat the same as if target itself didn't exist newestFile, newestTime = None, 0 # keep track of the newest source or include file if IS_WINDOWS: # normalizes case for this OS but not slashes (handy for regex matching) def xpybuild_normcase(path): return path.lower() else: def xpybuild_normcase(path): return path # changes in these options must cause us to re-execute makedepends ignoreregex = self.options['native.include.upToDateCheckIgnoreRegex'] if ignoreregex: ignoreregex = xpybuild_normcase(ignoreregex) r.append('option native.include.upToDateCheckIgnoreRegex=%s' % ignoreregex) makedependsoptions = "upToDateCheckIgnoreRegex='%s', upToDateCheckIgnoreSystemHeaders=%s, flags=%s" % ( ignoreregex, self.options['native.include.upToDateCheckIgnoreSystemHeaders'], self._getCompilerFlags(context), ) # first, figure out if we need to (re-)run makedepends or can use the cached info from the last build runmakedepends = False if targetmtime == 0: runmakedepends = True alreadychecked = set() # paths that we've already checked the date of sourcepaths = [] for path, _ in self.source.resolveWithDestinations(context): mtime = cached_getmtime(path) alreadychecked.add(path) sourcepaths.append(path) if mtime > newestTime: newestFile, newestTime = path, mtime if newestTime > targetmtime: runmakedepends = True if (not runmakedepends) and os.path.exists( makedependsfile): # (no point using stat cache for this file) # read file from last time; if any of the transitive dependencies # have changed, we should run makedepends again to update them with io.open(makedependsfile, 'r', encoding='utf-8') as f: flags = f.readline().strip() if flags != makedependsoptions: runmakedepends = True else: for path in f: path = path.strip() pathstat = cached_stat(path, errorIfMissing=False) if pathstat is False: # file doesn't exist - must rebuild runmakedepends = True ( self.log.critical if Cpp.__rebuild_makedepend_count <= 5 else self.log.info )('Recalculating C/C++ dependencies of %s as dependency no longer exists: %s', self, newestFile) break mtime = pathstat.st_mtime alreadychecked.add(path) if mtime > newestTime: newestFile, newestTime = path, mtime if newestTime > targetmtime: runmakedepends = True # (re-)run makedepends if runmakedepends: # only bother to log if we're recalculating if targetmtime != 0: Cpp.__rebuild_makedepend_count += 1 # log the first few at crit ( self.log.critical if Cpp.__rebuild_makedepend_count <= 5 else self.log.info )('Recalculating C/C++ dependencies of %s; most recently modified dependent file is %s at %s', self, newestFile, datetime.datetime.fromtimestamp( newestTime).strftime('%a %Y-%m-%d %H:%M:%S')) try: makedependsoutput = self.options[ 'native.compilers'].dependencies.depends( context=context, src=sourcepaths, options=self.options, flags=self._getCompilerFlags(context), includes=includedirs, ) except Exception as ex: raise BuildException('Dependency resolution failed for %s' % (sourcepaths[0]), causedBy=True) # normalize case to avoid problems on windows, and strip out sources since we already checked them above makedependsoutput = [ os.path.normcase(path) for path in makedependsoutput if path not in sourcepaths ] makedependsoutput.sort() if ignoreregex: ignoreregex = re.compile(ignoreregex) # match against version of path with forward slashes because making a regex with backslashes is a pain and not cross-platform makedependsoutput = [ path for path in makedependsoutput if not ignoreregex.match(path.replace(os.sep, '/')) ] # find the newest time from these files; if this is same as previous makedepends, won't do anything for path in makedependsoutput: if path in alreadychecked: continue mtime = cached_getmtime(path) if mtime > newestTime: newestFile, newestTime = path, mtime # write out new makedepends file for next time mkdir(os.path.dirname(makedependsfile)) assert '\n' not in makedependsoptions, makedependsoptions # sanity check with io.open(makedependsfile, 'w', encoding='utf-8') as f: f.write(makedependsoptions) f.write('\n') for path in makedependsoutput: f.write('%s\n' % path) # endif runmakedepends # include the newest timestamp as an implicit input, so that we'll rebuild if any include files have changed # no need to log this, as targetwrapper already logs differences in implicit inputs if newestFile is not None: newestDateTime = datetime.datetime.fromtimestamp(newestTime) r.append('newest dependency was modified at %s.%03d: %s' % (newestDateTime.strftime('%a %Y-%m-%d %H:%M:%S'), newestDateTime.microsecond / 1000, os.path.normcase(newestFile))) if time.time( ) - startt > 5: # this should usually be pretty quick, so if it takes a while it may indicate a real build file mistake self.log.warn( 'C/C++ dependency generation took a long time: %0.1f s to evaluate %s', time.time() - startt, self) return r
class SignJars(BaseTarget): """ Copy jars into a target directory and sign them with the supplied keystore, optionally also updating their manifests. Additional command line arguments can be passed to ``signjars`` using the option ``jarsigner.options`` (default ``[]``). """ def __init__(self, output, jars, keystore, alias=None, storepass=None, manifestDefaults=None): """ @param output: The output directory in which to put the signed jars @param jars: The list (or PathSet) of input jars to copy and sign @param keystore: The path to the keystore @param alias: The alias for the keystore (optional) @param storepass: The password for the store file (optional) @param manifestDefaults: a dictionary of manifest entries to add to the existing manifest.mf file of each jar before signing. Entries in this dictionary will be ignored if the same entry is found in the original manifest.mf file already. """ self.jars = PathSet(jars) self.keystore = keystore self.alias = alias self.storepass = storepass self.manifestDefaults = manifestDefaults BaseTarget.__init__(self, output, [self.jars, self.keystore]) def run(self, context): self.keystore = context.expandPropertyValues(self.keystore) options = self.options mkdir(self.path) for src, dest in self.jars.resolveWithDestinations(context): if '..' in dest: # to avoid people abusing this to copy files outside the dest directory! raise Exception('This target does not permit destination paths to contain ".." relative path expressions') try: with open(src, 'rb') as s: with openForWrite(os.path.join(self.path, dest), 'wb') as d: d.write(s.read()) shutil.copystat(src, os.path.join(self.path, dest)) # When we re-jar with the user specified manifest entries, jar will complain # about duplicate attributes IF the original MANIFEST.MF already has those entries. # This is happening for latest version of SL where Application-Name, Permission etc # were already there. # # The block of code below will first extract the original MANIFEST.MF from the source # jar file, read all manifest entry to a list. When constructing the new manifest entries, # make sure the old MANIFEST.MF doesn't have that entry before putting the new manifest entry # to the list. This will avoid the duplicate attribute error. # if self.manifestDefaults: lines = [] # read each line of MANIFEST.MF of the original jar and put them in lines with zipfile.ZipFile(src, 'r') as zf: lst = zf.infolist() for zi in lst: fn = zi.filename if fn.lower().endswith('manifest.mf'): try: manifest_txt = zf.read(zi.filename).decode('utf-8', errors='strict') except Exception as e: raise BuildException('Failed reading the manifest file %s with exception:%s' % (fn, e)) # if we have all manifest text, parse and save each line if manifest_txt: # CR LF | LF | CR can be there as line feed and hence the code below lines = manifest_txt.replace('\r\n', '\n').replace('\r','\n').split('\n') # done break original_entries = collections.OrderedDict() # to ensure we don't overwrite/duplicate these # populate the manifest_entries with original values from original manifest for l in lines: if ':' in l and not l.startswith(' '): # ignore continuation lines etc because keys are all we care about key,value = l.split(':', 1) original_entries[key] = value.strip() # build up a list of the new manifest entries (will be merged into any existing manifest by jar) manifest_entries = collections.OrderedDict() for i in self.manifestDefaults: # if entry isn't there yet, add to the list if i not in original_entries: manifest_entries[i] = context.expandPropertyValues(self.manifestDefaults[i]) # create the manifest file # we want to add the manifest entries explicitly specified here but # NOT the 'default' manifest entries we usually add, since these # are likely to have been set already, and we do not want duplicates mkdir(self.workDir) manifest = os.path.join(self.workDir, "MANIFEST.MF") # manifest file options = dict(options) options['jar.manifest.defaults'] = {} create_manifest(manifest, manifest_entries, options) # update the EXISTING jar file with the new manifest entries, which will be merged into # existing manifest by the jar tool jar(os.path.join(self.path, dest), manifest, None, options, update=True) signjar(os.path.join(self.path, dest), self.keystore, options, alias=self.alias, storepass=self.storepass, outputHandler=ProcessOutputHandler.create('signjars', treatStdErrAsErrors=False, options=options)) except BuildException as e: raise BuildException('Error processing %s: %s'%(os.path.basename(dest), e))
def __init__(self, dest, archives): """ @param dest: the output directory (ending with a "/"). Never specify a dest directory that is also written to by another target (e.g. do not specify a build 'output' directory here). @param archives: the input archives to be unpacked, which may be any combination of strings, PathSets, FilteredArchiveContents and lists of these. If these PathSets include mapping information, this will be used to define where (under the dest directory) each file from within that archive is copied (but cannot be used to change the archive-relative path of each item). For advanced cases, FilteredArchiveContents can be used to provide customized mapping and filtering of the archive contents, including manipulation of the destinations encoded within the archive itself. """ if not dest.endswith('/'): raise BuildException('Unpack target destination must be a directory (ending with "/"), not: "%s"'%dest) # NB: we could also support copying in non-archived files into the directory future too # we should preserve the specified order of archives since it may # affect what happens when they contain the same files and must # overwrite each other archives = [a if (isinstance(a, BasePathSet) or isinstance(a, FilteredArchiveContents)) else PathSet(a) for a in flatten(archives)] BaseTarget.__init__(self, dest, [ (a.getDependency() if isinstance(a, FilteredArchiveContents) else a) for a in archives]) self.archives = archives
class CSharp(BaseTarget): """ Compile C# files to produce an executable or library file. """ compile = None main = None libs = None def __init__(self, output, compile, main=None, libs=None, flags=None, dependencies=None, resources=None): """ @param output: the resulting .exe or .dll @param compile: the input PathSet, path or list of .cs file(s) @param main: The main class to execute if an exe is to be built. If this is set then an executable will be created. Otherwise this target will build a library. @param libs: a list of input libraries (or a PathSet) """ self.compile = FilteredPathSet(_isDotNetFile, PathSet(compile)) self.main = main self.flags = flags or [] self.libs = PathSet(libs or []) self.resources = resources or [] BaseTarget.__init__(self, output, [ self.compile, self.libs, [x for (x, y) in self.resources], dependencies or [] ]) self.tags('c#') def getHashableImplicitInputs(self, context): return super(CSharp, self).getHashableImplicitInputs(context) + ( ['main = %s' % context.expandPropertyValues( ('%s' % self.main))] if self.main else []) def run(self, context): libs = self.libs.resolve(context) libnames = [os.path.basename(x) for x in libs] libpaths = [os.path.dirname(x) for x in libs] flags = [context.expandPropertyValues(x) for x in self.flags] args = [self.getOption('csharp.compiler'), "-out:" + self.path] if libnames: args.extend([ "-reference:" + ",".join(libnames), "-lib:" + ",".join(libpaths) ]) if self.main: args.extend(["-target:exe", "-main:" + self.main]) else: args.append("-target:library") for (file, id) in self.resources: args.append('-resource:%s,%s' % (context.expandPropertyValues(file), context.expandPropertyValues(id))) args.extend(self.options['csharp.options']) args.extend(flags) args.extend(self.compile.resolve(context)) mkdir(os.path.dirname(self.path)) call(args, outputHandler=self.getOption('csharp.outputHandlerFactory')( 'csc', False, options=self.options), timeout=self.options['process.timeout'])
class CustomCommand(BaseTarget): """ A custom target that builds a single file or directory of content by running one or more command line processes. The command line *must* not reference any generated paths unless they are explicitly listed in deps. Supported target options include: - ``.option("process.timeout")`` to control the maximum number of seconds the command can run before being cancelled. - ``.option("common.processOutputEncodingDecider")`` to determine the encoding used for reading stdout/err (see `xpybuild.utils.process.defaultProcessOutputEncodingDecider`). - ``.option("CustomCommand.outputHandlerFactory")`` to replace the default behaviour for detecting errors (which is just based on zero/non-zero exit code) and logging stdout/err with a custom `xpybuild.utils.outputhandler.ProcessOutputHandler`. The additional options described on `ProcessOutputHandler` can also be used with this target. @param target: the file or directory to be built. Will be cleaned, and its parent dir created, before target runs. @param dependencies: an optional list of dependencies; it is essential that ALL dependencies required by this command and generated by the build processare explicitly listed here, in addition to any files/directories used by this command that might change between builds. @param list[obj]|Callable->list command: A list of command line arguments to execute one process. (see also the ``commands`` parameter which can be used to execute multiple processes). Alternatively, the list can be constructed dynamically by passing a function with signature ``(resolvedTargetDirPath: str, resolvedDepsList: list, context: xpybuild.buildcontext.BuildContext) -> list`` (where ``resolvedDepsList`` is an ordered, flattened list of resolved paths from ``deps``). Each argument in the list of arguments may be: - a string (which will be run through expandPropertyValues prior to execution); must not be used for representing arguments that are paths - a `PathSet` (which must resolve to exactly one path - see `joinPaths` property functor if multiple paths are required). Any PathSets used in the arguments should usually be explicitly listed in dependencies too, especially if they are generated by another part of this build. - a property functor such as joinPaths (useful for constructing Java classpaths), basename, etc - an arbitrary function taking a single context argument - `CustomCommand.TARGET` - a special value that is resolved to the output path of this target - `CustomCommand.DEPENDENCIES` - a special value that is resolved to a list of this target's dependencies - [deprecated] a ResolvePath(path) object, indicating a path that should be resolved and resolved at execution time (this is equivalent to using a PathSet, which is probably a better approach). Command lines MUST NOT depend in any way on the current source or output directory, always use a PathSet wrapper around such paths. @param list[list[obj]] commands: A list of commands to run to generate this target, each of which is itself represented as a list of command line arguments (as described above under ``command``). Note that you must specify either ``command=`` or ``commands=`` but not both. Commands listed here are executed in sequence. Unless you have multiple commands that need to write to the same large output directory it is usually better to use separate `CustomCommand` or `CustomCommandWithCopy` instances so they can execute in parallel for a faster build. This parameter was added in version 4.0. @param cwd: the working directory to run it from (almost always this should be left blank, meaning use output dir) @param env: a dictionary of environment overrides, or a function that returns one given a context. Values in the dictionary will be expanded using the same rules as for the command (see above). Consider using `xpybuild.propertysupport.joinPaths` for environment variables containing a list of paths. @param redirectStdOutToTarget: usually, any stdout is treated as logging and the command is assumed to create the target file itself, but set this to True for commands where the target file contents are generated by the stdout of the command being executed. @param stdout: usually a unique name is auto-generated for this target and suffixed with ``.out``, but set this parameter if you need to send output to a specific location. Ignored if the ``CustomCommand.outputHandlerFactory`` option is set. @param stderr: usually a unique name is auto-generated this target and suffixed with ``.err``, but set this parameter if you need to send output to a specific location. Ignored if the ``CustomCommand.outputHandlerFactory`` option is set. """ class __CustomCommandSentinel(object): def __init__(self, name): self.name = name def __repr__(self): return 'CustomCommand.' + name TARGET = __CustomCommandSentinel('TARGET') """ A special value that can be used in the ``command`` argument and is resolved to the output path of this target. """ DEPENDENCIES = __CustomCommandSentinel('DEPENDENCIES') """ A special value that can be used in the ``command`` argument and is resolved to a list of this target's dependencies. """ def __init__(self, target, command=None, dependencies=[], cwd=None, redirectStdOutToTarget=False, env=None, stdout=None, stderr=None, commands=None): BaseTarget.__init__(self, target, dependencies) assert not (command and commands), 'Cannot specify both command= and commands=' self.command = command self.commands = commands self.cwd = cwd self.deps = PathSet(dependencies) self.redirectStdOutToTarget = redirectStdOutToTarget if redirectStdOutToTarget and isDirPath(target): raise BuildException( 'Cannot set redirectStdOutToTarget and specify a directory for the target name - please specify a file instead: %s' % target) self.env = env self.stdout, self.stderr = stdout, stderr if stdout and redirectStdOutToTarget: raise BuildException( 'Cannot set both redirectStdOutToTarget and stdout') def _resolveItem(self, x, context): if x == self.DEPENDENCIES: return self.deps.resolve(context) if x == self.TARGET: x = self.path if isinstance(x, str): return context.expandPropertyValues(x) if hasattr(x, 'resolveToString'): return x.resolveToString(context) # supports Composables too if isinstance(x, BasePathSet): result = x.resolve(context) if len(result) != 1: raise BuildException( 'PathSet for custom command must resolve to exactly one path not %d (or use joinPaths): %s' % (len(result), x)) return result[0] if isinstance(x, ResolvePath): return x.resolve(context, self.baseDir) if callable(x): return x(context) raise Exception('Unknown custom command input type %s: %s' % (x.__class__.__name__, x)) def _resolveCommands(self, context): # if we wanted we could allow commands= to itself be a list, but not gonna bother for now resolved = [] for c in self.commands or [self.command]: if callable(c): c = c(self.path, self.deps.resolve(context), context) assert not isinstance( c, str) # must be a list of strings, not a string c = flatten([self._resolveItem(x, context) for x in c]) c[0] = normPath(os.path.abspath(c[0])) resolved.append(c) return resolved def getHashableImplicitInputs(self, context): return super(CustomCommand, self).getHashableImplicitInputs(context) + flatten( self._resolveCommands(context)) def run(self, context): if self.cwd: self.cwd = context.getFullPath(self.cwd, self.baseDir) if isDirPath(self.path): mkdir(self.path) cwd = self.cwd or self.path else: mkdir(os.path.dirname(self.path)) cwd = self.cwd or self.workDir mkdir(self.workDir) commands = self._resolveCommands(context) assert len( commands) > 0, 'No commands were specified to run in this target!' if len(commands) > 1: assert not ( self.redirectStdOutToTarget or self.stdout or self.stderr ), 'Invalid argument was specified for multiple commands mode' cmdindex = 0 for cmd in commands: cmdindex += 1 # this location is a lot easier to find than the target's workdir logbasename = os.path.normpath( context.getPropertyValue('BUILD_WORK_DIR') + '/CustomCommandOutput/' + os.path.basename(cmd[0]) + "." + targetNameToUniqueId(self.name)) if cmdindex > 1: logbasename = logbasename + ".%d" % cmdindex # make this unique cmdDisplaySuffix = ' #%d' % (cmdindex) if len(commands) > 1 else '' stdoutPath = context.getFullPath( self.path if self.redirectStdOutToTarget else (self.stdout or logbasename + '.out'), defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/') stderrPath = context.getFullPath( self.stderr or logbasename + '.err', defaultDir='${BUILD_WORK_DIR}/CustomCommandOutput/') self.log.info('Building %s by executing command%s: %s', self.name, cmdDisplaySuffix, ''.join(['\n\t"%s"' % x for x in cmd])) if self.cwd and cmdindex == 1: self.log.info(' building %s from working directory: %s', self.name, self.cwd) # only print if overridden env = self.env or {} if env: if callable(env): env = env(context) else: env = { k: None if None == env[k] else self._resolveItem( env[k], context) for k in env } if cmdindex == 1: self.log.info( ' environment overrides for %s are: %s', self.name, ''.join(['\n\t"%s=%s"' % (k, env[k]) for k in env])) for k in os.environ: if k not in env: env[k] = os.getenv(k) for k in list(env.keys()): if None == env[k]: del env[k] self.log.info( ' output from %s will be written to "%s" and "%s"', self.name + cmdDisplaySuffix, stdoutPath, stderrPath) if not os.path.exists(cmd[0]) and not ( IS_WINDOWS and os.path.exists(cmd[0] + '.exe')): raise BuildException( 'Cannot run command because the executable does not exist: "%s"' % (cmd[0]), location=self.location) encoding = self.options['common.processOutputEncodingDecider']( context, cmd[0]) handler = self.options['CustomCommand.outputHandlerFactory'] if handler: # create a new handler for each command handler = handler(str(self), options=self.options) success = False rc = None try: # maybe send output to a file instead mkdir(os.path.dirname(logbasename)) with open( stderrPath, 'wb') as fe: # can't use openForWrite with subprocess with open(stdoutPath, 'wb') as fo: process = subprocess.Popen(cmd, stderr=fe, stdout=fo, cwd=cwd, env=env) rc = _wait_with_timeout( process, '%s(%s)' % (self.name, os.path.basename(cmd[0])), self.options['process.timeout'], False) success = rc == 0 finally: try: if os.path.getsize(stderrPath) == 0 and not self.stderr: deleteFile(stderrPath, allowRetry=True) if not self.redirectStdOutToTarget and os.path.getsize( stdoutPath) == 0 and not self.stdout: deleteFile(stdoutPath, allowRetry=True) except Exception as e: # stupid windows, it passes understanding self.log.info( 'Failed to delete empty .out/.err files (ignoring error as it is not critical): %s', e) #if not os.listdir(self.workDir): deleteDir(self.workDir) # don't leave empty work dirs around mainlog = '<command did not write any stdout/stderr>' logMethod = self.log.info if success else self.log.error if (handler or not self.redirectStdOutToTarget) and os.path.isfile( stdoutPath) and os.path.getsize(stdoutPath) > 0: if handler: with open(stdoutPath, 'r', encoding=encoding, errors='replace') as f: for l in f: handler.handleLine(l, isstderr=False) elif os.path.getsize(stdoutPath) < 15 * 1024: logMethod( ' stdout from %s is: \n%s', self.name + cmdDisplaySuffix, open(stdoutPath, 'r', encoding=encoding, errors='replace').read().replace( '\n', '\n\t')) mainlog = stdoutPath if not success: context.publishArtifact( '%s%s stdout' % (self, cmdDisplaySuffix), stdoutPath) if os.path.isfile( stderrPath) and os.path.getsize(stderrPath) > 0: if handler: with open(stderrPath, 'r', encoding=encoding, errors='replace') as f: for l in f: handler.handleLine(l, isstderr=True) elif os.path.getsize(stderrPath) < 15 * 1024: logMethod( ' stderr from %s is: \n%s', self.name + cmdDisplaySuffix, open(stderrPath, 'r', encoding=encoding, errors='replace').read().replace( '\n', '\n\t')) mainlog = stderrPath # take precedence over stdout if not success: context.publishArtifact( '%s%s stderr' % (self, cmdDisplaySuffix), stderrPath) if handler: handler.handleEnd(returnCode=rc) elif rc != None and rc != 0 and not handler: if IS_WINDOWS: quotearg = lambda c: '"%s"' % c if ' ' in c else c else: quotearg = shlex.quote # having it in this format makes it easier for people to re-run the command manually self.log.info(' full command line is: %s', ' '.join(quotearg(c) for c in cmd)) raise BuildException( '%s command%s failed with error code %s; see output at "%s" or look under %s' % (os.path.basename( cmd[0]), cmdDisplaySuffix, rc, mainlog, cwd), location=self.location) # final sanity check if not os.path.exists(self.path): raise BuildException( '%s returned no error code but did not create the output file/dir; see output at "%s" or look under %s' % (self, mainlog, cwd), location=self.location) if (not isDirPath(self.path)) and (not os.path.isfile(self.path)): raise BuildException( '%s did not create a file as expected (please check that trailing "/" is used if and only if a directory output is intended)' % self, location=self.location) if isDirPath(self.path) and not os.listdir(self.path): raise BuildException('%s created an empty directory' % self, location=self.location)