def CythonModule(env, target, source=None): if not target: target = [] elif not is_List(target): target = [target] if not source: source = [] elif not is_List(source): source = [source] # mod_target is passed to the compile builder mod_target, *other_targets = target if not source: source.append(f"{mod_target}.pyx") pyx_mod, *too_much_mods = [x for x in source if str(x).endswith(".pyx")] if too_much_mods: raise UserError( f"Must have exactly one .pyx file in sources (got `{[mod, *too_much_mods]}`)" ) c_mod = pyx_mod.split(".", 1)[0] + ".c" # Useful to do `xxx.gen.pyx` ==> `xxx` CythonToCBuilder(env, target=[c_mod, *other_targets], source=source) c_compile_target = CythonCompile(env, target=mod_target, source=[c_mod]) return [*c_compile_target, *other_targets]
def SymlinkAction(target, source, env): target = target if is_List(target) else [target] source = source if is_List(source) else [source] if len(target) != 1 or len(source) != 1: raise UserError("Symlink only takes a single target and source") abs_src = os.path.abspath(str(source[0])) abs_trg = os.path.abspath(str(target[0])) if not os.path.isdir(abs_src): raise UserError( "Only folder symlink are allowed due to Windows limitation") try: os.unlink(abs_trg) except Exception: pass if env["HOST_OS"] == "win32": try: import _winapi _winapi.CreateJunction(abs_src, abs_trg) except Exception as e: raise UserError( f"Can't do a NTFS junction as symlink fallback ({abs_src} -> {abs_trg})" ) from e else: try: os.symlink(abs_src, abs_trg) except Exception as e: raise UserError( f"Can't create symlink ({abs_src} -> {abs_trg})") from e
def emma_score(env, target, gold, predictions): if is_List(gold): gold = gold[0] if is_List(predictions): predictions = predictions[0] gold_emma = env.DatasetToEMMA("%s-emma" % gold, gold) pred_emma = env.DatasetToEMMA("%s-emma" % predictions, predictions) return env.RunEMMA(target, [gold_emma, pred_emma])
def create_asr_experiment_emitter(target, source, env): # start with three configuration dictionaries files, directories, parameters = [x.read() for x in source] directories["CONFIGURATION_PATH"] = target[0].rstr() for f in files.keys(): if is_List(files[f]) and len(files[f]) > 0: files[f] = files[f][0] # all templates dlatsa = ["cfg.py", "construct.py", "test.py"] new_sources, new_targets = [], [] # new list of targets new_targets = [pjoin(directories["CONFIGURATION_PATH"], x) for x in dlatsa] # new list of sources new_sources = [env.Value({k : str(v) for k, v in files.iteritems()}), env.Value({k : str(v) for k, v in directories.iteritems()}), env.Value(parameters)] + \ [os.path.join("data", "%s.%s" % (x, parameters["LANGUAGE_ID"])) for x in dlatsa] + \ [p for n, p in files.iteritems()] return new_targets, new_sources
def serialize_resource(target, source, env): # Convert source to a list of SCons Files if not is_List(source): source = [env.File(source).srcnode()] # Determine name of resource source_path = source[0].abspath name = os.path.splitext(os.path.basename(source_path))[0] name_size = len(name) # Read resource data into memory with open(source_path, 'rb') as f: data = f.read() size = len(data) # Serialize to target # TODO move resource definition to C header # Format: # H size of resource name # s resource name (# of bytes equal to prev) # Q size of resource data # s resource data (# of bytes equal to prev) data_format = '<H{}sQ{}s'.format(name_size, size) serialized_data = struct.pack(data_format, name_size, name, size, data ) with open(target[0].abspath, 'wb') as f: f.write(serialized_data)
def Source(env, source=[defaultSourceDirectory], target=None, *args, **kwargs): buildDirectory = defaultBuildDirectory if not is_List(source): source = [source] if len(source) < 1: raise ValueError("Must have at least one source directory.") sources = [] if target != None: if not isinstance(target, str): raise ValueError("target must be a string value.") buildDirectory = target targets = [] for directoryName in source: sourceList = [] targetList = [] for root, directories, files in os.walk(directoryName): for name in files: base, ext = os.path.splitext(name) if ext == ".rst": sourceList.append(root + "/" + name) targetList.append( os.path.join( buildDirectory, root.replace(directoryName, "html"), base + ".html", ) ) sources.append(sourceList) targets.append(targetList) return []
def execute(self, target, source, env): """Execute a command action. This will handle lists of commands as well as individual commands, because construction variable substitution may turn a single "command" into a list. This means that this class can actually handle lists of commands, even though that's not how we use it externally. """ from SCons.Util import is_String, is_List, flatten, escape_list try: shell = env['SHELL'] except KeyError: raise SCons.Errors.UserError('Missing SHELL construction variable.') try: spawn = env['SPAWN'] except KeyError: raise SCons.Errors.UserError('Missing SPAWN construction variable.') escape = env.get('ESCAPE', lambda x: x) try: ENV = env['ENV'] except KeyError: global default_ENV if not default_ENV: import SCons.Environment default_ENV = SCons.Environment.Environment()['ENV'] ENV = default_ENV # Ensure that the ENV values are all strings: for key, value in ENV.items(): if not is_String(value): if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten(value) ENV[key] = string.join(map(str, value), os.pathsep) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: ENV[key] = str(value) cmd_list = env.subst_list(self.cmd_list, 0, target, map(rfile, source)) # Use len() to filter out any "command" that's zero-length. for cmd_line in filter(len, cmd_list): # Escape the command line for the interpreter we are using. cmd_line = escape_list(cmd_line, escape) result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) if result: return result return 0
def execute(self, target, source, env): """Execute a command action. This will handle lists of commands as well as individual commands, because construction variable substitution may turn a single "command" into a list. This means that this class can actually handle lists of commands, even though that's not how we use it externally. """ from SCons.Subst import escape_list from SCons.Util import is_String, is_List, flatten try: shell = env['SHELL'] except KeyError: raise SCons.Errors.UserError('Missing SHELL construction variable.') try: spawn = env['SPAWN'] except KeyError: raise SCons.Errors.UserError('Missing SPAWN construction variable.') escape = env.get('ESCAPE', lambda x: x) try: ENV = env['ENV'] except KeyError: global default_ENV if not default_ENV: import SCons.Environment default_ENV = SCons.Environment.Environment()['ENV'] ENV = default_ENV # Ensure that the ENV values are all strings: for key, value in ENV.items(): if not is_String(value): if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten(value) ENV[key] = string.join(map(str, value), os.pathsep) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: ENV[key] = str(value) cmd_list, ignore, silent = self.process(target, map(rfile, source), env) # Use len() to filter out any "command" that's zero-length. for cmd_line in filter(len, cmd_list): # Escape the command line for the interpreter we are using. cmd_line = escape_list(cmd_line, escape) result = spawn(shell, escape, cmd_line[0], cmd_line, ENV) if not ignore and result: return result return 0
def _convert_list_R(newlist, sources): for elem in sources: if is_List(elem): _convert_list_R(newlist, elem) elif not isinstance(elem, Node): newlist.append(Value(elem)) else: newlist.append(elem)
def sub_match(match, val=val, matchlist=matchlist): a = match.group(1) if a in matchlist: a = val if is_List(a) or is_Tuple(a): return string.join(map(str, a)) else: return str(a)
def _do_download(target, source, env): if not target: target = [] elif not is_List(target): target = [target] with urlopen(url) as infd: with open(target[0].abspath, "bw") as outfd: outfd.write(infd.read())
def scons_subst_once(strSubst, env, key): """Perform single (non-recursive) substitution of a single construction variable keyword. This is used when setting a variable when copying or overriding values in an Environment. We want to capture (expand) the old value before we override it, so people can do things like: env2 = env.Clone(CCFLAGS = '$CCFLAGS -g') We do this with some straightforward, brute-force code here... """ if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0: return strSubst matchlist = ['$' + key, '${' + key + '}'] val = env.get(key, '') def sub_match(match, val=val, matchlist=matchlist): a = match.group(1) if a in matchlist: a = val if is_List(a) or is_Tuple(a): return string.join(map(str, a)) else: return str(a) if is_List(strSubst) or is_Tuple(strSubst): result = [] for arg in strSubst: if is_String(arg): if arg in matchlist: arg = val if is_List(arg) or is_Tuple(arg): result.extend(arg) else: result.append(arg) else: result.append(_dollar_exps.sub(sub_match, arg)) else: result.append(arg) return result elif is_String(strSubst): return _dollar_exps.sub(sub_match, strSubst) else: return strSubst
def HTML(env, source=[defaultSourceDirectory], target=None, *args, **kwargs): buildDirectory = defaultBuildDirectory if not is_List(source): source = [source] if len(source) < 1: raise ValueError('Must have at least one source directory.') sources = [] if target != None: if not isinstance(target, str): raise ValueError('target must be a string value.') buildDirectory = target targets = [] for directoryName in source: sourceList = [] targetList = [] for root, directories, files in os.walk(directoryName): for name in files: base, ext = os.path.splitext(name) if ext == '.rst': sourceList.append(root + '/' + name) targetList.append( os.path.join(buildDirectory, root.replace(directoryName, 'html'), base + '.html')) sources.append(sourceList) targets.append(targetList) knownExtraFiles = [ 'doctrees/environment.pickle', 'doctrees/index.doctree', 'html/.buildinfo', 'html/genindex.html', 'html/objects.inv', 'html/search.html', 'html/searchindex.js', 'html/_sources/index.txt', 'html/_static/basic.css', 'html/_static/default.css', 'html/_static/doctools.js', 'html/_static/file.png', 'html/_static/jquery.js', 'html/_static/minus.png', 'html/_static/plus.png', 'html/_static/pygments.css', 'html/_static/searchtools.js', 'html/_static/sidebar.js', 'html/_static/underscore.js', ] constructedKnownExtraFiles = [ buildDirectory + '/' + name for name in knownExtraFiles ] env.Precious(constructedKnownExtraFiles) returnValue = [] for i in range(len(source)): env.SideEffect(constructedKnownExtraFiles, targets[i]) returnValue += env.Command( targets[i], sources[i], '{0} {1} -b html -d {2}/doctrees {3} {2}/html'.format( env['SPHINX'], env['SPHINXFLAGS'], buildDirectory, source[i])) return returnValue
def StringizeLibSymlinks(symlinks): """Converts list with pairs of nodes to list with pairs of node paths (strings). Used mainly for debugging.""" if is_List(symlinks): try: return [(k.get_path(), v.get_path()) for k, v in symlinks] except (TypeError, ValueError): return symlinks else: return symlinks
def AndroidSharedLibrary(env, target, source, **kwargs): archdir = env['archdir'] linkflags = [ "-Wl,--gc-sections", "-Wl,-z,nocopyreloc", "-Wl,--no-undefined", "-nostdlib", '-L' + os.path.join(archdir, 'usr', 'lib') ] if env['stdcpplib']: from SCons.Util import is_List lib = env.File(env['stdcpplib']) lib.attributes.shared = True if not is_List(source): source = [source] source.append(lib) if 'SHLINKFLAGS' in kwargs: linkflags += kwargs['SHLINKFLAGS'] else: linkflags += env['SHLINKFLAGS'] linkflags = copy.copy(linkflags) kwargs['SHLINKFLAGS'] = linkflags if 'POSTSHLINKFLAGS' in kwargs: postlinkflags = kwargs['POSTSHLINKFLAGS'] else: postlinkflags = env['POSTSHLINKFLAGS'] if 'LIBS' in kwargs: libs = kwargs['LIBS'] else: libs = env['LIBS'] libs = copy.copy(libs) libs += ['c', 'm', 'dl', 'gcc'] kwargs['LIBS'] = libs dsoh = env.get('dso_handle', None) if dsoh: from SCons.Util import is_List if not is_List(source): source = [source] source += [dsoh] return env.OldSharedLibrary(target, source, **kwargs)
def _gen_nodelist(self): list = self.list if list is None: list = [] elif not is_List(list) and not is_Tuple(list): list = [list] # The map(self.func) call is what actually turns # a list into appropriate proxies. self.nodelist = SCons.Util.NodeList(map(self.func, list)) self._create_nodelist = self._return_nodelist return self.nodelist
def get_command_env(env): """ Return a string that sets the environment for any environment variables that differ between the OS environment and the SCons command ENV. It will be compatible with the default shell of the operating system. """ try: return env["NINJA_ENV_VAR_CACHE"] except KeyError: pass # Scan the ENV looking for any keys which do not exist in # os.environ or differ from it. We assume if it's a new or # differing key from the process environment then it's # important to pass down to commands in the Ninja file. ENV = get_default_ENV(env) scons_specified_env = { key: value for key, value in ENV.items() # TODO: Remove this filter, unless there's a good reason to keep. SCons's behavior shouldn't depend on shell's. if key not in os.environ or os.environ.get(key, None) != value } windows = env["PLATFORM"] == "win32" command_env = "" for key, value in scons_specified_env.items(): # Ensure that the ENV values are all strings: if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten_sequence(value) value = joinpath(map(str, value)) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: value = str(value) if windows: command_env += "set '{}={}' && ".format(key, value) else: # We address here *only* the specific case that a user might have # an environment variable which somehow gets included and has # spaces in the value. These are escapes that Ninja handles. This # doesn't make builds on paths with spaces (Ninja and SCons issues) # nor expanding response file paths with spaces (Ninja issue) work. value = value.replace(r' ', r'$ ') command_env += "export {}='{}';".format(key, value) env["NINJA_ENV_VAR_CACHE"] = command_env return command_env
def _cython_to_c_emitter(target, source, env): if not source: source = [] elif not is_List(source): source = [source] # Consider we always depend on all .pxd files source += env["CYTHON_DEPS"] # Add .html target if cython is in annotate mode if "-a" in env["CYTHON_FLAGS"] or "--annotate" in env["CYTHON_FLAGS"]: pyx = next(x for x in target if x.name.endswith(".pyx")) base_name = pyx.get_path().rsplit(".")[0] return [target[0], f"{base_name}.html"], source else: return target, source
def _process_variable_templates(callback, **kw): """Feed all predefined GNU variables to callback. :Parameters: callback : callable function of type ``callback(name, desc, default)``, where - ``name:`` is the name of variable being processed, - ``desc:`` is short description, - ``default:`` is the default value for the variable. :Keywords: only : list list of variable names to process, others are ignored exclude : list list of variable names to exclude from processing """ from SCons.Util import is_List try: only = kw['only'] if not is_List(only): only = [ only ] except KeyError: only = None try: exclude = kw['exclude'] if not is_List(exclude): exclude = [ exclude ] except KeyError: exclude = None for dvt in _variable_templates: may_process = True name, desc, default = dvt if (only is not None) and (name not in only): may_process = False if (exclude is not None) and (name in exclude): may_process = False if may_process: callback(name, desc, default)
def Package(env, target=None, source=None, **kw): """ Entry point for the package tool. """ # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source) == 0: raise UserError, "No source for Package() given" # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw['PACKAGETYPE'] = env['PACKAGETYPE'] except KeyError: pass if not kw.get('PACKAGETYPE'): from SCons.Script import GetOption kw['PACKAGETYPE'] = GetOption('package_type') if kw['PACKAGETYPE'] == None: if env['BUILDERS'].has_key('Tar'): kw['PACKAGETYPE'] = 'targz' elif env['BUILDERS'].has_key('Zip'): kw['PACKAGETYPE'] = 'zip' else: raise UserError, "No type for Package() given" PACKAGETYPE = kw['PACKAGETYPE'] if type(PACKAGETYPE) is ModuleType: PACKAGETYPE = [PACKAGETYPE] elif not is_List(PACKAGETYPE): PACKAGETYPE = string.split(PACKAGETYPE, ',') # load the needed packagers. def load_packager(typ): if type(typ) is ModuleType: # Caller wants to use a custom packager return typ try: file, path, desc = imp.find_module(typ, __path__) return imp.load_module(typ, file, path, desc) except ImportError, e: raise EnvironmentError("packager %s not available: %s" % (typ, str(e)))
def Package(env, target=None, source=None, **kw): """ Entry point for the package tool. """ # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source)==0: raise UserError, "No source for Package() given" # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw['PACKAGETYPE']=env['PACKAGETYPE'] except KeyError: pass if not kw.get('PACKAGETYPE'): from SCons.Script import GetOption kw['PACKAGETYPE'] = GetOption('package_type') if kw['PACKAGETYPE'] == None: if env['BUILDERS'].has_key('Tar'): kw['PACKAGETYPE']='targz' elif env['BUILDERS'].has_key('Zip'): kw['PACKAGETYPE']='zip' else: raise UserError, "No type for Package() given" PACKAGETYPE=kw['PACKAGETYPE'] if type(PACKAGETYPE) is ModuleType: PACKAGETYPE=[PACKAGETYPE] elif not is_List(PACKAGETYPE): PACKAGETYPE=string.split(PACKAGETYPE, ',') # load the needed packagers. def load_packager(typ): if type(typ) is ModuleType: # Caller wants to use a custom packager return typ try: file,path,desc=imp.find_module(typ, __path__) return imp.load_module(typ, file, path, desc) except ImportError, e: raise EnvironmentError("packager %s not available: %s"%(typ,str(e)))
def test_is_List(self): assert is_List([]) assert is_List(UserList()) try: class mylist(list): pass except TypeError: pass else: assert is_List(mylist([])) assert not is_List(()) assert not is_List({}) assert not is_List("")
def EmitLibSymlinks(env, symlinks, libnode, **kw): """Used by emitters to handle (shared/versioned) library symlinks""" Verbose = False # nodes involved in process... all symlinks + library nodes = list(set([x for x, y in symlinks] + [libnode])) clean_targets = kw.get('clean_targets', []) if not is_List(clean_targets): clean_targets = [clean_targets] for link, linktgt in symlinks: env.SideEffect(link, linktgt) if Verbose: print("EmitLibSymlinks: SideEffect(%r,%r)" % (link.get_path(), linktgt.get_path())) clean_list = [x for x in nodes if x != linktgt] env.Clean(list(set([linktgt] + clean_targets)), clean_list) if Verbose: print("EmitLibSymlinks: Clean(%r,%r)" % (linktgt.get_path(), [x.get_path() for x in clean_list]))
def Package(env, target=None, source=None, **kw): """ Entry point for the package tool. """ # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source) == 0: raise UserError("No source for Package() given") # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw["PACKAGETYPE"] = env["PACKAGETYPE"] except KeyError: pass if not kw.get("PACKAGETYPE"): from SCons.Script import GetOption kw["PACKAGETYPE"] = GetOption("package_type") if kw["PACKAGETYPE"] == None: if "Tar" in env["BUILDERS"]: kw["PACKAGETYPE"] = "targz" elif "Zip" in env["BUILDERS"]: kw["PACKAGETYPE"] = "zip" else: raise UserError("No type for Package() given") PACKAGETYPE = kw["PACKAGETYPE"] if not is_List(PACKAGETYPE): PACKAGETYPE = PACKAGETYPE.split(",") # load the needed packagers. def load_packager(type): try: file, path, desc = imp.find_module(type, __path__) return imp.load_module(type, file, path, desc) except ImportError, e: raise EnvironmentError("packager %s not available: %s" % (type, str(e)))
def ShlibStubGen(env, target, symbols): """Generate a shared library stub This stub is used to facilitate dynamic linking without including occasionally problematic symbol versioning information. Parameters: env: Environment target: Target shared object name (e.g. libc.so.6) symbols: Symbol list to include in stub """ # Inspired by https://stackoverflow.com/a/21059674 if is_List(target): raise UserError("target must be a single string or node") target = env.fs.File(target) lines = ['// Auto-generated stub'] for sym in symbols: lines += [ 'void {}(void);'.format(sym), # for strict prototypes 'void {}(void) {{}}'.format(sym), '', ] csrc = env.Textfile( target=target.name + '_stub.c', source=lines, ) stub = env.SharedLibrary( target=target, source=[csrc], CCFLAGS=env['CCFLAGS'] + [ '-fno-builtin', ], LINKFLAGS=env['LINKFLAGS'] + [ '-Wl,-soname=${TARGET.name}', '-nostdlib', ], SHLIBSUFFIX='', ) return stub
def virtual_target_command(env, marker, condition, source, action): if not isinstance(marker, File): raise UserError("`marker` must be a File") if not condition(env) and os.path.exists(marker.abspath): # Condition has changed in our back, force rebuild env.Execute(Delete(marker)) return env.Command( marker, source, [ *(action if is_List(action) else [action]), Action( lambda target, source, env: install_marker(target[0]), "Write $TARGET to mark task complete", ), ], )
def _arg2builders(env, arg): """Convert an argument to a list of builder objects it refers to. :Parameters: env : SCons.Environment.Environment a SCons Environment object arg an argument to be converted to a builders, may be builder name, builder object or a list of (intermixed) builder names/objects. The returned list contains no repetitions (objects are unique). """ from SCons.Util import is_List, uniquer if not is_List(arg): arg = [arg] # Make it unique (but preserve order) # uniquer() is in SCons since 1.0.0 so it should be fine arg = uniquer(arg) builders = map(lambda b: _arg2builder(env, b), arg) return [b for b in builders if b is not None]
def _arg2builders(env, arg): """Convert an argument to a list of builder objects it refers to. :Parameters: env : SCons.Environment.Environment a SCons Environment object arg an argument to be converted to a builders, may be builder name, builder object or a list of (intermixed) builder names/objects. The returned list contains no repetitions (objects are unique). """ from SCons.Util import is_List, uniquer if not is_List(arg): arg = [ arg ] # Make it unique (but preserve order) # uniquer() is in SCons since 1.0.0 so it should be fine arg = uniquer(arg) builders = map(lambda b : _arg2builder(env,b), arg) return [b for b in builders if b is not None]
def setup_scons_entities(env): try: shell = env['SHELL'] except KeyError: raise SCons.Errors.UserError('Missing SHELL construction variable.') try: spawn = env['SPAWN'] except KeyError: raise SCons.Errors.UserError('Missing SPAWN construction variable.') else: if type(spawn) == 'str': spawn = env.subst(spawn, raw=1, conv=lambda x: x) escape = env.get('ESCAPE', lambda x: x) try: ENV = env['ENV'] except KeyError: import SCons.Environment ENV = SCons.Environment.Environment()['ENV'] # Ensure that the ENV values are all strings: for key, value in list(ENV.items()): if not is_String(value): if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten_sequence(value) ENV[key] = os.pathsep.join(map(str, value)) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: ENV[key] = str(value) return (shell, spawn, escape, ENV.copy())
def generate(self): """ Generate the build.ninja. This should only be called once for the lifetime of this object. """ if self.__generated: return self.rules.update(self.env.get(NINJA_RULES, {})) self.pools.update(self.env.get(NINJA_POOLS, {})) content = io.StringIO() ninja = self.writer_class(content, width=100) ninja.comment("Generated by scons. DO NOT EDIT.") ninja.variable("builddir", get_path(self.env.Dir(self.env['NINJA_DIR']).path)) for pool_name, size in self.pools.items(): ninja.pool(pool_name, min(self.env.get('NINJA_MAX_JOBS', size), size)) for var, val in self.variables.items(): ninja.variable(var, val) for rule, kwargs in self.rules.items(): if self.env.get( 'NINJA_MAX_JOBS') is not None and 'pool' not in kwargs: kwargs['pool'] = 'local_pool' ninja.rule(rule, **kwargs) generated_source_files = sorted({ output # First find builds which have header files in their outputs. for build in self.builds.values() if self.has_generated_sources(build["outputs"]) for output in build["outputs"] # Collect only the header files from the builds with them # in their output. We do this because is_generated_source # returns True if it finds a header in any of the outputs, # here we need to filter so we only have the headers and # not the other outputs. if self.is_generated_source(output) }) if generated_source_files: ninja.build(outputs="_generated_sources", rule="phony", implicit=generated_source_files) template_builders = [] for build in [self.builds[key] for key in sorted(self.builds.keys())]: if build["rule"] == "TEMPLATE": template_builders.append(build) continue if "implicit" in build: build["implicit"].sort() # Don't make generated sources depend on each other. We # have to check that none of the outputs are generated # sources and none of the direct implicit dependencies are # generated sources or else we will create a dependency # cycle. if (generated_source_files and not build["rule"] == "INSTALL" and set(build["outputs"]).isdisjoint(generated_source_files) and set(build.get("implicit", [])).isdisjoint(generated_source_files)): # Make all non-generated source targets depend on # _generated_sources. We use order_only for generated # sources so that we don't rebuild the world if one # generated source was rebuilt. We just need to make # sure that all of these sources are generated before # other builds. order_only = build.get("order_only", []) order_only.append("_generated_sources") build["order_only"] = order_only if "order_only" in build: build["order_only"].sort() # When using a depfile Ninja can only have a single output # but SCons will usually have emitted an output for every # thing a command will create because it's caching is much # more complex than Ninja's. This includes things like DWO # files. Here we make sure that Ninja only ever sees one # target when using a depfile. It will still have a command # that will create all of the outputs but most targets don't # depend directly on DWO files and so this assumption is safe # to make. rule = self.rules.get(build["rule"]) # Some rules like 'phony' and other builtins we don't have # listed in self.rules so verify that we got a result # before trying to check if it has a deps key. # # Anything using deps or rspfile in Ninja can only have a single # output, but we may have a build which actually produces # multiple outputs which other targets can depend on. Here we # slice up the outputs so we have a single output which we will # use for the "real" builder and multiple phony targets that # match the file names of the remaining outputs. This way any # build can depend on any output from any build. # # We assume that the first listed output is the 'key' # output and is stably presented to us by SCons. For # instance if -gsplit-dwarf is in play and we are # producing foo.o and foo.dwo, we expect that outputs[0] # from SCons will be the foo.o file and not the dwo # file. If instead we just sorted the whole outputs array, # we would find that the dwo file becomes the # first_output, and this breaks, for instance, header # dependency scanning. if rule is not None and (rule.get("deps") or rule.get("rspfile")): first_output, remaining_outputs = ( build["outputs"][0], build["outputs"][1:], ) if remaining_outputs: ninja.build( outputs=sorted(remaining_outputs), rule="phony", implicit=first_output, ) build["outputs"] = first_output # Optionally a rule can specify a depfile, and SCons can generate implicit # dependencies into the depfile. This allows for dependencies to come and go # without invalidating the ninja file. The depfile was created in ninja specifically # for dealing with header files appearing and disappearing across rebuilds, but it can # be repurposed for anything, as long as you have a way to regenerate the depfile. # More specific info can be found here: https://ninja-build.org/manual.html#_depfile if rule is not None and rule.get('depfile') and build.get( 'deps_files'): path = build['outputs'] if SCons.Util.is_List( build['outputs']) else [build['outputs']] generate_depfile(self.env, path[0], build.pop('deps_files', [])) if "inputs" in build: build["inputs"].sort() ninja.build(**build) template_builds = dict() for template_builder in template_builders: # Special handling for outputs and implicit since we need to # aggregate not replace for each builder. for agg_key in ["outputs", "implicit", "inputs"]: new_val = template_builds.get(agg_key, []) # Use pop so the key is removed and so the update # below will not overwrite our aggregated values. cur_val = template_builder.pop(agg_key, []) if is_List(cur_val): new_val += cur_val else: new_val.append(cur_val) template_builds[agg_key] = new_val # Collect all other keys template_builds.update(template_builder) if template_builds.get("outputs", []): # Try to clean up any dependency cycles. If we are passing an # ouptut node to SCons, it will build any dependencys if ninja # has not already. for output in template_builds.get("outputs", []): inputs = template_builds.get('inputs') if inputs and output in inputs: inputs.remove(output) implicits = template_builds.get('implicit') if implicits and output in implicits: implicits.remove(output) ninja.build(**template_builds) # We have to glob the SCons files here to teach the ninja file # how to regenerate itself. We'll never see ourselves in the # DAG walk so we can't rely on action_to_ninja_build to # generate this rule even though SCons should know we're # dependent on SCons files. # # The REGENERATE rule uses depfile, so we need to generate the depfile # in case any of the SConscripts have changed. The depfile needs to be # path with in the build and the passed ninja file is an abspath, so # we will use SCons to give us the path within the build. Normally # generate_depfile should not be called like this, but instead be called # through the use of custom rules, and filtered out in the normal # list of build generation about. However, because the generate rule # is hardcoded here, we need to do this generate_depfile call manually. ninja_file_path = self.env.File(self.ninja_file).path generate_depfile(self.env, ninja_file_path, self.env['NINJA_REGENERATE_DEPS']) ninja.build( ninja_file_path, rule="REGENERATE", implicit=[__file__], ) # If we ever change the name/s of the rules that include # compile commands (i.e. something like CC) we will need to # update this build to reflect that complete list. ninja.build( "compile_commands.json", rule="CMD", pool="console", implicit=[str(self.ninja_file)], variables={ "cmd": "{} -f {} -t compdb {}CC CXX > compile_commands.json".format( # NINJA_COMPDB_EXPAND - should only be true for ninja # This was added to ninja's compdb tool in version 1.9.0 (merged April 2018) # https://github.com/ninja-build/ninja/pull/1223 # TODO: add check in generate to check version and enable this by default if it's available. self.ninja_bin_path, str(self.ninja_file), '-x ' if self.env.get('NINJA_COMPDB_EXPAND', True) else '') }, ) ninja.build( "compiledb", rule="phony", implicit=["compile_commands.json"], ) # Look in SCons's list of DEFAULT_TARGETS, find the ones that # we generated a ninja build rule for. scons_default_targets = [ get_path(tgt) for tgt in SCons.Script.DEFAULT_TARGETS if get_path(tgt) in self.built ] # If we found an overlap between SCons's list of default # targets and the targets we created ninja builds for then use # those as ninja's default as well. if scons_default_targets: ninja.default(" ".join(scons_default_targets)) with open(str(self.ninja_file), "w") as build_ninja: build_ninja.write(content.getvalue()) self.__generated = True
# recursive expansion by copying the local # variable dictionary and overwriting a null # string for the value of the variable name # we just expanded. # # This could potentially be optimized by only # copying lvars when s contains more expansions, # but lvars is usually supposed to be pretty # small, and deeply nested variable expansions # are probably more the exception than the norm, # so it should be tolerable for now. lv = lvars.copy() var = string.split(key, '.')[0] lv[var] = '' return self.substitute(s, lv) elif is_List(s) or is_Tuple(s): def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars): return conv(substitute(l, lvars)) r = map(func, s) return string.join(r) elif callable(s): try: s = s(target=self.target, source=self.source, env=self.env, for_signature=(self.mode != SUBST_CMD)) except TypeError: # This probably indicates that it's a callable # object that doesn't match our calling arguments # (like an Action). s = str(s)
def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): """Substitute construction variables in a string (or list or other object) and separate the arguments into a command list. The companion scons_subst() function (above) handles basic substitutions within strings, so see that function instead if that's what you're looking for. """ # try: # Subst_List_Strings[strSubst] = Subst_List_Strings[strSubst] + 1 # except KeyError: # Subst_List_Strings[strSubst] = 1 # import SCons.Debug # SCons.Debug.caller(1) class ListSubber(UserList.UserList): """A class to construct the results of a scons_subst_list() call. Like StringSubber, this class binds a specific construction environment, mode, target and source with two methods (substitute() and expand()) that handle the expansion. In addition, however, this class is used to track the state of the result(s) we're gathering so we can do the appropriate thing whenever we have to append another word to the result--start a new line, start a new word, append to the current word, etc. We do this by setting the "append" attribute to the right method so that our wrapper methods only need ever call ListSubber.append(), and the rest of the object takes care of doing the right thing internally. """ def __init__(self, env, mode, target, source, conv, gvars): UserList.UserList.__init__(self, []) self.env = env self.mode = mode self.target = target self.source = source self.conv = conv self.gvars = gvars if self.mode == SUBST_RAW: self.add_strip = lambda x, s=self: s.append(x) else: self.add_strip = lambda x, s=self: None self.in_strip = None self.next_line() def expand(self, s, lvars, within_list): """Expand a single "token" as necessary, appending the expansion to the current result. This handles expanding different types of things (strings, lists, callables) appropriately. It calls the wrapper substitute() method to re-expand things as necessary, so that the results of expansions of side-by-side strings still get re-evaluated separately, not smushed together. """ if is_String(s): try: s0, s1 = s[:2] except (IndexError, ValueError): self.append(s) return if s0 != '$': self.append(s) return if s1 == '$': self.append('$') elif s1 == '(': self.open_strip('$(') elif s1 == ')': self.close_strip('$)') else: key = s[1:] if key[0] == '{' or string.find(key, '.') >= 0: if key[0] == '{': key = key[1:-1] try: s = eval(key, self.gvars, lvars) except KeyboardInterrupt: raise except Exception, e: if e.__class__ in AllowableExceptions: return raise_exception(e, self.target, s) else: if lvars.has_key(key): s = lvars[key] elif self.gvars.has_key(key): s = self.gvars[key] elif not NameError in AllowableExceptions: raise_exception(NameError(), self.target, s) else: return # Before re-expanding the result, handle # recursive expansion by copying the local # variable dictionary and overwriting a null # string for the value of the variable name # we just expanded. lv = lvars.copy() var = string.split(key, '.')[0] lv[var] = '' self.substitute(s, lv, 0) self.this_word() elif is_List(s) or is_Tuple(s): for a in s: self.substitute(a, lvars, 1) self.next_word()
def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): """Expand a string containing construction variable substitutions. This is the work-horse function for substitutions in file names and the like. The companion scons_subst_list() function (below) handles separating command lines into lists of arguments, so see that function if that's what you're looking for. """ if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0: return strSubst class StringSubber: """A class to construct the results of a scons_subst() call. This binds a specific construction environment, mode, target and source with two methods (substitute() and expand()) that handle the expansion. """ def __init__(self, env, mode, target, source, conv, gvars): self.env = env self.mode = mode self.target = target self.source = source self.conv = conv self.gvars = gvars def expand(self, s, lvars): """Expand a single "token" as necessary, returning an appropriate string containing the expansion. This handles expanding different types of things (strings, lists, callables) appropriately. It calls the wrapper substitute() method to re-expand things as necessary, so that the results of expansions of side-by-side strings still get re-evaluated separately, not smushed together. """ if is_String(s): try: s0, s1 = s[:2] except (IndexError, ValueError): return s if s0 != '$': return s if s1 == '$': return '$' elif s1 in '()': return s else: key = s[1:] if key[0] == '{' or string.find(key, '.') >= 0: if key[0] == '{': key = key[1:-1] try: s = eval(key, self.gvars, lvars) except KeyboardInterrupt: raise except Exception, e: if e.__class__ in AllowableExceptions: return '' raise_exception(e, self.target, s) else: if lvars.has_key(key): s = lvars[key] elif self.gvars.has_key(key): s = self.gvars[key] elif not NameError in AllowableExceptions: raise_exception(NameError(key), self.target, s) else: return '' # Before re-expanding the result, handle # recursive expansion by copying the local # variable dictionary and overwriting a null # string for the value of the variable name # we just expanded. # # This could potentially be optimized by only # copying lvars when s contains more expansions, # but lvars is usually supposed to be pretty # small, and deeply nested variable expansions # are probably more the exception than the norm, # so it should be tolerable for now. lv = lvars.copy() var = string.split(key, '.')[0] lv[var] = '' return self.substitute(s, lv) elif is_List(s) or is_Tuple(s): def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars): return conv(substitute(l, lvars)) r = map(func, s) return string.join(r)
def NumpyCheckLibAndHeader(context, libs, symbols=None, headers=None, language=None, section=None, name=None, autoadd=1): from SCons.Util import is_List env = context.env # XXX: handle language if language: raise NotImplementedError("FIXME: language selection not "\ "implemented yet !") # Make sure libs and symbols are lists if libs and not is_List(libs): libs = [libs] if symbols and not is_List(symbols): symbols = [symbols] if headers and not is_List(headers): headers = [headers] if not name: name = libs[0] # Get user customization (numscons.cfg) if any # opts will keep all user cusstomization, and put into config check source # files in comment so that scons will automatically consider sources as # obsolete whenever config change opts = [] build_info = None if section: cfg = _read_section(section, env) if cfg: opts.append(cfg) build_info = BuildDict.from_config_dict(cfg) if build_info is None: build_info = BuildDict() if not build_info['LIBS']: build_info['LIBS'] = libs opts.append(build_info) # Display message if symbols: sbstr = ', '.join(symbols) msg = 'Checking for symbol(s) %s in %s... ' % (sbstr, name) else: msg = 'Checking for %s... ' % name context.Message(msg) # Disable from environment if name=None is in it try: value = os.environ[name] if value == 'None': msg = 'Disabled from env through var %s !' % name return context.Result(msg), {} except KeyError: pass # Check whether the header is available (CheckHeader-like checker) saved = save_and_set(env, build_info) try: src_code = [r'#include <%s>' % h for h in headers] src_code.extend([r'#if 0', str(opts), r'#endif', '\n']) src = '\n'.join(src_code) st = context.TryCompile(src, '.c') finally: restore(env, saved) if not st: context.Result('Failed (could not check header(s) : check config.log '\ 'in %s for more details)' % env['build_dir']) return st # Check whether the library is available (CheckLib-like checker) saved = save_and_set(env, build_info) try: if symbols: for sym in symbols: # Add opts at the end of the source code to force dependency of # check from options. extra = [r'#if 0', str(build_info), r'#endif', '\n'] st = check_symbol(context, None, sym, '\n'.join(extra)) if not st: break finally: if st == 0 or autoadd == 0: restore(env, saved) if not st: context.Result('Failed (could not check symbol %s : check config.log '\ 'in %s for more details))' % (sym, env['build_dir'])) return st context.Result(st) return st
def Package(env, target=None, source=None, **kw): """ Entry point for the package tool. """ # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source) == 0: raise UserError("No source for Package() given") # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw['PACKAGETYPE'] = env['PACKAGETYPE'] except KeyError: pass if not kw.get('PACKAGETYPE'): from SCons.Script import GetOption kw['PACKAGETYPE'] = GetOption('package_type') if kw['PACKAGETYPE'] is None: if 'Tar' in env['BUILDERS']: kw['PACKAGETYPE'] = 'targz' elif 'Zip' in env['BUILDERS']: kw['PACKAGETYPE'] = 'zip' else: raise UserError("No type for Package() given") PACKAGETYPE = kw['PACKAGETYPE'] if not is_List(PACKAGETYPE): PACKAGETYPE = PACKAGETYPE.split(',') # load the needed packagers. def load_packager(type): try: # the specific packager is a relative import return importlib.import_module("." + type, __name__) except ImportError as e: raise SConsEnvironmentError("packager %s not available: %s" % (type, str(e))) packagers = list(map(load_packager, PACKAGETYPE)) # set up targets and the PACKAGEROOT try: # fill up the target list with a default target name until the PACKAGETYPE # list is of the same size as the target list. if not target: target = [] size_diff = len(PACKAGETYPE) - len(target) default_name = "%(NAME)s-%(VERSION)s" if size_diff > 0: default_target = default_name % kw target.extend([default_target] * size_diff) if 'PACKAGEROOT' not in kw: kw['PACKAGEROOT'] = default_name % kw except KeyError as e: raise SCons.Errors.UserError("Missing Packagetag '%s'" % e.args[0]) # setup the source files source = env.arg2nodes(source, env.fs.Entry) # call the packager to setup the dependencies. targets = [] try: for packager in packagers: t = [target.pop(0)] t = packager.package(env, t, source, **kw) targets.extend(t) assert (len(target) == 0) except KeyError as e: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (e.args[0],packager.__name__) ) except TypeError as e: # this exception means that a needed argument for the packager is # missing. As our packagers get their "tags" as named function # arguments we need to find out which one is missing. argspec = getfullargspec(packager.package) args = argspec.args if argspec.defaults: # throw away arguments with default values args = args[:-len(argspec.defaults)] args.remove('env') args.remove('target') args.remove('source') # now remove any args for which we have a value in kw. args = [x for x in args if x not in kw] if len(args) == 0: raise # must be a different error, so re-raise elif len(args) == 1: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (args[0],packager.__name__) ) else: raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\ % (", ".join(args),packager.__name__) ) target = env.arg2nodes(target, env.fs.Entry) targets.extend(env.Alias('package', targets)) return targets
def get_command(env, node, action): # pylint: disable=too-many-branches """Get the command to execute for node.""" if node.env: sub_env = node.env else: sub_env = env executor = node.get_executor() if executor is not None: tlist = executor.get_all_targets() slist = executor.get_all_sources() else: if hasattr(node, "target_peers"): tlist = node.target_peers else: tlist = [node] slist = node.sources # Retrieve the repository file for all sources slist = [rfile(s) for s in slist] # Get the dependencies for all targets implicit = list({dep for tgt in tlist for dep in get_dependencies(tgt)}) # Generate a real CommandAction if isinstance(action, SCons.Action.CommandGeneratorAction): # pylint: disable=protected-access action = action._generate(tlist, slist, sub_env, 1, executor=executor) rule = "CMD" # Actions like CommandAction have a method called process that is # used by SCons to generate the cmd_line they need to run. So # check if it's a thing like CommandAction and call it if we can. if hasattr(action, "process"): cmd_list, _, _ = action.process(tlist, slist, sub_env, executor=executor) # Despite being having "list" in it's name this member is not # actually a list. It's the pre-subst'd string of the command. We # use it to determine if the command we generated needs to use a # custom Ninja rule. By default this redirects CC/CXX commands to # CMD_W_DEPS but the user can inject custom Ninja rules and tie # them to commands by using their pre-subst'd string. rule = __NINJA_RULE_MAPPING.get(action.cmd_list, "CMD") cmd = _string_from_cmd_list(cmd_list[0]) else: # Anything else works with genstring, this is most commonly hit by # ListActions which essentially call process on all of their # commands and concatenate it for us. genstring = action.genstring(tlist, slist, sub_env) # Detect if we have a custom rule for this # "ListActionCommandAction" type thing. rule = __NINJA_RULE_MAPPING.get(genstring, "CMD") if executor is not None: cmd = sub_env.subst(genstring, executor=executor) else: cmd = sub_env.subst(genstring, target=tlist, source=slist) # Since we're only enabling Ninja for developer builds right # now we skip all Manifest related work on Windows as it's not # necessary. We shouldn't have gotten here but on Windows # SCons has a ListAction which shows as a # CommandGeneratorAction for linking. That ListAction ends # with a FunctionAction (embedManifestExeCheck, # embedManifestDllCheck) that simply say "does # target[0].manifest exist?" if so execute the real command # action underlying me, otherwise do nothing. # # Eventually we'll want to find a way to translate this to # Ninja but for now, and partially because the existing Ninja # generator does so, we just disable it all together. cmd = cmd.replace("\n", " && ").strip() if env["PLATFORM"] == "win32" and ("embedManifestExeCheck" in cmd or "embedManifestDllCheck" in cmd): cmd = " && ".join(cmd.split(" && ")[0:-1]) if cmd.endswith("&&"): cmd = cmd[0:-2].strip() outputs = get_outputs(node) if rule == "CMD_W_DEPS": # When using a depfile Ninja can only have a single output but # SCons will usually have emitted an output for every thing a # command will create because it's caching is much more # complex than Ninja's. This includes things like DWO # files. Here we make sure that Ninja only ever sees one # target when using a depfile. It will still have a command # that will create all of the outputs but most targets don't # depend direclty on DWO files and so this assumption is # safe to make. outputs = outputs[0:1] command_env = getattr(node.attributes, "NINJA_ENV_ENV", "") # If win32 and rule == CMD_W_DEPS then we don't want to calculate # an environment for this command. It's a compile command and # compiledb doesn't support shell syntax on Windows. We need the # shell syntax to use environment variables on Windows so we just # skip this platform / rule combination to keep the compiledb # working. # # On POSIX we can still set environment variables even for compile # commands so we do so. if not command_env and not (env["PLATFORM"] == "win32" and rule == "CMD_W_DEPS"): ENV = get_default_ENV(sub_env) # This is taken wholesale from SCons/Action.py # # Ensure that the ENV values are all strings: for key, value in ENV.items(): if not is_String(value): if is_List(value): # If the value is a list, then we assume it is a # path list, because that's a pretty common list-like # value to stick in an environment variable: value = flatten_sequence(value) value = os.pathsep.join(map(str, value)) else: # If it isn't a string or a list, then we just coerce # it to a string, which is the proper way to handle # Dir and File instances and will produce something # reasonable for just about everything else: value = str(value) if env["PLATFORM"] == "win32": command_env += "set '{}={}' && ".format(key, value) else: command_env += "{}={} ".format(key, value) setattr(node.attributes, "NINJA_ENV_ENV", command_env) ninja_build = { "outputs": outputs, "implicit": implicit, "rule": rule, "variables": { "cmd": command_env + cmd }, } # Don't use sub_env here because we require that NINJA_POOL be set # on a per-builder call basis to prevent accidental strange # behavior like env['NINJA_POOL'] = 'console' and sub_env can be # the global Environment object if node.env is None. # Example: # # Allowed: # # env.Command("ls", NINJA_POOL="ls_pool") # # Not allowed and ignored: # # env["NINJA_POOL"] = "ls_pool" # env.Command("ls") # if node.env and node.env.get("NINJA_POOL", None) is not None: ninja_build["pool"] = node.env["NINJA_POOL"] return ninja_build
def _get_SConscript_filenames(self, ls, kw): """ Convert the parameters passed to SConscript() calls into a list of files and export variables. If the parameters are invalid, throws SCons.Errors.UserError. Returns a tuple (l, e) where l is a list of SConscript filenames and e is a list of exports. """ exports = [] if len(ls) == 0: try: dirs = kw["dirs"] except KeyError: raise SCons.Errors.UserError( "Invalid SConscript usage - no parameters") if not is_List(dirs): dirs = [dirs] dirs = list(map(str, dirs)) name = kw.get('name', 'SConscript') files = [os.path.join(n, name) for n in dirs] elif len(ls) == 1: files = ls[0] elif len(ls) == 2: files = ls[0] exports = self.Split(ls[1]) else: raise SCons.Errors.UserError( "Invalid SConscript() usage - too many arguments") if not is_List(files): files = [files] if kw.get('exports'): exports.extend(self.Split(kw['exports'])) variant_dir = kw.get('variant_dir') if variant_dir: if len(files) != 1: raise SCons.Errors.UserError( "Invalid SConscript() usage - can only specify one SConscript with a variant_dir" ) duplicate = kw.get('duplicate', 1) src_dir = kw.get('src_dir') if not src_dir: src_dir, fname = os.path.split(str(files[0])) files = [os.path.join(str(variant_dir), fname)] else: if not isinstance(src_dir, SCons.Node.Node): src_dir = self.fs.Dir(src_dir) fn = files[0] if not isinstance(fn, SCons.Node.Node): fn = self.fs.File(fn) if fn.is_under(src_dir): # Get path relative to the source directory. fname = fn.get_path(src_dir) files = [os.path.join(str(variant_dir), fname)] else: files = [fn.get_abspath()] kw['src_dir'] = variant_dir self.fs.VariantDir(variant_dir, src_dir, duplicate) return (files, exports)
def Package(env, target=None, source=None, **kw): """ Entry point for the package tool. """ # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source)==0: raise UserError("No source for Package() given") # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw['PACKAGETYPE']=env['PACKAGETYPE'] except KeyError: pass if not kw.get('PACKAGETYPE'): from SCons.Script import GetOption kw['PACKAGETYPE'] = GetOption('package_type') if kw['PACKAGETYPE'] == None: if 'Tar' in env['BUILDERS']: kw['PACKAGETYPE']='targz' elif 'Zip' in env['BUILDERS']: kw['PACKAGETYPE']='zip' else: raise UserError("No type for Package() given") PACKAGETYPE=kw['PACKAGETYPE'] if not is_List(PACKAGETYPE): PACKAGETYPE=PACKAGETYPE.split(',') # load the needed packagers. def load_packager(type): try: file,path,desc=imp.find_module(type, __path__) return imp.load_module(type, file, path, desc) except ImportError as e: raise EnvironmentError("packager %s not available: %s"%(type,str(e))) packagers=list(map(load_packager, PACKAGETYPE)) # set up targets and the PACKAGEROOT try: # fill up the target list with a default target name until the PACKAGETYPE # list is of the same size as the target list. if not target: target = [] size_diff = len(PACKAGETYPE)-len(target) default_name = "%(NAME)s-%(VERSION)s" if size_diff>0: default_target = default_name%kw target.extend( [default_target]*size_diff ) if 'PACKAGEROOT' not in kw: kw['PACKAGEROOT'] = default_name%kw except KeyError as e: raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] ) # setup the source files source=env.arg2nodes(source, env.fs.Entry) # call the packager to setup the dependencies. targets=[] try: for packager in packagers: t=[target.pop(0)] t=packager.package(env,t,source, **kw) targets.extend(t) assert( len(target) == 0 ) except KeyError as e: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (e.args[0],packager.__name__) ) except TypeError as e: # this exception means that a needed argument for the packager is # missing. As our packagers get their "tags" as named function # arguments we need to find out which one is missing. from inspect import getargspec args,varargs,varkw,defaults=getargspec(packager.package) if defaults!=None: args=args[:-len(defaults)] # throw away arguments with default values args.remove('env') args.remove('target') args.remove('source') # now remove any args for which we have a value in kw. args=[x for x in args if x not in kw] if len(args)==0: raise # must be a different error, so re-raise elif len(args)==1: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (args[0],packager.__name__) ) else: raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\ % (", ".join(args),packager.__name__) ) target=env.arg2nodes(target, env.fs.Entry) targets.extend(env.Alias( 'package', targets )) return targets