def _install_files(env, target, source, exclude=None, glob=None, recurse=True): """ Install the given source paths to the given target location(s). """ if exclude is None: exclude = [] exclude = Flatten([exclude]) exclude.extend(['.*', '*~', '*.pyc', '*.o', '*.os']) if glob is None: glob = [] glob = Flatten([glob]) target = Flatten([target]) source = Flatten([source]) if len(target) != len(source): if len(target) == 1: target = target * len(source) else: raise UserError('Export files mismatch') results = [] for (t, s) in zip(target, source): if not isinstance(t, SConsFSBase): t = env.Dir(t) if not isinstance(s, SConsFSBase): s = env.Entry(s) for (dest, src) in _get_files(t, s, exclude, glob, recurse): results.extend(env.InstallAs(dest, src)) return results
def location_dependency(name, location=None, develop=None, include=None, sys_include=None, extra_sub_path=None, source_path=None, linktype=None): from SCons.Script import Flatten flattened_includes = [i for i in Flatten([include]) if i is not None] flattened_sys_includes = [ i for i in Flatten([sys_include]) if i is not None ] return type( 'BuildWith' + name.title(), (base, ), { '_name': name, '_default_location': location, '_default_develop': develop, '_default_include': flattened_includes, '_default_sys_include': flattened_sys_includes, '_extra_sub_path': extra_sub_path, '_source_path': source_path, '_linktype': linktype })
def __call__( self, env, program, sources, final_dir=None, include_patterns=None, exclude_dependencies=False, exclude_patterns=None ): if not env['variant_actions'].has_key('cov') and not env['variant_actions'].has_key('test'): return [] if final_dir == None: final_dir = env['abs_final_dir'] if include_patterns: include_patterns = Flatten( [ include_patterns ] ) exclude_dependency_pattern = None if exclude_dependencies: exclude_dependency_pattern = re.escape( env['download_root'] ).replace( "\_", "_" ).replace( "\#", "#" ) if os.path.isabs( env['download_root'] ): exclude_dependency_pattern = exclude_dependency_pattern + "#.*" else: exclude_dependency_pattern = ".*##" + exclude_dependency_pattern + "#.*" if exclude_patterns and exclude_dependency_pattern: exclude_patterns = Flatten( [ exclude_dependency_pattern, exclude_patterns ] ) elif exclude_dependency_pattern: exclude_patterns = [ exclude_dependency_pattern ] emitter, builder = env['toolchain'].coverage_runner( program, final_dir, include_patterns=include_patterns, exclude_patterns=exclude_patterns ) if not emitter and not builder: return None env['BUILDERS']['CoverageBuilder'] = env.Builder( action=builder, emitter=emitter ) coverage = env.CoverageBuilder( [], Flatten( [ sources ] ) ) cuppa.progress.NotifyProgress.add( env, coverage ) return coverage
def __call__(self, env, source=None, target=None, final_dir=None, data=None, depends_on=None, command=None, format_args=None, expected_exit_code=None, working_dir=None, retry_count=None): actions = env['variant_actions'] if actions.has_key('run') or actions.has_key( 'test') or actions.has_key('force_run') or actions.has_key( 'force_test'): if final_dir == None: final_dir = env['abs_final_dir'] action, emitter = runner(final_dir, command=command, format_args=format_args, expected_exit_code=expected_exit_code, target=target, working_dir=working_dir, retry_count=retry_count) env['BUILDERS']['RunBuilder'] = env.Builder(action=action, emitter=emitter) sources = source # data should be deprecated in favour of depends_on if data: sources = Flatten(source and [source, data] or [data]) if depends_on: sources = Flatten(source and [source, depends_on] or [depends_on]) run_process = env.RunBuilder([], sources) if env['variant_actions'].has_key('force_run') or env[ 'variant_actions'].has_key('force_test'): run_process = env.AlwaysBuild(run_process) cuppa.progress.NotifyProgress.add(env, run_process) return run_process return []
def __call__(self, env, source, final_dir=None, data=None, runner=None, expected='passed'): if final_dir == None: final_dir = env['abs_final_dir'] if not runner: runner = self._default_runner test_builder, test_emitter = env['toolchain'].test_runner( runner, final_dir, expected) env['BUILDERS']['TestBuilder'] = env.Builder(action=test_builder, emitter=test_emitter) sources = source if data: sources = Flatten([source, data]) test = env.TestBuilder([], sources) cuppa.progress.NotifyProgress.add(env, test) return test
def __call__(self, env, dependencies): # Ensure we have a list of dependencies dependencies = Flatten(dependencies) # We might have string names of dependencies or actual factories # so refer to this as an id for named_dependency in dependencies: name = None if is_string(named_dependency): name = named_dependency else: name = named_dependency.name() if not name in env['dependencies']: raise BuildWithException( "The sconscript [{}] requires the dependency [{}] but it is not available." .format(env['sconscript_file'], name)) dependency_factory = env['dependencies'][name] env.AppendUnique(BUILD_WITH=name) dependency = dependency_factory(env) if dependency: dependency(env, env['toolchain'], env['variant'].name()) else: raise BuildWithException( "The sconscript [{}] requires the dependency [{}] but it cannot be created." .format(env['sconscript_file'], name))
def __call__(self, env, source, **kwargs): sources = Flatten([source]) objects = [] if 'CPPPATH' in env: env.AppendUnique(INCPATH=env['CPPPATH']) obj_suffix = env.subst('$OBJSUFFIX') for source in sources: if os.path.splitext(str(source))[1] == obj_suffix: objects.append(source) else: target = None if not str(source).startswith(env['build_root']): target = os.path.splitext(os.path.split(str(source))[1])[0] target = os.path.join( env['build_dir'], env.subst('$OBJPREFIX') + target + env.subst('$OBJSUFFIX')) objects.append( env.Object(source=source, target=target, CPPPATH=env['SYSINCPATH'] + env['INCPATH'], **kwargs)) cuppa.progress.NotifyProgress.add(env, objects) return objects
def __call__(self, env, libraries): if not self._add_dependents: logger.warn( "BoostSharedLibrary() is deprecated, use BoostSharedLibs() or BoostSharedLib() instead" ) libraries = Flatten([libraries]) if not 'boost' in env['BUILD_WITH']: env.BuildWith('boost') Boost = env['dependencies']['boost'](env) for library in libraries: if library.startswith('log'): env.AppendUnique(CPPDEFINES='BOOST_LOG_DYN_LINK') elif library == 'chrono': env.AppendUnique(CPPDEFINES='BOOST_CHRONO_DYN_LINK') elif library == 'filesystem': env.AppendUnique(CPPDEFINES='BOOST_FILESYSTEM_DYN_LINK') elif library == 'date_time': env.AppendUnique(CPPDEFINES='BOOST_DATE_TIME_DYN_LINK') elif library == 'regex': env.AppendUnique(CPPDEFINES='BOOST_REGEX_DYN_LINK') elif library == 'system': env.AppendUnique(CPPDEFINES='BOOST_SYSTEM_DYN_LINK') library = BoostLibraryBuilder(Boost, add_dependents=self._add_dependents, verbose_build=self._verbose_build, verbose_config=self._verbose_config)( env, None, None, libraries, 'shared') if self._build_always: return AlwaysBuild(library) else: return library
def __call__(self, env, target, source, final_dir=None, data=None, append_variant=None, runner=None, expected='passed', **kwargs): nodes = [] program = env.Build(target, source, final_dir=final_dir, append_variant=append_variant, **kwargs) nodes.append(program) if env['variant_actions'].has_key( 'test') or env['variant_actions'].has_key('cov'): if not runner: runner = self._default_runner test = env.Test(program, final_dir=final_dir, data=data, runner=runner, expected=expected) nodes.append(test) if 'cov' in env['variant_actions']: coverage = env.Coverage(program, source, final_dir=final_dir) nodes.append(coverage) return Flatten(nodes)
def __call__(self, env, profiles): # Ensure we have a list of profiles profiles = Flatten(profiles) # We might have string names of profiles or actual factories # so refer to this as an id for named_profile in profiles: name = None if is_string(named_profile): name = named_profile else: name = named_profile.name() if not name in env['profiles']: raise BuildProfileException( "The sconscript [{}] requires the profile [{}] but it is not available." .format(env['sconscript_file'], name)) profile_factory = env['profiles'][name] env.AppendUnique(BUILD_PROFILE=name) profile = profile_factory(env) if profile: profile(env, env['toolchain'], env['variant'].name()) else: raise BuildProfileException( "The sconscript [{}] requires the profile [{}] but it cannot be created." .format(env['sconscript_file'], name))
def _install_files(env, target, source, exclude=None, glob=None, recurse=True): """ Install the given source paths to the given target location(s). """ if exclude is None: exclude = [] exclude = Flatten([exclude]) exclude.extend(['.*', '*~', '*.pyc', '*.o', '*.os']) if glob is None: glob = [] glob = Flatten([glob]) target = Flatten([target]) source = Flatten([source]) if len(target) != len(source): if len(target) == 1: target = target * len(source) else: raise UserError('Export files mismatch') results = [] for (t, s) in zip(target, source): if not isinstance(t, SConsFSBase): t = env.Dir(t) if not isinstance(s, SConsFSBase): s = env.Entry(s) for (dest, src) in _get_files(t, s, exclude, glob, recurse): results.extend(env.InstallAs(dest, src)) return results
def filter_nodes(nodes, match_patterns, exclude_patterns=[]): nodes = Flatten(nodes) if not match_patterns and not exclude_patterns: return nodes if match_patterns: match_patterns = Flatten([match_patterns]) for i, match_pattern in enumerate(match_patterns): if is_string(match_pattern): match_patterns[i] = re.compile( fnmatch.translate(match_pattern)) if exclude_patterns: exclude_patterns = Flatten([exclude_patterns]) for i, exclude_pattern in enumerate(exclude_patterns): if is_string(exclude_pattern): exclude_patterns[i] = re.compile( fnmatch.translate(exclude_pattern)) filtered_nodes = [] for node in nodes: path = str(node) logger.trace("Node in nodes to filter = [{}][{}]".format( as_notice(path), as_notice(node.path))) if exclude_patterns: excluded = False for exclude_pattern in exclude_patterns: if exclude_pattern.match(path): excluded = True break if excluded: continue if not match_patterns: filtered_nodes.append(node) else: for match_pattern in match_patterns: if match_pattern.match(path): filtered_nodes.append(node) return filtered_nodes
def regexes_from_patterns(cls, patterns): regexes = [] patterns = Flatten([patterns]) for pattern in patterns: if isinstance(pattern, Pattern): regexes.append(pattern._pattern) elif pattern: regexes.append(pattern) return regexes
def __call__(self, env, source, **kwargs): sources = Flatten([source]) objects = [] if 'CPPPATH' in env: env.AppendUnique(INCPATH=env['CPPPATH']) if self._shared: obj_prefix = env.subst('$SHOBJPREFIX') obj_suffix = env.subst('$SHOBJSUFFIX') obj_builder = env.SharedObject else: obj_prefix = env.subst('$OBJPREFIX') obj_suffix = env.subst('$OBJSUFFIX') obj_builder = env.Object logger.trace("Build Root = [{}]".format(as_notice(env['build_root']))) for source in sources: if not isinstance(source, Node): source = env.File(source) logger.trace("Object source = [{}]/[{}]".format( as_notice(str(source)), as_notice(source.path))) if os.path.splitext(str(source))[1] == obj_suffix: objects.append(source) else: target = None target = os.path.splitext(os.path.split(str(source))[1])[0] if not source.path.startswith(env['build_root']): if os.path.isabs(str(source)): target = env.File( os.path.join(obj_prefix + target + obj_suffix)) else: target = env.File( os.path.join(env['build_dir'], obj_prefix + target + obj_suffix)) else: offset_dir = os.path.relpath( os.path.split(source.path)[0], env['build_dir']) target = env.File( os.path.join(offset_dir, obj_prefix + target + obj_suffix)) logger.trace("Object target = [{}]/[{}]".format( as_notice(str(target)), as_notice(target.path))) objects.append( obj_builder(target=target, source=source, CPPPATH=env['SYSINCPATH'] + env['INCPATH'], **kwargs)) cuppa.progress.NotifyProgress.add(env, objects) return objects
def InstallPermAutoDir(self, dir, relative_dir, source, perm): for f in Flatten(source): path = dir if str(f.get_dir()).startswith(relative_dir): path = os.path.join(path, str(f.get_dir())[len(relative_dir):]) else: path = os.path.join(path, str(f.get_dir())) obj = self.Install(path, f) for i in obj: self.AddPostAction(i, Chmod(i, perm)) return dir
def lcov_generator(source, target, env, for_signature): cmd = ['lcov --capture'] cmd += ['--output-file', target[0].abspath] if 'LCOVDIR' in env: cmd += ['--directory', str(Dir(env['LCOVDIR']))] if 'LCOVBASEDIR' in env: cmd += ['--base-directory', str(Dir(env['LCOVBASEDIR']))] return ' '.join(Flatten(cmd))
def __call__(self, env, sources, destination=None): if 'test' not in env['variant_actions'].keys(): return [] env['BUILDERS']['CollateTestReportIndexBuilder'] = env.Builder( action=CollateReportIndexAction(destination), emitter=CollateReportIndexEmitter(destination)) index_file = env.CollateTestReportIndexBuilder([], Flatten([sources])) cuppa.progress.NotifyProgress.add(env, index_file) return index_file
def update(self, objs): """ Given a list of objects (eg, the output of ``locals().values()``), update self.targets with the set containing the relative path to each target (ie, those objects with a "NodeInfo" attribute). """ self.targets.update( set( str(obj) for obj in Flatten(objs) if hasattr(obj, 'NodeInfo')))
def __call__(self, env, target, source, match=None, exclude=None): destinations = [] for destination in Flatten([target]): if destination[0] != '#' and not os.path.isabs(destination): destination = os.path.join(env['abs_final_dir'], destination) destinations.append(destination) filtered_nodes = filter_nodes(source, match, exclude) installed_files = env.InstallAs(destinations, filtered_nodes) cuppa.progress.NotifyProgress.add(env, installed_files) return installed_files
def __call__(self, target, source, env): from SCons.Script import Flatten logger.debug("reading template file [{}]".format( as_notice(str(source[0])))) with open(str(Flatten(source)[0]), 'r') as template_file: logger.debug("open target file [{}]".format( as_notice(str(target[0])))) with open(str(target[0]), 'w') as expanded_file: logger.debug("expand variables matching [{}]".format( as_notice(str(self._kwargs)))) expanded_file.write( template_file.read().format(**self._kwargs)) return None
def __call__(self, env, target, source, final_dir=None, data=None, append_variant=None, runner=None, expected='passed', command=None, expected_exit_code=None, cov_include_patterns=None, cov_exclude_dependencies=False, cov_exclude_patterns=None, working_dir=None, **kwargs): nodes = [] program = env.Build(target, source, final_dir=final_dir, append_variant=append_variant, **kwargs) nodes.append(program) actions = env['variant_actions'] if actions.has_key('test') or actions.has_key('force_test'): if not runner: runner = self._default_runner test = env.Test(program, final_dir=final_dir, data=data, runner=runner, expected=expected, command=command, expected_exit_code=expected_exit_code, working_dir=working_dir) nodes.append(test) if 'cov' in actions: coverage = env.Coverage( program, source, final_dir=final_dir, exclude_dependencies=cov_exclude_dependencies, exclude_patterns=cov_exclude_patterns) nodes.append(coverage) return Flatten(nodes)
def __call__(self, env, program, sources, final_dir=None): if final_dir == None: final_dir = env['abs_final_dir'] emitter, builder = env['toolchain'].coverage_runner(program, final_dir) env['BUILDERS']['CoverageBuilder'] = env.Builder(action=builder, emitter=emitter) coverage = env.CoverageBuilder([], Flatten([sources])) cuppa.progress.NotifyProgress.add(env, coverage) return coverage
def __call__(self, env, source, **kwargs): sources = Flatten([source]) objects = [] if 'CPPPATH' in env: env.AppendUnique(INCPATH=env['CPPPATH']) for source in sources: objects.append( env.Object(source=source, CPPPATH=env['SYSINCPATH'] + env['INCPATH'], **kwargs)) cuppa.progress.NotifyProgress.add(env, objects) return objects
def __call__( self, env, sources, destination=None ): if not env['variant_actions'].has_key('cov') and not env['variant_actions'].has_key('test'): return [] emitter, builder = env['toolchain'].coverage_collate_files( destination ) if not emitter and not builder: return None env['BUILDERS']['CollateCoverageFilesBuilder'] = env.Builder( action=builder, emitter=emitter ) summary_files = env.CollateCoverageFilesBuilder( [], Flatten( [ sources ] ) ) cuppa.progress.NotifyProgress.add( env, summary_files ) return summary_files
def __call__( self, env, program, sources, final_dir=None ): if final_dir == None: final_dir = env['final_dir'] emitter, builder = env['toolchain'].coverage_runner( program, final_dir ) env['BUILDERS']['CoverageBuilder'] = env.Builder( action=builder, emitter=emitter ) # for s in Flatten( [ sources ] ): # coverage = env.CoverageBuilder( [], [s] ) # sconscript_progress.SconscriptProgress.add( env, coverage ) # for s in Flatten( [ sources ] ): coverage = env.CoverageBuilder( [], Flatten( [ sources ] ) ) cuppa.sconscript_progress.SconscriptProgress.add( env, coverage ) return coverage
def __call__(self, env, sources, destination=None): if 'cov' not in env['variant_actions'].keys( ) and 'test' not in env['variant_actions'].keys(): return [] emitter, builder = env['toolchain'].coverage_collate_index(destination) if not emitter and not builder: return None env['BUILDERS']['CollateCoverageIndexBuilder'] = env.Builder( action=builder, emitter=emitter) index_file = env.CollateCoverageIndexBuilder([], Flatten([sources])) cuppa.progress.NotifyProgress.add(env, index_file) return index_file
def quex_generator(source, target, env, for_signature): quex_opt = { 'QUEXLANG': '--language', 'QUEXEXT': '--file-extension-scheme', 'QUEXOUTDIR': '--output-directory', 'QUEXTKNID': '--foreign-token-id-file' } Mkdir(env['QUEXOUTDIR']) cmd = ['quex-exe.py'] cmd += ['--mode-files', str(source[0])] cmd += ['--engine', env['QUEXENGINENS'] + env['QUEXENGINE']] cmd += [env['QUEXFLAGS']] cmd += [[quex_opt[key], env[key]] for key in quex_opt if key in env] return ' '.join(Flatten(cmd))
def __call__(self, env, source, target=None, final_dir=None, data=None, runner=None, expected='passed', command=None, expected_exit_code=None, working_dir=None): actions = env['variant_actions'] if actions.has_key('test') or actions.has_key('force_test'): if not runner: runner = self._default_runner if final_dir == None: final_dir = env['abs_final_dir'] test_builder, test_emitter = env['toolchain'].test_runner( runner, final_dir, expected, command=command, expected_exit_code=expected_exit_code, target=target, working_dir=working_dir) env['BUILDERS']['TestBuilder'] = env.Builder(action=test_builder, emitter=test_emitter) sources = source if data: sources = Flatten([source, data]) test = env.TestBuilder([], sources) if env['variant_actions'].has_key('force_test'): test = env.AlwaysBuild(test) cuppa.progress.NotifyProgress.add(env, test) return test return []
def _build_units(env, units): """ For each given unit, check the build manifest to see if it has been built. If not, execute the unit's SConscript. """ project_dir = _get_project_dir(env) for unit in Flatten([units]): unit_name = _get_unit_name(env, unit) if env['X_BUILD_MANIFEST'].has_key(unit_name): continue unit_dir = os.path.join(project_dir, unit_name) variant_dir = os.path.join(project_dir, 'build', _get_variant(env), unit_name) SConscript(dirs=unit_dir, exports='env', variant_dir=variant_dir, duplicate=0) # Make sure that the build manifest has been updated. if not env['X_BUILD_MANIFEST'].has_key(unit_name): env['X_BUILD_MANIFEST'][unit_name] = {}
def show_extras(self, directory, one_line=True): """ Given a relative path ``directory`` search for files recursively and print a list of those not found among ``self.targets``. Print one path per line if ``one_line`` is False. """ outfiles = set( Flatten([[path.join(d, f) for f in ff] for d, _, ff in os.walk(directory)])) extras = outfiles - self.targets if extras: print('\nextraneous files in %s:' % directory) if one_line: print(' ' + ' '.join(sorted(extras))) else: print('\n'.join(sorted(extras))) print()
def __call__( self, env, libraries ): if not self._add_dependents: logger.warn( "BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" ) libraries = Flatten( [ libraries ] ) if not 'boost' in env['BUILD_WITH']: env.BuildWith( 'boost' ) Boost = env['dependencies']['boost']( env ) logger.trace( "Build static libraries [{}]".format( colour_items( libraries ) ) ) library = BoostLibraryBuilder( Boost, add_dependents = self._add_dependents, verbose_build = self._verbose_build, verbose_config = self._verbose_config )( env, None, None, libraries, 'static' ) if self._build_always: return AlwaysBuild( library ) else: return library