def generate(env): gcr = env.Detect('glib-compile-resources') env.SetDefault(GLIB_COMPILE_RESOURCES=gcr) if env['GLIB_COMPILE_RESOURCES'] is None: print("Could not find glib-compile-resources") env.Exit(1) env.AddMethod(gresource_bundle_builder_wrapper, 'GResourceBundle') env.AddMethod(gresource_method, 'GResource') env['BUILDERS']['_GResourceXMLInternal'] = Builder( action=Action( gresource_xml_build, cmdstr="Generating $TARGET", ), target_factory=env.File, source_factory=GResourceEntry, source_scanner=Scanner( function=gresource_entry_scan, skeys=[GResourceEntry.SCANNER_KEY], ), ) env['BUILDERS']['_GResourceFileInternal'] = Builder( action=Action( '$GLIB_COMPILE_RESOURCES --target=$TARGET $SOURCE', cmdstr="Bundling $TARGET", ), source_scanner=Scanner(function=gresource_xml_scan, ), )
def init_wheel(env): """ Create a wheel and its metadata using Environment env. """ env["PACKAGE_NAMEVER"] = "-".join( (env["PACKAGE_NAME_SAFE"], env["PACKAGE_VERSION"])) wheel_filename = "-".join( (env["PACKAGE_NAMEVER"], env["WHEEL_TAG"])) + ".whl" wheel_target_dir = env.Dir(env["WHEEL_DIR"]) # initial # here in path means its relative to top-level sconstruct env["WHEEL_PATH"] = env.get("WHEEL_PATH", env.Dir("#build/wheel/")) env["DIST_INFO_NAME"] = env["PACKAGE_NAMEVER"] + ".dist-info" env["DIST_INFO_PATH"] = env["WHEEL_PATH"].Dir(env["PACKAGE_NAME_SAFE"] + "-" + env["PACKAGE_VERSION"] + ".dist-info") env["WHEEL_DATA_PATH"] = env["WHEEL_PATH"].Dir(env["PACKAGE_NAME_SAFE"] + "-" + env["PACKAGE_VERSION"] + ".data") # used by prepare_metadata_for_build_wheel dist_info = env.Install(env.Dir(env["WHEEL_DIR"]), env["DIST_INFO_PATH"]) env.Alias("dist_info", dist_info) env["WHEEL_FILE"] = env.Dir(wheel_target_dir).File(wheel_filename) # Write WHEEL and METADATA targets = wheel_metadata(env) # experimental PEP517-style editable # with filename that won't collide with our real wheel (SCons wouldn't like that) editable_filename = ("-".join( (env["PACKAGE_NAMEVER"], "ed." + env["WHEEL_TAG"])) + ".whl") editable = env.Zip( target=env.Dir(env["WHEEL_DIR"]).File(editable_filename), source=env["DIST_INFO_PATH"], ZIPROOT=env["WHEEL_PATH"], ) env.Alias("editable", editable) env.NoClean(editable) env.AddPostAction(editable, Action(add_editable)) env.AddPostAction(editable, Action(add_manifest)) editable_dist_info = env.Dir( "#build/editable/${PACKAGE_NAMEVER}.dist-info") # editable may need an extra dependency, so it gets its own dist-info directory. env.Command(editable_dist_info, env["DIST_INFO_PATH"], Copy("$TARGET", "$SOURCE")) metadata2 = env.Command(editable_dist_info.File("METADATA"), metadata_source(env), metadata_builder) return targets
def RegisterSnakeBuilders(self): proto_cpp_action = Action('$PROTOCCOM', '$PROTOCCOMSTR') proto_cpp_builder = Builder(action=proto_cpp_action, emitter=_proto_cpp_emitter, src_suffix='.proto') proto_py_action = Action('$PROTOPYCCOM', '$PROTOPYCCOMSTR') proto_py_builder = Builder(action=proto_py_action, emitter=_proto_py_emitter, src_suffix='.proto') return { 'ProtoCppLibrary': proto_cpp_builder, 'ProtoPyLibrary': proto_py_builder }
def generate(env): env.AddMethod(wheel_package_builder_wrapper, 'WheelPackage') env.AddMethod(wheel_method, 'Wheel') env['BUILDERS']['_WheelFileInternal'] = Builder( action=Action( wheel_package_build, varlist=[ 'AUTHOR', 'NAME', 'VERSION', 'AUTHOR', 'AUTHOR_EMAIL', 'LICENSE', 'ARCHITECTURE', 'PACKAGE_METADATA', 'SOURCE_URL', 'BUILD', '_WHEEL_ROOT_IS_PURELIB', '_WHEEL_TAG', ], cmdstr="Packaging $TARGET", ), target_factory=env.File, source_factory=lambda s: WheelEntry(s, env.Entry(s)), source_scanner=Scanner( function=wheel_entry_scan, skeys=[WheelEntry.SCANNER_KEY], ), )
def generate(env): env.Command( '$LIBFUZZER_TEST_LIST', env.Value(_libfuzzer_tests), Action(libfuzzer_test_list_builder_action, "Generating $TARGET")) env.AddMethod(register_libfuzzer_test, 'RegisterLibfuzzerTest') env.AddMethod(build_cpp_libfuzzer_test, 'CppLibfuzzerTest') env.Alias('$LIBFUZZER_TEST_ALIAS', '$LIBFUZZER_TEST_LIST')
def generate(env): env.Command( '$INTEGRATION_TEST_LIST', env.Value(_integration_tests), Action(integration_test_list_builder_action, "Generating $TARGET")) env.AddMethod(register_integration_test, 'RegisterIntegrationTest') env.AddMethod(build_cpp_integration_test, 'CppIntegrationTest') env.Alias('$INTEGRATION_TEST_ALIAS', '$INTEGRATION_TEST_LIST')
def compile_python_module(env, sources, dst_dir=None): """ Makes *.pyc files from *.py files. Sources is a list of Python targets (files). If dst_dir is None then compiled files are placed in the same directory as source, otherwise copiled files will be saved to that directory. """ action = "python -c 'import py_compile; py_compile.compile(\"$SOURCE\", \"$TARGET\", doraise=True)'" if not state.log.verbose: action = Action(action, cmdstr="Compiling Python module $TARGET") targets = [] for src in sources: src_path = str(src) if dst_dir is None: dst_path = src_path + 'c' else: dst_path = os.path.join(str(dst_dir), os.path.basename(str(src_path)) + 'c') env.Command(dst_path, src_path, action) targets.append(env.File(dst_path)) return targets
def how_to_gen(source, target, env, for_signature): env.Alias('mkhowto', target) html_dir = env.get('html_dir', None) if html_dir is None: name = splitext(split(str(source[0]))[-1])[0] html_dir = join("#", 'html', name) dir = env.Dir(html_dir) MKHOWTO = WhereIs('mkhowto') or os.environ.get('MKHOWTO') MKHOWTO = env.get('MKHOWTO', MKHOWTO) env['MKHOWTO'] = MKHOWTO mk_act = ("${TEXINPUTS and 'TEXINPUTS='}${':'.join(TEXINPUTS)} " "${MKHOWTO} --quiet --html --dir=%(dir)s " "${ADDRESS and '--address='}${ADDRESS} " "${UP_LINK and '--up-link='}${UP_LINK} " "${UP_TITLE and '--up-title='}${'\"'+str(UP_TITLE)+'\"'} " "${SOURCE}" % vars()) mk_act_str = "mkhowto --html --dir=${TARGET.dir} " mkhowto_action = Action(mk_act, mk_act_str) Mkdir(str(dir)) return [mkhowto_action]
def checkStaticZ(context): context.Message("Checking for static z... ") tmp = context.env.Clone() env = context.env (ok, stuff) = context.TryAction(Action("pkg-config --version")) if ok: try: utils.safeParseConfig(env, 'pkg-config zlib --cflags') # Strip off the -L part libdir = utils.readExec( 'pkg-config zlib --libs-only-L')[2:].rstrip() # Hack to hardcode these libraries zlib = env.Install('misc', "%s/libz.a" % libdir) env.Append(LIBS=[zlib]) except OSError: context.sconf.env = tmp context.Result(utils.colorResult(0)) return 0 # FIXME: write proper test ret = context.TryLink( """ int main(int argc, char ** argv){ return 0; } """, ".c") if not ret: context.sconf.env = tmp context.Result(utils.colorResult(ret)) return ret
def get_sgf_checksum(file_name=None, sensorgraph=None): """Returns the device checksum from a sensorgraph file""" env = Environment(tools=[]) env.Command([os.path.join('build', 'output', file_name)], [sensorgraph], action=Action(_get_sgf_checksum_action, "Building SGF Checksum file at $TARGET"))
def generate(env, **kw): """ Add 'Download' builder to the environment. Called by SCons internally. """ env['BUILDERS']['Download'] = Builder(action=Action( downloadAction, strfunction=strFunction, chdir=True), single_source=True, source_factory=Value)
def GeneratePdumAction(target, source, env): action = Action(' '.join([ '"' + PDUMCONFIG_EXE + '"', '-z', PROJ_TARGET, '-f', '$SOURCES', '-o', BUILDGEN_DIR ])) action(target, source, env) for f in target: ClearReadOnlyAttribute(f)
def ensure_image_is_hex(input_path): """Return a path to a hex version of a firmware image. If the input file is already in hex format then input_path is returned and nothing is done. If it is not in hex format then an SCons action is added to convert it to hex and the target output file path is returned. A cache is kept so that each file is only converted once. Args: input_path (str): A path to a firmware image. Returns: str: The path to a hex version of input_path, this may be equal to input_path if it is already in hex format. """ family = utilities.get_family('module_settings.json') target = family.platform_independent_target() build_dir = target.build_dirs()['build'] if platform.system() == 'Windows': env = Environment(tools=['mingw'], ENV=os.environ) else: env = Environment(tools=['default'], ENV=os.environ) input_path = str(input_path) image_name = os.path.basename(input_path) root, ext = os.path.splitext(image_name) if len(ext) == 0: raise BuildError( "Unknown file format or missing file extension in ensure_image_is_hex", file_name=input_path) file_format = ext[1:] if file_format == 'hex': return input_path if file_format == 'elf': new_file = os.path.join(build_dir, root + '.hex') if new_file not in CONVERTED_HEX_FILES: env.Command(new_file, input_path, action=Action( "arm-none-eabi-objcopy -O ihex $SOURCE $TARGET", "Creating intel hex file from: $SOURCE")) CONVERTED_HEX_FILES.add(new_file) return new_file raise BuildError("Unknown file format extension in ensure_image_is_hex", file_name=input_path, extension=file_format)
def RegisterSnakeBuilders(self): thrift_cpp_action = Action('$THRIFTCCOM', '$THRIFTCCOMSTR') thrift_cpp_builder = Builder(action=thrift_cpp_action, emitter=_thrift_cpp_emitter, src_suffix='.thrift') thrift_php_action = Action('$THRIFTPHPCOM', '$THRIFTPHPCOMSTR') thrift_php_builder = Builder(action=thrift_php_action, emitter=_thrift_php_emitter, src_suffix='.thrift') thrift_py_action = Action('$THRIFTPYCOM', '$THRIFTPYCOMSTR') thrift_py_builder = Builder(action=thrift_py_action, emitter=_thrift_py_emitter, src_suffix='.thrift') return { 'ThriftCppLibrary': thrift_cpp_builder, 'ThriftPhpLibrary': thrift_php_builder, 'ThriftPyLibrary': thrift_py_builder }
def GenerateOsConfigAction(target, source, env): action = Action(' '.join([ '"' + OSCONFIG_EXE + '"', '-f', '$SOURCES', '-o', BUILDGEN_DIR, '-v', JENNIC_CHIP ])) action(target, source, env) for f in target: ClearReadOnlyAttribute(f)
def build_trub_records(file_name, slot_assignments=None, os_info=None, sensor_graph=None, app_info=None, use_safeupdate=False): """Build a trub script based on the records received for each slot. slot_assignments should be a list of tuples in the following form: ("slot X" or "controller", firmware_image_name, record_type, args) The output of this autobuild action will be a trub script in build/output/<file_name> that assigns the given firmware to each slot in the order specified in the slot_assignments list. Args: file_name (str): The name of the output file that we should create. This file name should end in .trub slot_assignments (list of (str, str, str, list)): The tuple contains (slot name, firmware file, record type, args) os_info (tuple(int, str)): A tuple of OS version tag and X.Y version number that will be set as part of the OTA script if included. Optional. sensor_graph (str): Name of sgf file. Optional. app_info (tuple(int, str)): A tuple of App version tag and X.Y version number that will be set as part of the OTA script if included. Optional. use_safeupdate (bool): Enables safe firmware update """ resolver = ProductResolver.Create() env = Environment(tools=[]) files = [] records = [] if slot_assignments is not None: slots = [_parse_slot_assignment(x) for x in slot_assignments] files = [ ensure_image_is_hex( resolver.find_unique("firmware_image", x[1]).full_path) for x in slot_assignments ] env['SLOTS'] = slots else: env['SLOTS'] = None env['USE_SAFEUPDATE'] = use_safeupdate env['OS_INFO'] = os_info env['APP_INFO'] = app_info env['UPDATE_SENSORGRAPH'] = False if sensor_graph is not None: files.append(sensor_graph) env['UPDATE_SENSORGRAPH'] = True env.Command([os.path.join('build', 'output', file_name)], files, action=Action(_build_records_action, "Building TRUB script at $TARGET"))
def dot_test(self): # print "calling profile",self if self._built: # print "has been built" return action = Action(self.build, "Dottest '%s' " % (self.act.__name__)) builder = Builder(action=action, emitter=self.emitter) builder(self.env, self.name) self._built = True
def generate(env): """ RootCint(dictionary,headers[,PCMNAME=pcmfilename]) env.RootCint(dictionary,headers[,PCMNAME=pcmfilename]) Generate ROOT dictionary source file "dictionary" from list of class headers "headers" """ bld = Builder(action=Action(rootcint_builder, rootcint_print), emitter=rootcint_emitter) env.Append(BUILDERS={'RootCint': bld}) env.Replace(PCMNAME='')
def GenerateZigbeeStackAction(target, source, env): action = Action(' '.join([ '"' + ZPSCONFIG_EXE + '"', '-n', PROJ_TARGET, '-t', JENNIC_CHIP, '-l', get_zpslib_path(ZPS_NWK_LIB), '-a', get_zpslib_path(ZPS_APL_LIB), '-c', TOOLCHAIN_DIR, '-f', '$SOURCES', '-o', BUILDGEN_DIR ])) action(target, source, env) for f in target: ClearReadOnlyAttribute(f)
def generate(env): env.Append(CPPPATH=[sysconfig.get_python_inc()]) env.Append(LIBPATH=[sysconfig.get_config_var('LIBDIR')]) # LIBS = ['python' + sysconfig.get_config_var('VERSION')] # only on CPython; ask distutils compiler = distutils.ccompiler.new_compiler() distutils.sysconfig.customize_compiler(compiler) if isinstance(compiler, distutils.unixccompiler.UnixCCompiler): env.MergeFlags(' '.join(compiler.compiler_so[1:])) # XXX other flags are revealed in compiler # XXX MSVC works differently env['PACKAGE_NAME'] = env['PACKAGE_METADATA']['name'] env['PACKAGE_NAME_SAFE'] = normalize_package(env['PACKAGE_NAME']) env['PACKAGE_VERSION'] = env['PACKAGE_METADATA']['version'] # Development .egg-info has no version number. Needs to have # underscore _ and not hyphen - env['EGG_INFO_PATH'] = env['PACKAGE_NAME_SAFE'] + '.egg-info' # all files under this directory will be packaged as a wheel env['WHEEL_PATH'] = env.Dir('#build/wheel/') # this distutils command helps trick setuptools into doing work for us command = Command(Distribution(env['PACKAGE_METADATA'])) egg_info = env.Command(egg_info_targets(env), 'pyproject.toml', egg_info_builder) env['DUMMY_COMMAND'] = command env.Clean(egg_info, env['EGG_INFO_PATH']) env.Alias('egg_info', egg_info) metadata = env.Command('METADATA', 'pyproject.toml', metadata_builder) pkg_info = env.Command('PKG-INFO', egg_info_targets(env)[0].get_path(), Copy('$TARGET', '$SOURCE')) # XXX switch to using FindInstalledFiles() or another collector, so random files # in build directory won't wind up in the archive. # XXX is this relative to the calling file? whl = env.Zip(target='-'.join( (env['PACKAGE_NAME_SAFE'], env['PACKAGE_VERSION'], env['WHEEL_TAG'])) + '.whl', source=env['WHEEL_PATH'], ZIPROOT=env['WHEEL_PATH']) env.AddPostAction(whl, Action(add_manifest)) env.Clean(whl, env['WHEEL_PATH']) return
def generate(env, **kw): """ Add 'Extract' builder to the environment. Called by SCons internally. """ env['BUILDERS']['Extract'] = Builder( action=Action( extractAction, strfunction=strFunction, ), emitter=emitter, target_factory=Dir, )
def generate(env): ''' Copy(target, source) env.Copy(target, source) Makes a copy just like standard Copy Command action named "link_name" that points to the real file or directory "source". The link produced is always relative. ''' bldr = Builder(action = Action(copy_builder,copy_print), single_target = True, single_source = True) env.Append(BUILDERS = {'Copy' : bldr})
def Environment(variables, configfiles, version=None, service_module=None, config_class=saliweb.backend.Config): buildmap = _add_build_variable(variables, configfiles) variables.Add( SCons.Script.PathVariable( 'html_coverage', 'Directory to output HTML coverage reports into', None, SCons.Script.PathVariable.PathIsDirCreate)) variables.Add( SCons.Script.BoolVariable('coverage', 'Preserve output coverage files', False)) env = SCons.Script.Environment(variables=variables) # Inherit some variables from the environment: if 'PERL5LIB' in os.environ: env['ENV']['PERL5LIB'] = os.environ['PERL5LIB'] if 'PATH' in os.environ: env['ENV']['PATH'] = os.environ['PATH'] configfile = buildmap[env['build']] env['configfile'] = File(configfile) env['config'] = config = config_class(configfile) _setup_sconsign(env) _setup_version(env, version) _setup_service_name(env, config, service_module) _setup_install_directories(env) if not env.GetOption('clean') and not env.GetOption('help'): _check(env) _install_config(env) _install_directories(env) env.AddMethod(_InstallAdminTools, 'InstallAdminTools') env.AddMethod(_InstallCGIScripts, 'InstallCGIScripts') env.AddMethod(_InstallPython, 'InstallPython') env.AddMethod(_InstallHTML, 'InstallHTML') env.AddMethod(_InstallTXT, 'InstallTXT') env.AddMethod(_InstallCGI, 'InstallCGI') env.AddMethod(_InstallPerl, 'InstallPerl') env.AddMethod(_make_frontend, 'Frontend') env.Append(BUILDERS={'RunPerlTests': Builder(action=builder_perl_tests)}) env.Append(BUILDERS={'RunPythonTests': \ Builder(action=builder_python_tests)}) install = env.Command('install', None, Action(_install_check, 'Check installation ...')) env.AlwaysBuild(install) env.Requires(install, env['config'].directories.values()) env.Default(install) return env
def profile(self): # print "calling profile",self if self._built: # print "has been built" return action = Action(self.build, "Profile '%s' [%s]" % (self.act.__name__, self.name)) builder = Builder(action=action, emitter=self.emitter, suffix='.hotshot') builder(self.env, self.name) self._built = True
def generate(env): '''LibTag(tag_file, library_files) env.LibTag(tag_file, library_files) Creates a <lib>.lib.tag file from list of library file names. Content is mostly needed to deterministally detect if library has abi file build in case of shared library. NOTE: It is different from make build as make build tests for existance of abi file in build directory which is wrong as it may be a leftover after build of different library version. ''' bldr = Builder(action=Action(lib_tag_builder, lib_tag_print)) env.Append(BUILDERS={'LibTag': bldr})
def CreateSLIMpyBuilder(name, act, file_name=None, str_func=None, emitters=None, depends_on=None): ''' ??? ''' if str_func is None: str_func = default_str_closure(name) pm_act = post_mortem_closure(act) slimpy_action = Action(pm_act, str_func) if file_name is None: mod = sys.modules[act.__module__] file_name = mod.__file__ if depends_on is None: depends_on = [] if hasattr(act, "__additional_dependancies__"): additional_deps = getattr(act, "__additional_dependancies__") depends_on.extend(additional_deps) slim_emitters = [ rsf_binary_emitter, logfile_emitter, slimpy_variable_emitter, slimpy_file(file_name), additional_parameters(depends_on), profile_emitter_wrapper(act), dottest_emitter_wrapper(act), help_emitter ] add_function_emitter(act, slim_emitters) if emitters is None: emitters = [] slim_emitters.extend(emitters) slimpy_builder = Builder(action=slimpy_action, emitter=slim_emitters, suffix='.rsf', src_suffix='.rsf') return slimpy_builder
def checkStaticOgg(context): context.Message("Checking for static ogg and vorbis... ") tmp = context.env.Clone() env = context.env env['HAVE_OGG'] = True env.Append(CPPDEFINES=['HAVE_OGG']) (ok, stuff) = context.TryAction(Action("pkg-config --version")) if ok: try: utils.safeParseConfig(env, 'pkg-config vorbisfile --cflags') # Strip off the -L part libdir = utils.readExec( 'pkg-config vorbisfile --libs-only-L')[2:].rstrip() # Hack to hardcode these libraries vorbisfile = env.Install('misc', "%s/libvorbisfile.a" % libdir) ogg = env.Install('misc', "%s/libogg.a" % libdir) vorbis = env.Install('misc', "%s/libvorbis.a" % libdir) env.Append(LIBS=[vorbisfile, ogg, vorbis]) except OSError: context.sconf.env = tmp context.Result(utils.colorResult(0)) return 0 main = 'int main(int argc, char ** argv)' try: if env['HAVE_SDL_MAIN']: main = 'int SDL_main(int argc, char ** argv)' except KeyError: pass ret = context.TryLink( """ #include <vorbis/vorbisfile.h> #include <stdio.h> %(main)s { OggVorbis_File ovf; FILE * f; ov_open_callbacks(f, &ovf, 0, 0, OV_CALLBACKS_DEFAULT); return 0; } """ % {'main': main}, ".c") if not ret: context.sconf.env = tmp context.Result(utils.colorResult(ret)) return ret
def generate(env): ''' SymLink(link_name,source) env.SymLink(link_name,source) Makes a symbolic link named "link_name" that points to the real file or directory "source". The link produced is always relative. ''' bldr = Builder(action=Action(symlink_builder, symlink_print), target_factory=FS.File, source_factory=FS.Entry, single_target=True, single_source=True, emitter=symlink_emitter) env.Append(BUILDERS={'SymLink': bldr})
def combine_trub_scripts(trub_scripts_list, out_file): """Combines trub scripts, processed from first to last""" resolver = ProductResolver.Create() files = [ resolver.find_unique("trub_script", x).full_path for x in trub_scripts_list ] env = Environment(tools=[]) env.Command([os.path.join('build', 'output', out_file)], files, action=Action(_combine_trub_scripts_action, "Combining TRUB scripts at $TARGET"))
def autobuild_bootstrap_file(file_name, image_list): """Combine multiple firmware images into a single bootstrap hex file. The files listed in image_list must be products of either this tile or any dependency tile and should correspond exactly with the base name listed on the products section of the module_settings.json file of the corresponding tile. They must be listed as firmware_image type products. This function keeps a global map of all of the intermediate files that it has had to create so that we don't try to build them multiple times. Args: file_name(str): Full name of the output bootstrap hex file. image_list(list of str): List of files that will be combined into a single hex file that will be used to flash a chip. """ family = utilities.get_family('module_settings.json') target = family.platform_independent_target() resolver = ProductResolver.Create() env = Environment(tools=[]) output_dir = target.build_dirs()['output'] build_dir = target.build_dirs()['build'] build_output_name = os.path.join(build_dir, file_name) full_output_name = os.path.join(output_dir, file_name) processed_input_images = [] for image_name in image_list: image_info = resolver.find_unique('firmware_image', image_name) image_path = image_info.full_path hex_path = arm.ensure_image_is_hex(image_path) processed_input_images.append(hex_path) env.Command( build_output_name, processed_input_images, action=Action( arm.merge_hex_executables, "Merging %d hex files into $TARGET" % len(processed_input_images))) env.Command(full_output_name, build_output_name, Copy("$TARGET", "$SOURCE"))