def generate(env, **kw): occbuild = kw.get('occbuild', None) if occbuild: occbuild_path = occbuild[0].abspath depend_emitter = lambda target, source, env: occbuild_depend_emitter( target, source, env, occbuild) else: occbuild_path = 'occbuild' depend_emitter = None pideps_scanner = Scanner(function=pideps_scan, skeys=['.occ'], path_function=FindPathDirs('INCPATH')) tce_bld = Builder(action=Action('$OCCBUILDCOM', '$OCCBUILDCOMSTR'), emitter=depend_emitter, suffix='.tce', src_suffix='.occ') # FIXME: The source scanner does not work well enough yet :/ #source_scanner = pideps_scanner) lib_bld = Builder(action=Action('$OCCBUILDLIBRARYCOM', '$OCCBUILDLIBRARYCOMSTR'), emitter=[depend_emitter, occbuild_library_emitter], suffix='.lib', src_suffix='.tce', src_builder=[tce_bld]) prog_bld = Builder( action=Action('$OCCBUILDPROGRAMCOM', '$OCCBUILDPROGRAMCOMSTR'), emitter=[depend_emitter, occbuild_program_emitter], suffix='$PROGSUFFIX', src_suffix=['.occ', '.tce'], # FIXME: If I leave the sourcebuilder in, scons seems to # want to turn my .occ extensions when I have a mixed # .occ, .tce source list into .tce using the builder ) #src_builder = [tce_bld]) tbc_headr_bld = Builder(action=Action('$TBCHEADERCOM', '$TBCHEADERCOMSTR'), emitter=[depend_emitter], suffix='.h', src_suffix=['.occ', '.tce'], src_bulider=[tce_bld]) # Add the new Builder to the list of builders # Use of $( $) causes bracketed flags not trigger rebuild when changed env['BUILDERS']['OccamObject'] = tce_bld env['OCCBUILDCOM'] = '$OCCBUILD $_OCCBUILD_TOOLCHAIN $_OCCBUILD_SEARCH_DIRS $OCCBUILDFLAGS --object $SOURCES' env['BUILDERS']['OccamLibrary'] = lib_bld env['OCCBUILDLIBRARYCOM'] = '$OCCBUILD $_OCCBUILD_TOOLCHAIN $_OCCBUILD_SEARCH_DIRS $OCCBUILDFLAGS --library $TARGET $SOURCES' env['BUILDERS']['OccamProgram'] = prog_bld env['OCCBUILDPROGRAMCOM'] = '$OCCBUILD $_OCCBUILD_TOOLCHAIN $_OCCBUILD_SEARCH_DIRS $OCCBUILDFLAGS --program $SOURCES' env['BUILDERS']['OccamBytecodeHeader'] = tbc_headr_bld env['TBCHEADERCOM'] = '$SKROC $_SKROC_SEARCH_DIRS $SKROCFLAGS --c -f $TARGET $SOURCES' env['OCCBUILD'] = occbuild_path env['_OCCBUILD_SEARCH_DIRS'] = '$( ${_concat(OCCBUILD_SEARCH_PFX, INCPATH, "", __env__, RDirs, TARGET, SOURCE)} $)' env['_SKROC_SEARCH_DIRS'] = '$( ${_concat(SKROC_SEARCH_PFX, INCPATH, "", __env__, RDirs, TARGET, SOURCE)} $)' env['OCCBUILD_SEARCH_PFX'] = '--search ' env['SKROC_SEARCH_PFX'] = '-L ' env['OCCBUILD_TOOLCHAIN'] = None env['_OCCBUILD_TOOLCHAIN'] = '${(OCCBUILD_TOOLCHAIN and "--toolchain $OCCBUILD_TOOLCHAIN" or "")}' def OccLibDepend(self, node, lib_name): if not isinstance(lib_name, list): list(lib_name) for lib in lib_name: self.Depends(node, self['OCCLIBS'][lib]['dep']) if 'inc' in self['OCCLIBS'][lib]: for n in node: n.env.AppendUnique(INCPATH=self['OCCLIBS'][lib]['inc']) env.AddMethod(OccLibDepend) env['OCCLIBS'] = dict() env['INCPATH'] = CLVar('') env['OCCBUILDFLAGS'] = CLVar('')
__pychecker__ = 'no-argsused' return 'instantiate "%s" as "%s"' % (source[0], target[0]) def __chmod_copy_exec(target, source, env): [target] = target [source] = source copymode(str(source), str(target)) def __chmod_copy_show(target, source, env): __pychecker__ = 'no-argsused' return 'copy file mode from "%s" to "%s"' % (source[0], target[0]) __chmod_copy = Action(__chmod_copy_exec, __chmod_copy_show) def __generator(source, target, env, for_signature): __pychecker__ = 'no-argsused' varlist = env['varlist'] actions = [Action(__instantiate_exec, __instantiate_show, varlist=varlist)] if env['template_copy_mode']: actions.append(__chmod_copy) return actions __template_builder = Builder( generator=__generator, src_suffix=['.in'], single_source=True,
AUTHORS: - David Cournapeau - Dag Sverre Seljebotn """ import SCons from SCons.Builder import Builder from SCons.Action import Action #def cython_action(target, source, env): # print target, source, env # from Cython.Compiler.Main import compile as cython_compile # res = cython_compile(str(source[0])) cythonAction = Action("$CYTHONCOM") def create_builder(env): try: cython = env['BUILDERS']['Cython'] except KeyError: cython = SCons.Builder.Builder( action = cythonAction, emitter = {}, suffix = cython_suffix_emitter, single_source = 1) env['BUILDERS']['Cython'] = cython return cython def cython_suffix_emitter(env, source):
# on Windows. # cmd = '%s %s' % (env['schemescanner'] + env.get('EXESUFFIX', ""), str(node)) cmd = '%s %s' % (env['schemescanner'], str(node)) stringresults = exe(cmd) # print "RESULTS ", results results_split = re.split(' ', stringresults) for i in results_split: if i != '': filtered.append(i) # print "RESULTS SPLIT ", filtered return filtered SchemeScanner = Scanner(function=schemefile_scan, skeys=['.scm', '.ss']) mzc = Builder(action=Action('$MZCCOM', '$MZCCOMSTR'), src_suffix=".scm", single_source=True, source_scanner=SchemeScanner) def generate(env, *kw): env['BUILDERS']['Mzc'] = mzc env['MZC'] = 'mzc' env['MZCCOM'] = '$MZC --exe $TARGET $SOURCE' def exists(env): return 1
def _go_object_suffix(env, sources): return os.path.extsep + env['GO_ARCHNAME'] def _go_program_prefix(env, sources): return env['PROGPREFIX'] def _go_program_suffix(env, sources): return env['PROGSUFFIX'] go_compiler = Builder( action=Action('$GO_GCCOM', '$GO_GCCOMSTR'), emitter=_gc_emitter, suffix=_go_object_suffix, ensure_suffix=True, src_suffix='.go', ) go_linker = Builder( action=Action('$GO_LDCOM', '$GO_LDCOMSTR'), prefix=_go_program_prefix, suffix=_go_program_suffix, src_builder=go_compiler, single_source=True, source_scanner=Scanner(function=_ld_scan_func, recursive=True), ) go_assembler = Builder( action=Action('$GO_ACOM', '$GO_ACOMSTR'),
def SandeshSconsEnvOnlyCppFunc(env): onlycppbuild = Builder(action=Action( SandeshOnlyCppBuilder, 'SandeshOnlyCppBuilder $SOURCE -> $TARGETS')) env.Append(BUILDERS={'SandeshOnlyCpp': onlycppbuild})
def SandeshSconsEnvPyFunc(env): pybuild = Builder(action=Action(SandeshPyBuilder, 'SandeshPyBuilder $SOURCE -> $TARGETS')) env.Append(BUILDERS={'SandeshPy': pybuild})
source.extend([x.strip() for x in src.split(",")]) elif re.match("usedir", pat) and usedir: outdir = os.path.join(basedir, usedir) if basedir \ else outdir elif re.match("BaseDirectory", pat) and base: basedir = base else: # This should never actually happen. raise NotImplementedError # print("source: {0}".format([str(x) for x in source])) # print("target: {0}".format([str(x) for x in target])) return target, source _ins2sty = Builder(action=Action("$INS2STYCOM", "$INS2STYCOMSTR"), emitter=_ins_emitter) def generate(env): """Add the Builders and construction variables to the Environment """ # Add the dtx as an extension for the PDF builder env["BUILDERS"]["PDF"].add_action(".dtx", PDFLaTeXAuxAction) env["BUILDERS"]["PDF"].add_emitter(".dtx", _dtx_emitter) # Add the ins as an extension for the PDF builder env["INS2STY"] = env["PDFLATEX"] env.SetDefault( INS2STYCOM="$INS2STY ${SOURCE}",
def read_pipe(command, env): [command] = env.subst_list(command) command = map(str, command) print ' '.join(command) process = Popen(command, env=env['ENV'], stdout=PIPE) for line in process.stdout: yield line status = process.wait() if status != 0: env.Exit(status) def __literal_exec(target, source, env): __pychecker__ = 'no-argsused' target = file(str(target[0]), 'w') print >> target, source[0].get_contents() target.close() def __literal_show(target, source, env): return 'create "%s" containing %s' % (target[0], source[0]) __literal_action = Action(__literal_exec, __literal_show) def literal(env, target, value): return env.Command(target, env.Value(value), __literal_action)
def ext2_create(target: File): return Action( 'mke2fs {} > /dev/null 2>&1'.format(target.get_path(), ), ' ${{COLOR_COM}}(MKE2FS)${{COLOR_NONE}} {}'.format( target.get_path(), )),
def limine_install(target: File): return Action('$LIMINE_INSTALL ${TARGET} > /dev/null 2>&1', ' ${COLOR_COM}(LIMINE)${COLOR_NONE} ${TARGET}')
# and install the package here... def _astyle_emitter(target, source, env): '''Helper function to filter out files for testing purposes.''' filtered_sources = [] fs_append = filtered_sources.append for f in source: if 'test/ref' not in f.abspath: f = f.abspath.replace(env['BUILD_DIR'], env['WS_DIR']) if os.path.exists(f): fs_append(f) return target, filtered_sources _astyle_builder = Builder( action=Action('$ASTYLE_COM', '$ASTYLE_COMSTR'), emitter=_astyle_emitter) _astyle_check_builder = Builder( action=Action(_astyle_check_action, '$ASTYLE_CHECK_COMSTR'), emitter=_astyle_emitter) def generate(env): """Add Builders and construction variables to the Environment.""" env['ASTYLE'] = _detect(env) env.SetDefault( # ASTYLE command ASTYLE_COM='$ASTYLE -k1 --options=none --convert-tabs -bSKpUH $SOURCES', ASTYLE_COMSTR=Cformat('\n=== Running Astyle ===\n', 'green'), ASTYLE_CHECK_COMSTR=Cformat('\n=== Running Astyle Check ===\n', 'green')
def PyBind11WIG(env, target, sources, module, decorator=None, parser='clanglite', controller='default', generator='pybind11_internal', depends=[], helder='std::shared_ptr', **kwargs): # SITE_AUTOWIG = env['SITE_AUTOWIG'] autowig_env = env.Clone() autowig_env['BUILDERS']['_PyBind11WIG'] = Builder( action=Action( pybind11_builder, 'autowig: Generating PyBind11 interface ...')) autowig_env['AUTOWIG_DEPENDS'] = depends autowig_env['AUTOWIG_HELDER'] = helder for kwarg in kwargs: autowig_env['AUTOWIG_' + kwarg] = kwargs[kwarg] autowig_env['AUTOWIG_generator_module'] = env.File( module).srcnode() if decorator: autowig_env['AUTOWIG_generator_decorator'] = env.File( decorator).srcnode() targets = [] if parser.endswith('.py'): targets.append( env.InstallAs( os.path.join(SITE_AUTOWIG, 'parser', target + '.py'), parser)) parser = target autowig_env['AUTOWIG_PARSER'] = parser if controller.endswith('.py'): targets.append( env.InstallAs( os.path.join(SITE_AUTOWIG, 'controller', target + '.py'), controller)) controller = target autowig_env['AUTOWIG_CONTROLLER'] = controller if generator.endswith('.py'): targets.append( env.InstallAs( os.path.join(SITE_AUTOWIG, 'generator', target + '.py'), generator)) generator = target autowig_env['AUTOWIG_GENERATOR'] = generator targets.append( autowig_env.File( os.path.join(SITE_AUTOWIG, 'ASG', target + '.pkl'))) for target in targets[:-1]: autowig_env.Depends(targets[-1], target) if os.path.exists(targets[-1].abspath): with open(targets[-1].abspath, 'rb') as filehandler: autowig_env['AUTOWIG_ASG'] = pickle.load(filehandler) autowig_env._PyBind11WIG(targets[-1], sources) return targets
from SCons.Builder import Builder ######################################################################## # # generate a sorted file # def __bitcode2text_emitter(target, source, env): #source.append(env['SortFile']) return target, source __bitcode2text_action = Action([[ '$Bitcode2Text.abspath', '$SOURCES', '>$TARGET' ]]) __bitcode2text_builder = Builder( emitter=__bitcode2text_emitter, action=__bitcode2text_action, ) ######################################################################## def generate(env):
def generate(env): reWindowsDrive = re.compile('^([A-Z]:[/\\\\])|([A-Z][A-Z0-9]*//)', re.I) def normalizePath(path): path = os.path.normpath(path) path = os.path.splitdrive(path)[1] # return (drive, tail) # for cross compiling splitdrive wont work since python is compiled for # the host system and os.path methods work with host system parameters. # use a simple regular expression check to see if we need to cut the # start of the string match = reWindowsDrive.match(path) if match: path = path[match.end():] return path def buildArchiveTarBz2(env, target, source): with tarfile.open(target[0].abspath, 'w:bz2', dereference=True) as arcfile: for path, node in env['ARCHIVE_FILES'].items(): info = arcfile.gettarinfo(node.abspath, normalizePath(path)) #info.mode = ... info.uid = 0 info.gid = 0 info.uname = 'root' info.gname = 'root' with open(node.abspath, 'rb') as nf: arcfile.addfile(info, nf) def buildArchiveZip(env, target, source): with zipfile.ZipFile(target[0].abspath, 'w', zipfile.ZIP_DEFLATED) as arcfile: for path, node in env['ARCHIVE_FILES'].items(): arcfile.write(node.abspath, normalizePath(path)) env.Append( BUILDERS={ 'ArchiveTarBz2': Builder(action=Action(buildArchiveTarBz2, '$ARCHIVETARBZ2COMSTR'), suffix='.tarbz2', src_suffix='') }) env['ARCHIVETARBZ2COM'] = 'Archiving "$TARGET"' env['ARCHIVETARBZ2COMSTR'] = env['ARCHIVETARBZ2COM'] env.Append( BUILDERS={ 'ArchiveZip': Builder(action=Action(buildArchiveZip, '$ARCHIVEZIPCOMSTR'), suffix='.zip', src_suffix='') }) env['ARCHIVEZIPCOM'] = 'Archiving "$TARGET"' env['ARCHIVEZIPCOMSTR'] = env['ARCHIVEZIPCOM'] try: class _TargetArchive(env.targetManager.Target): formatTarBz2 = 'tarbz2' formatZip = 'zip' def __init__(self, description, target=None, **args): super(_TargetArchive, self).__init__(description, target) def archiveFiles(self, env, target, files, format=formatTarBz2): if format == self.formatTarBz2: self.target.extend( env.ArchiveTarBz2(target, files.values(), ARCHIVE_FILES=files)) elif format == self.formatZip: self.target.extend( env.ArchiveZip(target, files.values(), ARCHIVE_FILES=files)) else: raise 'Invalid format {}'.format(format) env.targetManager.TargetArchive = _TargetArchive except: pass
def enable(env, config): compile_commands = {} entry_counter = _EntryCounter() env['_COMPILE_DB_ID'] = id(compile_commands) env['_COMPILE_DB_COUNTER'] = entry_counter entry_group = SCons.Node.Python.Value(id(compile_commands)) def create_db_entry_emitter(cxx, shared): def emitter(target, source, env): if env.get('_COMPILE_DB_ID') != id(compile_commands): return target, source def add_db_entry(): entry = config.entry_func(env, target, source, cxx, shared) if entry: key = '{}:{}'.format( entry['file'], str(target[0]) if config.multi else '') old_entry = compile_commands.get(key) compile_commands[key] = entry entry_counter.update(key, old_entry, entry) entry_node = SCons.Node.Python.Value(source) entry = env._AddDbEntry(entry_node, [], _COMPILE_DB_ENTRY_FUNC=add_db_entry) env.AlwaysBuild(entry) env.NoCache(entry) env.Depends(entry_group, entry) return target, source return emitter def add_db_entry_action(target, source, env): env['_COMPILE_DB_ENTRY_FUNC']() def update_db_action(target, source, env): # Convert dict to a list sorted with file/output tuple. contents = [e for _, e in sorted(compile_commands.items())] with open(target[0].path, 'w') as f: json.dump(contents, f, indent=2) def update_internal_db_action(target, source, env): with open(target[0].path, 'w') as f: json.dump(compile_commands, f, indent=2, sort_keys=True) # # Hook new emitters to the existing ones # for ((cxx, suffix), shared) in itertools.product( [(True, s) for s in config.cxx_suffixes] + [(False, s) for s in config.cc_suffixes], (True, False)): builder = 'SharedObject' if shared else 'StaticObject' emitter = env['BUILDERS'][builder].emitter assert isinstance(emitter, DictEmitter) org = emitter[suffix] new = create_db_entry_emitter(cxx, shared) emitter[suffix] = ListEmitter((org, new)) # # Add builders # env['BUILDERS']['_AddDbEntry'] = Builder( action=Action(add_db_entry_action, None)) env['BUILDERS']['_UpdateInternalDb'] = Builder(action=Action( update_internal_db_action, 'Check compilation DB : $TARGET ... ' '$_COMPILE_DB_COUNTER')) env['BUILDERS']['_UpdateDb'] = Builder( action=Action(update_db_action, 'Update compilation DB : $TARGET')) def compile_db(env, target=config.db): compile_commands.clear() entry_counter.reset() head, tail = os.path.split(target) internal_path = os.path.join(head, '.' + tail) internal_db = env._UpdateInternalDb(internal_path, entry_group)[0] if (not config.reset) and internal_db.exists(): compile_commands.update(json.loads( internal_db.get_text_contents())) env.AlwaysBuild(internal_db) return env._UpdateDb(target, internal_db) env.AddMethod(compile_db, 'CompileDb')
def SandeshSconsEnvDocFunc(env): docbuild = Builder(action=Action(SandeshDocBuilder, 'SandeshDocBuilder $SOURCE -> $TARGETS')) env.Append(BUILDERS={'SandeshDoc': docbuild})
def pegBuilder(environment): from SCons.Builder import Builder from SCons.Action import Action return Builder(action = Action(peg_to_cpp, environment['PEG_MAKE']), suffix = '.cpp', src_suffix = '.peg')
def SandeshSconsEnvCFunc(env): cbuild = Builder( action=Action(SandeshCBuilder, 'SandeshCBuilder $SOURCE -> $TARGETS')) env.Append(BUILDERS={'SandeshC': cbuild})
def BuildProgram(env): def _append_pio_macros(): env.AppendUnique(CPPDEFINES=[ "PLATFORMIO={0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr()) ]) _append_pio_macros() # fix ASM handling under non-casitive OS if not case_sensitive_suffixes(".s", ".S"): env.Replace(AS="$CC", ASCOM="$ASPPCOM") # process extra flags from board if "BOARD" in env and "build.extra_flags" in env.BoardConfig(): env.ProcessFlags(env.BoardConfig().get("build.extra_flags")) # remove base flags env.ProcessUnFlags(env.get("BUILD_UNFLAGS")) # apply user flags env.ProcessFlags(env.get("BUILD_FLAGS")) if env.get("PIOFRAMEWORK"): env.BuildFrameworks( [f.lower().strip() for f in env['PIOFRAMEWORK'].split(",")]) # restore PIO macros if it was deleted by framework _append_pio_macros() # build dependent libs deplibs = env.BuildDependentLibraries("$PROJECTSRC_DIR") # append specified LD_SCRIPT if ("LDSCRIPT_PATH" in env and not any(["-Wl,-T" in f for f in env['LINKFLAGS']])): env.Append(LINKFLAGS=['-Wl,-T"$LDSCRIPT_PATH"']) # enable "cyclic reference" for linker if env.get("LIBS", deplibs) and env.GetCompilerType() == "gcc": env.Prepend(_LIBFLAGS="-Wl,--start-group ") env.Append(_LIBFLAGS=" -Wl,--end-group") # Handle SRC_BUILD_FLAGS env.ProcessFlags(env.get("SRC_BUILD_FLAGS")) env.Append(CPPPATH=["$PROJECTSRC_DIR"], LIBS=deplibs, LIBPATH=["$BUILD_DIR"], PIOBUILDFILES=env.CollectBuildFiles( "$BUILDSRC_DIR", "$PROJECTSRC_DIR", src_filter=env.get("SRC_FILTER"), duplicate=False)) if "test" in COMMAND_LINE_TARGETS: env.Append(PIOBUILDFILES=env.ProcessTest()) if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS: sys.stderr.write( "Error: Nothing to build. Please put your source code files " "to '%s' folder\n" % env.subst("$PROJECTSRC_DIR")) env.Exit(1) program = env.Program(join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES']) if set(["upload", "uploadlazy", "program"]) & set(COMMAND_LINE_TARGETS): env.AddPostAction( program, Action(env.CheckUploadSize, "Checking program size $TARGET")) return program
def tarInfoToNode(tarInfoObject): return tarInfoObject.name #return File(tarInfoObject.name) def UnTar(target, source, env): # Code to build "target" from "source" here import tarfile sourceTar = tarfile.open(source[0].name, 'r') sourceTar.extractall() sourceTar.close() return None def UnTarString(target, source, env): """ Information string for UnTar """ return 'Extracting %s' % os.path.basename(str(source[0])) unTarBuilder = Builder(action=Action(UnTar, UnTarString), src_suffix='.tar.gz', emitter=tarContentsEmitter) def generate(env): env.Append(BUILDERS={'UnTar': unTarBuilder}) def exists(env): return True
base += appendix return base + ext return map(do_append, target), source def run_unit_test_desc(target, source, env): column_count = 80 head_and_tail = '=' * column_count content = 'Running ' + os.path.splitext(os.path.basename(str( source[0])))[0] lines = [head_and_tail, content, head_and_tail] return '\n'.join(lines) _run_unit_test = Action(run_unit_test, run_unit_test_desc) RunUnitTest = Builder(action=_run_unit_test, suffix='.txt', emitter=append_result_to_target) class Collector(object): def __init__(self): self.files = [] def Append(self, file): if isinstance(file, (list, tuple)): self.files.extend([str(item) for item in file]) else: self.files.append(str(file))
# print svnheader env['SVNREVISION'] = svndict['Revision'] env['SVNLASTCHANGEDDATE'] = svndict['Last Changed Date'] env['SVNURL'] = svndict['URL'] env['SVNWORKDIRSPEC'] = svndict['Working Directory'] env['SVNWORKDIR'] = svndict['workdir'] return target, [Value(svnheader)] def svninfo_build(env, target, source): out = open(target[0].path, "w") out.write(source[0].get_contents()) out.close() action = Action(svninfo_build, lambda t, s, e: "Generating %s" % t[0]) svninfobuilder = Builder(action=action, source_factory=FS.default_fs.Entry, emitter=svninfo_emitter) class SvnInfoWarning(SCons.Warnings.Warning): pass def generate(env): env['BUILDERS']['SvnInfo'] = svninfobuilder def exists(env):
"""Small tool to create a Cython builder.""" import SCons from SCons.Builder import Builder from SCons.Action import Action def cython_action(target, source, env): from .Cython.Compiler.Main import compile as cython_compile res = cython_compile(str(source[0])) cythonAction = Action(cython_action, "$CYTHONCOMSTR") def create_builder(env): try: cython = env['BUILDERS']['Cython'] except KeyError: cython = SCons.Builder.Builder(action=cythonAction, emitter={}, suffix=cython_suffix_emitter, single_source=1) env['BUILDERS']['Cython'] = cython return cython def cython_suffix_emitter(env, source): return "$CYTHONCFILESUFFIX"
def VerboseAction(_, act, actstr): if int(ARGUMENTS.get("PIOVERBOSE", 0)): return act else: return Action(act, actstr)
def init(env): bldRUT = Builder(action=Action(RunUnittest, PrintDummy)) env.Append(BUILDERS={'RunUnittest': bldRUT}) #- bldInitLcov = Builder(action=Action(InitLcov, PrintDummy)) env.Append(BUILDERS={'InitLcov': bldInitLcov}) #- bldRLcov = Builder(action=Action(RunLcov, PrintDummy)) env.Append(BUILDERS={'RunLcov': bldRLcov}) #- bldDoxygen = Builder(action=Action(RunDoxygen, PrintDummy)) env.Append(BUILDERS={'RunDoxygen': bldDoxygen}) env['DEFAULT_DOXYFILE'] = env.File('#/conf/doxygenTemplate').abspath #- # bldAStyleCheck = Builder(action=Action(AStyleCheck, PrintDummy)) # env.Append(BUILDERS={'RunAStyleCheck': bldAStyleCheck}) #- # bldAStyle = Builder(action=Action(AStyle, PrintDummy)) # env.Append(BUILDERS={'RunAStyle': bldAStyle}) #- bldPdfLatex = Builder(action=Action(RunPdfLatex, PrintDummy)) env.Append(BUILDERS={'RunPdfLatex': bldPdfLatex}) env['PDFLATEX_OPTIONS'] = '' #- bldValgrind = Builder(action=Action(RunValgrind, PrintDummy)) env.Append(BUILDERS={'RunValgrind': bldValgrind}) env['VALGRIND_OPTIONS'] = ['--leak-check=full', '--show-reachable=yes', '--error-limit=no', '--track-origins=yes'] #- bldCCCC = Builder(action=Action(RunCCCC, PrintDummy)) env.Append(BUILDERS={'RunCCCC': bldCCCC}) env['CCCC_OPTIONS'] = [] #- bldCLOC = Builder(action=Action(RunCLOC, PrintDummy)) env.Append(BUILDERS={'RunCLOC': bldCLOC}) env['CLOC_OUTPUT_FORMAT'] = 'txt' # txt | sql | xml env['CLOC_OPTIONS'] = [] #- env['CPPCHECK_OPTIONS'] = ['-f', '--enable=all'] #- bldMocko = Builder(action=Action(RunMocko, PrintDummy)) env.Append(BUILDERS={'RunMocko': bldMocko}) env['MOCKO_OPTIONS'] = [] #- bldReadyToCommit = Builder(action=Action(RunReadyToCommit, PrintDummy)) env.Append(BUILDERS={'RunReadyToCommit': bldReadyToCommit}) #- bldInfo = Builder(action=Action(RunInfo, PrintDummy)) env.Append(BUILDERS={'RunInfo': bldInfo}) #- bldStaticAnalysis = Builder(action=Action(RunStaticAnalysis, PrintDummy)) env.Append(BUILDERS={'RunStaticAnalysis': bldStaticAnalysis}) #- bldAddressSanitizer = Builder(action=Action(RunASan, PrintDummy)) env.Append(BUILDERS={'RunASan': bldAddressSanitizer}) #- bldNamecheck = Builder(action=Action(RunNamecheck, PrintDummy)) env.Append(BUILDERS={'RunNamecheck': bldNamecheck})
Package.attributes['Comments'] = escape(spec['DESCRIPTION']) if 'X_MSI_UPGRADE_CODE' in spec: Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE']) # We hardcode the media tag as our current model cannot handle it. Media = factory.createElement('Media') Media.attributes['Id'] = '1' Media.attributes['Cabinet'] = 'default.cab' Media.attributes['EmbedCab'] = 'yes' root.getElementsByTagName('Product')[0].childNodes.append(Media) # this builder is the entry-point for .wxs file compiler. wxs_builder = Builder(action=Action(build_wxsfile, string_wxsfile), ensure_suffix='.wxs') def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION, SUMMARY, VENDOR, X_MSI_LANGUAGE, **kw): # make sure that the Wix Builder is in the environment SCons.Tool.Tool('wix').generate(env) # get put the keywords for the specfile compiler. These are the arguments # given to the package function and all optional ones stored in kw, minus # the the source, target and env one. loc = locals() del loc['kw'] kw.update(loc) del kw['source'], kw['target'], kw['env']
} #endif""" inline = None for kw in ['inline', '__inline__', '__inline']: st = context.TryCompile(body % {'inline': kw}, '.c') if st: inline = kw break if inline: context.Result(inline) else: context.Result(0) return inline array_api_gen_bld = Builder(action=Action(do_generate_numpy_api, '$ARRAYPIGENCOMSTR'), emitter=generate_api_emitter) ufunc_api_gen_bld = Builder(action=Action(do_generate_ufunc_api, '$UFUNCAPIGENCOMSTR'), emitter=generate_api_emitter) template_bld = Builder(action=Action(generate_from_template, '$TEMPLATECOMSTR'), emitter=generate_from_template_emitter) umath_bld = Builder(action=Action(generate_umath, '$UMATHCOMSTR'), emitter=generate_umath_emitter)
if spec.has_key( 'DESCRIPTION' ): Package.attributes['Comments'] = escape( spec['DESCRIPTION'] ) if spec.has_key( 'X_MSI_UPGRADE_CODE' ): Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] ) # We hardcode the media tag as our current model cannot handle it. Media = factory.createElement('Media') Media.attributes['Id'] = '1' Media.attributes['Cabinet'] = 'default.cab' Media.attributes['EmbedCab'] = 'yes' root.getElementsByTagName('Product')[0].childNodes.append(Media) # this builder is the entry-point for .wxs file compiler. wxs_builder = Builder( action = Action( build_wxsfile, string_wxsfile ), ensure_suffix = '.wxs' ) def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION, SUMMARY, VENDOR, X_MSI_LANGUAGE, **kw): # make sure that the Wix Builder is in the environment SCons.Tool.Tool('wix').generate(env) # get put the keywords for the specfile compiler. These are the arguments # given to the package function and all optional ones stored in kw, minus # the the source, target and env one. loc = locals() del loc['kw'] kw.update(loc) del kw['source'], kw['target'], kw['env']
# H size of resource name # s resource name (# of bytes equal to prev) # Q size of resource data # s resource data (# of bytes equal to prev) data_format = '<H{}sQ{}s'.format(name_size, size) serialized_data = struct.pack(data_format, name_size, name, size, data ) with open(target[0].abspath, 'wb') as f: f.write(serialized_data) _resource_serializer = Builder( action = Action(serialize_resource, 'Serializing resource $SOURCE -> $TARGET'), suffix = '.res', ) def build_resource_blob(env, source, name='terminus_resources'): # Concatenate resources inton a single blob blob = env.Command( target = name + '.blob', source = source, action = 'cat $SOURCES > ${TARGET}', ) # Copy pre-substitution assembly file to build directory pre_subst_asm_file = env.Command( target = name + '.S.in', source = '#asm/$OS/resource.S.in',