def generate(env): if 'EDP' in env['BUILDERS']: return here = File(__file__).dir env.AppendUnique( EDP=here.File('../main/main'), BUILDERS={'EDP': __edp_builder}, )
def __call__(self, target, source, env): if self._linktype == 'static': node = File( env.subst('$LIBPREFIX') + 'boost_' + self._library + env.subst('$LIBSUFFIX')) else: shared_library_name = self._shared_library_name(env, self._library) node = File(os.path.join(env['final_dir'], shared_library_name)) target.append(node) return target, source
def MkdocsCombiner_emitter(target, source, env): # Choose mkdocs.yml as source file if not specified if not source: cfgfile = File('mkdocs.yml') source.append(cfgfile) else: cfgfile = source[0] # Read mkdocs config yamlcfg, sitedirnode, docsdirnode = Mkdocs_Readconfig(cfgfile, env) # Default target if not target: target = File(path.join(str(sitedirnode), 'export/mkdocs.pd')) return target, source
def FeatureSources(config, files): from SCons.Script import File output = [] for f in files: if type(f) == tuple: if f[0][0] == '!': if not config[f[0][1:]]: output.append(File(f[1])) elif config[f[0]]: output.append(File(f[1])) else: output.append(File(f)) return output
def Mkdocs_emitter(target, source, env): # Choose mkdocs.yml as source file if not specified if not source: cfgfile = File('mkdocs.yml') source.append(cfgfile) else: cfgfile = source[0] # Read mkdocs config yamlcfg, sitedirnode, docsdirnode = Mkdocs_Readconfig(cfgfile, env) # We need at least one target that's a file for the rebuild if source changes logic to work filenode = File(path.join(str(sitedirnode), 'mkdocs/search_index.json')) target.append(filenode) env.Clean(target, sitedirnode) return target, source
def examples(ccList=None, swigNameList=None, swigSrc=None): if swigNameList is None: swigFileList = Glob("*.i") swigNameList = [_getFileBase(node) for node in swigFileList] else: swigFileList = [File(name) for name in swigNameList] if swigSrc is None: swigSrc = {} allSwigSrc = set() for name, node in zip(swigNameList, swigFileList): src = swigSrc.setdefault(name, []) allSwigSrc.update(str(element) for element in src) src.append(node) if ccList is None: ccList = [node for node in Glob("*.cc") if (not str(node).endswith("_wrap.cc")) and str(node) not in allSwigSrc] state.log.info("SWIG modules for examples: %s" % swigFileList) state.log.info("C++ examples: %s" % ccList) results = [] for src in ccList: results.extend(state.env.Program(src, LIBS=state.env.getLibs("main"))) for name, src in swigSrc.items(): results.extend( state.env.SwigLoadableModule("_" + name, src, LIBS=state.env.getLibs("main python")) ) for result in results: state.env.Depends(result, state.targets["lib"]) state.targets["examples"].extend(results) return results
def infusion_emitter(target, source, env): if len(target) != 1: env.Error("Infusion needs exactly one output directory!") exit(1) target[0].must_be_same(SCons.Node.FS.Dir) # derrive infusion path and name infusion_path = str(target[0]) infusion_name = os.path.basename(infusion_path) # check source files for s in source: file_name = str(s) if not os.path.splitext(file_name)[1] in ['.class', '.dih', '.jar']: env.Error( "Infusion source `{}` is neither *.class nor *.dih nor *.jar". format(file_name)) exit(1) # create file targets output_templates = [ '{}.dih', '{}.di', 'jlib_{}.c', 'jlib_{}.h', 'jlib_{}.hpp' ] target = [] for templ in output_templates: path = os.path.join(infusion_path, templ.format(infusion_name)) file_target = File(path) Depends(file_target, env['INFUSER_JAR']) target.append(file_target) return target, source
def __call__(self, target, source, env): cpp_file = offset_path(target[0].path, env) hpp_file = hpp_from_cpp(cpp_file) txt_file = txt_from_cpp(cpp_file) output_dir = os.path.split(hpp_file)[0] if output_dir: output_dir = os.path.join(self.__working_dir, output_dir) if not os.path.exists(output_dir): os.makedirs(output_dir) version_hpp = open(os.path.join(self.__working_dir, hpp_file), "w") version_hpp.write( get_build_identity_header(self.__namespace_guard, self.__namespaces)) version_hpp.close() version_txt = open(os.path.join(self.__working_dir, txt_file), "w") version_txt.write( get_build_identity_txt(self.__version, relpath(env['base_path'], self.__location), self.__namespaces)) version_txt.close() target[0] = File(cpp_file) source.append(hpp_file) source.append(txt_file) return target, source
def calculate_target_file_paths(destination: Dir, relative_dir: Dir, source_files: list[File]) -> list[File]: """Returns the list of files taking their path from relative_dir and appending it to destination.""" return [ File(Path(str(sf)).relative_to(str(relative_dir)), directory=destination) for sf in source_files ]
def CheckIEEE754(context): context.Message( "Checking if floating point numbers are in the IEEE 754 format... ") test_file = File(os.path.join("src", "compile_time_tests", "ieee_754.cpp")).get_contents() ret, _ = context.TryRun(test_file, ".cpp") context.Result(ret) return ret
def fix_dylib_for_darwin(target, source, env): ''' Fix the the install_names for darwin for all dylibs in target (here they are set to the abspath of the library) ''' for t in target: abspath = File(t).abspath check_call("install_name_tool -id {0} {0}".format(abspath), shell=True)
def __generator(target, source, env, for_signature): baseDir = Dir(env.get('basedir')) if baseDir and baseDir != Dir('#'): targetPaths = [ relpath(node.abspath, baseDir.abspath) for node in target ] sourcePaths = [ relpath(node.abspath, baseDir.abspath) for node in source ] here = File(__file__).dir helper = here.File('../tests/run-with-cwd') prefix = [helper, baseDir] else: targetPaths = target sourcePaths = source prefix = [] return [prefix + argv + targetPaths + sourcePaths]
def _antlr_emitter(target, source, env): """Process sources and flags""" target = [] antlr_suffix = env.subst('$ANTLR_SUFFIX') antlr_h_suffix = env.subst('$ANTLR_HSUFFIX') antlr_cc_suffix = env.subst('$ANTLR_CCSUFFIX') antlr_txt_suffix = env.subst('$ANTLR_TXTSUFFIX') if env['ANTLR_FLAGS']: antlrflags = env.subst('$ANTLR_FLAGS', target=target, source=source) flags = SCons.Util.CLVar(antlrflags) else: flags = SCons.Util.CLVar('') # -o flag if env['ANTLR_OUT']: env['ANTLR_OUT'] = Dir(env['ANTLR_OUT']) flags.append('-o ${ANTLR_OUT}') # -glib flag if env['ANTLR_GLIB']: env['ANTLR_GLIB'] = File(env['ANTLR_GLIB']) flags.append('-glib ${ANTLR_GLIB}') #TODO: ImpTokenTypes!? # update antlr flags env['ANTLR_FLAGS'] = str(flags) # compute targets deps = [] for src in source: src = File(src) stem = src.abspath if stem.endswith(antlr_suffix): stem = stem[:-len(antlr_suffix)] deps.append(File(stem + 'ImpTokenTypes' + antlr_txt_suffix)) if env['ANTLR_OUT']: out = Dir(env['ANTLR_OUT']) stem = os.path.join(out.abspath, os.path.basename(stem)) for kind in ('Lexer', 'Parser'): for ext in (antlr_h_suffix, antlr_cc_suffix): target.append(File(stem + kind + ext)) for kind in ('', 'Lex'): for ext in (antlr_h_suffix, antlr_txt_suffix): target.append(File(stem + kind + 'TokenTypes' + ext)) for t in target: for d in deps: env.Depends(t, d) return (target, source)
def generate(env): env['COG'] = exists(env) python = File(sys.executable).get_abspath() # windows fixes python = python.replace("\\", "\\\\") env['PYTHON'] = python # default cog flags env['COGFLAGS'] = ''.join([ '-d ', # deletes the generator code from the output '-e ', # warns if the input file has no cog code in it. '-I submods/xgen_ws ', # includes in python path '-I "%s" ' % TOOLS_DIR, # include the tools dir so cog_warnign can be imported '-D __COGFILE__="$SOURCE" ', # defines a global string for cog program ]) cmd = '$PYTHON -m cogapp -o $TARGET $COGFLAGS $SOURCE' env['COGCOM'] = cmd action = Action('$COGCOM', '$COGCOMSTR') bld = env.Builder( action=[action], emitter=emitter, ) env['BUILDERS']['Cog'] = bld env['BUILDERS']['rebuild_cog'] = rebuild_cog if not VERBOSE: env['COGCOMSTR'] = 'Processing $SOURCE --> $TARGET' AddOption("--cog", dest="cog", action="store_true", default=False, help="Forces .cog files to be rebuilt")
def examples(ccList=None, swigNameList=None, swigSrc=None): """Convenience function to replace standard examples/SConscript boilerplate. Parameters ---------- ccList : `list`, optional A sequence of C++ examples to build (including .cc extensions). Defaults to a ``*.cc`` glob of the examples directory, minus any files that end with ``*_wrap.cc`` and files present in swigSrc. swigNameList : `list`, optional A sequence of SWIG modules to build (NOT including .i extensions). swigSrc : `list`, optional Additional source files to be compiled into SWIG modules, as a dictionary; each key must be an entry in swigNameList, and each value a list of additional source files. Returns ------- result : ??? ??? """ if swigNameList is None: swigFileList = Glob("*.i") swigNameList = [_getFileBase(node) for node in swigFileList] else: swigFileList = [File(name) for name in swigNameList] if swigSrc is None: swigSrc = {} allSwigSrc = set() for name, node in zip(swigNameList, swigFileList): src = swigSrc.setdefault(name, []) allSwigSrc.update(str(element) for element in src) src.append(node) if ccList is None: ccList = [ node for node in Glob("*.cc") if (not str(node).endswith("_wrap.cc")) and str(node) not in allSwigSrc ] state.log.info("SWIG modules for examples: %s" % swigFileList) state.log.info("C++ examples: %s" % ccList) results = [] for src in ccList: results.extend( state.env.Program(src, LIBS=state.env.getLibs("main"))) for name, src in swigSrc.items(): results.extend( state.env.SwigLoadableModule( "_" + name, src, LIBS=state.env.getLibs("main python"))) for result in results: state.env.Depends(result, state.targets["lib"]) state.targets["examples"].extend(results) return results
def __init__(self, infile, outdir, contents, attrs): if contents is not None: self.infile = Value(contents, contents) self.on_disk = False filename = infile else: self.infile = File(infile) if isinstance(infile, basestring) else infile self.on_disk = True filename = os.path.basename(self.infile.path) self.outpath = os.path.join(outdir, filename) if outdir else filename for name, value in attrs.iteritems(): setattr(self, name, value)
def _nanopb_proto_emitter(target, source, env): basename = os.path.splitext(str(source[0]))[0] target.append(basename + '.pb.h') # This is a bit of a hack. protoc include paths work the sanest # when the working directory is the same as the source root directory. # To get that directory in _nanopb_proto_actions, we add SConscript to # the list of source files. source.append(File("SConscript")) if os.path.exists(basename + '.options'): source.append(basename + '.options') return target, source
def get_lib(lib_name, LIBPATH=None): if not LIBPATH: LIBPATH = [] else: LIBPATH = LIBPATH[:] LIBPATH += get_lib_paths() for lp in [path(p) for p in LIBPATH]: try: files = lp.files(lib_name) except OSError: continue if files: return File(sorted(files, key=len)[0]) return None
def generate(env): """ Add builders and construction variables for the SvnVersion builder. """ python = File(sys.executable).get_abspath() python = python.replace("\\", "\\\\") # windows fix env['PYTHON'] = python cmd = "$PYTHON $SOURCE install --user" env['PYTHONSETUPCOM'] = cmd action = Action('$PYTHONSETUPCOM', '$PYTHONSETUPCOMSTR') bld = env.Builder( action=[Delete("build"), action, Touch("$TARGETS")], emitter=emitter, target_factor=env.fs.Entry, src_suffic=".py", ) env['BUILDERS']['PythonSetup'] = bld
def Environment(variables, configfiles, version=None, service_module=None, config_class=saliweb.backend.Config): buildmap = _add_build_variable(variables, configfiles) variables.Add( SCons.Script.PathVariable( 'html_coverage', 'Directory to output HTML coverage reports into', None, SCons.Script.PathVariable.PathIsDirCreate)) variables.Add( SCons.Script.BoolVariable('coverage', 'Preserve output coverage files', False)) env = SCons.Script.Environment(variables=variables) # Inherit some variables from the environment: if 'PERL5LIB' in os.environ: env['ENV']['PERL5LIB'] = os.environ['PERL5LIB'] if 'PATH' in os.environ: env['ENV']['PATH'] = os.environ['PATH'] configfile = buildmap[env['build']] env['configfile'] = File(configfile) env['config'] = config = config_class(configfile) _setup_sconsign(env) _setup_version(env, version) _setup_service_name(env, config, service_module) _setup_install_directories(env) if not env.GetOption('clean') and not env.GetOption('help'): _check(env) _install_config(env) _install_directories(env) env.AddMethod(_InstallAdminTools, 'InstallAdminTools') env.AddMethod(_InstallCGIScripts, 'InstallCGIScripts') env.AddMethod(_InstallPython, 'InstallPython') env.AddMethod(_InstallHTML, 'InstallHTML') env.AddMethod(_InstallTXT, 'InstallTXT') env.AddMethod(_InstallCGI, 'InstallCGI') env.AddMethod(_InstallPerl, 'InstallPerl') env.AddMethod(_make_frontend, 'Frontend') env.Append(BUILDERS={'RunPerlTests': Builder(action=builder_perl_tests)}) env.Append(BUILDERS={'RunPythonTests': \ Builder(action=builder_python_tests)}) install = env.Command('install', None, Action(_install_check, 'Check installation ...')) env.AlwaysBuild(install) env.Requires(install, env['config'].directories.values()) env.Default(install) return env
def emitter(target, source, env): src = source[0].get_abspath() if not src.endswith('.cog'): raise SCons.Errors.UserError( "Source '%s' must end with extension '.cog'" % source[0]) target = _get_target(target, source, env) tool = str(File(__file__)) if tool.endswith(".pyc"): tool = tool[:-1] env.Depends(target, tool) return target, source
def python(swigNameList=None, libs="main python", swigSrc=None): if swigNameList is None: swigNameList = [state.env["packageName"].split("_")[-1] + "Lib"] swigFileList = [File(name + ".i") for name in swigNameList] if swigSrc is None: swigSrc = {} for name, node in zip(swigNameList, swigFileList): swigSrc.setdefault(name, []).append(node) if isinstance(libs, basestring): libs = state.env.getLibs(libs) elif libs is None: libs = [] result = [] for name, src in swigSrc.items(): result.extend( state.env.SwigLoadableModule("_" + name, src, LIBS=libs)) state.targets["python"].extend(result) return result
def __call__( self, target, source, env ): for library in self._libraries: filename = None if self._linktype == 'static': filename = static_library_name( env, library, self._toolchain, self._boost.version(), self._variant, self._threading ) else: filename = shared_library_name( env, library, self._toolchain, self._boost.full_version(), self._variant, self._threading ) built_library_path = os.path.join( self._location, self._stage_dir, 'lib', filename ) logger.trace( "Emit Boost library [{}] to [{}]".format( as_notice(library), as_notice(built_library_path) ) ) node = File( built_library_path ) target.append( node ) return target, source
def MkdocsScanner(node, env, path, arg=None): """Dependency scanner for listing all files within the mkdocs source directory (typically docs) We exclude the doxygen dir since it has quite a lot of content and requires a clean build anyway Args: node: the SCons directory node to scan env: the current SCons environment path: not used arg: not used Returns: A list of files. """ # Read mkdocs config yamlcfg, sitedirnode, docsdirnode = Mkdocs_Readconfig(node, env) # Look at the docs source directory searchpath = env.subst(docsdirnode.abspath) doxygen_path = os.path.join(searchpath, 'doxygen') depends = [] for d, unused_s, files in os.walk(searchpath, topdown=True): if d.startswith(doxygen_path): continue for f in files: depends.append(File(os.path.join(d, f))) return depends
def _protoc_emitter(target, source, env): """Process target, sources, and flags""" # always ignore target target = [] # suffix protoc_suffix = env.subst('$PROTOC_SUFFIX') protoc_h_suffix = env.subst('$PROTOC_HSUFFIX') protoc_cc_suffix = env.subst('$PROTOC_CCSUFFIX') protoc_py_suffix = env.subst('$PROTOC_PYSUFFIX') protoc_java_suffix = env.subst('$PROTOC_JAVASUFFIX') # fetch all protoc flags if env['PROTOC_FLAGS']: protocflags = env.subst("$PROTOC_FLAGS", target=target, source=source) flags = SCons.Util.CLVar(protocflags) else: flags = SCons.Util.CLVar('') # flag --proto_path, -I proto_path = [] if env['PROTOC_PATH']: inc = env['PROTOC_PATH'] if SCons.Util.is_List(inc): for path in inc: path = Dir(path) #print "path:",path proto_path.append(path) flags.append('--proto_path='+str(path.abspath)) elif SCons.Util.is_Scalar(inc): path=Dir(inc) #print "path:",path proto_path.append(path) flags.append('--proto_path='+str(path.abspath)) # flag --cpp_out if env['PROTOC_CCOUT']: env['PROTOC_CCOUT'] = Dir(env['PROTOC_CCOUT']) flags.append('--cpp_out=${PROTOC_CCOUT.abspath}') # flag --python_out if env['PROTOC_PYOUT']: env['PROTOC_PYOUT'] = Dir(env['PROTOC_PYOUT']) flags.append('--python_out=${PROTOC_PYOUT.abspath}') # flag --java_out if env['PROTOC_JAVAOUT']: env['PROTOC_JAVAOUT'] = Dir(env['PROTOC_JAVAOUT']) flags.append('--java_out=${PROTOC_JAVAOUT.abspath}') # updated flags env['PROTOC_FLAGS'] = str(flags) #print "flags:",flags # source scons dirs src_struct = Dir('#') src_script = Dir('.').srcnode() # produce proper targets for src in source: src = File(src) # find proto_path for this source longest_common = '/' for path in proto_path: common = os.path.commonprefix([path.abspath, src.abspath]) if len(common) > len(longest_common): longest_common = common #print "longest_common:",longest_common src_relpath = os.path.relpath(src.abspath, start=longest_common) #print "src_relpath:",src_relpath # create stem by remove the $PROTOC_SUFFIX or take a guess if src_relpath.endswith(protoc_suffix): stem = src_relpath[:-len(protoc_suffix)] else: stem = src_relpath # C++ output, append if env['PROTOC_CCOUT']: out = Dir(env['PROTOC_CCOUT']) base = os.path.join(out.abspath, stem) target.append(File(base+protoc_cc_suffix)) target.append(File(base+protoc_h_suffix)) # python output, append if env['PROTOC_PYOUT']: out = Dir(env['PROTOC_PYOUT']) base = os.path.join(out.abspath, stem) target.append(File(base+protoc_py_suffix)) # java output, append if env['PROTOC_JAVAOUT']: out = Dir(env['PROTOC_JAVAOUT']) base = os.path.join(out.abspath, stem) target.append(File(base+protoc_java_suffix)) #print "targets:",env.subst("${TARGETS}", target=target, source=source) #print "sources:",env.subst("${SOURCES}", target=target, source=source) return target, source
def _protoc_emitter(target, source, env): """Process target, sources, and flags""" isDebug = env.get('PROTOC_DEBUG', False) def _print(*prtList): if not isDebug: return print(*prtList) _checkEnv(env) # always ignore target target = [] # suffix protoc_suffix = env.subst('$PROTOC_SUFFIX') protoc_h_suffix = env.subst('$PROTOC_HSUFFIX') protoc_cc_suffix = env.subst('$PROTOC_CCSUFFIX') protoc_grpc_h_suffix = env.subst('$PROTOC_GRPC_HSUFFIX') protoc_grpc_cc_suffix = env.subst('$PROTOC_GRPC_CCSUFFIX') protoc_py_suffix = env.subst('$PROTOC_PYSUFFIX') protoc_grpc_py_suffix = env.subst('$PROTOC_GRPC_PYSUFFIX') protoc_java_suffix = env.subst('$PROTOC_JAVASUFFIX') protoc_grpc_java_suffix = env.subst('$PROTOC_GRPC_JAVASUFFIX') includePath = _getIncludes(env) # produce proper targets for src in source: srcPath = os.path.abspath(str(src)) srcDir = os.path.relpath(os.path.dirname(srcPath), includePath[0]) srcName = os.path.basename(srcPath) # create stem by remove the $PROTOC_SUFFIX or take a guess if srcName.endswith(protoc_suffix): stem = srcName[:-len(protoc_suffix)] else: stem = srcName _print("stem:", stem) ############### # C++ ############### if env['PROTOC_CCOUT']: out = env['PROTOC_CCOUT'] base = os.path.join(out.abspath, srcDir, stem) target += [ File(base + protoc_h_suffix), File(base + protoc_cc_suffix) ] if env['PROTOC_GRPC_CC']: target += [ File(base + protoc_grpc_h_suffix), File(base + protoc_grpc_cc_suffix) ] ############### # Python ############### if env['PROTOC_PYOUT']: out = env['PROTOC_PYOUT'] base = os.path.join(out.abspath, srcDir, stem) target.append(File(base + protoc_py_suffix)) if env['PROTOC_GRPC_PY']: target.append(File(base + protoc_grpc_py_suffix)) ############### # Java ############### if env['PROTOC_JAVAOUT']: out = env['PROTOC_JAVAOUT'] _1, _2 = _getJavaTargets(srcPath, os.path.join(out.abspath, srcDir), protoc_suffix, protoc_java_suffix, protoc_grpc_java_suffix) target += _1 if env['PROTOC_GRPC_JAVA']: target += _2 _print('-' * 50) _print('flags:\n' + \ env.subst("${PROTOC_FLAGS}", target=target, source=source).replace(' ', '\n')) _print('path flags:\n' + \ env.subst("${PROTOC_PATH_FLAGS}", target=target, source=source).replace(' ', '\n')) _print('targets:\n' + \ env.subst("${TARGETS}", target=target, source=source).replace(' ', '\n')) _print('sources:\n' + \ env.subst("${SOURCES}", target=target, source=source).replace(' ', '\n')) _print('-' * 50) return target, source
def _checkEnv(env): if not env['PROTOC_FLAGS']: flags = SCons.Util.CLVar('') ############### # C++ ############### # flag --cpp_out if env['PROTOC_CCOUT']: env['PROTOC_CCOUT'] = Dir(env['PROTOC_CCOUT']) flags.append('--cpp_out=${PROTOC_CCOUT.abspath}') # flag --plugin=protoc-gen-grpc-cpp if env['PROTOC_GRPC_CC']: env['PROTOC_GRPC_CC'] = File(env['PROTOC_GRPC_CC']) flags.append( '--plugin=protoc-gen-grpc-cpp=${PROTOC_GRPC_CC.abspath}') # flag --grpc-cpp_out if env['PROTOC_CCOUT']: flags.append('--grpc-cpp_out=${PROTOC_CCOUT.abspath}') ############### # Python ############### # flag --python_out if env['PROTOC_PYOUT']: env['PROTOC_PYOUT'] = Dir(env['PROTOC_PYOUT']) flags.append('--python_out=${PROTOC_PYOUT.abspath}') # flag --plugin=protoc-gen-grpc-python if env['PROTOC_GRPC_PY']: env['PROTOC_GRPC_PY'] = File(env['PROTOC_GRPC_PY']) flags.append( '--plugin=protoc-gen-grpc-python=${PROTOC_GRPC_PY.abspath}') # flag --grpc-python_out if env['PROTOC_PYOUT']: flags.append('--grpc-python_out=${PROTOC_PYOUT.abspath}') ############### # Java ############### # flag --java_out if env['PROTOC_JAVAOUT']: env['PROTOC_JAVAOUT'] = Dir(env['PROTOC_JAVAOUT']) flags.append('--java_out=${PROTOC_JAVAOUT.abspath}') # flag --plugin=protoc-gen-grpc-java if env['PROTOC_GRPC_JAVA']: env['PROTOC_GRPC_JAVA'] = File(env['PROTOC_GRPC_JAVA']) flags.append( '--plugin=protoc-gen-grpc-java=${PROTOC_GRPC_JAVA.abspath}') # flag --grpc-java_out if env['PROTOC_JAVAOUT']: flags.append('--grpc-java_out=${PROTOC_JAVAOUT.abspath}') # updated flags env['PROTOC_FLAGS'] = str(flags) if not env['PROTOC_PATH_FLAGS']: if not env['PROTOC_PATH']: raise RuntimeError( 'Set PROTOC_PATH to the project top most directory') inc = env['PROTOC_PATH'] protoPath = [] if SCons.Util.is_List(inc): for path in inc: path = Dir(path) protoPath.append(path) elif SCons.Util.is_Scalar(inc): path = Dir(inc) protoPath.append(path) # flag --proto_path, -I flags = SCons.Util.CLVar('') for path in protoPath: flags.append('--proto_path=' + \ (path if isinstance(path, str) else \ str(path.abspath))) env['PROTOC_PATH_FLAGS'] = str(flags)
def emit_app(target, source, env): """ The first source is the binary program file, the rest are files/folders to include in the App's Resources directory. extra variables available: ICON - the filename of the icon file to use, used in package metadata. If not specified defaults to 'application.icns' (which should be in your sources list, but if you spec a file not in there it'll actually add it for you) SIGNATURE - the bundle signature, a four byte code. If not specified uses the first four characters of the bundle name. PLUGINS - a list of files/folders to place in Contents/PlugIns (Adium uses Contents/PlugIns, Audacity uses plug-ins AND Contents/plug-ins..., Apple Mail uses Contents/PlugIns, so that's what we should stick with) IDENTIFIER - An identifier string that specifies the application type of the bundle in reverse DNS format. DISPLAY_NAME - The application name to be encoded in the Plist and menu bar. SHORT_VERSION - Specifies the release version number of the bundle, which identifies a released iteration of the application. The release version number is a string comprised of three period-separated integers COPYRIGHT - Human readable copyright (NSHumanReadableCopyright) CATEGORY - Your application's category. """ # TODO: implement a FRAMEWORKS= arg, or maybe a Framework() builder so that # we can declare "this app depends on these frameworks"; then look in # env['FRAMEWORKS'] and env['LIBS'] and figure out the library dependencies # *ahead of time* so that we can get scons to copy them (and so that we # needn't redo their install_name's &c all the time) # bah, unless we decide to change the interface so you pass the app icon in # as a separate param, then we *have* to change Mixxx to work properly with # the Resources/ dir assert len(target) == 1 bundle = target[0] # pull the binary off the front since it's a special case binary, resources = source[0], source[1:] try: icon = env["ICON"] except KeyError: icon = "application.icns" try: plugins = env["PLUGINS"] except KeyError: plugins = env["PLUGINS"] = [] # so, this doesn't work realistically because if the passed in icon is a # remote path then shit clashes but still it might be useful. # XXX think this through. # if icon not in [str(ff) for ff in source]: # source.append(File(icon)) bundle_type = "APPL" try: bundle_signature = env["SIGNATURE"] except KeyError: bundle_signature = str(bundle)[:4].lower() assert len(bundle_signature) == 4, "Bundle signature must be four bytes" # coerce the target to a Bundle # XXX huh, that's weird, it builds fine now... oh shit it's because it # caches types in its database # we don't need to tell it Bundle(), we just need to postpend the .app # it seems that if we tell Builder "suffix = '.app'" then it _at that # point_ assumes that $NAME.app is a file, which then causes "TypeError: # Tried to lookup File 'Mixxx.app' as a Dir.:" # so just work around that here if type(bundle) != Bundle: bundle = Bundle(str(bundle).replace("_bundle", "") + ".app") bundle_identifier = env["IDENTIFIER"] bundle_version = env["VERSION"] bundle_display_name = env["DISPLAY_NAME"] bundle_short_version_string = env["SHORT_VERSION"] human_readable_copyright = env["COPYRIGHT"] application_category_type = env["CATEGORY"] minimum_osx_version = env["MINIMUM_OSX_VERSION"] # BUG: if the icon file is changed but nothing else then the plist doesn't # get rebuilt (but since it's a str() and not a Node() there's no clean way # to hook this in) # Precache some the important paths # idea: hide these in the env[]? bundle = Dir(str(bundle)) # coerce the bundle target into being a Dir contents = Dir(os.path.join(str(bundle), "Contents")) # Following variable is unused # frameworks = Dir( # os.path.join(str(contents), "Frameworks") # ) # we put both frameworks and standard unix sharedlibs in here env["APP_RESOURCES"] = Dir(os.path.join(str(contents), "Resources")) # env['APP_INSTALLED_BIN'] = installed_bin # Generate the .plist and PkgInfo files # The contents of the PkgInfo file are the 4-byte package type followed by # the 4-byte signature of your application. Thus, for the TextEdit # application, whose type is 'APPL' and whose signature is 'ttxt', the file # would contain the ASCII string "APPLttxt". # So, we use the first four characters of the app env.Writer( File(os.path.join(str(contents), "PkgInfo")), [], DATA="%s%s" % (bundle_type, bundle_signature), ) # Bug #1258435: executable name must match CFBundleExecutable otherwise # case-sensitive file systems break. Don't use binary.name.title() here. plist_data = { "CFBundleExecutable": binary.name, "CFBundleIconFile": icon, "CFBundlePackageType": bundle_type, "CFBundleSignature": bundle_signature, "CFBundleIdentifier": bundle_identifier, "CFBundleDisplayName": bundle_display_name, "CFBundleVersion": bundle_version, "CFBundleShortVersionString": bundle_short_version_string, "NSHumanReadableCopyright": human_readable_copyright, "NSPrincipalClass": "NSApplication", "NSHighResolutionCapable": "True", "LSApplicationCategoryType": application_category_type, "LSMinimumSystemVersion": minimum_osx_version, } if env["FOR_APP_STORE"]: plist_data["ForAppStore"] = "yes" env.Plist(os.path.join(str(contents), "Info"), PLIST=plist_data) # NB: only need CFBundleExecutale if the binary name differs from the # bundle name # todo: """Application Keys At a minimum, all applications should contain the following keys in their information property list file: CFBundleDisplayName CFBundleIdentifier CFBundleName CFBundlePackageType CFBundleShortVersionString CFBundleSignature CFBundleVersion LSHasLocalizedDisplayName NSHumanReadableCopyright NSAppleScriptEnabled""" # further: we should support generating document associations resource_map = env.get("APP_RESOURCES_MAP", {}) for i in resources: path = resource_map.get(str(i), "") target = env["APP_RESOURCES"] if path != "": target = Dir(os.path.join(str(target), path)) if isinstance(i, SCons.Node.FS.Dir): InstallDir(target, i, env) elif isinstance(i, SCons.Node.FS.File) or isinstance(i, str): env.Install(target, i) plugins = env["PLUGINS"] return bundle, source + plugins # +[installed_bin]
def build_dmg(target, source, env): """ Builds a *.dmg file. Takes the given source files, makes a temporary directory, copies them all there, and then packages that directory into a .dmg. """ # TODO: make emit_dmg emit that we are making the Dmg that we are making # since we are building into a single .dmg, coerce target to point at the # actual name assert len(target) == 1 target = target[0] # I'm going to let us overwrite the .dmg for now - Albert # huhh? why do I have to say +.dmg here? I thought scons was supposed to # handle that # if os.path.exists(str(target)+".dmg"): # raise Exception(".dmg target already exists.") # if 'DMG_DIR' in env: .... etc fill me in please dmg = (os.tmpnam() + "-" + env["VOLNAME"].strip() + "-dmg" ) # create a directory to build the .dmg root from # is os.system the best thing for this? Can't we declare that these files # need to be moved somehow? aah there must be a more SCons-ey (i.e. # declarative) way to do all this; the trouble with doing os.mkdir(dmg) for f in source: print("Copying", f) a, b = str(f), os.path.join(dmg, os.path.basename(str(f.path))) if isinstance(f, SCons.Node.FS.Dir): # XXX there's a lot of cases that could throw this off, # particularly if you try to pass in subdirs copier = shutil.copytree elif isinstance(f, SCons.Node.FS.File): copier = shutil.copy else: raise Exception("%s is neither Dir nor File node? Bailing out." % f) try: copier(a, b) except Exception as e: print("ERRRR", e) raise Exception("Error copying %s: " % (a, ), e) # Symlink Applications to /Applications os.system("ln -s /Applications %s" % os.path.join(dmg, "Applications")) if env["ICON"]: env["ICON"] = File(str(env["ICON"])) # make sure the given file is an icon; scons does this wrapping for us # on sources and targets but not on environment vars (obviously, that # would be stupid). # XXX this doesn't seem to work, at least not on MacOS 10.5 # the MacFUSE people have solved it, though, see "._" in # http://www.google.com/codesearch/p?hl=en#OXKFx3-7cSY/tags/macfuse-1.0.0/filesystems-objc/FUSEObjC/FUSEFileSystem.m&q=volumeicon # appearently it requires making a special volume header file named # "._$VOLNAME" with a binary blob in it But also the Qt4 dmg has a # working icon, and it has no ._$VOLNAME file # XXX bug: will crash if not given an icon file shutil.copy(str(env["ICON"]), os.path.join(dmg, ".VolumeIcon.icns")) # is there an sconsey way to declare this? Would be nice so that it # could write what system('SetFile -a C "%s"' % dmg) # TODO(rryan): hdiutil has a bug where if srcfolder is greater than 100M it # fails to create a DMG with error -5341. The actual size of the resulting # DMG is not affected by the -size parameter -- I think it's just the size # of the "partition" in the DMG. Hard-coding 150M is a band-aid to get the # build working again while we figure out the right solution. if system("hdiutil create -size 150M -srcfolder %s -format UDBZ -ov " "-volname %s %s" % (dmg, env["VOLNAME"], target)): raise Exception("hdiutil create failed") shutil.rmtree(dmg)
def tests(pyList=None, ccList=None, swigNameList=None, swigSrc=None, ignoreList=None, noBuildList=None, pySingles=None, args=None): if noBuildList is None: noBuildList = [] if pySingles is None: pySingles = [] if swigNameList is None: swigFileList = Glob("*.i") swigNameList = [_getFileBase(node) for node in swigFileList] else: swigFileList = [File(name + ".i") for name in swigNameList] if swigSrc is None: swigSrc = {} allSwigSrc = set() for name, node in zip(swigNameList, swigFileList): src = swigSrc.setdefault(name, []) allSwigSrc.update(str(element) for element in src) src.append(node) if pyList is None: pyList = [node for node in Glob("*.py") if _getFileBase(node) not in swigNameList and os.path.basename(str(node)) not in noBuildList] # if we got no matches, reset to None so we do not enabled # auto test detection in pytest if not pyList: pyList = None if ccList is None: ccList = [node for node in Glob("*.cc") if (not str(node).endswith("_wrap.cc")) and str(node) not in allSwigSrc and os.path.basename(str(node)) not in noBuildList] if ignoreList is None: ignoreList = [] def s(l): if l is None: return ['None'] return [str(i) for i in l] state.log.info("SWIG modules for tests: %s" % s(swigFileList)) state.log.info("Python tests: %s" % s(pyList)) state.log.info("C++ tests: %s" % s(ccList)) state.log.info("Files that will not be built: %s" % noBuildList) state.log.info("Ignored tests: %s" % ignoreList) control = tests.Control(state.env, ignoreList=ignoreList, args=args, verbose=True) for ccTest in ccList: state.env.Program(ccTest, LIBS=state.env.getLibs("main test")) swigMods = [] for name, src in swigSrc.items(): swigMods.extend( state.env.SwigLoadableModule("_" + name, src, LIBS=state.env.getLibs("main python")) ) # Warn about insisting that a test in pySingles starts with test_ and # therefore might be automatically discovered by pytest. These files # should not be discovered automatically. for node in pySingles: if str(node).startswith("test_"): state.log.warn("Warning: {} should be run independently but" " can be automatically discovered".format(node)) # Ensure that python tests listed in pySingles are not included in pyList. if pyList is not None: pyList = [str(node) for node in pyList if str(node) not in pySingles] ccList = [control.run(str(node)) for node in ccList] pySingles = [control.run(str(node)) for node in pySingles] # If we tried to discover .py files and found none, do not then # try to use auto test discovery. if pyList is not None: pyList = [control.runPythonTests(pyList)] else: pyList = [] pyList.extend(pySingles) for pyTest in pyList: state.env.Depends(pyTest, ccList) state.env.Depends(pyTest, swigMods) state.env.Depends(pyTest, state.targets["python"]) state.env.Depends(pyTest, state.targets["shebang"]) result = ccList + pyList state.targets["tests"].extend(result) return result