def _customize_environment(self): self.env['NS_ON_WINDOWS'] = True self.env['NSOUND_PLATFORM_OS'] = "NSOUND_PLATFORM_OS_WINDOWS" self.env['NS_BUILD_STATIC'] = True import platform cpu = platform.machine().upper() if cpu in ['X86_64', 'AMD64']: cpu = 'x64' # Defaults CPPDEFINES = [] CPPPATH = [Dir('external/include')] CXXFLAGS = "/nologo /O2 /W3 /EHsc /MD /Gd /TP /Zm256".split() ENV = os.environ LIBPATH = [Dir('external/win32/lib/%s' % cpu)] LIBS = [] LINKFLAGS = ["/nologo"] self.env.AppendUnique(CPPDEFINES=CPPDEFINES, CPPPATH=CPPPATH, CXXFLAGS=CXXFLAGS, ENV=ENV, LIBPATH=LIBPATH, LIBS=LIBS, LINKFLAGS=LINKFLAGS) # Manifest handeling self._generate_vc_runtime_manifest()
def InstallDir(target, source, env): # XXX this belongs not in this module """ Copies the given source dir inside of the given target dir. """ # XXX could be rewritten better with schemey-recursion as "if source is # File: env.Install(), elif source is Dir, scan the dir and recurse" # SCons doesn't really like using directories as targets. Like, at all. # Mkdir(os.path.join(str(target), str(source))) # translate install(a/, b/) to install(a/b/, [files in b]) contents = Glob(os.path.join( str(source), "*")) # XXX there's probably a cleaner way that SCons has to do this # print("contents:", contents) files = filter(lambda f: isinstance(f, SCons.Node.FS.File), contents) folders = filter(lambda f: isinstance(f, SCons.Node.FS.Dir), contents) # print(map(str, folders)) name = os.path.basename(str(source)) # install the files local to this nodes = env.Install(Dir(os.path.join(str(target), name)), files) # now recursively install the subfolders for f in folders: nodes += InstallDir(Dir(os.path.join(str(target), name)), f, env) return nodes
def _customize_environment(self): self.env['NS_ON_CYGWIN'] = True self.env['NSOUND_PLATFORM_OS'] = "NSOUND_PLATFORM_OS_CYGWIN" self.env['NS_BUILD_STATIC'] = True CXXFLAGS = [] if self.env['NS_DEBUG_BUILD']: CXXFLAGS.append("-g") else: CXXFLAGS.extend([ "-fno-strict-aliasing", "-fwrapv", "-O2", ]) import platform cpu = platform.machine().upper() if cpu in ['X86_64', 'AMD64']: cpu = 'amd64' CPPDEFINES = [] CPPPATH = [Dir('external/include')] LIBPATH = [Dir('external/cygwin/lib/%s' % cpu)] LIBS = [] self.env.AppendUnique(CPPDEFINES=CPPDEFINES, CPPPATH=CPPPATH, LIBPATH=LIBPATH, LIBS=LIBS)
def _customize_environment(self): self.env['NS_ON_WINDOWS'] = True self.env['NSOUND_PLATFORM_OS'] = "NSOUND_PLATFORM_OS_WINDOWS" self.env['NS_BUILD_STATIC'] = True import platform cpu = platform.machine().upper() if cpu in ['X86_64', 'AMD64']: cpu = 'x64' # Defaults CPPDEFINES = ["_CRT_SECURE_NO_WARNINGS"] CPPPATH = [Dir('external/include')] CXXFLAGS = "/nologo /O2 /W3 /EHsc /MD /Gd /TP /Zm256 /std:c++14".split( ) LIBPATH = [Dir('external/win32/lib/%s' % cpu)] LIBS = [] LINKFLAGS = ["/nologo"] self.env.AppendUnique(CPPDEFINES=CPPDEFINES, CPPPATH=CPPPATH, CXXFLAGS=CXXFLAGS, LIBPATH=LIBPATH, LIBS=LIBS, LINKFLAGS=LINKFLAGS)
def copy_file(src_dir, dst_dir, name): from shutil import copy src_path = os.path.join(Dir(src_dir).abspath, name) dst_dir = Dir(dst_dir).abspath if not os.path.isdir(dst_dir): os.makedirs(dst_dir) copy(src_path, dst_dir)
def lcov_generator(source, target, env, for_signature): cmd = ['lcov --capture'] cmd += ['--output-file', target[0].abspath] if 'LCOVDIR' in env: cmd += ['--directory', str(Dir(env['LCOVDIR']))] if 'LCOVBASEDIR' in env: cmd += ['--base-directory', str(Dir(env['LCOVBASEDIR']))] return ' '.join(Flatten(cmd))
def copy_file(src_dir, dst_dir, src_name, dst_name=""): from shutil import copy src_path = os.path.join(Dir(src_dir).abspath, src_name) dst_dir = Dir(dst_dir).abspath if not os.path.isdir(dst_dir): os.makedirs(dst_dir) if dst_name: copy(src_path, os.path.join(dst_dir, dst_name)) else: copy(src_path, dst_dir)
def emitter(target, source, env): """ Add all dependencies to this library. Called by SCons.Builder internally. @todo: Check for 'TARGET' and 'CMD' in all commands. @todo: Add "Value('CREATE_FOLDER')" check for extract """ # Check if this library definition exists if not env['LIB_BUILDER'].has_key(source[0].get_contents()): print 'Error:', source[0].get_contents(), 'is not defined.' Exit(1) lib = env['LIB_BUILDER'][source[0].get_contents()] # Add all lib properties as source value dependency source.append(Value(lib)) _substituteVars(lib) if lib.has_key('URL'): download = env.Download(target=lib['ARCHIVE'], source=lib['URL']) else: download = lib['ARCHIVE'] extract = env.Extract(target=lib['FOLDER'], source=download) # Add dependencies for all CMDS for cmd in lib['CMDS']: # Get command working directory if cmd.has_key('CWD'): folder = os.path.join(Dir('.').path, cmd['CWD']) else: folder = os.path.join(Dir('.').path, lib['FOLDER']) if isinstance(cmd['TARGET'], list): cmdTarget = map(File, cmd['TARGET']) else: # TODO: Find why SCons add wierd dependencies when 'File' is use # instead of 'Value' cmdTarget = Value(cmd['TARGET']) source.append( env.Command(action=cmd['CMD'], target=cmdTarget, source=extract, chdir=folder)) return target, source
def ProtocEmitter(target, source, env): dirOfCallingSConscript = Dir('.').srcnode() env.Prepend(PROTOCPROTOPATH=dirOfCallingSConscript.path) source_with_corrected_path = [] for src in source: commonprefix = os.path.commonprefix( [dirOfCallingSConscript.path, src.srcnode().path]) if len(commonprefix) > 0: source_with_corrected_path.append( src.srcnode().path[len(commonprefix + os.sep):]) else: source_with_corrected_path.append(src.srcnode().path) source = source_with_corrected_path for src in source: modulename = os.path.splitext(src)[0] if env['PROTOCCOUTDIR']: base = os.path.join(env['PROTOCCOUTDIR'], modulename) target.append(base + '.pb-c.h') target.append(base + '.pb-c.c') return target, source
def generate(env, **kw): """ Add this builder to the 'env'. Called by SCons internally. """ # Check for needed builders, else add it if not env['BUILDERS'].has_key('Download'): env.Tool('download_builder', toolpath=['../scons']) if not env['BUILDERS'].has_key('Extract'): env.Tool('extract_builder', toolpath=['../scons']) # Add command line option to the environment opts = Options(None) opts.Add( PathOption('LIBS_PREFIX', help='Where all libraries are installed', default=os.path.join(Dir('.').abspath, 'local'), validator=PathOption.PathAccept)) opts.Update(env) # Generate command line help Help(opts.GenerateHelpText(env)) # Init LibBuilder properties repository if not env.has_key('LIB_BUILDER'): env['LIB_BUILDER'] = dict() # Add this builder to the env env['BUILDERS']['LibBuilder'] = Builder( action=action, emitter=emitter, target_factory=Alias, source_factory=Value, )
def build(env_mono): assert env_mono['tools'] output_dir = Dir('#bin').abspath editor_tools_dir = os.path.join(output_dir, 'GodotSharp', 'Tools') editor_api_dir = os.path.join(output_dir, 'GodotSharp', 'Api', 'Debug') source_filenames = ['GodotSharp.dll', 'GodotSharpEditor.dll'] sources = [ os.path.join(editor_api_dir, filename) for filename in source_filenames ] target_filenames = [ 'GodotTools.dll', 'GodotTools.BuildLogger.dll', 'GodotTools.ProjectEditor.dll', 'DotNet.Glob.dll', 'GodotTools.Core.dll' ] if env_mono['target'] == 'debug': target_filenames += [ 'GodotTools.pdb', 'GodotTools.BuildLogger.pdb', 'GodotTools.ProjectEditor.pdb', 'GodotTools.Core.pdb' ] targets = [ os.path.join(editor_tools_dir, filename) for filename in target_filenames ] cmd = env_mono.CommandNoCache(targets, sources, build_godot_tools, module_dir=os.getcwd()) env_mono.AlwaysBuild(cmd)
def build(env_mono): assert env_mono['tools'] target_filenames = [ 'GodotSharp.dll', 'GodotSharp.pdb', 'GodotSharp.xml', 'GodotSharpEditor.dll', 'GodotSharpEditor.pdb', 'GodotSharpEditor.xml' ] depend_cmd = [] for build_config in ['Debug', 'Release']: output_dir = Dir('#bin').abspath editor_api_dir = os.path.join(output_dir, 'GodotSharp', 'Api', build_config) targets = [os.path.join(editor_api_dir, filename) for filename in target_filenames] cmd = env_mono.CommandNoCache(targets, depend_cmd, build_api_solution, module_dir=os.getcwd(), solution_build_config=build_config) env_mono.AlwaysBuild(cmd) # Make the Release build of the API solution depend on the Debug build. # We do this in order to prevent SCons from building them in parallel, # which can freak out MSBuild. In many cases, one of the builds would # hang indefinitely requiring a key to be pressed for it to continue. depend_cmd = cmd return depend_cmd
def build(env_mono, api_sln_cmd): assert env_mono['tools'] output_dir = Dir('#bin').abspath editor_tools_dir = os.path.join(output_dir, 'GodotSharp', 'Tools') target_filenames = [ 'GodotTools.dll', 'GodotTools.IdeConnection.dll', 'GodotTools.BuildLogger.dll', 'GodotTools.ProjectEditor.dll', 'DotNet.Glob.dll', 'GodotTools.Core.dll', 'JetBrains.Annotations.dll', 'Newtonsoft.Json.dll' ] if env_mono['target'] == 'debug': target_filenames += [ 'GodotTools.pdb', 'GodotTools.IdeConnection.pdb', 'GodotTools.BuildLogger.pdb', 'GodotTools.ProjectEditor.pdb', 'GodotTools.Core.pdb' ] targets = [ os.path.join(editor_tools_dir, filename) for filename in target_filenames ] cmd = env_mono.CommandNoCache(targets, api_sln_cmd, build_godot_tools, module_dir=os.getcwd()) env_mono.AlwaysBuild(cmd)
def make_template_dir(env, mono_root): from shutil import rmtree platform = env['platform'] target = env['target'] template_dir_name = '' if platform in ['windows', 'osx', 'x11', 'android']: template_dir_name = 'data.mono.%s.%s.%s' % (platform, env['bits'], target) else: assert False output_dir = Dir('#bin').abspath template_dir = os.path.join(output_dir, template_dir_name) template_mono_root_dir = os.path.join(template_dir, 'Mono') if os.path.isdir(template_mono_root_dir): rmtree(template_mono_root_dir) # Clean first # Copy etc/mono/ if platform != 'android': template_mono_config_dir = os.path.join(template_mono_root_dir, 'etc', 'mono') copy_mono_etc_dir(mono_root, template_mono_config_dir, env['platform']) # Copy the required shared libraries copy_mono_shared_libs(env, mono_root, template_mono_root_dir)
def make_template_dir(env, mono_root): from shutil import rmtree platform = env['platform'] target = env['target'] template_dir_name = '' assert is_desktop(platform) template_dir_name = 'data.mono.%s.%s.%s' % (platform, env['bits'], target) output_dir = Dir('#bin').abspath template_dir = os.path.join(output_dir, template_dir_name) template_mono_root_dir = os.path.join(template_dir, 'Mono') if os.path.isdir(template_mono_root_dir): rmtree(template_mono_root_dir) # Clean first # Copy etc/mono/ template_mono_config_dir = os.path.join(template_mono_root_dir, 'etc', 'mono') copy_mono_etc_dir(mono_root, template_mono_config_dir, platform) # Copy the required shared libraries copy_mono_shared_libs(env, mono_root, template_mono_root_dir)
def create_gsl_cpy_commands(conf, dependencies, copy_folder): ''' Create os dependent commands. On darwin: copy all gsl libs, fix the install names for dylibs using install_name_tool, and replace lib path with the patched version. On linux: do nothing ''' if conf.env["SYSTEM"] == "Darwin" and dependencies["gsl"].lib_path: lib_path = dependencies["gsl"].lib_path commands = [] for lib in Glob(os.path.join(lib_path, "*")): new_path = os.path.join(copy_folder, os.path.basename(lib.rstr())) action = [Copy("$TARGET", "$SOURCE")] if ("0.dylib" in lib.rstr()): action += [fix_dylib_for_darwin] kw = { 'target': '{0}'.format(new_path), 'source': '{0}'.format(lib), 'action': action } commands.append(kw) dependencies["gsl"].lib_path = Dir(copy_folder).abspath return commands else: return []
def ProtocEmitter(target, source, env): dirOfCallingSConscript = Dir('.').srcnode() env.Prepend(PROTOCPROTOPATH = dirOfCallingSConscript.path) source_with_corrected_path = [] for src in source: commonprefix = os.path.commonprefix([dirOfCallingSConscript.path, src.srcnode().path]) if len(commonprefix)>0: source_with_corrected_path.append( src.srcnode().path[len(commonprefix + os.sep):] ) else: source_with_corrected_path.append( src.srcnode().path ) source = source_with_corrected_path for src in source: modulename = os.path.splitext(src)[0] if env['PROTOCOUTDIR']: #base = os.path.join(env['PROTOCOUTDIR'] , modulename) base = os.path.join(modulename) target.extend( [ base + '.pb.cc', base + '.pb.h' ] ) if env['PROTOCPYTHONOUTDIR']: base = os.path.join(env['PROTOCPYTHONOUTDIR'] , modulename) target.append( base + '_pb2.py' ) try: target.append(env['PROTOCFDSOUT']) except KeyError: pass #~ print "PROTOC SOURCE:", [str(s) for s in source] #~ print "PROTOC TARGET:", [str(s) for s in target] return target, source
def make_template_dir(env, mono_root): from shutil import rmtree platform = env["platform"] target = env["target"] template_dir_name = "" assert is_desktop(platform) template_dir_name = "data.mono.%s.%s.%s" % (platform, env["bits"], target) output_dir = Dir("#bin").abspath template_dir = os.path.join(output_dir, template_dir_name) template_mono_root_dir = os.path.join(template_dir, "Mono") if os.path.isdir(template_mono_root_dir): rmtree(template_mono_root_dir) # Clean first # Copy etc/mono/ template_mono_config_dir = os.path.join(template_mono_root_dir, "etc", "mono") copy_mono_etc_dir(mono_root, template_mono_config_dir, platform) # Copy the required shared libraries copy_mono_shared_libs(env, mono_root, template_mono_root_dir)
def build_project_editor_only(env_mono): assert env_mono['tools'] output_dir = Dir('#bin').abspath editor_tools_dir = os.path.join(output_dir, 'GodotSharp', 'Tools') target_filenames = [ 'GodotTools.ProjectEditor.dll', 'DotNet.Glob.dll', 'GodotTools.Core.dll' ] if env_mono['target'] == 'debug': target_filenames += [ 'GodotTools.ProjectEditor.pdb', 'GodotTools.Core.pdb' ] targets = [ os.path.join(editor_tools_dir, filename) for filename in target_filenames ] cmd = env_mono.CommandNoCache(targets, [], build_godot_tools_project_editor, module_dir=os.getcwd()) env_mono.AlwaysBuild(cmd)
def _antlr_emitter(target, source, env): """Process sources and flags""" target = [] antlr_suffix = env.subst('$ANTLR_SUFFIX') antlr_h_suffix = env.subst('$ANTLR_HSUFFIX') antlr_cc_suffix = env.subst('$ANTLR_CCSUFFIX') antlr_txt_suffix = env.subst('$ANTLR_TXTSUFFIX') if env['ANTLR_FLAGS']: antlrflags = env.subst('$ANTLR_FLAGS', target=target, source=source) flags = SCons.Util.CLVar(antlrflags) else: flags = SCons.Util.CLVar('') # -o flag if env['ANTLR_OUT']: env['ANTLR_OUT'] = Dir(env['ANTLR_OUT']) flags.append('-o ${ANTLR_OUT}') # -glib flag if env['ANTLR_GLIB']: env['ANTLR_GLIB'] = File(env['ANTLR_GLIB']) flags.append('-glib ${ANTLR_GLIB}') #TODO: ImpTokenTypes!? # update antlr flags env['ANTLR_FLAGS'] = str(flags) # compute targets deps = [] for src in source: src = File(src) stem = src.abspath if stem.endswith(antlr_suffix): stem = stem[:-len(antlr_suffix)] deps.append(File(stem + 'ImpTokenTypes' + antlr_txt_suffix)) if env['ANTLR_OUT']: out = Dir(env['ANTLR_OUT']) stem = os.path.join(out.abspath, os.path.basename(stem)) for kind in ('Lexer', 'Parser'): for ext in (antlr_h_suffix, antlr_cc_suffix): target.append(File(stem + kind + ext)) for kind in ('', 'Lex'): for ext in (antlr_h_suffix, antlr_txt_suffix): target.append(File(stem + kind + 'TokenTypes' + ext)) for t in target: for d in deps: env.Depends(t, d) return (target, source)
def __generator(target, source, env, for_signature): baseDir = Dir(env.get('basedir')) if baseDir and baseDir != Dir('#'): targetPaths = [ relpath(node.abspath, baseDir.abspath) for node in target ] sourcePaths = [ relpath(node.abspath, baseDir.abspath) for node in source ] here = File(__file__).dir helper = here.File('../tests/run-with-cwd') prefix = [helper, baseDir] else: targetPaths = target sourcePaths = source prefix = [] return [prefix + argv + targetPaths + sourcePaths]
def Mkdocs_Readconfig(cfgfile, env): """Read the mkdocs yaml configuration file""" with open(str(cfgfile), 'r') as stream: yamlcfg = yaml.load(stream) # Determine destination site dir if env['Mkdocs_SiteDir']: sitedirnode = Dir(env['Mkdocs_SiteDir']) elif 'site_dir' in yamlcfg: sitedirnode = Dir(yamlcfg['site_dir']) else: sitedirnode = Dir('site') # Determind source docs dir if 'docs_dir' in yamlcfg: docsdirnode = Dir(yamlcfg['docs_dir']) else: docsdirnode = Dir('docs') return yamlcfg, sitedirnode, docsdirnode
def custom_path_is_dir_create(key, val, env): """Validator to check if Path is a directory, creating it if it does not exist. Similar to PathIsDirCreate, except it uses SCons.Script.Dir() and SCons.Script.File() in order to support the '#' top level directory token. """ # Dir constructor will throw an error if the path points to a file fsDir = Dir(val) if not fsDir.exists: os.makedirs(fsDir.abspath)
def add_build_rpath(env, pathin="."): """Add a build directory with -Wl,-rpath-link""" path = Dir(pathin).path env.AppendUnique(LINKFLAGS=["-Wl,-rpath-link=%s" % path]) env.AppendENVPath("CGO_LDFLAGS", "-Wl,-rpath-link=%s" % path, sep=" ") # We actually run installed binaries from the build area to generate # man pages. In such cases, we need LD_LIBRARY_PATH set to pick up # the dependencies env.AppendENVPath("LD_LIBRARY_PATH", path)
def generate(env, **kw): global added if not added: added = 1 AddOption( '--enable-Trace', dest='Trace', action='store_true', help='Enable trace support by defining COAST_TRACE, (StartTrace,\ Trace,...), see Tracer.h for details') import socket envconfigdir_default = socket.gethostname() AddOption( '--env-cfg', dest='envconfigdir', action='store', nargs=1, type='string', default=envconfigdir_default, help='Define directory name to use for location dependent files,\ default [' + envconfigdir_default + ']. When a config file gets copied and\ a corresponding file exists below this directory, it will get used instead\ of the original one. This allows to define configuration settings appropriate\ for the current environment.') buildflags = [] buildmode = GetOption('buildcfg') if buildmode == 'optimized': buildflags.append('OPT') elif buildmode == 'debug': buildflags.append('DBG') else: buildflags.append('PROFILE') if GetOption('Trace'): env.AppendUnique(VARIANT_SUFFIX=['_trace']) env.AppendUnique(CPPDEFINES=['COAST_TRACE']) buildflags.append('TRACE') env.AppendUnique(CPPDEFINES=[ 'COAST_BUILDFLAGS' + '="\\"' + '_'.join(buildflags) + '\\""' ]) compilerstring = [env.get('CXX', 'unknown')] if env.get('CXXVERSION', ''): compilerstring.append(env.get('CXXVERSION', 'unknown')) if env.get('CXXFLAVOUR', ''): compilerstring.append(env.get('CXXFLAVOUR', 'unknown')) env.AppendUnique(CPPDEFINES=[ 'COAST_COMPILER' + '="\\"' + '_'.join(compilerstring) + '\\""' ]) env['__envconfigdir__'] = Dir(GetOption('envconfigdir')) logger.info("environment specific directory: %s", env['__envconfigdir__'].get_abspath())
def build(env_mono): if not env_mono['tools']: return output_dir = Dir('#bin').abspath editor_tools_dir = os.path.join(output_dir, 'GodotSharp', 'Tools') mono_sln_builder = Builder(action=mono_build_solution) env_mono.Append(BUILDERS={'MonoBuildSolution': mono_sln_builder}) env_mono.MonoBuildSolution( os.path.join(editor_tools_dir, 'GodotSharpTools.dll'), 'editor/GodotSharpTools/GodotSharpTools.sln' )
def shebang(src=None): # check if Python is called on the first line with this expression # This comes from distutils copy_scripts FIRST_LINE_RE = re.compile(r'^#!.*python[0-9.]*([ \t].*)?$') doRewrite = utils.needShebangRewrite() def rewrite_shebang(target, source, env): """Copy source to target, rewriting the shebang""" # Currently just use this python usepython = utils.whichPython() for targ, src in zip(target, source): with open(str(src), "r") as srcfd: with open(str(targ), "w") as outfd: first_line = srcfd.readline() # Always match the first line so we can warn people # if an attempt is being made to rewrite a file that should # not be rewritten match = FIRST_LINE_RE.match(first_line) if match and doRewrite: post_interp = match.group(1) or '' # Paths can be long so ensure that flake8 won't complain outfd.write("#!{}{} # noqa\n".format( usepython, post_interp)) else: if not match: state.log.warn( "Could not rewrite shebang of {}. Please check" " file or move it to bin directory.". format(str(src))) outfd.write(first_line) for line in srcfd.readlines(): outfd.write(line) # Ensure the bin/ file is executable oldmode = os.stat(str(targ))[ST_MODE] & 0o7777 newmode = (oldmode | 0o555) & 0o7777 if newmode != oldmode: state.log.info("changing mode of {} from {} to {}".format( str(targ), oldmode, newmode)) os.chmod(str(targ), newmode) if src is None: src = Glob("#bin.src/*") for s in src: filename = str(s) # Do not try to rewrite files starting with non-letters if filename != "SConscript" and re.match("[A-Za-z]", filename): result = state.env.Command(target=os.path.join( Dir("#bin").abspath, filename), source=s, action=rewrite_shebang) state.targets["shebang"].extend(result)
def register_report_folders(cls, final_dir=None, destination_dir=None): destination_dir = str(Dir(destination_dir)) final_dir = str(Dir(final_dir)) if not destination_dir in cls.destination_dirs: cls.destination_dirs[destination_dir] = set() cls.destination_dirs[destination_dir].add(final_dir) else: new_common = None new_folder = None for path in cls.destination_dirs[destination_dir]: common, tail1, tail2 = cuppa.path.split_common(path, final_dir) if common and (not tail1 or not tail2): new_common = common new_folder = final_dir break else: new_folder = final_dir if new_common: cls.destination_dirs[destination_dir].add(new_common) cls.destination_dirs[destination_dir].remove(new_folder) elif new_folder: cls.destination_dirs[destination_dir].add(new_folder)
def find_indent(): """find clang-format""" indent = WhereIs("clang-format") if indent is not None: style = "Mozilla" # fallback root = Dir("#").abspath while root != "/": if os.path.exists(os.path.join(root, ".clang-format")): if not supports_custom_format(indent): break style = "file" root = os.path.dirname(root) return "%s --style=%s" % (indent, style) return "cat"
def build(env_mono, api_sln_cmd): assert env_mono["tools"] output_dir = Dir("#bin").abspath editor_tools_dir = os.path.join(output_dir, "GodotSharp", "Tools") target_filenames = ["GodotTools.dll"] if env_mono["target"] == "debug": target_filenames += ["GodotTools.pdb"] targets = [os.path.join(editor_tools_dir, filename) for filename in target_filenames] cmd = env_mono.CommandNoCache(targets, api_sln_cmd, build_godot_tools, module_dir=os.getcwd()) env_mono.AlwaysBuild(cmd)
def IMPSystem(env, name=None, version=None, authors=[], brief="", overview="", publications=None, license="standard", required_modules=[], optional_dependencies=[], required_dependencies=[], extra_data=[], testable=False, parallelizable=False, last_imp_version="unknown", python=True): if not name: name= Dir(".").abspath.split("/")[-1] if env.GetOption('help'): return dirs = scons_tools.paths.get_sconscripts(env) local_module=False for d in dirs: env.SConscript(d, exports=['env']) (nenv, version, found_optional_modules, found_optional_dependencies) =\ utility.configure(env, name, "system", version, required_modules=required_modules, optional_dependencies=optional_dependencies, required_dependencies= required_dependencies) if not nenv: data.get(env).add_application(name, ok=False) return else: if nenv["IMP_PASS"] != "RUN": return lkname="system_"+name.replace(" ", "_").replace(":", "_") pre="\page "+lkname+" "+name extrasections=[] if testable: extrasections.append(("Testable", "Yes")) else: extrasections.append(("Testable", "No")) if parallelizable: extrasections.append(("Parallelizeable", "Yes")) else: extrasections.append(("Parallelizeable", "No")) if last_imp_version != "unknown": extrasections.append(("Last known good \imp version", last_imp_version)) else: vtxt= "\n\\section lkgversion Last known good IMP version\n"+\ "unknown"+"\n" data.get(env).add_system(name, link="\\ref "+lkname+' "'+name+'"', dependencies=required_dependencies\ +found_optional_dependencies, unfound_dependencies=[x for x in optional_dependencies if not x in found_optional_dependencies], modules= required_modules+found_optional_modules, version=version) for d in dirs: if str(d).split("/")[0] != "local": env.SConscript(d, exports=['env']) scons_tools.data.get(env).add_to_alias("all", env.Alias(name)) env= nenv for m in required_modules+found_optional_modules: env.Depends(scons_tools.data.get(env).get_alias(name), scons_tools.data.get(env).get_alias(m)) if testable: samples= Glob("sample_[0123456789]*.py") samples.sort(utility.file_compare) analysis= Glob("analyze_[0123456789]*.py") analysis.sort(utility.file_compare) tt= [] tests = test.add_tests(env, samples+analysis, "system") for t in tests: env.Depends(t, scons_tools.data.get(env).get_alias(name)) # link files in build dir allfiles= [] for suff in ["*.txt", "*.mrc", "*.pdb", ".py", ".param", ".input", ".lib"]: allfiles.extend(Glob("*/*"+suff)) allfiles.extend(Glob("*"+suff)) for f in allfiles+extra_data: inst=install.install(env, "biological_systems/"+name+"/", f) scons_tools.data.get(env).add_to_alias(env.Alias(name), inst) #if f.path.endswith(".py"): # example.add_python_example(env, f, f.path) #env.AlwaysBuild(install) doc.add_doc_page(env, "\\page "+lkname+" "+name.capitalize(), authors, version, brief, overview, publications, license, extra_sections=extrasections) return env
def IMPModuleBuild(env, version=None, required_modules=[], lib_only_required_modules=[], optional_modules=[], lib_only_optional_modules=[], optional_dependencies=[], config_macros=[], module=None, module_libname=None, module_pylibname=None, module_include_path=None, module_preproc=None, module_namespace=None, module_nicename=None, required_dependencies=[], alias_name=None, cxxflags=[], cppdefines=[], cpppath=[], python_docs=False, local_module=False, standards=True): if env.GetOption('help'): return dta= scons_tools.data.get(env) if module is None: module=Dir('.').abspath.split('/')[-1] if module=="local": module=Dir('.').abspath.split('/')[-2]+"_local" if not module_libname and (module != module.lower() or module.find("-") != -1): scons_tools.utility.report_error("Module names can only have lower case characters and numbers") if module_libname is None: module_libname="imp_"+module if module_pylibname is None: module_pylibname="_IMP_"+module if module_include_path is None: module_include_path="IMP/"+module if module_namespace is None: module_namespace="IMP::"+module if module_preproc is None: module_preproc=module_namespace.replace("::","_").upper() if module_nicename is None: module_nicename= "IMP."+module if alias_name is None: alias_name=module if python_docs: env.Append(IMP_PYTHON_DOCS=[module]) optm=optional_modules+lib_only_optional_modules optd=optional_dependencies reqd=required_dependencies reqm=required_modules+lib_only_required_modules all_sconscripts=stp.get_sconscripts(env, ['data', 'examples']) nenv = scons_tools.utility.configure_module(env, module, alias_name, module_libname, version, required_modules=reqm, optional_dependencies=optd, optional_modules=optm, required_dependencies= reqd) if not nenv: return preclone=env env = nenv vars={'module_include_path':module_include_path, 'module':module, 'PREPROC':module_preproc, 'EXPORT':module_preproc.replace("_", ""), 'namespace':module_namespace, 'module_libname':module_libname, 'module_pylibname':module_pylibname, 'module_nicename':module_nicename, 'module_alias':alias_name} env['IMP_MODULE_VARS']=vars build_config=[] if cxxflags: env.Replace(CXXFLAGS=cxxflags) if cppdefines: env.Append(CPPDEFINES=cppdefines) if cpppath: env.Append(CPPPATH=cpppath) #if len(found_optional_modules + found_optional_dependencies)>0: # print " (using " +", ".join(found_optional_modules + found_optional_dependencies) +")" real_config_macros=config_macros[:] #print "config", module, real_config_macros env['IMP_MODULE_CONFIG']=real_config_macros for s in all_sconscripts: env.SConscript(s, exports='env') if env['IMP_PASS']=="BUILD": dta.add_to_alias("all", _get_module_alias(env)) # needed for data for m in _get_module_modules(env): env.Requires(dta.get_alias(_get_module_alias(env)), dta.get_alias(dta.modules[m].alias)) if standards: root=Dir(".").abspath if env.get('repository', None): old=Dir("#").abspath #print old, root, env['repository'] root=root.replace(old, Dir(Dir("#").abspath+"/"+env['repository']).abspath) scons_tools.standards.add(env, [root+"/"+x for x in ["include/*.h", "include/internal/*.h", "src/*.cpp", "src/internal/*.cpp", "test/*.py", "bin/*.cpp"]]) return env