def show_sys_vars(): print('os.name = %s, sys.platform = %s' % (os.name, sys.platform)) from distutils import util, ccompiler print('platform = %s' % util.get_platform()) print('compiler = %s' % ccompiler.get_default_compiler()) from SCons.Environment import Environment env = Environment() vars = [ 'CC', 'CXX', 'PLATFORM', 'MSVC_VERSION', 'TARGET', 'TARGET_ARCH', 'TARGET_OS', 'MSVS', 'MSVS_VERSION', 'MSVS_ARCH', 'TOOLS', 'HOST_ARCH', 'HOST_OS', 'MSVC_BATCH', 'MSVC_USE_SCRIPT', ] for var in vars: print('%s = %r' % (var, env.subst('$' + var)))
def CocoEnvironment(**kwargs): if sys.platform == 'win32': # The following Environment setup is used with MSVC in Win32. kwargs['ENV'] = os.environ #kwargs['tools'] = ['gcc', 'g++', 'gnulink', 'gas', 'ar'] #kwargs['CCFLAGS'] = ['-g', '-Wall'] elif sys.platform == 'linux2': kwargs['CCFLAGS'] = ['-g', '-Wall'] env = Environment(**kwargs) MonoSetup(env) conf = env.Configure(config_h=env['config_h'], custom_tests={ 'CheckFunc0': CheckFunc0, 'CheckJava': CheckJava, 'CheckMono': CheckMono }) conf.env['COCO_FEATURES'] = [] if conf.CheckFunc0('readdir_r((void *)0, (void *)0, (void *)0)', '#include <dirent.h>'): conf.Define('HAVE_READDIR_R', 1) if conf.CheckLibWithHeader('expat', 'expat.h', 'c', 'XML_ParserCreate(NULL);', autoadd=0): conf.Define('HAVE_EXPAT_H', 1) conf.env['COCO_FEATURES'].append('expat') if conf.CheckJava(): conf.env['COCO_FEATURES'].append('java') if conf.CheckMono(): conf.env['COCO_FEATURES'].append('mono') env = conf.Finish() return env
def __init__(self, project, python=True, **options): jinja_loader = jinja2.ChoiceLoader([ cfo.jinja.macros.LOADER, jinja2.FileSystemLoader('.'), ]) options.setdefault('BUILDERS', {}).update( Jinja=JinjaBuilder(jinja_loader), ) SConsEnvironment.__init__(self, **options) # Add the common carefree-objects Variables: Variables(project, python).Update(self) self['PROJECT'] = project self['JINJALOADER'] = jinja_loader # Add Linux/GCC style flags from os.environ or custom overrides: for var in ['CPPFLAGS', 'CXXFLAGS', 'LDFLAGS']: flags = self.get(var) or os.environ.get(var) if flags: self.MergeFlags(flags) # Add Windows/VC style search paths from os.environ # or custom overrides: # (Explicitly setting PATH is often needed to find VC binaries) for var in ['PATH', 'INCLUDE', 'LIB']: paths = self.get(var) or os.environ.get(var) if paths: self.Append(ENV = {var: paths}) # Look for custom C++ compiler binary: cxx = self.get('CXX') or os.environ.get('CXX') if cxx: self.Replace(CXX=cxx)
def test_shlib_symlink_emitter(self): """Test shlib_symlink_emitter() """ env = Environment(tools=['gnulink']) target = env.SharedLibrary('lib', 'lib.c', SHLIBPREFIX='lib', SHLIBSUFFIX=".so") target_name = str(target[0]) self.assertEqual(str(target_name), 'liblib.so', "Expected target 'liblib.so' != '%s'" % target_name) target = env.SharedLibrary('xyz', 'lib.c', SHLIBPREFIX='xyz', SHLIBSUFFIX=".so", SHLIBVERSION='1.2.3') t0 = target[0] target_name = str(t0) assert target_name == 'xyzxyz.so.1.2.3', "Expected target 'xyzxyz.so.1.2.3' != '%s'" % target_name if hasattr(t0.attributes, 'shliblinks'): (soname_symlink, t0_1) = t0.attributes.shliblinks[0] (shlib_noversion_symlink, t0_2) = t0.attributes.shliblinks[1] self.assertEqual(t0_1, t0, "soname_symlink target is not target[0]") self.assertEqual(t0_2, t0, "shlib_noversion_symlink target is not target[0]") self.assertEqual(str(soname_symlink), 'xyzxyz.so.1', "soname symlink is not 'xyzxyz.so.1': '%s'" % str(soname_symlink)) self.assertEqual(str(shlib_noversion_symlink), 'xyzxyz.so', "shlib_noversion_symlink is not 'xyzxyz.so': '%s'" % str(shlib_noversion_symlink)) else: self.fail('Target xyzxyz.so.1.2.3 has no .attributes.shliblinks')
def __init__(self, *args, **kwargs): Environment.__init__(self, *args, **kwargs) # Our personal list of things to be tested. self.test_libs = [] self.test_libs_includes = [] self.test_libs_path = [] self.check_lib = ['check'] self.check_lib_includes = [checklib_include] self.check_lib_path = ['#build/test/check'] self.current_build_dir = None # Custom builder for templating language def build_from_template(target, source, env, for_signature = None): assert len(target) == 1 assert len(source) == 1 target = str(target[0]) source = str(source[0]) #print "Build template target is", target, "source is", source, "env is", env template = file(source).read() output = StringIO() environment = { 'include_files' : ['%s.h' % (libname) for libname in self.test_libs], 'suite_build_functions': ['make_%s_suite' % (libname) for libname in self.test_libs], } result = markup(template, output, environment) assert result is True file(target, 'wb').write(output.getvalue()) template_builder = self.Builder(action = build_from_template) self['BUILDERS']['Templated'] = template_builder
def __init__(self, project, python=True, **options): jinja_loader = jinja2.ChoiceLoader([ cfo.jinja.macros.LOADER, jinja2.FileSystemLoader('.'), ]) options.setdefault('BUILDERS', {}).update(Jinja=JinjaBuilder(jinja_loader), ) SConsEnvironment.__init__(self, **options) # Add the common carefree-objects Variables: Variables(project, python).Update(self) self['PROJECT'] = project self['JINJALOADER'] = jinja_loader # Add Linux/GCC style flags from os.environ or custom overrides: for var in ['CPPFLAGS', 'CXXFLAGS', 'LDFLAGS']: flags = self.get(var) or os.environ.get(var) if flags: self.MergeFlags(flags) # Add Windows/VC style search paths from os.environ # or custom overrides: # (Explicitly setting PATH is often needed to find VC binaries) for var in ['PATH', 'INCLUDE', 'LIB']: paths = self.get(var) or os.environ.get(var) if paths: self.Append(ENV={var: paths}) # Look for custom C++ compiler binary: cxx = self.get('CXX') or os.environ.get('CXX') if cxx: self.Replace(CXX=cxx)
def copy_extra_files(tile): """Copy all files listed in a copy_files and copy_products section. Files listed in copy_files will be copied from the specified location in the current component to the specified path under the output folder. Files listed in copy_products will be looked up with a ProductResolver and copied copied to the specified path in the output folder. There is not currently a way to specify what type of product is being resolved. The `short_name` given must be unique across all products from this component and its direct dependencies. """ env = Environment(tools=[]) outputbase = os.path.join('build', 'output') for src, dest in tile.settings.get('copy_files', {}).items(): outputfile = os.path.join(outputbase, dest) env.Command([outputfile], [src], Copy("$TARGET", "$SOURCE")) resolver = ProductResolver.Create() for src, dest in tile.settings.get('copy_products', {}).items(): prod = resolver.find_unique(None, src) outputfile = os.path.join(outputbase, dest) env.Command([outputfile], [prod.full_path], Copy("$TARGET", "$SOURCE"))
def _copy_files(self, target_dir): """Copy test harness and file-under-test.""" builder = Builder(action=recursive_template_action, emitter=recursive_template_emitter) _inc_dirs, _sources, headers = unit_test.find_sources('firmware/src') # Render the template env = Environment(tools=[], BUILDERS={'render': builder}) env['RECURSIVE_TEMPLATE'] = self.UNIT_TEMPLATE template_files = env.render([os.path.join(target_dir, '.timestamp')], []) test_files = [] for infile in self.files: test_file = env.Command( [os.path.join(target_dir, os.path.basename(infile))], [infile], action=Copy("$TARGET", "$SOURCE")) test_files.append(test_file) # Copy all headers into the unit test for _basename, infile in viewitems(headers): test_file = env.Command( [os.path.join(target_dir, os.path.basename(infile))], [infile], action=Copy("$TARGET", "$SOURCE")) test_files.append(test_file) all_files = template_files + test_files c_files = [str(x) for x in all_files if str(x).endswith('.c')] return c_files, all_files
def __init__(self, exportdirs, installdirs, platform=SCons.Platform.Platform(), tools=None, toolpath=None, options=None, **kw): if toolpath is None: toolpath = [] args = (self, platform, tools, toolpath, options) Environment.__init__(*args, **kw) self.exportdirs = exportdirs self.installdirs = installdirs self['BUILDERS']['uic'] = uic self['BUILDERS']['uic_impl'] = uic_impl self['BUILDERS']['moc'] = moc self['BUILDERS']['IDLH'] = idl_h_builder self['BUILDERS']['IDLIMPLH'] = idl_impl_h_builder self['BUILDERS']['IDLCPP'] = idl_cpp_builder self['_oldstripixes'] = self['_stripixes'] self['_stripixes'] = CleanupLibFlags self.StringExpandFileFunc = StringExpandFileFunc DefineProtocolBufferBuilder(self)
def main(): # Setup environment EnsureSConsVersion(3, 0, 0) env = Environment(ENV=os.environ, tools=['Docs.Mkdocs'], toolpath=[PyPackageDir('scons_tools_grbd.Tools')]) setup_opts(env) # Use the first parameter as the mode to run as if len(COMMAND_LINE_TARGETS) > 0: cmd = COMMAND_LINE_TARGETS[0] else: print_useage(env) Exit(1) if cmd == 'serve': tgt = env.MkdocsServer() Default(tgt) elif cmd == 'build': tgt = env.MkdocsBuild() Default(tgt) elif cmd == 'publish': tgt = env.MkdocsPublish("Example commit message") Default(tgt) elif cmd == 'clean': clean(env) # Alternative Formats elif cmd == 'json': tgt = env.MkdocsJsonBuild() Default(tgt) elif cmd == 'mkcombine': tgt = env.MkdocsCombiner() Default(tgt) elif cmd == 'test1': tgt = env.Dll2Lib('D:\\Temp\\19\\test1.lib', 'D:\\Temp\\19\\CoreUIComponents.dll') Default(tgt) # TODO elif cmd == 'pdf': manual_clean(env) markdown_target = env.MkdocsCombiner() pd_target = env.Pandoc('docs/export/site.pdf', markdown_target) Default(pd_target) elif cmd == 'doxygen': print("TODO doxygen_templates") else: print_useage(env) Exit(1)
def main(): # Setup environment EnsureSConsVersion(3,0,0) env = Environment(ENV = os.environ, tools = ['Docs.Mkdocs'], toolpath = [PyPackageDir('scons_tools_grbd.Tools')]) setup_opts(env) # Use the first parameter as the mode to run as if len(COMMAND_LINE_TARGETS) > 0: cmd = COMMAND_LINE_TARGETS[0] else: print_useage(env) Exit(1) if cmd == 'serve': # Doxygen directory can get quite big, so iets remove it for serve mode doxydir = os.path.abspath('docs/doxygen') Execute(Delete(doxydir)) tgt = env.MkdocsServer() Default(tgt) elif cmd == 'build': tgt = env.MkdocsBuild() Default(tgt) elif cmd == 'publish': tgt = env.MkdocsPublish("Site update") Default(tgt) elif cmd == 'clean': tgt = env.MkdocsBuild() Default(tgt) SetOption('clean', True) # Alternative Formats elif cmd == 'json': tgt = env.MkdocsJsonBuild() Default(tgt) elif cmd == 'mkcombine': tgt = env.MkdocsCombiner() Default(tgt) # TODO elif cmd == 'doxygen': print ("TODO doxygen_templates") # TODO elif cmd == 'pdf': manual_clean(env) markdown_target = env.MkdocsCombiner() pd_target = env.Pandoc('docs/export/site.pdf', markdown_target) Default(pd_target) else: print_useage(env) Exit(1)
def test_vars(self): """Test that WiX tool adds vars""" env = Environment(tools=["wix"]) assert env["WIXCANDLE"] is not None assert env["WIXCANDLEFLAGS"] is not None assert env["WIXLIGHTFLAGS"] is not None assert env.subst("$WIXOBJSUF") == ".wixobj" assert env.subst("$WIXSRCSUF") == ".wxs"
def Tool(self, toolname, path = None): """Like SCons.Tool, but knows about numscons specific toolpaths.""" if path: return Environment.Tool(self, toolname, path + get_numscons_toolpaths(self)) else: return Environment.Tool(self, toolname, get_numscons_toolpaths(self))
def test_vars(self): """Test that WiX tool adds vars""" env = Environment(tools=['wix']) assert env['WIXCANDLE'] is not None assert env['WIXCANDLEFLAGS'] is not None assert env['WIXLIGHTFLAGS'] is not None assert env.subst('$WIXOBJSUF') == '.wixobj' assert env.subst('$WIXSRCSUF') == '.wxs'
def get_env(self, **kwargs): c_flags = ['-ffunction-sections', '-fdata-sections', '-fno-exceptions', '-funsigned-char', '-funsigned-bitfields', '-fpack-struct', '-fshort-enums', '-Os', '-Wall', '-mmcu=%s' % self.MCU] # Add some missing paths to CFLAGS # Workaround for /usr/libexec/gcc/avr/ld: # cannot open linker script file ldscripts/avr5.x: No such file or directory # Workaround for /usr/libexec/gcc/avr/ld: # crtm168.o: No such file: No such file or directory extra_cflags = ['-L/usr/x86_64-pc-linux-gnu/avr/lib/', '-B/usr/avr/lib/avr5/', ] c_flags += extra_cflags if self.ARDUINO_BOARD == "leonardo": c_flags += ["-DUSB_VID=" + self.get_board_conf('build.vid')] c_flags += ["-DUSB_PID=" + self.get_board_conf('build.pid')] env_defaults = dict(CC='"' + self.AVR_BIN_PREFIX + 'gcc"', CXX='"' + self.AVR_BIN_PREFIX + 'g++"', AS='"' + self.AVR_BIN_PREFIX + 'gcc"', CPPPATH=[self.core_root], CPPDEFINES={'F_CPU': self.F_CPU, 'ARDUINO': self.ARDUINO_VER}, CFLAGS=c_flags + ['-std=gnu99'], CCFLAGS=c_flags, ASFLAGS=['-assembler-with-cpp','-mmcu=%s' % self.MCU], TOOLS=['gcc','g++', 'as']) hw_variant = os.path.join(self.ARDUINO_HOME, 'hardware/arduino/variants', self.get_board_conf('build.variant', '')) if hw_variant: env_defaults['CPPPATH'].append(hw_variant) for k, v in six.iteritems(kwargs): print('processing kwarg: %s->%s' % (k, v)) if k in env_defaults and isinstance(env_defaults[k], dict)\ and isinstance(v, dict): env_defaults[k].update(v) print(' update dict') elif k in env_defaults and isinstance(env_defaults[k], list): env_defaults[k].append(v) print(' append to list') else: env_defaults[k] = v print(' set value') print('kwargs:', kwargs) print('env_defaults:', env_defaults) env_arduino = Environment(**env_defaults) # Add Arduino Processing, Elf, and Hex builders to environment for builder_name in ['Processing', 'CompressCore', 'Elf', 'Hex', 'BuildInfo']: env_arduino.Append(BUILDERS={builder_name: getattr(self, 'get_%s_builder' % builder_name .lower())()}) return env_arduino
def ensure_image_is_hex(input_path): """Return a path to a hex version of a firmware image. If the input file is already in hex format then input_path is returned and nothing is done. If it is not in hex format then an SCons action is added to convert it to hex and the target output file path is returned. A cache is kept so that each file is only converted once. Args: input_path (str): A path to a firmware image. Returns: str: The path to a hex version of input_path, this may be equal to input_path if it is already in hex format. """ family = utilities.get_family('module_settings.json') target = family.platform_independent_target() build_dir = target.build_dirs()['build'] if platform.system() == 'Windows': env = Environment(tools=['mingw'], ENV=os.environ) else: env = Environment(tools=['default'], ENV=os.environ) input_path = str(input_path) image_name = os.path.basename(input_path) root, ext = os.path.splitext(image_name) if len(ext) == 0: raise BuildError( "Unknown file format or missing file extension in ensure_image_is_hex", file_name=input_path) file_format = ext[1:] if file_format == 'hex': return input_path if file_format == 'elf': new_file = os.path.join(build_dir, root + '.hex') if new_file not in CONVERTED_HEX_FILES: env.Command(new_file, input_path, action=Action( "arm-none-eabi-objcopy -O ihex $SOURCE $TARGET", "Creating intel hex file from: $SOURCE")) CONVERTED_HEX_FILES.add(new_file) return new_file raise BuildError("Unknown file format extension in ensure_image_is_hex", file_name=input_path, extension=file_format)
def setup_environment(chip): """ Setup the SCons environment for compiling arm cortex code """ config = ConfigManager() #Make sure we never get MSVC settings for windows since that has the wrong command line flags for gcc if platform.system() == 'Windows': env = Environment(tools=['mingw'], ENV=os.environ) else: env = Environment(tools=['default'], ENV=os.environ) env['INCPREFIX'] = '-I"' env['INCSUFFIX'] = '"' env['CPPPATH'] = chip.includes() env['ARCH'] = chip #Setup Cross Compiler env['CC'] = 'arm-none-eabi-gcc' env['AS'] = 'arm-none-eabi-gcc' env['LINK'] = 'arm-none-eabi-gcc' env['AR'] = 'arm-none-eabi-ar' env['RANLIB'] = 'arm-none-eabi-ranlib' #AS command line is by default setup for call as directly so we need to modify it to call via *-gcc to allow for preprocessing env['ASCOM'] = "$AS $ASFLAGS -o $TARGET -c $SOURCES" # Setup nice display strings unless we're asked to show raw commands if not config.get('build:show-commands'): env['CCCOMSTR'] = "Compiling $TARGET" env['ARCOMSTR'] = "Building static library $TARGET" env['RANLIBCOMSTR'] = "Indexing static library $TARGET" env['LINKCOMSTR'] = "Linking $TARGET" #Setup Compiler Flags env['CCFLAGS'] = chip.combined_properties('cflags') env['LINKFLAGS'] = chip.combined_properties('ldflags') env['ARFLAGS'].append(chip.combined_properties( 'arflags')) #There are default ARFLAGS that are necessary to keep env['ASFLAGS'].append(chip.combined_properties('asflags')) #Add in compile tile definitions defines = utilities.build_defines(chip.property('defines', {})) env['CCFLAGS'].append(defines) #Setup Target Architecture env['CCFLAGS'].append('-mcpu=%s' % chip.property('cpu')) env['ASFLAGS'].append('-mcpu=%s' % chip.property('cpu')) env['LINKFLAGS'].append('-mcpu=%s' % chip.property('cpu')) #Initialize library paths (all libraries are added via dependencies) env['LIBPATH'] = [] env['LIBS'] = [] return env
def copy_tilebus_definitions(tile): destdir = os.path.join('build', 'output', 'tilebus') env = Environment(tools=[]) for tbdef in tile.find_products('tilebus_definitions'): tbname = os.path.basename(tbdef) infile = tbdef outfile = os.path.join(destdir, tbname) env.Command([outfile], [infile], Copy("$TARGET", "$SOURCE"))
def copy_linker_scripts(tile): destdir = os.path.join('build', 'output', 'linker') linkers = tile.find_products('linker_script') env = Environment(tools=[]) for linker in linkers: linkername = os.path.basename(linker) srcfile = os.path.join("firmware", 'linker', linkername) destfile = os.path.join(destdir, linkername) env.Command([destfile], [srcfile], Copy("$TARGET", "$SOURCE"))
def ALEAEnvironment(conf, *args, **kwds): if 'options' in kwds: opts = kwds['options'] conf.UpdateOptions(opts) else: opts = conf.Options(*args, **kwds) env = Environment(options=opts) conf.Update(env) env.Prepend(CPPPATH='$build_includedir') env.Prepend(LIBPATH='$build_libdir') return env
def copy_dependency_docs(tile): """Copy all documentation from dependencies into build/output/doc folder""" env = Environment(tools=[]) outputbase = os.path.join('build', 'output', 'doc') depbase = os.path.join('build', 'deps') for dep in tile.dependencies: depdir = os.path.join(depbase, dep['unique_id'], 'doc', dep['unique_id']) outputdir = os.path.join(outputbase, dep['unique_id']) if os.path.exists(depdir): env.Command([outputdir], [depdir], Copy("$TARGET", "$SOURCE"))
def ALEASolution(options, tools=[], dir=[]): from copy import deepcopy SConsignFile() env_compiler_options = {} if isinstance(platform, Win32): # Checking for compiler info first compileroptions = deepcopy(options) compilerconf = Config([], dir) compilerconf.UpdateOptions(compileroptions) compilerenv = Environment() compileroptions.Update(compilerenv) compilerconf.Update(compilerenv) if compilerenv['compiler'] == 'msvc': if compilerenv['MSVC_VERSION'] != '': env_compiler_options['MSVC_VERSION'] = compilerenv[ 'MSVC_VERSION'] env_compiler_options['TARGET_ARCH'] = compilerenv[ 'TARGET_ARCH'] elif compilerenv['compiler'] == 'mingw': env_compiler_options['tools'] = ['mingw'] env_compiler_options['ENV'] = { 'PATH': [find_executable_path_from_env('gcc.exe', strip_bin=False)], 'TMP': os.environ['TMP'] } conf = Config(tools, dir) conf.UpdateOptions(options) if len(env_compiler_options) > 0: print(('Force environment with compiler options : ' + str(env_compiler_options))) env = Environment(options=options, **env_compiler_options) else: env = Environment(options=options) options.Update(env) conf.Update(env) Help(options.GenerateHelpText(env)) prefix = env['build_prefix'] VariantDir(prefix, '.') env.Prepend(CPPPATH='$build_includedir') env.Prepend(LIBPATH='$build_libdir') # If scons is run in a conda environment, append paths if CONDA_ENV: PREFIX = CONDA_PREFIX env.Prepend(CPPPATH=pj(PREFIX, 'include')) env.Prepend(LIBPATH=pj(PREFIX, 'lib')) return env
def __init__( self ): Environment.__init__( self, ENV=environ ) self._initFromSysEnv() self._initBuildDate() tool = self._TOOLCHAIN() self['AR'] = tool.AR self['AS'] = tool.AS self['CC'] = tool.CC self['CXX'] = tool.CXX self['LINK'] = tool.LINK self['NM'] = tool.NM self['RANDLIB'] = tool.RANDLIB self['OBJCOPY'] = tool.OBJCOPY self['OBJDUMP'] = tool.OBJDUMP self.Append( CCFLAGS=self._DEF_CCFLAGS ) self.Append( CPPPATH=self._DEF_CPPPATH ) self.Append( LIBS=self._DEF_LIBS ) self.Append( LIBPATH=self._DEF_LIBPATH ) self.Append( LINKFLAGS=self._DEF_LINKFLAGS ) self.Append( CCFLAGS=self._CCFLAGS ) self.Append( CPPPATH=self._CPPPATH ) if self._LIBS: self.Append( LIBS=self._LIBS ) self.Append( LIBPATH=self._LIBPATH ) self.Append( LINKFLAGS=self._LINKFLAGS ) BIN_BUILDER = Builder( action = tool.OBJCOPY + ' -O binary $SOURCE $TARGET', suffix='.bin', src_suffix='.elf' ) self.Append( BUILDERS={'Bin': BIN_BUILDER} ) HEX_BUILDER = Builder( action = tool.OBJCOPY + ' -O ihex $SOURCE $TARGET', suffix='.hex', src_suffix='.elf' ) self.Append( BUILDERS={'Hex': HEX_BUILDER} ) SIZE_BUILDER = Builder( action = tool.SIZE + ' -t $SOURCE', src_suffix='.elf' ) self.Append( BUILDERS={'Size': SIZE_BUILDER} ) DUMP_BUILDER = Builder( action = tool.OBJDUMP + ' -adhlS $SOURCE > $TARGET', suffix='.lst', src_suffix='.elf' ) self.Append( BUILDERS={'Dump': DUMP_BUILDER} ) #MAP_BUILDER = Builder( action = 'touch $TARGET', suffix='.map', src_suffix='.elf' ) #self.Append( BUILDERS={'Map': MAP_BUILDER} ) self.findRoot() if not self.NO_PARALLEL: self.setMultiJobs()
def create_cplusplus_environment(): """Creates a new environment with the required variables for building C/C++ projects @returns A new SCons environment set up for C/C++ builds""" environment = Environment( variables = _parse_default_command_line_options(), SOURCE_DIRECTORY = 'Source', HEADER_DIRECTORY = 'Include', TESTS_DIRECTORY = 'Tests', TESTS_RESULT_FILE = "gtest-results.xml", REFERENCES_DIRECTORY = 'ThirdParty' ) # Pass the 'TERM' variable through to allow colored output on Linux terminals if platform.system() == 'Linux': if 'TERM' in os.environ: environment['ENV']['TERM'] = os.environ['TERM'] # Extension methods from the C/C++ module cplusplus.setup(environment) # Nuclex standard build settings and extensions _set_standard_cplusplus_compiler_flags(environment) _set_standard_cplusplus_linker_flags(environment) _register_generic_extension_methods(environment) _register_cplusplus_extension_methods(environment) return environment
def InstallEnvironment(): """ Determine installation locations for libraries and executables. """ ienv = Environment() #prefix = ARGUMENTS.get('prefix', '/usr/local') if os.name == 'nt': import larch prefix = larch.larchlib.sys_larchdir dlldir = larch.larchlib.get_dlldir() # Here are our installation paths: ienv['i_prefix'] = prefix ienv['i_lib'] = join(prefix, 'dlls', dlldir) ienv['i_bin'] = join(prefix, 'bin') ienv['i_inc'] = join(prefix, 'include') ienv['i_data'] = join(prefix, 'share') else: prefix = '/usr/local' # Here are our installation paths: ienv['i_prefix'] = prefix ienv['i_lib'] = join(prefix, 'lib') ienv['i_bin'] = join(prefix, 'bin') ienv['i_inc'] = join(prefix, 'include') ienv['i_data'] = join(prefix, 'share') return ienv
def NumpyConfigure(self, *args, **kw): # Import here to avoid addint import times when configuration is not needed from numscons.checkers import CheckF77BLAS, CheckF77LAPACK from numscons.checkers.simple_check import NumpyCheckLibAndHeader from numscons.checkers.fortran import CheckF77Mangling if kw.has_key('conf_dir') or kw.has_key('log_file'): # XXX handle this gracefully assert 0 == 1 else: kw['conf_dir'] = 'sconf' kw['log_file'] = 'config.log' if not kw.has_key("custom_tests"): kw["custom_tests"] = {} if not kw["custom_tests"].has_key("CheckF77Mangling"): kw["custom_tests"]["CheckF77Mangling"] = CheckF77Mangling if not kw["custom_tests"].has_key("CheckF77BLAS"): kw["custom_tests"]["CheckF77BLAS"] = CheckF77BLAS if not kw["custom_tests"].has_key("CheckF77LAPACK"): kw["custom_tests"]["CheckF77LAPACK"] = CheckF77LAPACK kw["custom_tests"]["NumpyCheckLibAndHeader"] = NumpyCheckLibAndHeader config = Environment.Configure(self, *args, **kw) def GetLastError(self): self.logstream.flush() errlines = open(str(self.logfile)).readlines() return get_last_error_from_config(errlines) config.GetLastError = new.instancemethod(GetLastError, config) return config
def teensy31(target, userOptions={}, env=Environment()): options = _defaultOptions.copy() options.update(userOptions) boardsTxt = _parseBoardsTxt() platformTxt = _parsePlatformTxt(boardsTxt, options, target) _setEnv(env, boardsTxt, platformTxt) # ok # env.VariantDir('build', 'src') # env.Program('build/y', ['build/x.cpp', 'build/z.cpp']) # # ok # env.VariantDir('build', 'src') # env.Library('build/y', ['build/x.cpp', 'build/z.cpp']) # # ok #env.VariantDir('build/a', '/tmp/t/7/src') #env.Library('build/a/y', ['build/a/x.cpp', 'build/a/z.cpp']) env.VariantDir('build/lib', env['CPPPATH']) coreList = [os.path.join('build/lib', os.path.basename(_)) for _ in env.Glob(os.path.join(env['CPPPATH'], '*.*'), strings=True) if not _.endswith('h')] env.Library('build/lib/core', coreList) env.VariantDir('build/code', '.') o = env.Object('build/code/' + target +'.o', [target + '.cpp']) elf = env.Command(target + '.elf', [o, 'build/lib/libcore.a'], platformTxt['recipe.c.combine.pattern']) eep = env.Command(target + '.eep', elf, platformTxt['recipe.objcopy.eep.pattern']) hex_ = env.Command(target + '.hex', eep, platformTxt['recipe.objcopy.hex.pattern'])
def __init__(self, buildSetup, buildVars, **kw): kw.update(VERSION = buildSetup.VERSION ,TARGDIR = buildSetup.TARGDIR ,DESTDIR = '$INSTALLDIR/$PREFIX' ,toolpath = [buildSetup.TOOLDIR ] ,variables = buildVars ) Environment.__init__ (self, **kw) self.path = Record (extract_localPathDefs(buildSetup)) # e.g. buildExe -> env.path.buildExe self.libInfo = {} self.Tool("BuilderGCH") self.Tool("BuilderDoxygen") self.Tool("ToolDistCC") self.Tool("ToolCCache") register_LumieraResourceBuilder(self) register_LumieraCustomBuilders(self)
def DistutilsSConscript(self, name): """This sets up build directory correctly to play nice with distutils.""" if self['src_dir']: sname = pjoin('$src_dir', name) else: sname = name Environment.SConscript(self, sname, build_dir = '$build_dir', src_dir = '$src_dir')
def __init__(self, parent=None, args=None, **kw): Environment.__init__(self, ENV=os.environ, tools = ['default', 'subst'], toolpath = [pjoin(os.getcwd(), 'build')], **kw) # See SCons/Platform/__init__.py for possible values if self['PLATFORM'] in _platforms: self['APR_PLATFORM'] = self['PLATFORM'] else: self['APR_PLATFORM'] = 'unix' # if no *.c files are found in the original APR_PLATFORM, we switch to # using this fallback platform. self['APR_FALLBACK_PLATFORM'] = 'unix' self.AppendUnique(CPPPATH = ['include', 'include/private', 'include/arch/'+self['APR_PLATFORM']]) self.autoconf = aprconf.APRConfigure(self) self.AppendUnique(LIBS = ['expat'])
def build_units(parent, targets): tests = find_units(parent) summary_env = Environment(tools=[]) summary_env['TESTS'] = [] for test in tests: test.build(targets, summary_env) build_summary(summary_env)
def getEnvironment(): import utils environment = Environment(ENV=os.environ) environment['PAINTOWN_PLATFORM'] = ['wii', 'sdl'] peg_color = 'light-cyan' environment['PAINTOWN_BACKEND'] = 'sdl' environment['PAINTOWN_USE_PRX'] = False environment['PAINTOWN_TESTS'] = {'CheckPython': checkPython} environment['PAINTOWN_COLORIZE'] = utils.colorize environment['PAINTOWN_NETWORKING'] = False environment['LIBS'] = [] environment['PEG_MAKE'] = "%s %s" % (utils.colorize( 'Creating peg parser', peg_color), utils.colorize('$TARGET', 'light-blue')) environment.Append(BUILDERS={'Peg': utils.pegBuilder(environment)}) environment.Append(CPPPATH=['#src', '#src/util/network/hawknl']) environment.Append(CPPDEFINES=['USE_SDL']) return utils.lessVerbose(wii(environment))
def copy_dependency_images(tile): """Copy all documentation from dependencies into build/output/doc folder""" env = Environment(tools=[]) outputbase = os.path.join('build', 'output') depbase = os.path.join('build', 'deps') for dep in tile.dependencies: depdir = os.path.join(depbase, dep['unique_id']) outputdir = os.path.join(outputbase) deptile = IOTile(depdir) for image in deptile.find_products('firmware_image'): name = os.path.basename(image) input_path = os.path.join(depdir, name) output_path = os.path.join(outputdir, name) env.Command([output_path], [input_path], Copy("$TARGET", "$SOURCE"))
def setUp(self): class FakeEnvironment(object): def __init__(self, **kw): self.kw = kw def subst(self, s, target=None, source=None, conv=lambda x: x): if s[0] == '$': s = s[1:] if s == 'target': s = target elif s == 'source': s = source else: s = self.kw[s] return s self.env = FakeEnvironment(AAA='aaa', NULL='') from SCons.Environment import Environment self.env = Environment(AAA='aaa', NULL='')
def create_generic_environment(): """Creates an general-purpose environment without specific support for any programming language or resource/asset system @return A new SCons environment without support for specific builds""" environment = Environment(variables=_parse_default_command_line_options()) _register_generic_extension_methods(environment) return environment
def get_env(self, **kwargs): cFlags = [ '-ffunction-sections', '-fdata-sections', '-fno-exceptions', '-funsigned-char', '-funsigned-bitfields', '-fpack-struct', '-fshort-enums', '-Os', '-mmcu=%s' % self.MCU ] env_defaults = dict(CC='"%s"' % (self.AVR_BIN_PREFIX + 'gcc'), CXX='"%s"' % (self.AVR_BIN_PREFIX + 'g++'), CPPPATH=[self.core_root], CPPDEFINES={ 'F_CPU': self.F_CPU, 'ARDUINO': self.ARDUINO_VER, 'AVR': None, }, CFLAGS=cFlags + ['-std=gnu99'], CCFLAGS=cFlags, TOOLS=['gcc', 'g++']) for k, v in kwargs.iteritems(): print 'processing kwarg: %s->%s' % (k, v) if k in env_defaults and isinstance(env_defaults[k], dict)\ and isinstance(v, dict): env_defaults[k].update(v) print ' update dict' elif k in env_defaults and isinstance(env_defaults[k], list): env_defaults[k].append(v) print ' append to list' else: env_defaults[k] = v print ' set value' print 'kwargs:', kwargs print 'env_defaults:', env_defaults envArduino = Environment(**env_defaults) # Add Arduino Processing, Elf, and Hex builders to environment for builder_name in ['Processing', 'Elf', 'Hex']: envArduino.Append( BUILDERS={ builder_name: getattr(self, 'get_%s_builder' % builder_name.lower())() }) return envArduino
def CompilationEnvironment(): """ Determine how to build the Fortran parts of feff85exafs """ args = {} jsondir = realpath(join(getcwd(), '..', 'json-fortran')) flags = {'jsondir': jsondir} gfortran_comp_flags = '-O3 -ffree-line-length-none -g -Wall' gfortran_link_flags = None json_comp_flags = ' -I{jsondir} -J{jsondir}'.format(**flags) json_link_flags = ' -L{jsondir} -ljsonfortran'.format(**flags) # darwin: if os.name == 'posix' and sys.platform=='darwin': args['platform'] = 'darwin' gfortran_comp_flags = '-O2 -arch x86_64 -Wall' gfortran_link_flags = '-dynamiclib -L/usr/local/gfortran/lib/ -lgfortran -lgfortranbegin' # windows: needs work! if os.name == 'nt': #args['platform'] = 'Windows' pass env = Environment(**args) if env['FORTRAN'] == 'gfortran': # this was the suggestion in the top level Makefile in what # the FP gave us: "-O3 -ffree-line-length-none -finit-local-zero" # -O3 makes sense, as does -finit-local-zero. I don't understand # the advantage of -ffree-line-length-none -- it seems to # encourage poor code style -- like we need more of that! # also, -finit-local-zero fails on FMS/fmstot.f env = Environment(FORTRANFLAGS = gfortran_comp_flags + json_comp_flags, CFLAGS = '-g') if gfortran_link_flags is not None: env.Replace(SHLINKFLAGS = gfortran_link_flags + json_link_flags) elif env['FORTRAN'] == 'g77': env = Environment(FORTRANFLAGS = '-Wall -O2') elif env['FORTRAN'] == 'xlf': env = Environment(FORTRANFLAGS = '-qextern=trap') elif env['FORTRAN'] == 'ifort': ## I think the -module flg is correct ... untested ... env = Environment(FORTRANFLAGS = '-O3 -module '+jsondir) if DEBUG_ENV: for key, val in env.items(): try: print( key, val) except: pass sys.exit() if os.name == 'nt': env.PrependENVPath('PATH', os.environ['PATH']) return env
def __init__(self, *args, **kw): if kw.has_key('tools'): raise NumsconsError("NumpyEnvironment has received a tools "\ "argument.") else: kw['tools'] = [] Environment.__init__(self, *args, **kw) # Setting dirs according to command line options self['build_dir'] = pjoin(self['build_prefix'], pkg_to_path(self['pkg_name'])) # Set directory where to "install" built C extensions if self["inplace"]: self['distutils_installdir'] = pjoin(str(self.fs.Top), self["pkg_path"]) else: if not self.has_key('distutils_installdir'): # XXX: compatibility with upstream numpy trunk - this can be # removed once install_clib is merged self["distutils_installdir"] = '' self['distutils_installdir'] = pjoin(self['distutils_libdir'], pkg_to_path(self['pkg_name'])) if not self.has_key('distutils_install_prefix'): # XXX: compatibility with upstream numpy trunk - this can be # removed once install_clib is merged self["distutils_install_prefix"] = '' else: self["distutils_install_prefix"] = os.path.abspath(pjoin(str(self.fs.Top), self["distutils_install_prefix"])) # This will keep our compiler dependent customization (optimization, # warning, etc...) self['NUMPY_CUSTOMIZATION'] = {} self._set_sconsign_location() self._customize_scons_env() set_logging(self)
def build_moduletest(test, arch): """ Given a path to the source files, build a unit test including the unity test harness targeting the given architecture. """ rawlog = '#' + test.get_path('rawlog', arch) outlog = '#' + test.get_path('log', arch) statusfile = '#' + test.get_path('status', arch) elffile = '#' + test.get_path('elf', arch) build_dirs = test.build_dirs(arch) objdir = build_dirs['objects'] arch = arch.retarget(add=['test']) unit_env = Environment(tools=['xc16_compiler', 'xc16_assembler', 'xc16_linker'], ENV = os.environ) tester_env = Environment(tools=['xc16_compiler' ], ENV = os.environ) unit_env['ARCH'] = arch tester_env['ARCH'] = arch unit_env['OUTPUT'] = '%s.elf' % test.name objs = [] for src in test.files + arch.extra_sources(): name,ext = os.path.splitext(os.path.basename(src)) target = os.path.join(objdir, name + '.o') if src == test.files[0]: objs.append(tester_env.xc16_gcc('#' + target, src)) else: objs.append(unit_env.xc16_gcc('#' + target, src)) #Generate main.c programmatically to run the test main_src = '#' + os.path.join(objdir, 'main.c') main_target = '#' + os.path.join(objdir, 'main.o') unit_env.Command(main_src, test.files[0], action=unit_env.Action(build_moduletest_main, "Creating test runner")) objs.append(unit_env.xc16_gcc(main_target, main_src)) #Link the test, run it and build a status file unit_env.xc16_ld(elffile, objs) unit_env.Command(outlog, elffile, action=unit_env.Action(r"momo-picunit %s '%s' '%s'" % (arch.property('simulator_model'), elffile[1:], outlog[1:]), "Running unit test")) unit_env.Command(statusfile, outlog, action=unit_env.Action(process_log, 'Processing log file')) return statusfile
def __init__(self, *args, **kwargs): from os.path import abspath, dirname, join import racy import racy.renv as renv import racy.rlog as rlog from racy.renv import constants from racy.renv.options import get_option from racy.rproject import RacyProjectsDB kwargs['toolpath'] = renv.toolpath() + kwargs.get('toolpath',[]) self._callbacks = [] for opt in ['DEBUG', 'TOOL', 'MSVC_VERSION','MSSDK_VERSION']: kwargs[opt] = get_option(opt) kwargs.setdefault('tools',[]) kwargs['tools'] += env_tools kwargs.update(env_vars) kwargs['ARCH'] = get_option('ARCH') cxx = get_option('CXX') if cxx : kwargs['CXX'] = cxx Env.__init__(self, *args, **kwargs) FLAGS = ( '$( ${{_concat(INCPREFIX, {0}, INCSUFFIX, ' '__env__, RDirs, TARGET, SOURCE)}} $)' ) self['CPP_LIBEXT_PATH'] = [] self['_CPPINCFLAGS'] = ( FLAGS.format('CPPPATH') + FLAGS.format('CPP_LIBEXT_PATH') ) act = self.Action( CopyBuilder, "Install '$$SOURCE' as '$$TARGET'") self.__CopyBuilder__ = self.Builder( action = act, source_factory = self.Entry, target_factory = self.Entry, ) res = racy.rplugins.register.get_env_addons(self) if True in res: racy.exit_racy(0) self['_FORCE_INCLUDE'] = '${_defines(FORCE_INCLUDE_PREFIX, FORCE_INCLUDE, FORCE_INCLUDE_SUFFIX, __env__)}' self['_CCCOMCOM'] = self['_CCCOMCOM'] + ' ${_FORCE_INCLUDE}' if not self.GetOption('help'): tool = self['TOOL'] if tool == 'auto': tool = constants.SYSTEM_DEFAULT_TOOL[renv.system()] self.Tool(tool) self.Decider('MD5-timestamp') self.prj_db = db = RacyProjectsDB(env = self) self.lookup_list.append( db.target_lookup ) num_jobs = get_option('JOBS') if num_jobs == 'auto': import multiprocessing num_jobs = multiprocessing.cpu_count() self.SetOption('num_jobs', num_jobs) # allow use of cached md5 after 600 sec (instead of 2 days). self.SetOption('max_drift', 600) racy_db_file_default = join(racy.renv.dirs.build, '.sconsign.dblite') racy_db_file = get_option('RACY_DBFILE') if not racy_db_file: racy_db_file = racy_db_file_default self.SConsignFile(racy_db_file) rlog.info.log(".sconsign file", racy_db_file) import os from racy.renv.configs.commandline import get_opts_help self.Help( (os.linesep*2).join(get_opts_help()) ) map(lambda f:f(self), self._callbacks) del self._callbacks
def build_unittest(test, arch, summary_env, cmds=None): """ Build a hex file from the source files test_files for the indicated chip If cmds is passed, replace the generic run command to gpsim with the commands listed in the passed file """ #Extract information from test #Allow files to be renamed with architecture extensions automatically test_files = filter(lambda x: x is not None, map(lambda x: test.find_support_file(x, arch), test.files)) name = test.name type = test.type env = Environment(tools=['xc8_compiler', 'patch_mib12', 'merge_mib12_app', 'merge_mib12_sym', 'gpsim_runner'], ENV = os.environ) env['ORIGINAL_ARCH'] = arch env['TEST'] = test #Configure for app module or exec if type.startswith("executive"): orig_name = 'mib12_executive_symbols' env['ARCH'] = arch.retarget(remove=['exec'], add=['app']) pic12.configure_env_for_xc8(env, force_app=True) test_harness = ['../test/pic12/exec_harness/mib12_exec_unittest.c', '../test/pic12/exec_harness/mib12_api.as', '../test/pic12/exec_harness/mib12_exec_unittest_startup.as', '../test/pic12/gpsim_logging/test_log.as', '../test/pic12/gpsim_logging/test_mib.as'] elif type == "application": orig_name = "mib12_app_module_symbols" env['ARCH'] = arch.retarget(remove=['app'], add=['exec']) pic12.configure_env_for_xc8(env, force_exec=True) test_harness = ['../test/pic12/app_harness/mib12_app_unittest.c', '../test/pic12/app_harness/mib12_test_api.as', '../test/pic12/gpsim_logging/test_log.as'] else: raise BuildError("Invalid unit test type specified. Should start with executive or application.", type=type) orig_symfile = orig_name + '.h' orig_symtab = orig_name + '.stb' dirs = arch.build_dirs() builddir = dirs['build'] testdir = os.path.join(dirs['test'], name, 'objects') finaldir = dirs['output'] outdir = os.path.join(dirs['test'], name) env.AppendENVPath('PATH','../../tools/scripts') incs = [] incs.append('.') incs.append('src') incs.append('src/mib') incs.append(testdir) incs.extend(arch.property('test_includes', [])) env['INCLUDE'] += incs #Copy over the symbol file from the module we're testing so we can reference it symfile = Command(os.path.join(testdir, 'symbols.h'), os.path.join(builddir, orig_symfile), Copy("$TARGET", "$SOURCE")) testee_symtab = Command(os.path.join(testdir, 'symbols.stb'), os.path.join(builddir, orig_symtab), Copy("$TARGET", "$SOURCE")) symtab = env.merge_mib12_symbols([os.path.join(outdir, 'symbols.stb')], [testee_symtab, os.path.join(testdir, name + '_unit.sym')]) #Load in all of the xc8 configuration from build_settings sim = arch.property('gpsim_proc') env['TESTCHIP'] = sim env['TESTNAME'] = name env['TESTAPPEND'] = arch.arch_name() env['EXTRACMDS'] = cmds #Must do this in 1 statement so we don't modify test_files srcfiles = test_files + test_harness apphex = env.xc8(os.path.join(testdir, name + '_unit.hex'), srcfiles) env.Depends(apphex[0], symfile) if type.startswith("executive"): app_start = env['CHIP'].app_rom[0] + 2 #Executive integration tests run the entire executive startup routine #Regular integration tests skip it to save time (1 second delay for registration) #and to avoid triggering any bugs in the executive code since these are unit tests. #for specific routines. if type == "executive_integration": lowhex = os.path.join(builddir, 'mib12_executive_patched.hex') else: lowhex = env.Command(os.path.join(testdir, 'mib12_executive_local.hex'), os.path.join(builddir, 'mib12_executive_patched.hex'), action='python ../../tools/scripts/patch_start.py %d $SOURCE $TARGET' % app_start) highhex = apphex[0] else: lowhex = apphex[0] highhex = env.Command(os.path.join(testdir, 'mib12_app_module_local.hex'), os.path.join(builddir, 'mib12_app_module.hex'), Copy("$TARGET", "$SOURCE")) outhex = env.merge_mib12_app(os.path.join(outdir, name + '.hex'), [lowhex, highhex]) outscript = env.Command([os.path.join(outdir, 'test.stc')], [outhex], action=env.Action(build_unittest_script, "Building test script")) raw_log_path = os.path.join(outdir, build_logfile_name(env)) raw_results = env.gpsim_run(raw_log_path, [outscript, outhex]) #include outhex so that scons knows to rerun this command when the hex changes formatted_log = env.Command([build_formatted_log_name(env), build_status_name(env)], [raw_results, symtab], action=env.Action(process_unittest_log, "Processing test log")) #Add this unit test to the unit test summary command summary_env['TESTS'].append(build_status_name(env)) #Remember to remove the test directory when cleaning #Also add any extra intermediate files that the unit test defines so that #those are cleaned up as well env.Clean(outscript, testdir) env.Clean(outscript, outdir) additional_files = test.get_intermediates(arch) for file in additional_files: env.Clean(outscript, file)