def platform(ctx, target, platform, **kwds): target = ctx.get_tgen_by_name(target) if unversioned_sys_platform() not in to_list(platform): return for key, val in kwds.items(): setattr(target, key, to_list(getattr(target, key, []))) getattr(target, key).extend(to_list(val))
def compose_starters(self): parent = getattr(self, "parent", None) if parent: mains = getattr(self.parent, "main", None) if mains: self.starter = [ self.root.name + "." + main for main in to_list(mains) ] self.target = pop(parent.target, len(getattr(parent, "starter", ())), len(mains)) elif not hasattr(self.parent, 'starter'): root = self.root base = root.parent def starter_path(node): if node.name == "__main__.py": return node.parent.path_from(base) elif main_indicator.search(node.read()): return node.path_from(base)[:-3] self.starter = [ starter.replace("/", ".") for starter in map(starter_path, root.ant_glob("**/*.py")) if starter ] else: self.target = self.to_nodes(getattr(self, "target", ()), self.install_from, "find_or_declare") self.main = to_list(getattr(self, "main", ()))
def BUILD_SHARED_LIBRARY(bld, *k, **kw): if 'target' not in kw: bld.fatal("""\ntarget was not provided. Aborting! {error:208db39b}""") project_group = ProjectGroup.APP project_groups_to_build = bld.options.proj_groups.split(',') if project_group not in project_groups_to_build: return kw['project_group'] = project_group project_build_group = None if 'group' in kw: project_build_group = kw['group'] else: kw['group'] = ProjectBuildGroup.MAIN project_build_group = kw['group'] project_category = None if 'project_category' in kw: project_category = kw['project_category'] else: kw['project_category'] = ProjectCategory.MAIN project_category = kw['project_category'] kw = set_flags(bld, project_category, **kw) defines = ['BUILDING_SHARED_LIBRARY'] if 'defines' in kw: kw['defines'] = to_list(kw['defines']) + defines else: kw['defines'] = defines if bld.env.PROJECT_OS_NAME == OSName.WIN: if 'install_path' not in kw: kw['install_path'] = '${BINDIR}' if bld.options.with_sdk: if 'install_path_implib' not in kw: kw['install_path_implib'] = '${LIBDIR}' else: features = ['suppress_import_library'] if 'features' in kw: kw['features'] = to_list(kw['features']) + features else: kw['features'] = features else: if 'install_path' not in kw: kw['install_path'] = '${LIBDIR}' if bld.options.with_sdk and 'sdk_install_callback' in kw: kw['sdk_install_callback'](bld) return bld.shlib(*k, **kw)
def build_f2py(bld, source, module_name, extra_sources, skip=None, only=None, symlink=False, **kwargs): """ Build an f2py extension with waf. Arguments: ---------- source - the name of the file being wrapped module_name - the name of the module being produced extra-sources - other things to compile and link along with the extension skip/only - skip/only wrap certain functions symlink - symlink the library into the source folder after building **kwargs - passed to the build command """ #use f2py to create the wrapper skip = 'skip: %s'%' '.join(to_list(skip)) if skip is not None else '' only = 'only: %s'%' '.join(to_list(only)) if only is not None else '' bld(rule=('${{F2PY}} --build-dir ${{TGT[0].parent.abspath()}} --quiet ' '-m {MODULENAME} ${{SRC}} {ONLY} {SKIP}').format(MODULENAME=module_name, ONLY=only, SKIP=skip), source=source, target='{MODULENAME}module.c {MODULENAME}-f2pywrappers2.f90'.format(MODULENAME=module_name)) #copy over f2py files fortranobject.c and fortranobject.h import numpy.f2py.cfuncs as cfuncs f2pydir = osp.join(osp.dirname(osp.abspath(cfuncs.__file__)),'src') for x in 'ch': bld(rule='cp -L ${SRC} ${TGT}', source=bld.root.find_node(osp.join(f2pydir,'fortranobject.'+x)), target='fortranobject.'+x) #build the library bld(features='c fc fcshlib', source=('{SRC} ' 'fortranobject.c ' '{MODULENAME}module.c ' '{MODULENAME}-f2pywrappers2.f90').format(SRC=source, MODULENAME=module_name).split() + to_nodes(bld,extra_sources), target=module_name, includes=[bld.root.find_node(numpy.get_include())]+to_nodes(bld,kwargs.pop('includes',[])), use=['PYEXT']+to_list(kwargs.pop('use',[])), install_path=kwargs.pop('install_path',None) or osp.join('${PYTHONDIR}',bld.path.path_from(bld.root.find_node(bld.top_dir))), **kwargs) if symlink: bld.symlink_as(bld.path.get_src().make_node('%s.so'%module_name).abspath(), bld.path.get_bld().make_node('%s.so'%module_name).abspath())
def to_nodes(self, lst, path=None, search_fun="find_resource"): """This is exactly to_nodes, but finding directories without generating exceptions.""" search_fun = getattr(path or self.path, search_fun) return [ search_fun(dir) if isinstance(dir, str) else dir for dir in to_list([lst] if isinstance(lst, Node) else lst) ]
def BUILD_DOCUMENTATION(bld, *k, **kw): if 'target' not in kw: bld.fatal("""\ntarget was not provided. Aborting! {error:65e041c7}""") project_group = ProjectGroup.DOC project_groups_to_build = bld.options.proj_groups.split(',') if project_group not in project_groups_to_build: return kw['project_group'] = project_group kw['project_category'] = None features = ['doxygen'] if 'features' in kw: kw['features'] = to_list(kw['features']) + features else: kw['features'] = features if 'doxyfile' not in kw: kw['doxyfile'] = 'doxygen.conf' if 'install_path' not in kw: kw['install_path'] = '${DOCSDIR}' + os.sep + kw['target'] return bld(*k, **kw)
def generate_python_starter(self): env = self.env modules = to_list(getattr(self, "starter", [])) for module, target in zip( modules, chain( self.target, map(self.install_from.find_or_declare, (module.replace(".", "-") for module in modules[len(self.target):])))): modenv = env.derive() modenv.MODULE = module modenv.append_value("MANPAGERFLAGS", ('-p', target.name)) def create_task(*args, **kwargs): self.create_task(*args, env=modenv, **kwargs) starter = target.change_ext('.sh') create_task('entrypynt', tgt=starter) self.bld.install_as(subst_vars("${BINDIR}/", env) + target.name, starter, chmod=O755) manpage = target.change_ext('.1') create_task('manpyge', tgt=manpage) compressed = target.change_ext('.1.gz') create_task('gz', src=manpage, tgt=compressed) self.bld.install_files(subst_vars("${MANDIR}/man1", env), compressed)
def make_package(bld, name, use=''): use = to_list(use) + ['ROOTSYS'] includes = [] headers = [] source = [] incdir = bld.path.find_dir('inc') srcdir = bld.path.find_dir('src') dictdir = bld.path.find_dir('dict') testsrc = bld.path.ant_glob('test/test_*.cxx') + bld.path.ant_glob('tests/test_*.cxx') appsdir = bld.path.find_dir('apps') if incdir: headers += incdir.ant_glob(name + '/*.h') includes += ['inc'] bld.env['INCLUDES_'+name] = [incdir.abspath()] if headers: bld.install_files('${PREFIX}/include/%s' % name, headers) if srcdir: source += srcdir.ant_glob('*.cxx') if dictdir: if not headers: error('No header files for ROOT dictionary "%s"' % name) linkdef = dictdir.find_resource('LinkDef.h') bld.gen_rootcling_dict(name, linkdef, headers = headers, includes = includes, use = use) source.append(name+'Dict.cxx') if incdir and srcdir: bld(features = 'cxx cxxshlib', name = name, source = source, target = name, includes = 'inc', export_includes = 'inc', use=use) if testsrc: for test_main in testsrc: bld.program(features = 'test', source = [test_main], target = test_main.name.replace('.cxx',''), install_path = None, includes = 'inc', use = use + [name]) if appsdir: for app in appsdir.ant_glob('*.cxx'): bld.program(source = [app], target = app.name.replace('.cxx',''), includes = 'inc', use = use + [name])
def run(self): more_tasks = self.more_tasks = [] gen = self.generator bld = gen.bld env = gen.env metadata_node = self.inputs[0] metadata = metadata_node.read_json() # Retrieve uuid. uuid = getattr(gen, "uuid", None) or metadata["uuid"] if not uuid: raise WafError("missing uuid in {}".format(self)) if bld.is_install: # Retrieve and categorize sources. path = gen.path # Installation has to look at their hierarchy from the correct root to # install generated files into the same location as static ones. nothing, srcnodes, bldnodes, both = partition(categories=4, items=chain((metadata_node, ), bld.node_deps[self.uid()], gen.to_nodes(getattr(self, 'data', []))), # The is_src and is_bld predicates are combined like binary # flags to end up with an integral predicate. predicate=lambda source: source.is_src() + 2 * source.is_bld()) # Check for sources manually added outside the extension tree. bldpath = path.get_bld() nothing = tuple(nothing) if tuple(nothing): raise WafError("files {} neither found below {} nor {}".format( ', '.join(map(str, nothing)), path, bldpath)) both = tuple(both) if tuple(both): raise WafError("files {} found both below {} and {}".format( ', '.join(map(str, nothing)), path, bldpath)) # Install. target = env.EXTDIR.format(uuid) install = partial(gen.add_install_files, install_to=target, relative_trick=True) more_tasks += [install(install_from=srcnodes), install(install_from=bldnodes, cwd=bldpath)] # Collect schemas. schemas = to_list(getattr(gen, 'schemas', [])) if "settings-schema" in metadata: schemas += [metadata["settings-schema"] + '.gschema.xml'] # Pass on to glib2 tool for schema processing. if schemas: gen = bld(features="glib2", settings_schema_files = schemas) gen.env.GSETTINGSSCHEMADIR = env.SCHEMADIR.format(uuid) gen.post() more_tasks += gen.tasks
def bin(self, target, fxs, mib, ext_in=['.mib', '.fxs'], ext_out='.bin', **kw): rule = '${CONFDC} -c ${SRC} %s -o ${TGT} ' % ' '.join( to_list(confdc_mib_options)) self(group='early0', features='use confdc', source=[mib, fxs], target=target, color='YELLOW', shell=True, rule=rule, ext_in=ext_in, ext_out=ext_out, **kw)
def compose_starters(self): parent = getattr(self, "parent", None) if parent: mains = getattr(self.parent, "main", None) if mains: self.starter = [self.root.name + "." + main for main in to_list(mains)] self.target = pop(parent.target, len(getattr(parent, "starter", ())), len(mains)) elif not hasattr(self.parent, 'starter'): root = self.root base = root.parent def starter_path(node): if node.name == "__main__.py": return node.parent.path_from(base) elif main_indicator.search(node.read()): return node.path_from(base)[:-3] self.starter = [starter.replace("/", ".") for starter in map(starter_path, root.ant_glob("**/*.py")) if starter] else: self.target = self.to_nodes(getattr(self, "target", ()), self.install_from, "find_or_declare") self.main = to_list(getattr(self, "main", ()))
def check_library_func(conf, library, function, use, envvars=None): if envvars is None: envvars = [] envvars += ['LIB','LIBPATH','LINKFLAGS'] for u in to_list(use): envvars += ['LIB_%s'%u,'LIBPATH_%s'%u,'LINKFLAGS_%s'%u] try: conf.check(fragment='int main(){ %s(); }'%function, msg="Checking for library %s"%library, okmsg="found function %s"%function, errmsg="couldn't find function %s"%function, use=use) except conf.errors.ConfigurationError: conf.fatal(('%s was not found on your system. ' 'Check that it is installed and that the following ' 'environment variables are set correctly:\n')%library+ '\n'.join(['%s = %s'%(x,' '.join(getattr(conf.env,x,''))) for x in sorted(set(envvars))]))
def generate_python_starter(self): env = self.env modules = to_list(getattr(self, "starter", [])) for module, target in zip(modules, chain(self.target, map(self.install_from.find_or_declare, (module.replace(".", "-") for module in modules[len(self.target):])))): modenv = env.derive() modenv.MODULE = module modenv.append_value("MANPAGERFLAGS", ('-p', target.name)) def create_task(*args, **kwargs): self.create_task(*args, env = modenv, **kwargs) starter = target.change_ext('.sh') create_task('entrypynt', tgt = starter) self.bld.install_as(subst_vars("${BINDIR}/", env) + target.name, starter, chmod=O755) manpage = target.change_ext('.1') create_task('manpyge', tgt = manpage) compressed = target.change_ext('.1.gz') create_task('gz', src = manpage, tgt = compressed) self.bld.install_files(subst_vars("${MANDIR}/man1", env), compressed)
def install_dir(self, dest, paths, env=None, chmod=O755, cwd=None, add=True, postpone=True, task=None): """ Create a task to install direcories on the system:: def build(bld): bld.install_files('${DATADIR}', '${PACKAGE}') :param dest: absolute path of the destination directory :type dest: string :param paths: paths of directories to create :type paths: list of strings or space separated string :param env: configuration set for performing substitutions in dest :type env: Configuration set :param chmod: mode for directories to set :type chmod: int :param cwd: parent node for searching srcfile, when srcfile is not a :py:class:`waflib.Node.Node` :type cwd: :py:class:`waflib.Node.Node` :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started :type add: bool :param postpone: execute the task immediately to perform the installation :type postpone: bool """ assert (dest) tsk = inst(env=env or self.env) tsk.bld = self tsk.path = cwd or self.path tsk.chmod = chmod tsk.task = task tsk.dest = dest tsk.source = [] tsk.paths = to_list(paths) tsk.exec_task = tsk.exec_install_dir if add: self.add_to_group(tsk) self.run_task_now(tsk, postpone) return tsk
def EMBED_RESOURCES(bld, *k, **kw): project_group = ProjectGroup.RES project_groups_to_build = bld.options.proj_groups.split(',') if project_group not in project_groups_to_build: return kw['project_group'] = project_group kw['group'] = ProjectBuildGroup.RESOURCES kw['project_category'] = ProjectCategory.MAIN project_category = kw['project_category'] features = ['embedres', 'c', 'cxx'] if 'features' in kw: kw['features'] = to_list(kw['features']) + features else: kw['features'] = features kw = set_flags(bld, project_category, **kw) return bld(*k, **kw)
def configure(conf): conf.env.PYTHON = sys.executable conf.load('python compiler_c') conf.check_python_version((2,7)) conf.check_python_module('numpy','ver >= num(1,5)') for k,v in conf.environ.items(): if any(k.startswith(p) for p in ['LIB','LIBPATH','LINKFLAGS','CFLAGS','FCFLAGS']): conf.env.append_value(k, to_list(conf.environ[k])) if conf.options.PLUGIN is not None: success, fail = [conf.srcnode.find_node("cosmoslik_plugins").make_node(conf.options.PLUGIN)], [] conf.recurse(success[0].abspath()) else: if conf.options.EXCLUDE is not None: exclude = [conf.srcnode.find_node("cosmoslik_plugins/").find_node(e) for e in conf.options.EXCLUDE] else: exclude = [] success, fail = recurse(conf,keep_going=True,exclude=exclude) if len(success)>0: sys.stdout.write('\033[1m') print "The following plugins are ready to build:" sys.stdout.write('\033[92m') for f in success: print " "+f.path_from(conf.srcnode.find_node("cosmoslik_plugins/")) sys.stdout.write('\033[0m') if len(fail)>0: sys.stdout.write('\033[1m') print "The following plugins can't be built (ignore if not needed):" sys.stdout.write('\033[93m') for f in fail: print " "+f.path_from(conf.srcnode.find_node("cosmoslik_plugins/")) sys.stdout.write('\033[0m') print "Run './waf configure --plugin PLUGIN' to see why a given plugin can't build." print "where PLUGIN is exactly as it appears above." conf.env.configured_plugins = [f.path_from(conf.srcnode) for f in success] if conf.options.inplace: conf.env.PYTHONDIR = '.' conf.env.INPLACE = True
def BUILD_PLUGIN(bld, *k, **kw): if 'target' not in kw: bld.fatal("""\ntarget was not provided. Aborting! {error:f1e16874}""") project_group = ProjectGroup.APP project_groups_to_build = bld.options.proj_groups.split(',') if project_group not in project_groups_to_build: return kw['project_group'] = project_group project_build_group = None if 'group' in kw: project_build_group = kw['group'] else: kw['group'] = ProjectBuildGroup.MAIN project_build_group = kw['group'] kw = set_flags(bld, ProjectCategory.MAIN, **kw) features = ['suppress_import_library'] if 'features' in kw: kw['features'] = to_list(kw['features']) + features else: kw['features'] = features if 'install_path' not in kw: kw['install_path'] = '${PLUGINDIR}' + os.sep + kw['target'] if bld.options.with_sdk and 'sdk_install_callback' in kw: bld.fatal( """\n'sdk_install_callback' is not supported when building plugins. Aborting! {error:9d05519a}""" ) plugin = bld.BUILD_SHARED_LIBRARY(*k, **kw) plugin.env.cxxshlib_PATTERN = '%s.plugin' plugin.env.cshlib_PATTERN = '%s.plugin' return plugin
def _configure(ctx, name, incs=(), libs=(), pcname=None, mandatory=True, uses=""): lower = name.lower() UPPER = name.upper() if pcname is None: pcname = lower inst = getattr(ctx.options, 'with_'+lower, None) inc = getattr(ctx.options, 'with_%s_include'%lower, None) lib = getattr(ctx.options, 'with_%s_lib'%lower, None) if mandatory: if inst: assert (inst.lower() not in ['no','off','false']) else: # optional if not any([inst, inc, lib]): return if inst and inst.lower() in ['no','off','false']: return # rely on package config if not any([inst,inc,lib]) or (inst and inst.lower() in ['yes','on','true']): ctx.start_msg('Checking for %s in PKG_CONFIG_PATH' % name) args = "--cflags" if libs: # things like eigen may not have libs args += " --libs" ctx.check_cfg(package=pcname, uselib_store=UPPER, args=args, mandatory=mandatory) if 'HAVE_'+UPPER in ctx.env: ctx.end_msg("found") else: ctx.end_msg("pkg-config failed to find %s" % pcname) else: # do manual setting if incs: if not inc and inst: inc = osp.join(inst, 'include') if inc: setattr(ctx.env, 'INCLUDES_'+UPPER, [inc]) if libs: if not lib and inst: lib = osp.join(inst, 'lib') if lib: setattr(ctx.env, 'LIBPATH_'+UPPER, [lib]) # now check, this does some extra work in the caseof pkg-config uses = [UPPER] + to_list(uses) if incs: #print (ctx.env) ctx.start_msg("Location for %s headers" % name) for tryh in incs: ctx.check_cxx(header_name=tryh, use=uses, uselib_store=UPPER, mandatory=mandatory) ctx.end_msg(str(getattr(ctx.env, 'INCLUDES_' + UPPER, None))) if libs: ctx.start_msg("Location for %s libs" % name) for tryl in libs: ctx.check_cxx(lib=tryl, use=uses, uselib_store=UPPER, mandatory=mandatory) ctx.end_msg(str(getattr(ctx.env, 'LIBPATH_' + UPPER, None))) ctx.start_msg("Libs for %s" % name) ctx.end_msg(str(getattr(ctx.env, 'LIB_' + UPPER)))
def smplpkg(bld, name, use='', app_use='', test_use=''): use = list(set(to_list(use))) app_use = list(set(use + to_list(app_use))) test_use = list(set(use + to_list(test_use))) includes = [] headers = [] source = [] incdir = bld.path.find_dir('inc') srcdir = bld.path.find_dir('src') dictdir = bld.path.find_dir('dict') testsrc = bld.path.ant_glob('test/test_*.cxx') test_scripts = bld.path.ant_glob('test/test_*.sh') + bld.path.ant_glob( 'test/test_*.py') test_jsonnets = bld.path.ant_glob('test/test*.jsonnet') appsdir = bld.path.find_dir('apps') if incdir: headers += incdir.ant_glob(name + '/*.h') includes += ['inc'] bld.env['INCLUDES_' + name] = [incdir.abspath()] if headers: bld.install_files('${PREFIX}/include/%s' % name, headers) if srcdir: source += srcdir.ant_glob('*.cxx') source += srcdir.ant_glob('*.cu') # cuda # fixme: I should move this out of here. # root dictionary if dictdir: if not headers: error('No header files for ROOT dictionary "%s"' % name) #print 'Building ROOT dictionary: %s using %s' % (name,use) if 'ROOTSYS' in use: linkdef = dictdir.find_resource('LinkDef.h') bld.gen_rootcling_dict(name, linkdef, headers=headers, includes=includes, use=use) source.append(bld.path.find_or_declare(name + 'Dict.cxx')) else: warn( 'No ROOT dictionary will be generated for "%s" unless "ROOTSYS" added to "use"' % name) def get_rpath(uselst, local=True): ret = set([bld.env["PREFIX"] + "/lib"]) for one in uselst: libpath = bld.env["LIBPATH_" + one] for l in libpath: ret.add(l) if local: if one.startswith("WireCell"): sd = one[8:].lower() blddir = bld.path.find_or_declare(bld.out_dir) pkgdir = blddir.find_or_declare(sd).abspath() #print pkgdir ret.add(pkgdir) ret = list(ret) return ret # the library if incdir and srcdir: #print "Building library: %s using %s"%(name, use) bld(features='cxx cxxshlib', name=name, source=source, target=name, rpath=get_rpath(use), includes='inc', export_includes='inc', use=use) if appsdir: for app in appsdir.ant_glob('*.cxx'): #print 'Building %s app: %s using %s' % (name, app, app_use) bld.program(source=[app], target=app.name.replace('.cxx', ''), includes='inc', rpath=get_rpath(app_use + [name], local=False), use=app_use + [name]) if (testsrc or test_scripts) and not bld.options.no_tests: for test_main in testsrc: #print 'Building %s test: %s using %s' % (name, test_main, test_use) rpath = get_rpath(test_use + [name]) #print rpath bld.program(features='test', source=[test_main], ut_cwd=bld.path, target=test_main.name.replace('.cxx', ''), install_path=None, rpath=rpath, includes=['inc', 'test', 'tests'], use=test_use + [name]) for test_script in test_scripts: interp = "${BASH}" if test_script.abspath().endswith(".py"): interp = "${PYTHON}" #print 'Building %s test %s script: %s using %s' % (name, interp, test_script, test_use) bld(features="test_scripts", ut_cwd=bld.path, test_scripts_source=test_script, test_scripts_template="pwd && " + interp + " ${SCRIPT}") if test_jsonnets and not bld.options.no_tests: # print ("testing %d jsonnets in %s" % (len(test_jsonnets), bld.path )) for test_jsonnet in test_jsonnets: bld(features="test_scripts", ut_cwd=bld.path, test_scripts_source=test_jsonnet, test_scripts_template="pwd && wcsonnet ${SCRIPT}")
def external(ctx, exts): if not hasattr(ctx, "ext_dir"): ctx.ext_dir = ctx.path.find_dir("ext") for ext in to_list(exts): ctx.recurse(ctx.ext_dir.find_node(ext).abspath())
def configure(cfg): print('Compile options: %s' % cfg.options.build_debug) cfg.load('boost') cfg.load('smplpkgs') for name, args in package_descriptions: #print ("Configure: %s %s" % (name, args)) generic._configure(cfg, name, **args) #print ("configured %s" % name) if getattr(cfg.options, "with_libtorch", False) is False: print("sans libtorch") else: cfg.load('libtorch') if getattr(cfg.options, "with_cuda", False) is False: print("sans CUDA") else: cfg.load('cuda') if getattr(cfg.options, "with_kokkos", False) is False: print("sans KOKKOS") else: cfg.load('kokkos') if getattr(cfg.options, "with_root", False) is False: print("sans ROOT") else: cfg.load('rootsys') ### not yet used # if cfg.options.with_protobuf is False: # print ("sans protobuf") # else: # cfg.load('protobuf') def haveit(one): one = one.upper() if 'LIB_' + one in cfg.env: cfg.env['HAVE_' + one] = 1 print('HAVE %s libs' % one) else: print('NO %s libs' % one) # Check for stuff not found in the wcb-generic way cfg.check_boost( lib='system filesystem graph thread program_options iostreams regex') haveit('boost') cfg.check(header_name="dlfcn.h", uselib_store='DYNAMO', lib=['dl'], mandatory=True) haveit('dynamo') cfg.check(features='cxx cxxprogram', lib=['pthread'], uselib_store='PTHREAD') haveit('pthread') cfg.env.CXXFLAGS += to_list(cfg.options.build_debug) cfg.env.CXXFLAGS += ['-DEIGEN_FFTW_DEFAULT=1'] cfg.env.LIB += ['z'] submodules = find_submodules(cfg) # submodules = 'util iface gen sigproc img pgraph apps sio dfp tbb ress cfg root'.split() # submodules.sort() # submodules = [sm for sm in submodules if osp.isdir(sm)] # Remove WCT packages if they an optional dependency wasn't found for pkg, ext in [("root", "HAVE_ROOTSYS"), ("tbb", "HAVE_TBB"), ("tbb", "LIB_FFTWTHREADS"), ("cuda", "HAVE_CUDA"), ("hio", "INCLUDES_H5CPP"), ("pytorch", "LIB_LIBTORCH"), ("zio", "LIB_ZIO LIB_ZYRE LIB_CZMQ LIB_ZMQ")]: exts = to_list(ext) for have in exts: if have in cfg.env or have in cfg.env.define_key: continue if pkg in submodules: print( 'Removing package "%s" due to lack of external dependency "%s"' % (pkg, have)) submodules.remove(pkg) cfg.env.SUBDIRS = submodules print('Configured for submodules: %s' % (', '.join(submodules), )) cfg.write_config_header('config.h')
def __init__(self, bld, target, args, srcs, expected): self.target = target self.args = to_list(args) self.srcs = [bld.path.find_resource(s) for s in to_list(srcs)] self.binary = bld.path.find_or_declare(self.args[0]) self.expected = bld.path.find_dir(expected)
def to_nodes(self, lst, path=None, search_fun="find_resource"): """This is exactly to_nodes, but finding directories without generating exceptions.""" search_fun = getattr(path or self.path, search_fun) return [search_fun(dir) if isinstance(dir, str) else dir for dir in to_list([lst] if isinstance(lst, Node) else lst)]
def set_flags(bld, proj_category, **kw): if proj_category == ProjectCategory.MAIN: if bld.env.PROJECT_WARNING_FLAGS_MAIN == []: if 'PROJECT_MSVC_VERSION_NAME' in bld.env: WARNING_FLAGS = ['/w'] # disables all compiler warnings elif 'PROJECT_GCC_VERSION_NAME' in bld.env or 'PROJECT_GXX_VERSION_NAME' in bld.env: WARNING_FLAGS = ['-w'] # disables all compiler warnings else: ctx.fatal( """\nSomething has gone wrong. Aborting! {error:90cfbe48}.""" ) else: WARNING_FLAGS = bld.env.PROJECT_WARNING_FLAGS_MAIN.split(',') elif proj_category == ProjectCategory.THIRDYPARTY: if bld.env.PROJECT_WARNING_FLAGS_THIRDPARTY == []: if 'PROJECT_MSVC_VERSION_NAME' in bld.env: WARNING_FLAGS = ['/w'] # disables all compiler warnings elif 'PROJECT_GCC_VERSION_NAME' in bld.env or 'PROJECT_GXX_VERSION_NAME' in bld.env: WARNING_FLAGS = ['-w'] # disables all compiler warnings else: ctx.fatal( """\nSomething has gone wrong. Aborting! {error:b71a3be3}.""" ) else: WARNING_FLAGS = bld.env.PROJECT_WARNING_FLAGS_THIRDPARTY.split(',') if AttributeName.DEFINES in kw: kw[AttributeName.DEFINES] = list( OrderedDict.fromkeys( to_list(kw[AttributeName.DEFINES]) + get_flags(bld, proj_category, AttributeName.DEFINES))) else: kw[AttributeName.DEFINES] = list( OrderedDict.fromkeys( get_flags(bld, proj_category, AttributeName.DEFINES))) if AttributeName.CFLAGS in kw: kw[AttributeName.CFLAGS] = list( OrderedDict.fromkeys( to_list(kw[AttributeName.CFLAGS]) + get_flags(bld, proj_category, AttributeName.CFLAGS) + WARNING_FLAGS)) else: kw[AttributeName.CFLAGS] = list( OrderedDict.fromkeys( get_flags(bld, proj_category, AttributeName.CFLAGS) + WARNING_FLAGS)) if AttributeName.CXXFLAGS in kw: kw[AttributeName.CXXFLAGS] = list( OrderedDict.fromkeys( to_list(kw[AttributeName.CXXFLAGS]) + get_flags(bld, proj_category, AttributeName.CXXFLAGS) + WARNING_FLAGS)) else: kw[AttributeName.CXXFLAGS] = list( OrderedDict.fromkeys( get_flags(bld, proj_category, AttributeName.CXXFLAGS) + WARNING_FLAGS)) if AttributeName.LINKFLAGS in kw: kw[AttributeName.LINKFLAGS] = list( OrderedDict.fromkeys( to_list(kw[AttributeName.LINKFLAGS]) + get_flags(bld, proj_category, AttributeName.LINKFLAGS))) else: kw[AttributeName.LINKFLAGS] = list( OrderedDict.fromkeys( get_flags(bld, proj_category, AttributeName.LINKFLAGS))) return kw
def modelsim_vlog_prepare(self): if hasattr(self,'use'): self.VLOG_SV_OPTIONS = [] self.VLOG_V_OPTIONS = [] self.VCOM_OPTIONS = [] uses = to_list(self.use) for use in uses: if 'VLOG_SV_OPTIONS_'+use in self.env: self.VLOG_SV_OPTIONS.extend(self.env['VLOG_SV_OPTIONS_'+use]) if 'VLOG_V_OPTIONS_'+use in self.env: self.VLOG_V_OPTIONS.extend(self.env['VLOG_V_OPTIONS_'+use]) if 'VCOM_OPTIONS_'+use in self.env: self.VCOM_OPTIONS.extend(self.env['VCOM_OPTIONS_'+use]) else: if 'VLOG_SV_OPTIONS' in self.env: self.VLOG_SV_OPTIONS = self.env.VLOG_SV_OPTIONS else: self.VLOG_SV_OPTIONS = [] if 'VLOG_V_OPTIONS' in self.env: self.VLOG_V_OPTIONS = self.env.VLOG_V_OPTIONS else: self.VLOG_V_OPTIONS = [] if 'VCOM_OPTIONS' in self.env: self.VCOM_OPTIONS = self.env.VCOM_OPTIONS else: self.VCOM_OPTIONS = [] # create task to generate worklib (if necessary) (self.WORKLIB,worklib_gen_output) = self.check_create_modelsim_worklib_task(getattr(self,'worklib','worklib')) # # transform search paths to the format used for ncvlog # vsp = getattr(self,'verilog_search_paths',[]) self.env.VERILOG_SEARCH_PATHS = [] vid = [] if len(vsp) > 0: for path in vsp: self.env.VERILOG_SEARCH_PATHS.append(path.abspath()) vid.append('+incdir+'+path.abspath()) if len(vid) > 0: self.env.VERILOG_INC_DIRS = vid if not hasattr(self,'name'): self.name = Node.split_path(self.source[0])[-1] if not hasattr(self,'source'): raise Errors.ConfigurationError('Please specify the source attribute for task generator '+getattr(self,'name','?noname? (and give it a name, too!)')) # generate the logfile name self.logfile = self.get_logdir_node().make_node(self.env.VLOG_LOGFILE+'_'+self.name).abspath() # process source here, skip default process_source self.source_sv = [] self.source_string_sv = [] self.source_v = [] self.source_string_v = [] self.source_vhdl = [] self.source_string_vhdl = [] remove_sources = [] for src in getattr(self,'source',[]): if src.suffix() == '.v': self.source_string_v.append(src.abspath()) self.source_v.append(src) remove_sources.append(src) elif src.suffix() == '.sv' or src.suffix() == '.svh': self.source_string_sv.append(src.abspath()) self.source_sv.append(src) remove_sources.append(src) elif src.suffix() == '.vhd' or src.suffix() == '.vhdl': self.source_string_vhdl.append(src.abspath()) self.source_vhdl.append(src) remove_sources.append(src) for src in remove_sources: self.source.remove(src) if not worklib_gen_output is None: self.source_v.append(worklib_gen_output) self.source_sv.append(worklib_gen_output) self.source_vhdl.append(worklib_gen_output) if len(self.source_string_v) > 0: task = self.create_task("ModelsimVlogTask", self.source_v,[]) if len(self.source_string_sv) > 0: task = self.create_task("ModelsimSvlogTask", self.source_sv,[]) if len(self.source_string_vhdl) > 0: task = self.create_task("ModelsimVhdlTask", self.source_vhdl,[])