def get_emscripten_version(conf, cc): """ Emscripten doesn't support processing '-' like clang/gcc """ dummy = conf.cachedir.parent.make_node("waf-emscripten.c") dummy.write("") cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()] env = conf.env.env or None try: p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) out = p.communicate()[0] except Exception as e: conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e)) if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') k = {} out = out.splitlines() for line in out: lst = shlex.split(line) if len(lst)>2: key = lst[1] val = lst[2] k[key] = val if not ('__clang__' in k and 'EMSCRIPTEN' in k): conf.fatal('Could not determine the emscripten compiler version.') conf.env.DEST_OS = 'generic' conf.env.DEST_BINFMT = 'elf' conf.env.DEST_CPU = 'asm-js' conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) return k
def check_cython_version(conf, minver): conf.start_msg("Checking cython version") minver = tuple(minver) import re version_re = re.compile(r'cython\s*version\s*(?P<major>\d*)\.(?P<minor>\d*)(?:\.(?P<micro>\d*))?', re.I).search cmd = conf.cmd_to_list(conf.env['CYTHON']) cmd = cmd + ['--version'] from waflib.Tools import fc_config stdout, stderr = fc_config.getoutput(conf, cmd) if stdout: match = version_re(stdout) else: match = version_re(stderr) if not match: conf.fatal("cannot determine the Cython version") cy_ver = [match.group('major'), match.group('minor')] if match.group('micro'): cy_ver.append(match.group('micro')) else: cy_ver.append('0') cy_ver = tuple([int(x) for x in cy_ver]) if cy_ver < minver: conf.end_msg(False) conf.fatal("cython version %s < %s" % (cy_ver, minver)) conf.end_msg(str(cy_ver))
def ecpp_setupbuild(conf, id, board = None, device = None, platform = None, arch = None): kw = zip('board device platform arch'.split(),(board,device,platform,arch)) for k,v in kw: module = 'ecpp_%s_%s' % (k,v) if os.path.exists(os.path.join(conf.env['ECPP_DIR'], 'waf' , module + '.py')): conf.load(module) conf.setenv("") envname = id if envname not in conf.all_envs: for k,v in kw: func = getattr(conf,'ecpp_setupbuild_%s_%s' % (k,v),None) if func: func(**dict(kw)) break conf.setenv(envname,conf.env) conf.env['ECPP_ENVNAME'] = envname # override build flag if conf.env['ECPP_BUILDLIB_TARGET']: conf.env['ECPP_BUILDLIB'] = True ecpp_libname = 'ecpp_build_%s' % id.lower() conf.env.append_value('ECPP_LIBNAME', ecpp_libname) conf.env.append_value('ECPP_USE', [ecpp_libname]) else: conf.env['ECPP_BUILDLIB'] = False else: conf.fatal("Doubly defined build id")
def check_fortran_clib(self, autoadd=True, *k, **kw): """ Obtain the flags for linking with the C library if this check works, add uselib='CLIB' to your task generators """ if not self.env.FC_VERBOSE_FLAG: self.fatal("env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?") self.start_msg("Getting fortran runtime link flags") try: self.check_cc( fragment=FC_FRAGMENT2, compile_filename="test.f", features="fc fcprogram_test", linkflags=[self.env.FC_VERBOSE_FLAG], ) except Exception: self.end_msg(False) if kw.get("mandatory", True): conf.fatal("Could not find the c library flags") else: out = self.test_bld.err flags = parse_fortran_link(out.splitlines()) self.end_msg("ok (%s)" % " ".join(flags)) self.env.LINKFLAGS_CLIB = flags return flags return []
def check_libdynamixel(conf, **kw): required = 'required' in kw and kw.get('required', False) includes_check = ['/usr/include', '/usr/local/include'] resibots_dir = conf.options.resibots if hasattr(conf.options, 'resibots') and conf.options.resibots else None if resibots_dir: includes_check = [resibots_dir + '/include'] + includes_check if conf.options.libdynamixel: includes_check = [conf.options.libdynamixel + '/include'] + includes_check conf.start_msg('Checking for libdynamixel includes') try: res = conf.find_file('dynamixel/dynamixel.hpp', includes_check) except: res = False if res: conf.env.INCLUDES_LIBDYNAMIXEL = [os.path.expanduser(include) for include in includes_check] conf.env.DEFINES_LIBDYNAMIXEL = ['USE_LIBDYNAMIXEL'] conf.end_msg('ok') else: if conf.options.libdynamixel and resibots_dir: msg = 'not found in %s nor in %s' % (conf.options.libdynamixel, resibots_dir) elif conf.options.libdynamixel or resibots_dir: msg = 'not found in %s' % (conf.options.libdynamixel if conf.options.libdynamixel else resibots_dir) else: msg = 'not found, use --libdynamixel=/path/to/libdynamixel or --resibots=/path/to/resibots' if required: conf.fatal(msg) else: conf.end_msg(msg, 'YELLOW')
def expand_bundle(conf,arg): if not arg: return[] arg=arg.split(',') if'NONE'in arg and'ALL'in arg: conf.fatal('Cannot specify both ALL and NONE as dependencies') candidate_score=dict([(name,0)for name in dependencies]) def check_candidate(c): if c not in candidate_score: conf.fatal('Cannot bundle %s, since it is not specified as a'' dependency'%c) for a in arg: if a=='ALL': for candidate in candidate_score: candidate_score[candidate]+=1 continue if a=='NONE': continue if a.startswith('-'): a=a[1:] check_candidate(a) candidate_score[a]-=1 else: check_candidate(a) candidate_score[a]+=1 candidates=[name for name in candidate_score if candidate_score[name]>0] return candidates
def find_dmd(conf): conf.find_program(['dmd','dmd2','ldc'],var='D') out=conf.cmd_and_log([conf.env.D,'--help']) if out.find("D Compiler v")==-1: out=conf.cmd_and_log([conf.env.D,'-version']) if out.find("based on DMD v1.")==-1: conf.fatal("detected compiler is not dmd/ldc")
def ecpp_setuptoolchain(conf, arch): global tool_prefixes arch = arch.lower() envname = 'toolchain_%s' % arch if envname not in conf.all_envs: conf.setenv(envname, conf.env) for prefix in tool_prefixes[arch]: try: conf.env.stash() conf.env['TOOL_PREFIX'] = prefix conf.load('gcc') conf.load('gxx') conf.load('gas') conf.find_program(['strip'], var='STRIP') conf.find_program(['objcopy'], var='OBJCOPY') conf.find_program(['objdump'], var='OBJDUMP') conf.find_program(['nm'], var='NM') conf.env.append_value('ASFLAGS', ['-g']) conf.env.append_value('CFLAGS', ['-g', '-Wall']) conf.env.append_value('CXXFLAGS', ['-g', '-std=c++11','-Wall', '-ftemplate-depth=10000']) except conf.errors.ConfigurationError: conf.env.revert() else: break else: conf.fatal('Could not find a valid toolchain for "%s".' % arch) else: conf.setenv(envname)
def find_sxx(conf): """ Detect the sun C++ compiler """ v = conf.env cc = None if v["CXX"]: cc = v["CXX"] elif "CXX" in conf.environ: cc = conf.environ["CXX"] if not cc: cc = conf.find_program("CC", var="CXX") # studio if not cc: cc = conf.find_program("c++", var="CXX") if not cc: conf.fatal("Could not find a Sun C++ compiler") try: conf.cmd_and_log(cc + ["-flags"]) except Exception: conf.fatal("%r is not a Sun compiler" % cc) v["CXX"] = cc v["CXX_NAME"] = "sun" conf.get_suncc_version(cc)
def getoutput(conf,cmd,stdin=False): input=stdin and'\n'or None try: out,err=conf.cmd_and_log(cmd,env=conf.env.env or None,output=0,input=input) except Exception: conf.fatal('could not determine the compiler version %r'%cmd) return(out,err)
def find_msvc(conf): if sys.platform=='cygwin': conf.fatal('MSVC module does not work under cygwin Python!') v=conf.env path=v.PATH compiler=v.MSVC_COMPILER version=v.MSVC_VERSION compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) cxx=conf.find_program(compiler_name,var='CXX',path_list=path) env=dict(conf.environ) if path:env.update(PATH=';'.join(path)) if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): conf.fatal('the msvc compiler could not be identified') v.CC=v.CXX=cxx v.CC_NAME=v.CXX_NAME='msvc' if not v.LINK_CXX: v.LINK_CXX=conf.find_program(linker_name,path_list=path,errmsg='%s was not found (linker)'%linker_name) if not v.LINK_CC: v.LINK_CC=v.LINK_CXX if not v.AR: stliblink=conf.find_program(lib_name,path_list=path,var='AR') if not stliblink: return v.ARFLAGS=['/nologo'] if v.MSVC_MANIFEST: conf.find_program('MT',path_list=path,var='MT') v.MTFLAGS=['/nologo'] try: conf.load('winres') except Errors.ConfigurationError: Logs.warn('Resource compiler not found. Compiling resource file is disabled')
def check_omni_vrep(conf, **kw): required = 'required' in kw and kw.get('required', False) includes_check = ['/usr/include', '/usr/local/include'] resibots_dir = conf.options.resibots if hasattr(conf.options, 'resibots') and conf.options.resibots else None if resibots_dir: includes_check = [resibots_dir + '/include'] + includes_check if conf.options.omni_vrep: includes_check = [conf.options.omni_vrep + '/include'] + includes_check conf.start_msg('Checking for omni_vrep includes') try: res = conf.find_file('omni_vrep/omnipointer.hpp', includes_check) except: res = False if res: conf.env.INCLUDES_OMNI_VREP = [os.path.expanduser(include) for include in includes_check] conf.env.DEFINES_OMNI_VREP = ['USE_OMNI_VREP'] conf.end_msg('ok') else: if conf.options.omni_vrep and resibots_dir: msg = 'not found in %s nor in %s' % (conf.options.omni_vrep, resibots_dir) elif conf.options.omni_vrep or resibots_dir: msg = 'not found in %s' % (conf.options.omni_vrep if conf.options.omni_vrep else resibots_dir) else: msg = 'not found, use --omni_vrep=/path/to/omni_vrep or --resibots=/path/to/resibots' if required: conf.fatal(msg) else: conf.end_msg(msg, 'YELLOW')
def check_eigen(conf, **kw): required = 'required' in kw and kw.get('required', False) includes_check = ['/usr/include/eigen3', '/usr/local/include/eigen3', '/usr/include', '/usr/local/include'] resibots_dir = conf.options.resibots if hasattr(conf.options, 'resibots') and conf.options.resibots else None if resibots_dir: includes_check = [resibots_dir + '/include'] + includes_check if conf.options.eigen: includes_check = [conf.options.eigen + '/include'] + includes_check conf.start_msg('Checking for Eigen includes') try: res = conf.find_file('Eigen/Core', includes_check) except: res = False if res: conf.env.INCLUDES_EIGEN = [os.path.expanduser(include) for include in includes_check] conf.env.DEFINES_EIGEN = ['USE_EIGEN'] conf.end_msg('ok') else: if conf.options.eigen and resibots_dir: msg = 'not found in %s nor in %s' % (conf.options.eigen, resibots_dir) elif conf.options.eigen or resibots_dir: msg = 'not found in %s' % (conf.options.eigen if conf.options.eigen else resibots_dir) else: msg = 'not found, use --eigen=/path/to/eigen or --resibots=/path/to/resibots' if required: conf.fatal(msg) else: conf.end_msg(msg, 'YELLOW')
def get_pgfortran_version(conf,fc): version_re = re.compile(r"The Portland Group", re.I).search cmd = fc + ['-V'] out,err = fc_config.getoutput(conf, cmd, stdin=False) if out: match = version_re(out) else: match = version_re(err) if not match: conf.fatal('Could not verify PGI signature') cmd = fc + ['-help=variable'] out,err = fc_config.getoutput(conf, cmd, stdin=False) if out.find('COMPVER')<0: conf.fatal('Could not determine the compiler type') k = {} prevk = '' out = out.split('\n') for line in out: lst = line.partition('=') if lst[1] == '=': key = lst[0].rstrip() if key == '': key = prevk val = lst[2].rstrip() k[key] = val else: prevk = line.partition(' ')[0] def isD(var): return var in k def isT(var): return var in k and k[var]!='0' conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
def setup_msvc(conf,versiondict): platforms=getattr(Options.options,'msvc_targets','').split(',') if platforms==['']: platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] desired_versions=getattr(Options.options,'msvc_version','').split(',') if desired_versions==['']: desired_versions=conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys()))) lazy_detect=getattr(Options.options,'msvc_lazy',True) if conf.env.MSVC_LAZY_AUTODETECT is False: lazy_detect=False if not lazy_detect: for val in versiondict.values(): for arch in list(val.keys()): cfg=val[arch] cfg.evaluate() if not cfg.is_valid: del val[arch] conf.env.MSVC_INSTALLED_VERSIONS=versiondict for version in desired_versions: try: targets=versiondict[version] except KeyError: continue for arch in platforms: try: cfg=targets[arch] except KeyError: continue cfg.evaluate() if cfg.is_valid: compiler,revision=version.rsplit(' ',1) return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys())))
def setup_msvc(conf,versions,arch=False): platforms=getattr(Options.options,'msvc_targets','').split(',') if platforms==['']: platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] desired_versions=getattr(Options.options,'msvc_version','').split(',') if desired_versions==['']: desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] versiondict=dict(versions) for version in desired_versions: try: targets=dict(versiondict[version]) for target in platforms: try: try: realtarget,(p1,p2,p3)=targets[target] except conf.errors.ConfigurationError: del(targets[target]) else: compiler,revision=version.rsplit(' ',1) if arch: return compiler,revision,p1,p2,p3,realtarget else: return compiler,revision,p1,p2,p3 except KeyError:continue except KeyError:continue conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
def setup_msvc(conf, versions, arch=False): platforms = getattr(Options.options, "msvc_targets", "").split(",") if platforms == [""]: platforms = Utils.to_list(conf.env["MSVC_TARGETS"]) or [ i for i, j in all_msvc_platforms + all_icl_platforms + all_wince_platforms ] desired_versions = getattr(Options.options, "msvc_version", "").split(",") if desired_versions == [""]: desired_versions = conf.env["MSVC_VERSIONS"] or [v for v, _ in versions][::-1] versiondict = dict(versions) for version in desired_versions: try: targets = dict(versiondict[version]) for target in platforms: try: arch, (p1, p2, p3) = targets[target] compiler, revision = version.rsplit(" ", 1) if arch: return compiler, revision, p1, p2, p3, arch else: return compiler, revision, p1, p2, p3 except KeyError: continue except KeyError: continue conf.fatal("msvc: Impossible to find a valid architecture for building (in setup_msvc)")
def get_python_variables(conf,variables,imports=['import sys']): program=list(imports) program.append('') for v in variables: program.append("print(repr(%s))"%v) os_env=dict(os.environ) try: del os_env['MACOSX_DEPLOYMENT_TARGET'] except KeyError: pass try: out=conf.cmd_and_log(conf.env.PYTHON+['-c','\n'.join(program)],env=os_env) except Errors.WafError: conf.fatal('The distutils module is unusable: install "python-devel"?') return_values=[] for s in out.split('\n'): s=s.strip() if not s: continue if s=='None': return_values.append(None) elif s[0]=="'"and s[-1]=="'": return_values.append(s[1:-1]) elif s[0].isdigit(): return_values.append(int(s)) else:break return return_values
def find_ifort_win32(conf): v=conf.env path=v.PATH compiler=v.MSVC_COMPILER version=v.MSVC_VERSION compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) v.IFORT_MANIFEST=(compiler=='intel'and version>=11) fc=conf.find_program(compiler_name,var='FC',path_list=path) env=dict(conf.environ) if path:env.update(PATH=';'.join(path)) if not conf.cmd_and_log(fc+['/nologo','/help'],env=env): conf.fatal('not intel fortran compiler could not be identified') v.FC_NAME='IFORT' if not v.LINK_FC: conf.find_program(linker_name,var='LINK_FC',path_list=path,mandatory=True) if not v.AR: conf.find_program(lib_name,path_list=path,var='AR',mandatory=True) v.ARFLAGS=['/nologo'] if v.IFORT_MANIFEST: conf.find_program('MT',path_list=path,var='MT') v.MTFLAGS=['/nologo'] try: conf.load('winres') except Errors.WafError: Logs.warn('Resource compiler not found. Compiling resource file is disabled')
def getoutput(conf, cmd, stdin=False): """ Obtains Fortran command outputs """ from waflib import Errors if conf.env.env: env = conf.env.env else: env = dict(os.environ) env['LANG'] = 'C' input = stdin and '\n'.encode() or None try: out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input) except Errors.WafError as e: # An WafError might indicate an error code during the command # execution, in this case we still obtain the stderr and stdout, # which we can use to find the version string. if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')): raise e else: # Ignore the return code and return the original # stdout and stderr. out = e.stdout err = e.stderr except Exception: conf.fatal('could not determine the compiler version %r' % cmd) return (out, err)
def configure(conf): # Which mkspec should we use, by default, use the cxx_default # that simply fallbacks to use waf auto detect of compiler etc. mkspec = "cxx_default" if conf.has_tool_option('cxx_mkspec'): mkspec = conf.get_tool_option('cxx_mkspec') conf.msg('Using the mkspec:', mkspec) # Find and call the mkspec function on the conf object if hasattr(conf, mkspec): getattr(conf, mkspec)() else: conf.fatal("The mkspec is not available: {0}".format(mkspec)) # Additional flags for C/C++ compiler and linker if conf.has_tool_option('cflags'): conf.env['CFLAGS'] += conf.get_tool_option('cflags').split(';') if conf.has_tool_option('cxxflags'): conf.env['CXXFLAGS'] += conf.get_tool_option('cxxflags').split(';') if conf.has_tool_option('linkflags'): conf.env['LINKFLAGS'] += conf.get_tool_option('linkflags').split(';') # Common flags to be set for C/C++ compiler and linker if conf.has_tool_option('commonflags'): conf.env['CFLAGS'] += conf.get_tool_option('commonflags').split(';') conf.env['CXXFLAGS'] += conf.get_tool_option('commonflags').split(';') conf.env['LINKFLAGS'] += conf.get_tool_option('commonflags').split(';')
def check_fortran_clib(self, autoadd=True, *k, **kw): """ Obtain flags for linking with the c library if this check works, add uselib='CLIB' to your task generators """ if not self.env.FC_VERBOSE_FLAG: self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') self.start_msg('Getting fortran runtime link flags') try: self.check_cc( fragment = FC_FRAGMENT2, compile_filename = 'test.f', features = 'fc fcprogram_test', linkflags = [self.env.FC_VERBOSE_FLAG] ) except: self.end_msg(False) if kw.get('mandatory', True): conf.fatal('Could not find the c library flags') else: out = self.test_bld.err flags = parse_fortran_link(out.splitlines()) self.end_msg('ok (%s)' % ' '.join(flags)) self.env.CLIB_LINKFLAGS = flags return flags return []
def setup_ifort(conf, versiondict): """ Checks installed compilers and targets and returns the first combination from the user's options, env, or the global supported lists that checks. :param versiondict: dict(platform -> dict(architecture -> configuration)) :type versiondict: dict(string -> dict(string -> target_compiler) :return: the compiler, revision, path, include dirs, library paths and target architecture :rtype: tuple of strings """ platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms] desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys()))) for version in desired_versions: try: targets = versiondict[version] except KeyError: continue for arch in platforms: try: cfg = targets[arch] except KeyError: continue cfg.evaluate() if cfg.is_valid: compiler,revision = version.rsplit(' ', 1) return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
def get_suncc_version(conf, cc): """ Returns the Sun compiler version :raise: :py:class:`waflib.Errors.ConfigurationError` """ cmd = cc + ['-V'] try: out, err = conf.cmd_and_log(cmd, output=0) except Errors.WafError as e: # Older versions of the compiler exit with non-zero status when reporting their version if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')): conf.fatal('Could not find suncc %r' % cmd) out = e.stdout err = e.stderr version = (out or err) version = version.splitlines()[0] # cc: Sun C 5.10 SunOS_i386 2009/06/03 # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17 # cc: WorkShop Compilers 5.0 98/12/15 C 5.0 version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search match = version_re(version) if match: k = match.groupdict() conf.env.CC_VERSION = (k['major'], k['minor']) else: conf.fatal('Could not determine the suncc version.')
def get_gfortran_version(conf,fc): version_re=re.compile(r"GNU\s*Fortran",re.I).search cmd=fc+['--version'] out,err=fc_config.getoutput(conf,cmd,stdin=False) if out:match=version_re(out) else:match=version_re(err) if not match: conf.fatal('Could not determine the compiler type') cmd=fc+['-dM','-E','-'] out,err=fc_config.getoutput(conf,cmd,stdin=True) if out.find('__GNUC__')<0: conf.fatal('Could not determine the compiler type') k={} out=out.split('\n') import shlex for line in out: lst=shlex.split(line) if len(lst)>2: key=lst[1] val=lst[2] k[key]=val def isD(var): return var in k def isT(var): return var in k and k[var]!='0' conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
def find_arm_cxx_cpp(conf): v = conf.env v['CXX'] = None cxx = conf.find_program('arm-linux-g++', path_list=['/usr/arm/bin']) if not cxx: conf.fatal('arm-linux-g++ was not found') v['CXX'] = v['LINK_CXX'] = v['COMPILER_CXX'] = v['CPP'] = v['LINK_CPP'] = cxx v['CXX_NAME'] = 'gcc'
def check_python_module(conf,module_name): conf.start_msg('Python module %s'%module_name) try: conf.cmd_and_log([conf.env['PYTHON'],'-c','import %s\nprint(1)\n'%module_name]) except: conf.end_msg(False) conf.fatal('Could not find the python module %r'%module_name) conf.end_msg(True)
def check_python_module(conf,module_name): conf.start_msg('Python module %s'%module_name) try: conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name]) except: conf.end_msg(False) conf.fatal('Could not find the python module %r'%module_name) conf.end_msg(True)
def check_python_module(conf, module_name): conf.start_msg("Python module %s" % module_name) try: conf.cmd_and_log(conf.env["PYTHON"] + ["-c", PYTHON_MODULE_TEMPLATE % module_name]) except: conf.end_msg(False) conf.fatal("Could not find the python module %r" % module_name) conf.end_msg(True)
def archive_name_without_suffix (archive): suffixes = [".tar.gz", ".tgz", ".tar" ] if any(archive.endswith (suffix) for suffix in suffixes): for suffix in suffixes: if archive.endswith (suffix): return archive[:-len(suffix)] else: conf.fatal ("Cannot handle archive %s (based on its suffix)" % archive)
def get_msvc_version(conf, compiler, version, target, vcvars): """ Checks that an installed compiler actually runs and uses vcvars to obtain the environment needed by the compiler. :param compiler: compiler type, for looking up the executable name :param version: compiler version, for debugging only :param target: target architecture :param vcvars: batch file to run to check the environment :return: the location of the compiler executable, the location of include dirs, and the library paths :rtype: tuple of strings """ Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) try: conf.msvc_cnt += 1 except AttributeError: conf.msvc_cnt = 1 batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt) batfile.write("""@echo off set INCLUDE= set LIB= call "%s" %s echo PATH=%%PATH%% echo INCLUDE=%%INCLUDE%% echo LIB=%%LIB%%;%%LIBPATH%% """ % (vcvars,target)) sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) lines = sout.splitlines() if not lines[0]: lines.pop(0) MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None for line in lines: if line.startswith('PATH='): path = line[5:] MSVC_PATH = path.split(';') elif line.startswith('INCLUDE='): MSVC_INCDIR = [i for i in line[8:].split(';') if i] elif line.startswith('LIB='): MSVC_LIBDIR = [i for i in line[4:].split(';') if i] if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR): conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') # Check if the compiler is usable at all. # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. env = dict(os.environ) env.update(PATH = path) compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) cxx = conf.find_program(compiler_name, path_list=MSVC_PATH) # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. if 'CL' in env: del(env['CL']) try: conf.cmd_and_log(cxx + ['/help'], env=env) except UnicodeError: st = Utils.ex_stack() if conf.logger: conf.logger.error(st) conf.fatal('msvc: Unicode error - check the code page?') except Exception as e: Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e)) conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') else: Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) finally: conf.env[compiler_name] = '' return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
def check_magnum_integration(conf, *k, **kw): def get_directory(filename, dirs, full=False): res = conf.find_file(filename, dirs) if not full: return res[:-len(filename) - 1] return res[:res.rfind('/')] def find_in_string(data, text): return data.find(text) # Check compiler version (for gcc); I am being a bit more strong (Magnum could be built with 4.7 but needs adjustment) if conf.env.CXX_NAME in [ "gcc", "g++" ] and int(conf.env['CC_VERSION'][0] + conf.env['CC_VERSION'][1]) < 48: msg = 'MagnumIntegration cannot be setup with GCC < 4.8!' if required: conf.fatal(msg) Logs.pprint('RED', msg) return includes_check = [ '/usr/local/include', '/usr/include', '/opt/local/include', '/sw/include' ] libs_check = [ '/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib', '/usr/lib/x86_64-linux-gnu/', '/usr/lib64' ] bins_check = [ '/usr/bin', '/usr/local/bin', '/opt/local/bin', '/sw/bin', '/bin' ] # Magnum depends on several libraries and we cannot make the assumption that # someone installed all of them in the same directory! # to-do: a better? solution would be to create different scripts for each dependency if conf.options.magnum_integration_install_dir: includes_check = [ conf.options.magnum_integration_install_dir + '/include' ] + includes_check libs_check = [conf.options.magnum_integration_install_dir + '/lib' ] + libs_check bins_check = [conf.options.magnum_integration_install_dir + '/bin' ] + bins_check # OSX/Mac uses .dylib and GNU/Linux .so suffix = 'dylib' if conf.env['DEST_OS'] == 'darwin' else 'so' required = kw.get('required', False) requested_components = kw.get('components', None) if requested_components == None: requested_components = [] else: requested_components = requested_components.split() magnum_var = kw.get('magnum', 'Magnum') # MagnumIntegration require Magnum if not conf.env['INCLUDES_%s' % magnum_var]: msg = 'Magnum needs to be configured! Cannot proceed!' if required: conf.fatal(msg) Logs.pprint('RED', msg) return magnum_integration_var = kw.get('uselib_store', 'MagnumIntegration') # to-do: enforce C++11/14 magnum_integration_components, magnum_integration_dependencies, magnum_integration_magnum_dependencies = get_magnum_integration_components( ) # magnum_integration_includes = copy.deepcopy(conf.env['INCLUDES_%s_Magnum' % magnum_var]) # magnum_integration_libpaths = copy.deepcopy(conf.env['LIBPATH_%s_Magnum' % magnum_var]) # magnum_integration_libs = copy.deepcopy(conf.env['LIB_%s_Magnum' % magnum_var]) magnum_integration_includes = [] magnum_integration_libpaths = [] magnum_integration_libs = [] magnum_integration_component_includes = {} magnum_integration_component_libpaths = {} magnum_integration_component_libs = {} # only check for components that can exist requested_components = list( set(requested_components).intersection(magnum_integration_components)) # add dependencies for lib in requested_components: requested_components = requested_components + magnum_integration_dependencies[ lib] # remove duplicates requested_components = list(set(requested_components)) for component in requested_components: conf.start_msg('Checking for ' + component + ' Magnum Integration') # magnum_integration_component_includes[component] = copy.deepcopy(conf.env['INCLUDES_%s_Magnum' % magnum_var]) # magnum_integration_component_libpaths[component] = copy.deepcopy(conf.env['LIBPATH_%s_Magnum' % magnum_var]) # magnum_integration_component_libs[component] = copy.deepcopy(conf.env['LIB_%s_Magnum' % magnum_var]) magnum_integration_component_includes[component] = [] magnum_integration_component_libpaths[component] = [] magnum_integration_component_libs[component] = [] component_name = component component_file = 'Integration' if component == 'Bullet': component_name = 'BulletIntegration' elif component == 'Dart': component_name = 'DartIntegration' component_file = 'DartIntegration' elif component == 'Eigen': component_name = 'EigenIntegration' try: include_dir = get_directory( 'Magnum/' + component_name + '/' + component_file + '.h', includes_check) magnum_integration_includes = magnum_integration_includes + [ include_dir ] magnum_integration_component_includes[ component] = magnum_integration_component_includes[ component] + [include_dir] if component != 'Eigen': lib = 'Magnum' + component_name lib_dir = get_directory('lib' + lib + '.' + suffix, libs_check, True) magnum_integration_libs.append(lib) magnum_integration_libpaths = magnum_integration_libpaths + [ lib_dir ] magnum_integration_component_libpaths[ component] = magnum_integration_component_libpaths[ component] + [lib_dir] magnum_integration_component_libs[component].append(lib) except: if required: conf.fatal('Not found') conf.end_msg('Not found', 'RED') # if optional, continue? continue conf.end_msg(include_dir) # extra dependencies # to-do: check for bullet and Dart # if component == 'AssimpImporter': # # AssimpImporter requires Assimp # conf.start_msg(component + ': Checking for Assimp') # try: # assimp_inc = get_directory('assimp/anim.h', includes_check) # magnum_integration_includes = magnum_integration_includes + [assimp_inc] # magnum_integration_component_includes[component] = magnum_integration_component_includes[component] + [assimp_inc] # lib_dir = get_directory('libassimp.'+suffix, libs_check) # magnum_integration_libpaths = magnum_integration_libpaths + [lib_dir] # magnum_integration_libs.append('assimp') # magnum_integration_component_libpaths[component] = magnum_integration_component_libpaths[component] + [lib_dir] # magnum_integration_component_libs[component].append('assimp') # except: # if required: # conf.fatal('Not found') # conf.end_msg('Not found', 'RED') # # if optional, continue? # continue # conf.end_msg(assimp_inc) if len(magnum_integration_libs) > 0: conf.start_msg(magnum_integration_var + ' libs:') conf.end_msg(magnum_integration_libs) # remove duplicates magnum_integration_includes = list(set(magnum_integration_includes)) magnum_integration_libpaths = list(set(magnum_integration_libpaths)) # set environmental variables conf.env['INCLUDES_%s' % magnum_integration_var] = magnum_integration_includes conf.env['LIBPATH_%s' % magnum_integration_var] = magnum_integration_libpaths conf.env['LIB_%s' % magnum_integration_var] = magnum_integration_libs conf.env['DEFINES_%s' % magnum_integration_var] = copy.deepcopy( conf.env['DEFINES_%s' % magnum_var]) # set component libs for component in requested_components: for lib in magnum_integration_dependencies[component]: magnum_integration_component_includes[ component] = magnum_integration_component_includes[ component] + magnum_integration_component_includes[lib] magnum_integration_component_libpaths[ component] = magnum_integration_component_libpaths[ component] + magnum_integration_component_libpaths[lib] magnum_integration_component_libs[ component] = magnum_integration_component_libs[ component] + magnum_integration_component_libs[lib] conf.env['INCLUDES_%s_%s' % (magnum_integration_var, component)] = list( set(magnum_integration_component_includes[component])) if len(magnum_integration_component_libs[component]) > 0: conf.env['LIBPATH_%s_%s' % (magnum_integration_var, component)] = list( set(magnum_integration_component_libpaths[component])) conf.env['LIB_%s_%s' % (magnum_integration_var, component)] = list( set(magnum_integration_component_libs[component])) # copy the C++ defines; we want them to be available on all Magnum builds conf.env['DEFINES_%s_%s' % (magnum_integration_var, component)] = copy.deepcopy( conf.env['DEFINES_%s' % magnum_integration_var]) # set C++ flags conf.env['CXX_FLAGS_%s' % magnum_integration_var] = copy.deepcopy( conf.env['CXX_FLAGS_%s' % magnum_var])
def check_python_version(conf, minver=None): assert minver is None or isinstance(minver, tuple) pybin = conf.env['PYTHON'] if not pybin: conf.fatal('could not find the python executable') cmd = pybin + [ '-c', 'import sys\nfor x in sys.version_info: print(str(x))' ] Logs.debug('python: Running python command %r' % cmd) lines = conf.cmd_and_log(cmd).split() assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines) pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) result = (minver is None) or (pyver_tuple >= minver) if result: pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) conf.env['PYTHON_VERSION'] = pyver if 'PYTHONDIR' in conf.environ: pydir = conf.environ['PYTHONDIR'] else: if Utils.is_win32: (python_LIBDEST, pydir) = conf.get_python_variables([ "get_config_var('LIBDEST') or ''", "get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX'] ]) else: python_LIBDEST = None (pydir, ) = conf.get_python_variables([ "get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX'] ]) if python_LIBDEST is None: if conf.env['LIBDIR']: python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver) else: python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver) if 'PYTHONARCHDIR' in conf.environ: pyarchdir = conf.environ['PYTHONARCHDIR'] else: (pyarchdir, ) = conf.get_python_variables([ "get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX'] ]) if not pyarchdir: pyarchdir = pydir if hasattr(conf, 'define'): conf.define('PYTHONDIR', pydir) conf.define('PYTHONARCHDIR', pyarchdir) conf.env['PYTHONDIR'] = pydir conf.env['PYTHONARCHDIR'] = pyarchdir pyver_full = '.'.join(map(str, pyver_tuple[:3])) if minver is None: conf.msg('Checking for python version', pyver_full) else: minver_str = '.'.join(map(str, minver)) conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str, ) and 'GREEN' or 'YELLOW') if not result: conf.fatal('The python version is too old, expecting %r' % (minver, ))
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): cmd = cc + ['-dM', '-E', '-'] env = conf.env.env or None try: p = Utils.subprocess.Popen(cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) p.stdin.write('\n') out = p.communicate()[0] except Exception: conf.fatal('Could not determine the compiler version %r' % cmd) if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') if gcc: if out.find('__INTEL_COMPILER') >= 0: conf.fatal('The intel compiler pretends to be gcc') if out.find('__GNUC__') < 0 and out.find('__clang__') < 0: conf.fatal('Could not determine the compiler type') if icc and out.find('__INTEL_COMPILER') < 0: conf.fatal('Not icc/icpc') if clang and out.find('__clang__') < 0: conf.fatal('Not clang/clang++') k = {} if icc or gcc or clang: out = out.splitlines() for line in out: lst = shlex.split(line) if len(lst) > 2: key = lst[1] val = lst[2] k[key] = val def isD(var): return var in k def isT(var): return var in k and k[var] != '0' if not conf.env.DEST_OS: conf.env.DEST_OS = '' for i in MACRO_TO_DESTOS: if isD(i): conf.env.DEST_OS = MACRO_TO_DESTOS[i] break else: if isD('__APPLE__') and isD('__MACH__'): conf.env.DEST_OS = 'darwin' elif isD('__unix__'): conf.env.DEST_OS = 'generic' if isD('__ELF__'): conf.env.DEST_BINFMT = 'elf' elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'): conf.env.DEST_BINFMT = 'pe' conf.env.LIBDIR = conf.env.BINDIR elif isD('__APPLE__'): conf.env.DEST_BINFMT = 'mac-o' if not conf.env.DEST_BINFMT: conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) for i in MACRO_TO_DEST_CPU: if isD(i): conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] break Logs.debug('ccroot: dest platform: ' + ' '.join([ conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU') ])) if icc: ver = k['__INTEL_COMPILER'] conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1]) else: if isD('__clang__'): conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) else: try: conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) except KeyError: conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], 0) return k
def fatal(required, msg): if required: conf.fatal(msg) Logs.pprint('RED', msg)
def configure_toolchain_unavail(conf): conf.fatal('Posix toolchain not available none posix os')
def get_msvc_version(conf, compiler, version, target, vcvars): """ Create a bat file to obtain the location of the libraries :param compiler: ? :param version: ? :target: ? :vcvars: ? :return: the location of msvc, the location of include dirs, and the library paths :rtype: tuple of strings """ debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) batfile = conf.bldnode.make_node('waf-print-msvc.bat') batfile.write("""@echo off set INCLUDE= set LIB= call "%s" %s echo PATH=%%PATH%% echo INCLUDE=%%INCLUDE%% echo LIB=%%LIB%% """ % (vcvars, target)) sout = conf.cmd_and_log(['cmd', '/E:on', '/V:on', '/C', batfile.abspath()]) lines = sout.splitlines() if not lines[0]: lines = lines[1:] for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler', 'Intel Parallel Studio'): if lines[0].find(x) != -1: break else: debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target) conf.fatal( 'msvc: Impossible to find a valid architecture for building (in get_msvc_version)' ) for line in lines[1:]: if line.startswith('PATH='): path = line[5:] MSVC_PATH = path.split(';') elif line.startswith('INCLUDE='): MSVC_INCDIR = [i for i in line[8:].split(';') if i] elif line.startswith('LIB='): MSVC_LIBDIR = [i for i in line[4:].split(';') if i] # Check if the compiler is usable at all. # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. env = {} env.update(os.environ) env.update(PATH=path) compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) cxx = conf.find_program(compiler_name, path_list=MSVC_PATH) cxx = conf.cmd_to_list(cxx) # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically. if 'CL' in env: del (env['CL']) try: try: conf.cmd_and_log(cxx + ['/help'], env=env) except Exception as e: debug('msvc: get_msvc_version: %r %r %r -> failure' % (compiler, version, target)) debug(str(e)) conf.fatal('msvc: cannot run the compiler (in get_msvc_version)') else: debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) finally: conf.env[compiler_name] = '' return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
def find_gdc(conf): conf.find_program('gdc', var='D') out = conf.cmd_and_log([conf.env.D, '--version']) if out.find("gdc ") == -1: conf.fatal("detected compiler is not gdc")
def find_ldc2(conf): conf.find_program(['ldc2'],var='D') out=conf.cmd_and_log(conf.env.D+['-version']) if out.find("based on DMD v2.")==-1: conf.fatal("detected compiler is not ldc2")
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): cmd = cc + ['-dM', '-E', '-'] env = conf.env.env or None try: out, err = conf.cmd_and_log(cmd, output=0, input='\n', env=env) except Exception: conf.fatal('Could not determine the compiler version %r' % cmd) if gcc: if out.find('__INTEL_COMPILER') >= 0: conf.fatal('The intel compiler pretends to be gcc') if out.find('__GNUC__') < 0 and out.find('__clang__') < 0: conf.fatal('Could not determine the compiler type') if icc and out.find('__INTEL_COMPILER') < 0: conf.fatal('Not icc/icpc') if clang and out.find('__clang__') < 0: conf.fatal('Not clang/clang++') if not clang and out.find('__clang__') >= 0: conf.fatal( 'Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure' ) k = {} if icc or gcc or clang: out = out.splitlines() for line in out: lst = shlex.split(line) if len(lst) > 2: key = lst[1] val = lst[2] k[key] = val def isD(var): return var in k if not conf.env.DEST_OS: conf.env.DEST_OS = '' for i in MACRO_TO_DESTOS: if isD(i): conf.env.DEST_OS = MACRO_TO_DESTOS[i] break else: if isD('__APPLE__') and isD('__MACH__'): conf.env.DEST_OS = 'darwin' elif isD('__unix__'): conf.env.DEST_OS = 'generic' if isD('__ELF__'): conf.env.DEST_BINFMT = 'elf' elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'): conf.env.DEST_BINFMT = 'pe' conf.env.LIBDIR = conf.env.BINDIR elif isD('__APPLE__'): conf.env.DEST_BINFMT = 'mac-o' if not conf.env.DEST_BINFMT: conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) for i in MACRO_TO_DEST_CPU: if isD(i): conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] break Logs.debug('ccroot: dest platform: ' + ' '.join([ conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU') ])) if icc: ver = k['__INTEL_COMPILER'] conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1]) else: if isD('__clang__') and isD('__clang_major__'): conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) else: conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) return k
if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')): conf.fatal('Could not find suncc %r' % cmd) out = e.stdout err = e.stderr version = (out or err) version = version.splitlines()[0] version_re = re.compile( r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search match = version_re(version) if match: k = match.groupdict() conf.env.CC_VERSION = (k['major'], k['minor']) else: conf.fatal('Could not determine the suncc version.') @conf def add_as_needed(self): if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME): self.env.append_unique('LINKFLAGS', '-Wl,--as-needed') class cfgtask(Task.TaskBase): def display(self): return '' def runnable_status(self): return Task.RUN_ME
def load_win_x64_host_settings(conf): """ Setup any environment settings you want to apply globally any time the host doing the building is win x64 """ v = conf.env # Setup the environment for the AZ Code Generator # Look for the most recent version of the code generator subfolder. This should be either installed or built by the bootstrap process at this point global AZCG_VALIDATED_PATH if AZCG_VALIDATED_PATH is None: az_code_gen_subfolders = ['bin/windows'] validated_azcg_dir = None for az_code_gen_subfolder in az_code_gen_subfolders: azcg_dir = conf.Path( 'Tools/AzCodeGenerator/{}'.format(az_code_gen_subfolder)) azcg_exe = os.path.join(azcg_dir, AZ_CODE_GEN_EXECUTABLE) if os.path.exists(azcg_exe): Logs.debug( 'lumberyard: Found AzCodeGenerator at {}'.format(azcg_dir)) validated_azcg_dir = azcg_dir break AZCG_VALIDATED_PATH = validated_azcg_dir if validated_azcg_dir is None: conf.fatal( 'Unable to locate the AzCodeGenerator subfolder. Make sure that the Windows binaries are available' ) v['CODE_GENERATOR_EXECUTABLE'] = AZ_CODE_GEN_EXECUTABLE v['CODE_GENERATOR_PATH'] = [AZCG_VALIDATED_PATH] v['CODE_GENERATOR_PYTHON_PATHS'] = [ conf.Path('Tools/Python/3.7.10/windows/Lib'), conf.Path('Tools/Python/3.7.10/windows/libs'), conf.Path('Tools/Python/3.7.10/windows/DLLs'), conf.ThirdPartyPath('markupsafe', 'x64'), conf.ThirdPartyPath('jinja2', 'x64') ] v['CODE_GENERATOR_PYTHON_DEBUG_PATHS'] = [ conf.Path('Tools/Python/3.7.10/windows/Lib'), conf.Path('Tools/Python/3.7.10/windows/libs'), conf.Path('Tools/Python/3.7.10/windows/DLLs'), conf.ThirdPartyPath('markupsafe', 'x64'), conf.ThirdPartyPath('jinja2', 'x64') ] v['EMBEDDED_PYTHON_HOME_RELATIVE_PATH'] = 'Tools/Python/3.7.10/windows' v['CODE_GENERATOR_PYTHON_HOME'] = conf.Path( v['EMBEDDED_PYTHON_HOME_RELATIVE_PATH']) v['CODE_GENERATOR_PYTHON_HOME_DEBUG'] = conf.Path( 'Tools/Python/3.7.10/windows') v['CODE_GENERATOR_INCLUDE_PATHS'] = [] v['EMBEDDED_PYTHON_HOME'] = v['CODE_GENERATOR_PYTHON_HOME'] v['EMBEDDED_PYTHON_INCLUDE_PATH'] = os.path.join(v['EMBEDDED_PYTHON_HOME'], 'include') v['EMBEDDED_PYTHON_LIBPATH'] = os.path.join(v['EMBEDDED_PYTHON_HOME'], 'libs') v['EMBEDDED_PYTHON_SHARED_OBJECT'] = os.path.join( v['EMBEDDED_PYTHON_HOME'], 'python37.dll') crcfix_dir = conf.Path('Tools/crcfix/bin/windows') if not os.path.exists(crcfix_dir): Logs.warn( 'Unable to locate the crcfix subfolder. Make sure that the Windows crcfix binaries are available' ) v['CRCFIX_PATH'] = [crcfix_dir] v['CRCFIX_EXECUTABLE'] = 'crcfix.exe'
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): """ Runs the preprocessor to determine the gcc/icc/clang version The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env* :raise: :py:class:`waflib.Errors.ConfigurationError` """ cmd = cc + ["-dM", "-E", "-"] env = conf.env.env or None try: out, err = conf.cmd_and_log(cmd, output=0, input=b"\n", env=env) except Errors.WafError: conf.fatal("Could not determine the compiler version %r" % cmd) if gcc: if out.find("__INTEL_COMPILER") >= 0: conf.fatal("The intel compiler pretends to be gcc") if out.find("__GNUC__") < 0 and out.find("__clang__") < 0: conf.fatal("Could not determine the compiler type") if icc and out.find("__INTEL_COMPILER") < 0: conf.fatal("Not icc/icpc") if clang and out.find("__clang__") < 0: conf.fatal("Not clang/clang++") if not clang and out.find("__clang__") >= 0: conf.fatal( "Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure" ) k = {} if icc or gcc or clang: out = out.splitlines() for line in out: lst = shlex.split(line) if len(lst) > 2: key = lst[1] val = lst[2] k[key] = val def isD(var): return var in k # Some documentation is available at http://predef.sourceforge.net # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. if not conf.env.DEST_OS: conf.env.DEST_OS = "" for i in MACRO_TO_DESTOS: if isD(i): conf.env.DEST_OS = MACRO_TO_DESTOS[i] break else: if isD("__APPLE__") and isD("__MACH__"): conf.env.DEST_OS = "darwin" elif isD("__unix__" ): # unix must be tested last as it's a generic fallback conf.env.DEST_OS = "generic" if isD("__ELF__"): conf.env.DEST_BINFMT = "elf" elif isD("__WINNT__") or isD("__CYGWIN__") or isD("_WIN32"): conf.env.DEST_BINFMT = "pe" if not conf.env.IMPLIBDIR: conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files conf.env.LIBDIR = conf.env.BINDIR elif isD("__APPLE__"): conf.env.DEST_BINFMT = "mac-o" if not conf.env.DEST_BINFMT: # Infer the binary format from the os name. conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) for i in MACRO_TO_DEST_CPU: if isD(i): conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] break Logs.debug("ccroot: dest platform: " + " ".join([ conf.env[x] or "?" for x in ("DEST_OS", "DEST_BINFMT", "DEST_CPU") ])) if icc: ver = k["__INTEL_COMPILER"] conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1]) else: if isD("__clang__") and isD("__clang_major__"): conf.env.CC_VERSION = ( k["__clang_major__"], k["__clang_minor__"], k["__clang_patchlevel__"], ) else: # older clang versions and gcc conf.env.CC_VERSION = ( k["__GNUC__"], k["__GNUC_MINOR__"], k.get("__GNUC_PATCHLEVEL__", "0"), ) return k
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): """ Runs the preprocessor to determine the gcc/icc/clang version The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env* :raise: :py:class:`waflib.Errors.ConfigurationError` """ cmd = cc + ['-dM', '-E', '-'] env = conf.env.env or None try: out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env) except Exception: conf.fatal('Could not determine the compiler version %r' % cmd) if gcc: if out.find('__INTEL_COMPILER') >= 0: conf.fatal('The intel compiler pretends to be gcc') if out.find('__GNUC__') < 0 and out.find('__clang__') < 0: conf.fatal('Could not determine the compiler type') if icc and out.find('__INTEL_COMPILER') < 0: conf.fatal('Not icc/icpc') if clang and out.find('__clang__') < 0: conf.fatal('Not clang/clang++') if not clang and out.find('__clang__') >= 0: conf.fatal( 'Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure' ) k = {} if icc or gcc or clang: out = out.splitlines() for line in out: lst = shlex.split(line) if len(lst) > 2: key = lst[1] val = lst[2] k[key] = val def isD(var): return var in k # Some documentation is available at http://predef.sourceforge.net # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. if not conf.env.DEST_OS: conf.env.DEST_OS = '' for i in MACRO_TO_DESTOS: if isD(i): conf.env.DEST_OS = MACRO_TO_DESTOS[i] break else: if isD('__APPLE__') and isD('__MACH__'): conf.env.DEST_OS = 'darwin' elif isD('__unix__' ): # unix must be tested last as it's a generic fallback conf.env.DEST_OS = 'generic' if isD('__ELF__'): conf.env.DEST_BINFMT = 'elf' elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'): conf.env.DEST_BINFMT = 'pe' conf.env.LIBDIR = conf.env.BINDIR elif isD('__APPLE__'): conf.env.DEST_BINFMT = 'mac-o' if not conf.env.DEST_BINFMT: # Infer the binary format from the os name. conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) for i in MACRO_TO_DEST_CPU: if isD(i): conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] break Logs.debug('ccroot: dest platform: ' + ' '.join([ conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU') ])) if icc: ver = k['__INTEL_COMPILER'] conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1]) else: if isD('__clang__') and isD('__clang_major__'): conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) else: # older clang versions and gcc conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) return k
def fail(msg, required): if required: conf.fatal(msg) conf.end_msg(msg, 'RED')
def check_python_headers(conf,features='pyembed pyext'): features=Utils.to_list(features) assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'" env=conf.env if not env.CC_NAME and not env.CXX_NAME: conf.fatal('load a compiler first (gcc, g++, ..)') if conf.python_cross_compile(features): return if not env.PYTHON_VERSION: conf.check_python_version() pybin=env.PYTHON if not pybin: conf.fatal('Could not find the python executable') v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() try: lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) except RuntimeError: conf.fatal("Python development headers not found (-v for details).") vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals))) dct=dict(zip(v,lst)) x='MACOSX_DEPLOYMENT_TARGET' if dct[x]: env[x]=conf.environ[x]=dct[x] env.pyext_PATTERN='%s'+dct['SO'] num='.'.join(env.PYTHON_VERSION.split('.')[:2]) conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False) if env.PYTHON_CONFIG: if conf.env.HAVE_PYTHON_H: return all_flags=[['--cflags','--libs','--ldflags']] if sys.hexversion<0x2070000: all_flags=[[k]for k in all_flags[0]] xx=env.CXX_NAME and'cxx'or'c' if'pyembed'in features: for flags in all_flags: conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags) try: conf.test_pyembed(xx) except conf.errors.ConfigurationError: if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']]) conf.test_pyembed(xx) else: raise if'pyext'in features: for flags in all_flags: conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags) try: conf.test_pyext(xx) except conf.errors.ConfigurationError: if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']]) conf.test_pyext(xx) else: raise conf.define('HAVE_PYTHON_H',1) return all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] conf.parse_flags(all_flags,'PYEMBED') all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] conf.parse_flags(all_flags,'PYEXT') result=None if not dct["LDVERSION"]: dct["LDVERSION"]=env.PYTHON_VERSION for name in('python'+dct['LDVERSION'],'python'+env.PYTHON_VERSION+'m','python'+env.PYTHON_VERSION.replace('.','')): if not result and env.LIBPATH_PYEMBED: path=env.LIBPATH_PYEMBED conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) if not result and dct['LIBDIR']: path=[dct['LIBDIR']] conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) if not result and dct['LIBPL']: path=[dct['LIBPL']] conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) if not result: path=[os.path.join(dct['prefix'],"libs")] conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) if result: break if result: env.LIBPATH_PYEMBED=path env.append_value('LIB_PYEMBED',[name]) else: conf.to_log("\n\n### LIB NOT FOUND\n") if Utils.is_win32 or dct['Py_ENABLE_SHARED']: env.LIBPATH_PYEXT=env.LIBPATH_PYEMBED env.LIB_PYEXT=env.LIB_PYEMBED conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],)) env.INCLUDES_PYEXT=[dct['INCLUDEPY']] env.INCLUDES_PYEMBED=[dct['INCLUDEPY']] if env.CC_NAME=='gcc': env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) if env.CXX_NAME=='gcc': env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) if env.CC_NAME=="msvc": from distutils.msvccompiler import MSVCCompiler dist_compiler=MSVCCompiler() dist_compiler.initialize() env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!')
def configure_toolchain_gcc_linux(conf): if sys.platform in ("linux","linux2"): conf.setup_gnu_gcc_toolchain(prefix='') conf.env.append_value('LINKFLAGS', ['-Wl,-gc-sections']) else: conf.fatal('GCC Linux toolchain not available none linux os')
def check_python_headers(conf, features='pyembed pyext'): """ Check for headers and libraries necessary to extend or embed python by using the module *distutils*. On success the environment variables xxx_PYEXT and xxx_PYEMBED are added: * PYEXT: for compiling python extensions * PYEMBED: for embedding a python interpreter """ features = Utils.to_list(features) assert ('pyembed' in features) or ( 'pyext' in features ), "check_python_headers features must include 'pyembed' and/or 'pyext'" env = conf.env if not env.CC_NAME and not env.CXX_NAME: conf.fatal('load a compiler first (gcc, g++, ..)') # bypass all the code below for cross-compilation if conf.python_cross_compile(features): return if not env.PYTHON_VERSION: conf.check_python_version() pybin = env.PYTHON if not pybin: conf.fatal('Could not find the python executable') # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split( ) try: lst = conf.get_python_variables( ["get_config_var('%s') or ''" % x for x in v]) except RuntimeError: conf.fatal("Python development headers not found (-v for details).") vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)] conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals))) dct = dict(zip(v, lst)) x = 'MACOSX_DEPLOYMENT_TARGET' if dct[x]: env[x] = conf.environ[x] = dct[x] env.pyext_PATTERN = '%s' + dct['SO'] # not a mistake # Try to get pythonX.Y-config num = '.'.join(env.PYTHON_VERSION.split('.')[:2]) conf.find_program([ ''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num ], var='PYTHON_CONFIG', msg="python-config", mandatory=False) if env.PYTHON_CONFIG: # check python-config output only once if conf.env.HAVE_PYTHON_H: return # python2.6-config requires 3 runs all_flags = [['--cflags', '--libs', '--ldflags']] if sys.hexversion < 0x2070000: all_flags = [[k] for k in all_flags[0]] xx = env.CXX_NAME and 'cxx' or 'c' if 'pyembed' in features: for flags in all_flags: # Python 3.8 has different flags for pyembed, needs --embed embedflags = flags + ['--embed'] try: conf.check_cfg( msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags) except conf.errors.ConfigurationError: # However Python < 3.8 doesn't accept --embed, so we need a fallback conf.check_cfg( msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) try: conf.test_pyembed(xx) except conf.errors.ConfigurationError: # python bug 7352 if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']: env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']]) conf.test_pyembed(xx) else: raise if 'pyext' in features: for flags in all_flags: conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags) try: conf.test_pyext(xx) except conf.errors.ConfigurationError: # python bug 7352 if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']: env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']]) conf.test_pyext(xx) else: raise conf.define('HAVE_PYTHON_H', 1) return # No python-config, do something else on windows systems all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS'] conf.parse_flags(all_flags, 'PYEMBED') all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS'] conf.parse_flags(all_flags, 'PYEXT') result = None if not dct["LDVERSION"]: dct["LDVERSION"] = env.PYTHON_VERSION # further simplification will be complicated for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')): # LIBPATH_PYEMBED is already set; see if it works. if not result and env.LIBPATH_PYEMBED: path = env.LIBPATH_PYEMBED conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path) result = conf.check( lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name) if not result and dct['LIBDIR']: path = [dct['LIBDIR']] conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path) result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name) if not result and dct['LIBPL']: path = [dct['LIBPL']] conf.to_log( "\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n" ) result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name) if not result: path = [os.path.join(dct['prefix'], "libs")] conf.to_log( "\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n" ) result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name) if result: break # do not forget to set LIBPATH_PYEMBED if result: env.LIBPATH_PYEMBED = path env.append_value('LIB_PYEMBED', [name]) else: conf.to_log("\n\n### LIB NOT FOUND\n") # under certain conditions, python extensions must link to # python libraries, not just python embedding programs. if Utils.is_win32 or dct['Py_ENABLE_SHARED']: env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED env.LIB_PYEXT = env.LIB_PYEMBED conf.to_log( "Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'], )) env.INCLUDES_PYEXT = [dct['INCLUDEPY']] env.INCLUDES_PYEMBED = [dct['INCLUDEPY']] # Code using the Python API needs to be compiled with -fno-strict-aliasing if env.CC_NAME == 'gcc': env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing']) if env.CXX_NAME == 'gcc': env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) if env.CC_NAME == "msvc": from distutils.msvccompiler import MSVCCompiler dist_compiler = MSVCCompiler() dist_compiler.initialize() env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options) env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options) env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared) # See if it compiles conf.check( header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg= 'Distutils not installed? Broken python installation? Get python-config now!' )
def get_cc_version(conf, cc, gcc=False, icc=False): """ Run the preprocessor to determine the compiler version The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env* """ cmd = cc + ['-dM', '-E', '-'] env = conf.env.env or None try: p = Utils.subprocess.Popen(cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) p.stdin.write('\n'.encode()) out = p.communicate()[0] except Exception: conf.fatal('Could not determine the compiler version %r' % cmd) if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') if gcc: if out.find('__INTEL_COMPILER') >= 0: conf.fatal('The intel compiler pretends to be gcc') if out.find('__GNUC__') < 0: conf.fatal('Could not determine the compiler type') if icc and out.find('__INTEL_COMPILER') < 0: conf.fatal('Not icc/icpc') k = {} if icc or gcc: out = out.splitlines() for line in out: lst = shlex.split(line) if len(lst)>2: key = lst[1] val = lst[2] k[key] = val def isD(var): return var in k def isT(var): return var in k and k[var] != '0' # Some documentation is available at http://predef.sourceforge.net # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. if not conf.env.DEST_OS: conf.env.DEST_OS = '' for i in MACRO_TO_DESTOS: if isD(i): conf.env.DEST_OS = MACRO_TO_DESTOS[i] break else: if isD('__APPLE__') and isD('__MACH__'): conf.env.DEST_OS = 'darwin' elif isD('__unix__'): # unix must be tested last as it's a generic fallback conf.env.DEST_OS = 'generic' if isD('__ELF__'): conf.env.DEST_BINFMT = 'elf' elif isD('__WINNT__') or isD('__CYGWIN__'): conf.env.DEST_BINFMT = 'pe' conf.env.LIBDIR = conf.env['PREFIX'] + '/bin' elif isD('__APPLE__'): conf.env.DEST_BINFMT = 'mac-o' if not conf.env.DEST_BINFMT: # Infer the binary format from the os name. conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) for i in MACRO_TO_DEST_CPU: if isD(i): conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] break Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')])) if icc: ver = k['__INTEL_COMPILER'] conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1]) else: if isD('__clang__'): conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) else: conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) return k
def check_magnum(conf, *k, **kw): def get_directory(filename, dirs, full=False): res = conf.find_file(filename, dirs) if not full: return res[:-len(filename) - 1] return res[:res.rfind('/')] def find_in_string(data, text): return data.find(text) def fatal(required, msg): if required: conf.fatal(msg) Logs.pprint('RED', msg) required = kw.get('required', False) # Check compiler version (for gcc); I am being a bit more strong (Magnum could be built with 4.7 but needs adjustment) if conf.env.CXX_NAME in [ "gcc", "g++" ] and int(conf.env['CC_VERSION'][0] + conf.env['CC_VERSION'][1]) < 48: fatal(required, 'Magnum cannot be setup with GCC < 4.8!') return includes_check = [ '/usr/local/include', '/usr/include', '/opt/local/include', '/sw/include' ] libs_check = [ '/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib', '/usr/lib/x86_64-linux-gnu/', '/usr/lib64' ] bins_check = [ '/usr/bin', '/usr/local/bin', '/opt/local/bin', '/sw/bin', '/bin' ] if conf.env['DEST_OS'] == 'darwin': includes_check = includes_check + [ '/System/Library/Frameworks/OpenGL.framework/Headers', '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/OpenGL.framework/Versions/A/Headers/' ] libs_check = libs_check + [ '/System/Library/Frameworks/OpenGL.framework/Libraries', '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/OpenGL.framework/Versions/A/Libraries/' ] # OSX/Mac uses .dylib and GNU/Linux .so suffix = 'dylib' if conf.env['DEST_OS'] == 'darwin' else 'so' # Magnum depends on several libraries and we cannot make the assumption that # someone installed all of them in the same directory! # to-do: a better? solution would be to create different scripts for each dependency if conf.options.magnum_install_dir: includes_check = [conf.options.magnum_install_dir + '/include' ] + includes_check libs_check = [conf.options.magnum_install_dir + '/lib'] + libs_check bins_check = [conf.options.magnum_install_dir + '/bin'] + bins_check requested_components = kw.get('components', None) if requested_components == None: requested_components = [] else: requested_components = requested_components.split() corrade_var = kw.get('corrade', 'Corrade') # Magnum requires Corrade if not conf.env['INCLUDES_%s' % corrade_var]: fatal(required, 'Magnum requires Corrade! Cannot proceed!') return if not conf.env['INCLUDES_%s_Utility' % corrade_var]: fatal(required, 'Magnum requires Corrade Utility library! Cannot proceed!') return if not conf.env['INCLUDES_%s_PluginManager' % corrade_var]: fatal( required, 'Magnum requires Corrade PluginManager library! Cannot proceed!') return magnum_includes = [] magnum_libpaths = [] magnum_libs = [] magnum_bins = [] magnum_var = kw.get('uselib_store', 'Magnum') # to-do: enforce C++11/14 magnum_possible_configs = [ "BUILD_DEPRECATED", "BUILD_STATIC", "BUILD_MULTITHREADED", "TARGET_GL", "TARGET_GLES", "TARGET_GLES2", "TARGET_GLES3", "TARGET_DESKTOP_GLES", "TARGET_WEBGL", "TARGET_HEADLESS" ] magnum_config = [] magnum_components, magnum_component_type, magnum_dependencies = get_magnum_components( ) magnum_component_includes = {} magnum_component_libpaths = {} magnum_component_libs = {} magnum_component_bins = {} try: # to-do: support both debug and release builds conf.start_msg('Checking for Magnum includes') magnum_include_dir = get_directory('Magnum/Magnum.h', includes_check) magnum_includes = magnum_includes + [ magnum_include_dir, magnum_include_dir + '/MagnumExternal/OpenGL' ] conf.end_msg(magnum_include_dir) conf.start_msg('Checking for Magnum lib') magnum_lib_path = get_directory('libMagnum.' + suffix, libs_check) magnum_libpaths = magnum_libpaths + [magnum_lib_path] magnum_libs = magnum_libs + ['Magnum'] conf.end_msg(['Magnum']) conf.start_msg('Getting Magnum configuration') config_file = conf.find_file('Magnum/configure.h', includes_check) with io.open(config_file, errors='ignore') as f: config_content = f.read() for config in magnum_possible_configs: index = find_in_string(config_content, '#define MAGNUM_' + config) if index > -1: magnum_config.append(config) conf.end_msg(magnum_config) if 'TARGET_GL' in magnum_config: # to-do: make it work for other platforms; now only for desktop and only for GL conf.start_msg('Magnum: Checking for OpenGL includes') opengl_files = ['GL/gl.h', 'gl.h'] gl_not_found = False for gl_file in opengl_files: try: opengl_include_dir = get_directory(gl_file, includes_check) gl_not_found = False break except: gl_not_found = True if gl_not_found: fatal(required, 'Not found') return magnum_includes = magnum_includes + [opengl_include_dir] conf.end_msg(opengl_include_dir) conf.start_msg('Magnum: Checking for OpenGL lib') opengl_lib_dir = get_directory('libGL.' + suffix, libs_check) magnum_libpaths = magnum_libpaths + [opengl_lib_dir] magnum_libs = magnum_libs + ['GL'] conf.end_msg(['GL']) conf.start_msg('Magnum: Checking for MagnumGL lib') gl_lib_dir = get_directory('libMagnumGL.' + suffix, libs_check) magnum_libpaths = magnum_libpaths + [gl_lib_dir] magnum_libs = magnum_libs + ['MagnumGL'] conf.end_msg(['MagnumGL']) else: fatal(required, 'At the moment only desktop OpenGL is supported by WAF') return conf.start_msg('Checking for Magnum components') # only check for components that can exist requested_components = list( set(requested_components).intersection(magnum_components)) # add dependencies for lib in requested_components: requested_components = requested_components + magnum_dependencies[ lib] # remove duplicates requested_components = list(set(requested_components)) for component in requested_components: magnum_component_includes[component] = [] magnum_component_libpaths[component] = [] magnum_component_libs[component] = [] magnum_component_bins[component] = [] # get component type component_type = magnum_component_type[component] if component_type == 'lib': pat_app = re.compile('.+Application') pat_context = re.compile('.+Context') component_file = component if component == 'MeshTools': component_file = 'CompressIndices' if component == 'Primitives': component_file = 'Cube' if component == 'TextureTools': component_file = 'Atlas' lib_type = suffix include_prefix = component # Applications if re.match(pat_app, component): # to-do: all of them are static? lib_type = 'a' include_prefix = 'Platform' include_dir = get_directory( 'Magnum/' + include_prefix + '/' + component_file + '.h', includes_check) lib = 'Magnum' + component lib_dir = get_directory('lib' + lib + '.' + lib_type, libs_check) magnum_component_includes[ component] = magnum_component_includes[component] + [ include_dir ] magnum_component_libpaths[ component] = magnum_component_libpaths[component] + [ lib_dir ] magnum_component_libs[component].append(lib) # Applications if re.match(pat_app, component): if component == 'GlfwApplication': # GlfwApplication requires GLFW3 # conf.start_msg('Magnum: Checking for GLFW3 includes') glfw_inc = get_directory('GLFW/glfw3.h', includes_check) magnum_component_includes[ component] = magnum_component_includes[ component] + [glfw_inc] # conf.start_msg('Magnum: Checking for GLFW3 lib') libs_glfw = ['glfw3', 'glfw'] glfw_found = False for lib_glfw in libs_glfw: try: lib_dir = get_directory( 'lib' + lib_glfw + '.' + suffix, libs_check) glfw_found = True magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append( lib_glfw) break except: glfw_found = False # GlfwApplication needs the libdl.so library try: lib_dir = get_directory('libdl.' + suffix, libs_check) magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append('dl') except: glfw_found = False if not glfw_found: fatal(required, 'Not found') return elif component == 'GlutApplication': # GlutApplication requires GLUT # conf.start_msg('Magnum: Checking for GLUT includes') glut_inc = get_directory('GL/freeglut.h', includes_check) magnum_component_includes[ component] = magnum_component_includes[ component] + [glut_inc] # conf.start_msg('Magnum: Checking for GLUT lib') libs_glut = ['glut', 'glut32'] glut_found = False for lib_glut in libs_glut: try: lib_dir = get_directory( 'lib' + lib_glut + '.' + suffix, libs_check) glut_found = True magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append( lib_glut) break except: glut_found = False if not glut_found: fatal(required, 'Not found') return elif component == 'Sdl2Application': # Sdl2Application requires SDL2 conf.check_cfg(path='sdl2-config', args='--cflags --libs', package='', uselib_store='MAGNUM_SDL') magnum_component_includes[ component] = magnum_component_includes[ component] + conf.env['INCLUDES_MAGNUM_SDL'] magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + conf.env['LIBPATH_MAGNUM_SDL'] magnum_component_libs[ component] = magnum_component_libs[ component] + conf.env['LIB_MAGNUM_SDL'] # Sdl2Application needs the libdl.so library try: lib_dir = get_directory('libdl.' + suffix, libs_check) magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append('dl') except: fatal(required, 'Not found') return # to-do: maybe copy flags? elif component == 'WindowlessEglApplication': # WindowlessEglApplication requires EGL egl_inc = get_directory('EGL/egl.h', includes_check) magnum_component_includes[ component] = magnum_component_includes[ component] + [egl_inc] libs_egl = ['EGL'] egl_found = False for lib_egl in libs_egl: try: lib_dir = get_directory( 'lib' + lib_egl + '.so', libs_check) egl_found = True magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append( lib_egl) break except: egl_found = False if not egl_found: fatal(required, 'Not found') return elif component == 'WindowlessGlxApplication' or component == 'GlxApplication': # [Windowless]GlxApplication requires GLX. X11 glx_inc = get_directory('GL/glx.h', includes_check) magnum_component_includes[ component] = magnum_component_includes[ component] + [glx_inc] libs_glx = ['GLX', 'X11'] glx_found = False for lib_glx in libs_glx: try: lib_dir = get_directory( 'lib' + lib_glx + '.so', libs_check) glx_found = True magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append( lib_glx) # break except: glx_found = False if not glx_found: fatal(required, 'Not found') return elif component not in [ 'WindowlessCglApplication', 'WindowlessWglApplication' ]: # to-do: support all other applications msg = 'Component ' + component + ' is not yet supported by WAF' fatal(required, msg) return if re.match(pat_context, component) and component not in [ 'CglContext', 'WglContext' ]: # to-do: support all other contexts msg = 'Component ' + component + ' is not yet supported by WAF' fatal(required, msg) return # Audio lib required OpenAL if component == 'Audio': # conf.start_msg('Magnum: Checking for OpenAL includes') includes_audio = ['AL', 'OpenAL'] openal_found = False for inc in includes_audio: try: # we need the full include dir incl_audio = get_directory(inc + '/al.h', includes_check, True) openal_found = True magnum_component_includes[ component] = magnum_component_includes[ component] + [incl_audio] break except: openal_found = False if not openal_found: fatal(required, 'Not found') return # conf.start_msg('Magnum: Checking for OpenAL lib') libs_audio = ['OpenAL', 'al', 'openal', 'OpenAL32'] openal_found = False for lib_audio in libs_audio: try: lib_dir = get_directory( 'lib' + lib_audio + '.' + suffix, libs_check) openal_found = True magnum_component_libpaths[ component] = magnum_component_libpaths[ component] + [lib_dir] magnum_component_libs[component].append(lib_audio) break except: openal_found = False if not openal_found: fatal(required, 'Not found') return elif component_type == 'plugin': pat_audio = re.compile('.+AudioImporter$') pat_importer = re.compile('.+Importer$') pat_font = re.compile('.+Font$') pat_img_conv = re.compile('.+ImageConverter$') pat_font_conv = re.compile('.+FontConverter$') lib_path_suffix = '' component_file = component if re.match(pat_audio, component): lib_path_suffix = 'audioimporters' component_file = component.replace("AudioImporter", "Importer") elif re.match(pat_importer, component): lib_path_suffix = 'importers' elif re.match(pat_font, component): lib_path_suffix = 'fonts' elif re.match(pat_img_conv, component): lib_path_suffix = 'imageconverters' elif re.match(pat_font_conv, component): lib_path_suffix = 'fontconverters' if lib_path_suffix != '': lib_path_suffix = lib_path_suffix + '/' include_dir = get_directory( 'MagnumPlugins/' + component + '/' + component_file + '.h', includes_check) lib = component # we need the full lib_dir in order to be able to link to the plugins # or not? because they are loaded dynamically # we need to set the libpath for the static plugins only lib_dir = get_directory( 'magnum/' + lib_path_suffix + lib + '.' + suffix, libs_check, True) magnum_component_includes[ component] = magnum_component_includes[component] + [ include_dir ] # magnum_component_libpaths[component] = magnum_component_libpaths[component] + [lib_dir] # magnum_component_libs[component].append(lib) elif component_type == 'bin': bin_name = 'magnum-' + component executable = conf.find_file(bin_name, bins_check) magnum_component_bins[component] = magnum_component_bins[ component] + [executable] conf.end_msg(requested_components) # set environmental variables conf.env['INCLUDES_%s' % magnum_var] = magnum_includes conf.env['LIBPATH_%s' % magnum_var] = magnum_libpaths conf.env['LIB_%s' % magnum_var] = magnum_libs if conf.env['DEST_OS'] == 'darwin': conf.env['FRAMEWORK_%s_Magnum' % magnum_var] = ['OpenGL', 'Foundation'] conf.env['EXEC_%s' % magnum_var] = magnum_bins # set main Magnum component conf.env['INCLUDES_%s_Magnum' % magnum_var] = magnum_includes conf.env['LIBPATH_%s_Magnum' % magnum_var] = magnum_libpaths conf.env['LIB_%s_Magnum' % magnum_var] = magnum_libs if conf.env['DEST_OS'] == 'darwin': conf.env['FRAMEWORK_%s_Magnum' % magnum_var] = ['OpenGL', 'Foundation'] conf.env['EXEC_%s_Magnum' % magnum_var] = magnum_bins # Plugin directories magnum_plugins_dir = magnum_lib_path + '/magnum' magnum_plugins_font_dir = magnum_plugins_dir + '/fonts' magnum_plugins_fontconverter_dir = magnum_plugins_dir + '/fontconverters' magnum_plugins_imageconverter_dir = magnum_plugins_dir + '/imageconverters' magnum_plugins_importer_dir = magnum_plugins_dir + '/importers' magnum_plugins_audioimporter_dir = magnum_plugins_dir + '/audioimporters' # conf.env['%s_PLUGINS_DIR' % magnum_var] = magnum_plugins_dir # conf.env['%s_PLUGINS_FONT_DIR' % magnum_var] = magnum_plugins_font_dir # conf.env['%s_PLUGINS_FONTCONVERTER_DIR' % magnum_var] = magnum_plugins_fontconverter_dir # conf.env['%s_PLUGINS_IMAGECONVERTER_DIR' % magnum_var] = magnum_plugins_imageconverter_dir # conf.env['%s_PLUGINS_IMPORTER_DIR' % magnum_var] = magnum_plugins_importer_dir # conf.env['%s_PLUGINS_AUDIOIMPORTER_DIR' % magnum_var] = magnum_plugins_audioimporter_dir # set C++ defines conf.env['DEFINES_%s' % magnum_var] = [] conf.env['DEFINES_%s' % magnum_var].append( '%s_PLUGINS_DIR="%s"' % (magnum_var.upper(), magnum_plugins_dir)) conf.env['DEFINES_%s' % magnum_var].append( '%s_PLUGINS_FONT_DIR="%s"' % (magnum_var.upper(), magnum_plugins_font_dir)) conf.env['DEFINES_%s' % magnum_var].append( '%s_PLUGINS_FONTCONVERTER_DIR="%s"' % (magnum_var.upper(), magnum_plugins_fontconverter_dir)) conf.env['DEFINES_%s' % magnum_var].append( '%s_PLUGINS_IMAGECONVERTER_DIR="%s"' % (magnum_var.upper(), magnum_plugins_imageconverter_dir)) conf.env['DEFINES_%s' % magnum_var].append( '%s_PLUGINS_IMPORTER_DIR="%s"' % (magnum_var.upper(), magnum_plugins_importer_dir)) conf.env['DEFINES_%s' % magnum_var].append( '%s_PLUGINS_AUDIOIMPORTER_DIR="%s"' % (magnum_var.upper(), magnum_plugins_audioimporter_dir)) for config in magnum_config: conf.env['DEFINES_%s' % magnum_var].append(config) if conf.env['DEST_OS'] == 'darwin': conf.env['DEFINES_%s' % magnum_var].append('%s_MAC_OSX' % magnum_var.upper()) # copy C++ defines to Magnum::Magnum component; we want them to be available on all Magnum builds conf.env['DEFINES_%s_Magnum' % magnum_var] = copy.deepcopy( conf.env['DEFINES_%s' % magnum_var]) # set component libs for component in requested_components: conf.env['INCLUDES_%s_%s' % (magnum_var, component)] = magnum_component_includes[component] conf.env['LIBPATH_%s_%s' % (magnum_var, component)] = magnum_component_libpaths[component] conf.env['LIB_%s_%s' % (magnum_var, component)] = magnum_component_libs[component] conf.env['EXEC_%s_%s' % (magnum_var, component)] = magnum_component_bins[component] except: if required: conf.fatal('Not found') conf.end_msg('Not found', 'RED') return return 1
def check_python_version(conf, minver=None): """ Check if the python interpreter is found matching a given minimum version. minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4') of the actual python version found, and PYTHONDIR and PYTHONARCHDIR are defined, pointing to the site-packages directories appropriate for this python version, where modules/packages/extensions should be installed. :param minver: minimum version :type minver: tuple of int """ assert minver is None or isinstance(minver, tuple) pybin = conf.env.PYTHON if not pybin: conf.fatal('could not find the python executable') # Get python version string cmd = pybin + [ '-c', 'import sys\nfor x in sys.version_info: print(str(x))' ] Logs.debug('python: Running python command %r', cmd) lines = conf.cmd_and_log(cmd).split() assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines) pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) # Compare python version with the minimum required result = (minver is None) or (pyver_tuple >= minver) if result: # define useful environment variables pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) conf.env.PYTHON_VERSION = pyver if 'PYTHONDIR' in conf.env: # Check if --pythondir was specified pydir = conf.env.PYTHONDIR elif 'PYTHONDIR' in conf.environ: # Check environment for PYTHONDIR pydir = conf.environ['PYTHONDIR'] else: # Finally, try to guess if Utils.is_win32: (python_LIBDEST, pydir) = conf.get_python_variables([ "get_config_var('LIBDEST') or ''", "get_python_lib(standard_lib=0) or ''" ]) else: python_LIBDEST = None (pydir, ) = conf.get_python_variables([ "get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX ]) if python_LIBDEST is None: if conf.env.LIBDIR: python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver) else: python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver) if 'PYTHONARCHDIR' in conf.env: # Check if --pythonarchdir was specified pyarchdir = conf.env.PYTHONARCHDIR elif 'PYTHONARCHDIR' in conf.environ: # Check environment for PYTHONDIR pyarchdir = conf.environ['PYTHONARCHDIR'] else: # Finally, try to guess (pyarchdir, ) = conf.get_python_variables([ "get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX ]) if not pyarchdir: pyarchdir = pydir if hasattr(conf, 'define' ): # conf.define is added by the C tool, so may not exist conf.define('PYTHONDIR', pydir) conf.define('PYTHONARCHDIR', pyarchdir) conf.env.PYTHONDIR = pydir conf.env.PYTHONARCHDIR = pyarchdir # Feedback pyver_full = '.'.join(map(str, pyver_tuple[:3])) if minver is None: conf.msg('Checking for python version', pyver_full) else: minver_str = '.'.join(map(str, minver)) conf.msg('Checking for python version >= %s' % (minver_str, ), pyver_full, color=result and 'GREEN' or 'YELLOW') if not result: conf.fatal('The python version is too old, expecting %r' % (minver, ))
def configure_3rd_party_with_autotools(conf, archive_name, without_configure=False, without_make_install=False, conf_args="", cflags_args="", make_args=[]): name = archive_name_without_suffix(archive_name) Logs.pprint("BLUE", "Starting installation of %s" % name) conf.to_log((" Starting installation of %s " % name).center(80, "=")) archive_path = os.path.join(conf.path.abspath(), "3rd", archive_name) destnode = conf.bldnode.make_node("3rd") # Install everything in build directory, in '3rd' subdirectory (the 'lib' and # 'include' directory can be copied in conf.env.PREFIX when ./waf install is # called, if needed) incdir = destnode.find_or_declare("include").abspath() libdir = destnode.find_or_declare("lib").abspath() srcdir = conf.extract_archive(archive_path, name, destnode) conf.find_program("make") # Apply patches conf.apply_all_relevant_patches(name) # always build static library, even if ibex is built as a shared library. conf_args += " --enable-static --disable-shared" if conf.env.ENABLE_SHARED: cflags = os.getenv("CFLAGS", "") cxxflags = os.getenv("CXXFLAGS", "") os.environ["CFLAGS"] = cflags + " " + cflags_args + " ".join( conf.env.CFLAGS_cshlib) os.environ["CXXFLAGS"] = cxxflags + " " + " ".join( conf.env.CXXFLAGS_cxxshlib) else: cflags = os.getenv("CFLAGS", "") cxxflags = os.getenv("CXXFLAGS", "") os.environ["CFLAGS"] = cflags + " " + cflags_args os.environ["CXXFLAGS"] = cxxflags + " " if Utils.is_win32: conf_args += " --prefix=%s" % convert_path_win2msys(destnode.abspath()) conf.find_program("sh") cmd_conf = [conf.env.SH, "-c", "./configure %s" % conf_args] cmd_make = conf.env.MAKE + make_args else: conf_args += " --prefix=%s" % destnode.abspath() cmd_conf = "./configure %s" % (conf_args) cmd_make = conf.env.MAKE + ["-j%d" % conf.options.jobs] + make_args cmd_install = conf.env.MAKE + ["install"] stages = [] if not without_configure: stages += [(cmd_conf, "configure")] stages += [(cmd_make, "make")] if not without_make_install: stages += [(cmd_install, "install")] for cmd, stage in stages: conf.start_msg("Calling %s" % stage) try: out = conf.cmd_and_log(cmd, cwd=srcdir, env=os.environ) conf.end_msg("done") except Errors.WafError as e: conf.end_msg("failed", color="RED") print(e) conf.fatal("failed to %s %s (%s)" % (stage, name, cmd)) conf.to_log((" Installation of %s: done " % name).center(80, "=")) if conf.env.ENABLE_SHARED: os.environ["CFLAGS"] = cflags os.environ["CXXFLAGS"] = cxxflags return srcdir, incdir, libdir
def check_vrep_ros(conf, **kw): required = 'required' in kw and kw.get('required', False) if conf.options.vrep_ros: includes_check = [conf.options.vrep_ros + '/include'] libs_check = [conf.options.vrep_ros + '/lib'] else: if 'ROS_PACKAGE_PATH' not in os.environ: conf.start_msg('Checking for V-REP ROS plugin') if required: conf.fatal('ROS_PACKAGE_PATH not in environmental variables, use --vrep_ros=/path/to/vrep_ros') else: conf.end_msg('ROS_PACKAGE_PATH not in environmental variables, use --vrep_ros=/path/to/vrep_ros', 'YELLOW') return path = os.environ['ROS_PACKAGE_PATH'] paths = re.split(":", path) path = os.path.join(paths[0], '../devel') includes_check = [path + '/include'] libs_check = [path + '/lib'] conf.start_msg('Checking for V-REP ROS plugin includes') try: res = conf.find_file('vrep_common/VrepInfo.h', includes_check) except: res = False if res: conf.end_msg('ok') else: if conf.options.vrep_ros: msg = 'not found in %s' % conf.options.vrep_ros elif 'ROS_PACKAGE_PATH' in os.environ: msg = 'not found in %s (from ROS_PACKAGE_PATH environmental variable)' % path else: msg = 'not found, use --vrep_ros=/path/to/vrep_ros' if required: conf.fatal(msg) else: conf.end_msg(msg, 'YELLOW') return conf.start_msg('Checking for V-REP ROS plugin libs') try: res = conf.find_file('libv_repExtRos.so', libs_check) except: res = False if res: conf.env.INCLUDES_VREP_ROS = [os.path.expanduser(include) for include in includes_check] conf.env.LIBPATH_VREP_ROS = [os.path.expanduser(lib) for lib in libs_check] conf.env.LIB_VREP_ROS = ['v_repExtRos'] conf.env.DEFINES_VREP_ROS = ['USE_VREP_ROS'] conf.end_msg('ok') else: if conf.options.vrep_ros: msg = 'not found in %s' % conf.options.vrep_ros elif 'ROS_PACKAGE_PATH' in os.environ: msg = 'not found in %s (from ROS_PACKAGE_PATH environmental variable)' % path else: msg = 'not found, use --vrep_ros=/path/to/vrep_ros' if required: conf.fatal(msg) else: conf.end_msg(msg, 'YELLOW')
def check_eigen(conf): conf.start_msg('Checking for Eigen') includes_check = ['/usr/include/eigen3', '/usr/local/include/eigen3', '/usr/include', '/usr/local/include'] if conf.options.eigen: includes_check = [conf.options.eigen] try: res = conf.find_file('Eigen/Core', includes_check) incl = res[:-len('Eigen/Core')-1] conf.env.INCLUDES_EIGEN = [incl] conf.end_msg(incl) if conf.options.lapacke_blas: conf.start_msg('Checking for LAPACKE/BLAS (optional)') p1 = subprocess.Popen(["cat", incl+"/Eigen/src/Core/util/Macros.h"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p2 = subprocess.Popen(["grep", "#define EIGEN_WORLD_VERSION"], stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p1.stdout.close() out1, err = p2.communicate() p1 = subprocess.Popen(["cat", incl+"/Eigen/src/Core/util/Macros.h"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p2 = subprocess.Popen(["grep", "#define EIGEN_MAJOR_VERSION"], stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p1.stdout.close() out2, err = p2.communicate() out1 = out1.decode('UTF-8') out2 = out2.decode('UTF-8') world_version = int(out1.strip()[-1]) major_version = int(out2.strip()[-1]) if world_version == 3 and major_version >= 3: # Check for lapacke and blas extra_libs = ['/usr/lib', '/usr/local/lib', '/usr/local/opt/openblas/lib'] blas_libs = ['blas', 'openblas'] blas_lib = '' blas_path = '' for b in blas_libs: try: if conf.env['DEST_OS']=='darwin': res = conf.find_file('lib'+b+'.dylib', extra_libs) blas_path = res[:-len('lib'+b+'.dylib')-1] else: res = conf.find_file('lib'+b+'.so', extra_libs) blas_path = res[:-len('lib'+b+'.so')-1] except: continue blas_lib = b break lapacke = False lapacke_path = '' try: if conf.env['DEST_OS']=='darwin': res = conf.find_file('liblapacke.dylib', extra_libs) lapacke_path = res[:-len('liblapacke.dylib')-1] else: res = conf.find_file('liblapacke.so', extra_libs) lapacke_path = res[:-len('liblapacke.so')-1] lapacke = True except: lapacke = False if lapacke or blas_lib != '': conf.env.DEFINES_EIGEN = [] if lapacke_path != blas_path: conf.env.LIBPATH_EIGEN = [lapacke_path, blas_path] else: conf.env.LIBPATH_EIGEN = [lapacke_path] conf.env.LIB_EIGEN = [] conf.end_msg('LAPACKE: \'%s\', BLAS: \'%s\'' % (lapacke_path, blas_path)) elif lapacke: conf.end_msg('Found only LAPACKE: %s' % lapacke_path, 'YELLOW') elif blas_lib != '': conf.end_msg('Found only BLAS: %s' % blas_path, 'YELLOW') else: conf.end_msg('Not found in %s' % str(extra_libs), 'RED') if lapacke: conf.env.DEFINES_EIGEN.append('EIGEN_USE_LAPACKE') conf.env.LIB_EIGEN.append('lapacke') if blas_lib != '': conf.env.DEFINES_EIGEN.append('EIGEN_USE_BLAS') conf.env.LIB_EIGEN.append(blas_lib) else: conf.end_msg('LAPACKE/BLAS can be used only with Eigen>=3.3', 'RED') except: conf.fatal('Not found in %s' % str(includes_check)) return 1
def check_eigen(conf, *k, **kw): def get_directory(filename, dirs): res = conf.find_file(filename, dirs) return res[:-len(filename) - 1] includes_check = [ '/usr/include/eigen3', '/usr/local/include/eigen3', '/usr/include', '/usr/local/include' ] required = kw.get('required', False) # OSX/Mac uses .dylib and GNU/Linux .so suffix = 'dylib' if conf.env['DEST_OS'] == 'darwin' else 'so' if conf.options.eigen: includes_check = [conf.options.eigen] try: conf.start_msg('Checking for Eigen') incl = get_directory('Eigen/Core', includes_check) conf.env.INCLUDES_EIGEN = [incl] conf.end_msg(incl) if conf.options.lapacke_blas: conf.start_msg('Checking for LAPACKE/BLAS (optional)') world_version = -1 major_version = -1 minor_version = -1 config_file = conf.find_file('Eigen/src/Core/util/Macros.h', includes_check) with open(config_file) as f: config_content = f.readlines() for line in config_content: world = line.find('#define EIGEN_WORLD_VERSION') major = line.find('#define EIGEN_MAJOR_VERSION') minor = line.find('#define EIGEN_MINOR_VERSION') if world > -1: world_version = int(line.split(' ')[-1].strip()) if major > -1: major_version = int(line.split(' ')[-1].strip()) if minor > -1: minor_version = int(line.split(' ')[-1].strip()) if world_version > 0 and major_version > 0 and minor_version > 0: break if world_version == 3 and major_version >= 3: # Check for lapacke and blas extra_libs = [ '/usr/lib', '/usr/local/lib', '/usr/local/opt/openblas/lib' ] blas_libs = ['blas', 'openblas'] blas_lib = '' blas_path = '' for b in blas_libs: try: blas_path = get_directory('lib' + b + '.' + suffix, extra_libs) except: continue blas_lib = b break lapacke = False lapacke_path = '' try: lapacke_path = get_directory('liblapacke.' + suffix, extra_libs) lapacke = True except: lapacke = False if lapacke or blas_lib != '': conf.env.DEFINES_EIGEN = [] if lapacke_path != blas_path: conf.env.LIBPATH_EIGEN = [lapacke_path, blas_path] else: conf.env.LIBPATH_EIGEN = [lapacke_path] conf.env.LIB_EIGEN = [] conf.end_msg('LAPACKE: \'%s\', BLAS: \'%s\'' % (lapacke_path, blas_path)) elif lapacke: conf.end_msg('Found only LAPACKE: %s' % lapacke_path, 'YELLOW') elif blas_lib != '': conf.end_msg('Found only BLAS: %s' % blas_path, 'YELLOW') else: conf.end_msg('Not found in %s' % str(extra_libs), 'RED') if lapacke: conf.env.DEFINES_EIGEN.append('EIGEN_USE_LAPACKE') conf.env.LIB_EIGEN.append('lapacke') if blas_lib != '': conf.env.DEFINES_EIGEN.append('EIGEN_USE_BLAS') conf.env.LIB_EIGEN.append(blas_lib) else: conf.end_msg( 'Found Eigen version %s: LAPACKE/BLAS can be used only with Eigen>=3.3' % (str(world_version) + '.' + str(major_version) + '.' + str(minor_version)), 'RED') except: if required: conf.fatal('Not found in %s' % str(includes_check)) conf.end_msg('Not found in %s' % str(includes_check), 'RED') return 1
def cry_error(conf, msg): conf.fatal("error: %s" % msg)
def cry_file_error(conf, msg, filePath, lineNum=0): if isinstance(filePath, Node.Node): filePath = filePath.abspath() if not os.path.isabs(filePath): filePath = conf.path.make_node(filePath).abspath() conf.fatal('%s(%s): error: %s' % (filePath, lineNum, msg))
if conf.env.env: env=conf.env.env else: env=dict(os.environ) env['LANG']='C' input=stdin and'\n'or None try: out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input) except Errors.WafError ,e: if not(hasattr(e,'stderr')and hasattr(e,'stdout')): raise e else: out=e.stdout err=e.stderr except Exception: conf.fatal('could not determine the compiler version %r'%cmd) return(out,err) ROUTINES_CODE="""\ subroutine foobar() return end subroutine foo_bar() return end """ MAIN_CODE=""" void %(dummy_func_nounder)s(void); void %(dummy_func_under)s(void); int %(main_func_name)s() { %(dummy_func_nounder)s(); %(dummy_func_under)s();
def apply_patch(conf, patch_abspath): conf.msg("Applying patch", os.path.basename(patch_abspath)) p = patch.fromfile(patch_abspath) if not p.apply(root=conf.bldnode.make_node("3rd").abspath()): conf.fatal("Cannot apply patch %s" % patch_abspath)
def find_icpc(conf): if sys.platform == 'cygwin': conf.fatal('The Intel compiler does not work on Cygwin') cxx = conf.find_program('icpc', var='CXX') conf.get_cc_version(cxx, icc=True) conf.env.CXX_NAME = 'icc'