def do_install(self,src,tgt,lbl,**kw): if not Options.options.force: try: st1=os.stat(tgt) st2=os.stat(src) except OSError: pass else: if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: if not self.generator.bld.progress_bar: Logs.info('- install %s (from %s)',tgt,lbl) return False if not self.generator.bld.progress_bar: Logs.info('+ install %s (from %s)',tgt,lbl) try: os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) except EnvironmentError: pass try: os.remove(tgt) except OSError: pass try: self.copy_fun(src,tgt) except EnvironmentError as e: if not os.path.exists(src): Logs.error('File %r does not exist',src) elif not os.path.isfile(src): Logs.error('Input %r is not a file',src) raise Errors.WafError('Could not install the file %r'%tgt,e)
def check_err_features(self): lst=self.to_list(self.features) if'shlib'in lst: Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') for x in('c','cxx','d','fc'): if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: Logs.error('%r features is probably missing %r'%(self,x))
def summary(bld): """ Show the status of Google Test """ lst = getattr(bld, 'utest_results', []) if not lst: return Logs.pprint('CYAN', '[test summary]') nfails = 0 for (f, code, out, err, result) in lst: fail = int(result.attrib['failures']) if fail > 0: nfails += fail for failure in result.iter('failure'): message = failure.attrib['message'] message_body = '\n'.join(message.split('\n')[1:]) message = message.split('\n')[0] m = re.compile(r'^(.*):([0-9]+)$').match(message) body = m.group(1) num = int(m.group(2)) Logs.error('{}({}): error: {}'.format(body, num, message_body)) if nfails > 0: raise Errors.WafError('test failed')
def configure(conf): try: extract_this(GMOCK_UNPACK_DIR) conf.msg('Unpacking gmock', 'yes') except: conf.msg('Unpacking gmock', 'no') Logs.error(sys.exc_info()[1])
def run(self): env=self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS','-interaction=batchmode') env.append_value('PDFLATEXFLAGS','-interaction=batchmode') env.append_value('XELATEXFLAGS','-interaction=batchmode') self.cwd=self.inputs[0].parent.get_bld().abspath() Logs.info('first pass on %s'%self.__class__.__name__) cur_hash=self.hash_aux_nodes() self.call_latex() self.hash_aux_nodes() self.bibtopic() self.bibfile() self.bibunits() self.makeindex() self.makeglossaries() for i in range(10): prev_hash=cur_hash cur_hash=self.hash_aux_nodes() if not cur_hash: Logs.error('No aux.h to process') if cur_hash and cur_hash==prev_hash: break Logs.info('calling %s'%self.__class__.__name__) self.call_latex()
def do_install(self,src,tgt,**kw): d,_=os.path.split(tgt) if not d: raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) Utils.check_dir(d) srclbl=src.replace(self.srcnode.abspath()+os.sep,'') if not Options.options.force: try: st1=os.stat(tgt) st2=os.stat(src) except OSError: pass else: if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: if not self.progress_bar: Logs.info('- install %s (from %s)'%(tgt,srclbl)) return False if not self.progress_bar: Logs.info('+ install %s (from %s)'%(tgt,srclbl)) try: os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) except EnvironmentError: pass try: os.remove(tgt) except OSError: pass try: self.copy_fun(src,tgt,**kw) except IOError: try: os.stat(src) except EnvironmentError: Logs.error('File %r does not exist'%src) raise Errors.WafError('Could not install the file %r'%tgt)
def check_err_features(self): lst = self.to_list(self.features) if "shlib" in lst: Logs.error("feature shlib -> cshlib, dshlib or cxxshlib") for x in ("c", "cxx", "d", "fc"): if not x in lst and lst and lst[0] in [x + y for y in ("program", "shlib", "stlib")]: Logs.error("%r features is probably missing %r" % (self, x))
def run(self): run_do_script_base.run(self) ret, log_tail = self.check_erase_log_file() if ret: Logs.error("""Running Stata on %s failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""" % ( self.inputs[0].nice_path(), ret, self.env.LOGFILEPATH, log_tail)) return ret
def load_envs(self): """ The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those files. The config sets are then stored in the dict :py:attr:`waflib.Build.BuildContext.allenvs`. """ try: lst = Utils.listdir(self.cache_dir) except OSError as e: if e.errno == errno.ENOENT: raise Errors.WafError('The project was not configured: run "waf configure" first!') else: raise if not lst: raise Errors.WafError('The cache directory is empty: reconfigure the project') for fname in lst: if fname.endswith(CACHE_SUFFIX): env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, fname)) name = fname[:-len(CACHE_SUFFIX)] self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def gather_vswhere_versions(conf, versions): try: import json except ImportError: Logs.error('Visual Studio 2017 detection requires Python 2.6') return prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)')) vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe') args = [vswhere, '-products', '*', '-legacy', '-format', 'json'] try: txt = conf.cmd_and_log(args) except Errors.WafError as e: Logs.debug('msvc: vswhere.exe failed %s', e) return if sys.version_info[0] < 3: txt = txt.decode(sys.stdout.encoding or 'windows-1252') arr = json.loads(txt) arr.sort(key=lambda x: x['installationVersion']) for entry in arr: ver = entry['installationVersion'] ver = str('.'.join(ver.split('.')[:2])) path = str(os.path.abspath(entry['installationPath'])) if os.path.exists(path) and ('msvc %s' % ver) not in versions: conf.gather_msvc_targets(versions, ver, path)
def check_output(self,ret,out): for num,line in enumerate(out.split('\n')): if line.find('Error:') == 0: Logs.error("Error in line %d: %s" % (num,line[6:])) ret = 1 return ret
def addlines(self, node): """ Add the lines from a header in the list of preprocessor lines to parse :param node: header :type node: :py:class:`waflib.Node.Node` """ self.currentnode_stack.append(node.parent) self.count_files += 1 if self.count_files > recursion_limit: # issue #812 raise PreprocError('recursion limit exceeded') if Logs.verbose: Logs.debug('preproc: reading file %r', node) try: lines = self.parse_lines(node) except EnvironmentError: raise PreprocError('could not read the file %r' % node) except Exception: if Logs.verbose > 0: Logs.error('parsing %r failed %s', node, traceback.format_exc()) else: self.lines.extend(lines)
def call(self,*k,**kw): for x in typos: if x in kw: kw[typos[x]]=kw[x] del kw[x] Logs.error('typo %r -> %r'%(x,typos[x])) return oldcall(self,*k,**kw)
def get_check_func(conf, lang): if lang == 'c': return conf.check_cc elif lang == 'cxx': return conf.check_cxx else: Logs.error("Unknown header language `%s'" % lang)
def build_version_files(header_path, source_path, domain, major, minor, micro): """Generate version code header""" header_path = os.path.abspath(header_path) source_path = os.path.abspath(source_path) text = "int " + domain + "_major_version = " + str(major) + ";\n" text += "int " + domain + "_minor_version = " + str(minor) + ";\n" text += "int " + domain + "_micro_version = " + str(micro) + ";\n" try: o = open(source_path, 'w') o.write(text) o.close() except IOError: Logs.error('Failed to open %s for writing\n' % source_path) sys.exit(-1) text = "#ifndef __" + domain + "_version_h__\n" text += "#define __" + domain + "_version_h__\n" text += "extern const char* " + domain + "_revision;\n" text += "extern int " + domain + "_major_version;\n" text += "extern int " + domain + "_minor_version;\n" text += "extern int " + domain + "_micro_version;\n" text += "#endif /* __" + domain + "_version_h__ */\n" try: o = open(header_path, 'w') o.write(text) o.close() except IOError: Logs.warn('Failed to open %s for writing\n' % header_path) sys.exit(-1) return None
def ant_glob(self,*k,**kw): if k: lst=Utils.to_list(k[0]) for pat in lst: if'..'in pat.split('/'): Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0]) return old_ant_glob(self,*k,**kw)
def call(self,*k,**kw): ret=oldcall(self,*k,**kw) for x in typos: if x in kw: err=True Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret)) return ret
def addlines(self,node): self.currentnode_stack.append(node.parent) filepath=node.abspath() self.count_files+=1 if self.count_files>c_preproc.recursion_limit: raise c_preproc.PreprocError("recursion limit exceeded") pc=self.parse_cache Logs.debug('preproc: reading file %r',filepath) try: lns=pc[filepath] except KeyError: pass else: self.lines.extend(lns) return try: lines=self.filter_comments(filepath) lines.append((c_preproc.POPFILE,'')) lines.reverse() pc[filepath]=lines self.lines.extend(lines) except IOError: raise c_preproc.PreprocError("could not read the file %s"%filepath) except Exception: if Logs.verbose>0: Logs.error("parsing %s failed"%filepath) traceback.print_exc()
def isfile_cached(self): # optimize for nt.stat calls, assuming there are many files for few folders try: cache = self.__class__.cache_isfile_cache except AttributeError: cache = self.__class__.cache_isfile_cache = {} try: c1 = cache[id(self.parent)] except KeyError: c1 = cache[id(self.parent)] = [] curpath = self.parent.abspath() findData = ctypes.wintypes.WIN32_FIND_DATAW() find = FindFirstFile(TP % curpath, ctypes.byref(findData)) if find == INVALID_HANDLE_VALUE: Logs.error("invalid win32 handle isfile_cached %r", self.abspath()) return os.path.isfile(self.abspath()) try: while True: if findData.cFileName not in UPPER_FOLDERS: thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY if not thatsadir: c1.append(str(findData.cFileName)) if not FindNextFile(find, ctypes.byref(findData)): break except Exception as e: Logs.error('exception while listing a folder %r %r', self.abspath(), e) return os.path.isfile(self.abspath()) finally: FindClose(find) return self.name in c1
def configure(cfg): msg.debug('orch: CONFIG CALLED') if not cfg.options.orch_config: raise RuntimeError('No Orchestration configuration file given (--orch-config)') orch_config = [] for lst in util.string2list(cfg.options.orch_config): lst = lst.strip() orch_config += glob(lst) okay = True for maybe in orch_config: if os.path.exists(maybe): continue msg.error('No such file: %s' % maybe) okay = False if not okay or not orch_config: raise ValueError('missing configuration files') cfg.msg('Orch configuration files', '"%s"' % '", "'.join(orch_config)) extra = dict(cfg.env) extra['top'] = context.top_dir extra['out'] = context.out_dir # usually {top}/tmp extra['DESTDIR'] = getattr(cfg.options, 'destdir', '') suite = pkgconf.load(orch_config, start = cfg.options.orch_start, **extra) envmunge.decompose(cfg, suite) cfg.msg('Orch configure envs', '"%s"' % '", "'.join(cfg.all_envs.keys())) bind_functions(cfg) return
def build_version_files(header_path, source_path, domain, major, minor, micro, exportname, visheader): header_path = os.path.abspath(header_path) source_path = os.path.abspath(source_path) text = "int " + domain + "_major_version = " + str(major) + ";\n" text += "int " + domain + "_minor_version = " + str(minor) + ";\n" text += "int " + domain + "_micro_version = " + str(micro) + ";\n" try: o = open(source_path, 'w') o.write(text) o.close() except IOError: Logs.error('Failed to open %s for writing\n' % source_path) sys.exit(-1) text = "#ifndef __" + domain + "_version_h__\n" text += "#define __" + domain + "_version_h__\n" if visheader != '': text += "#include \"" + visheader + "\"\n" text += exportname + " extern const char* " + domain + "_revision;\n" text += exportname + " extern int " + domain + "_major_version;\n" text += exportname + " extern int " + domain + "_minor_version;\n" text += exportname + " extern int " + domain + "_micro_version;\n" text += "#endif /* __" + domain + "_version_h__ */\n" try: o = open(header_path, 'w') o.write(text) o.close() except IOError: Logs.warn('Failed to open %s for writing\n' % header_path) sys.exit(-1) return None
def do_install(self, src, tgt, chmod=Utils.O644): d, _ = os.path.split(tgt) if not d: raise Errors.WafError("Invalid installation given %r->%r" % (src, tgt)) Utils.check_dir(d) srclbl = src.replace(self.srcnode.abspath() + os.sep, "") if not Options.options.force: try: st1 = os.stat(tgt) st2 = os.stat(src) except OSError: pass else: if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size: if not self.progress_bar: Logs.info("- install %s (from %s)" % (tgt, srclbl)) return False if not self.progress_bar: Logs.info("+ install %s (from %s)" % (tgt, srclbl)) try: os.remove(tgt) except OSError: pass try: shutil.copy2(src, tgt) os.chmod(tgt, chmod) except IOError: try: os.stat(src) except (OSError, IOError): Logs.error("File %r does not exist" % src) raise Errors.WafError("Could not install the file %r" % tgt)
def load_envs(self): """ The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those files. The config sets are then stored in the dict :py:attr:`waflib.Build.BuildContext.allenvs`. """ node = self.root.find_node(self.cache_dir) if not node: raise Errors.WafError('The project was not configured: run "waf configure" first!') lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True) if not lst: raise Errors.WafError('The cache directory is empty: reconfigure the project') for x in lst: name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/') env = ConfigSet.ConfigSet(x.abspath()) self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def dl_task(task): src = task.inputs[0] tgt = task.outputs[0] url = src.read().strip() try: web = urlopen(url) tgt.write(web.read(),'wb') except Exception: import traceback traceback.print_exc() msg.error(tgen.worch.format("[{package}_dlpatch] problem downloading [{patch_urlfile}]")) raise checksum = tgen.worch.patch_checksum if not checksum: return hasher_name, ref = checksum.split(":") import hashlib, os # FIXME: check the hasher method exists. check for typos. hasher = getattr(hashlib, hasher_name)() hasher.update(tgt.read('rb')) data= hasher.hexdigest() if data != ref: msg.error(tgen.worch.format("[{package}_dlpatch] invalid checksum:\nref: %s\nnew: %s" %\ (ref, data))) try: os.remove(tgt.abspath()) except IOError: pass return 1 return
def configure(self): self.find_qt5_binaries() self.set_qt5_libs_to_check() self.set_qt5_defines() self.find_qt5_libraries() self.add_qt5_rpath() self.simplify_qt5_libs() if not has_xml: Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') frag='#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n' uses='QT5CORE QT5WIDGETS QT5GUI' for flag in[[],'-fPIE','-fPIC','-std=c++11',['-std=c++11','-fPIE'],['-std=c++11','-fPIC']]: msg='See if Qt files compile ' if flag: msg+='with %s'%flag try: self.check(features='qt5 cxx',use=uses,uselib_store='qt5',cxxflags=flag,fragment=frag,msg=msg) except self.errors.ConfigurationError: pass else: break else: self.fatal('Could not build a simple Qt application') from waflib import Utils if Utils.unversioned_sys_platform()=='freebsd': frag='#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n' try: self.check(features='qt5 cxx cxxprogram',use=uses,fragment=frag,msg='Can we link Qt programs on FreeBSD directly?') except self.errors.ConfigurationError: self.check(features='qt5 cxx cxxprogram',use=uses,uselib_store='qt5',libpath='/usr/local/lib',fragment=frag,msg='Is /usr/local/lib required?')
def scan(self): """Parse the *.qrc* files""" if not has_xml: Logs.error('no xml support was found, the rcc dependencies will be incomplete!') return ([], []) parser = make_parser() curHandler = XMLHandler() parser.setContentHandler(curHandler) fi = open(self.inputs[0].abspath(), 'r') try: parser.parse(fi) finally: fi.close() nodes = [] names = [] root = self.inputs[0].parent for x in curHandler.files: nd = root.find_resource(x) if nd: nodes.append(nd) else: names.append(x) return (nodes, names)
def add_moc_tasks(self): node=self.inputs[0] bld=self.generator.bld try: self.signature() except KeyError: pass else: delattr(self,'cache_sig') moctasks=[] mocfiles=[] try: tmp_lst=bld.raw_deps[self.uid()] bld.raw_deps[self.uid()]=[] except KeyError: tmp_lst=[] for d in tmp_lst: if not d.endswith('.moc'): continue if d in mocfiles: Logs.error("paranoia owns") continue mocfiles.append(d) h_node=None try:ext=Options.options.qt_header_ext.split() except AttributeError:pass if not ext:ext=MOC_H base2=d[:-4] for x in[node.parent]+self.generator.includes_nodes: for e in ext: h_node=x.find_node(base2+e) if h_node: break if h_node: m_node=h_node.change_ext('.moc') break else: for k in EXT_QT4: if base2.endswith(k): for x in[node.parent]+self.generator.includes_nodes: h_node=x.find_node(base2) if h_node: break if h_node: m_node=h_node.change_ext(k+'.moc') break if not h_node: raise Errors.WafError('no header found for %r which is a moc file'%d) bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node task=self.create_moc_task(h_node,m_node) moctasks.append(task) tmp_lst=bld.raw_deps[self.uid()]=mocfiles lst=bld.node_deps.get(self.uid(),()) for d in lst: name=d.name if name.endswith('.moc'): task=self.create_moc_task(bld.node_deps[(self.inputs[0].parent.abspath(),name)],d) moctasks.append(task) self.run_after.update(set(moctasks)) self.moc_done=1
def _getVersion(): """update the ns3_version.js file, when building documentation""" prog = "doc/ns3_html_theme/get_version.sh" if subprocess.Popen([prog]).wait() : Logs.error(prog + " returned an error") raise SystemExit(1)
def install_pyfile(self,node,install_from=None): from_node=install_from or node.parent tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False) path=tsk.get_install_path() if self.bld.is_install<0: Logs.info("+ removing byte compiled python files") for x in'co': try: os.remove(path+x) except OSError: pass if self.bld.is_install>0: try: st1=os.stat(path) except OSError: Logs.error('The python file is missing, this should not happen') for x in['c','o']: do_inst=self.env['PY'+x.upper()] try: st2=os.stat(path+x) except OSError: pass else: if st1.st_mtime<=st2.st_mtime: do_inst=False if do_inst: lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[] (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x) argv=self.env['PYTHON']+lst+['-c',INST,a,b,c] Logs.info('+ byte compiling %r'%(path+x)) env=self.env.env or None ret=Utils.subprocess.Popen(argv,env=env).wait() if ret: raise Errors.WafError('py%s compilation failed %r'%(x,path))
def bibfile(self): """ Parses *.aux* files to find bibfiles to process. If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ for aux_node in self.aux_nodes: try: ct = aux_node.read() except EnvironmentError: Logs.error('Error reading %s: %r', aux_node.abspath()) continue if g_bibtex_re.findall(ct): self.info('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) self.env.SRCFILE = aux_node.name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun()) for node in getattr(self, 'multibibs', []): self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) self.env.SRCFILE = node.name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE) sys.exit(1) try: os.chdir(Context.run_dir) except OSError: Logs.error('Waf: The folder %r is unreadable' % Context.run_dir) sys.exit(1) try: set_main_module(Context.run_dir + os.sep + Context.WSCRIPT_FILE) except Errors.WafError, e: Logs.pprint('RED', e.verbose_msg) Logs.error(str(e)) sys.exit(1) except Exception, e: Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e) traceback.print_exc(file=sys.stdout) sys.exit(2) try: run_commands() except Errors.WafError, e: if Logs.verbose > 1: Logs.pprint('RED', e.verbose_msg) Logs.error(e.msg) sys.exit(1) except SystemExit: raise except Exception, e: traceback.print_exc(file=sys.stdout) sys.exit(2) except KeyboardInterrupt:
def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0', maxversion=None, version_blacklist=[], checkfunctions=None, headers=None, checkcode=None, onlyif=None, implied_deps=None, require_headers=True, pkg=None, set_target=True): '''check if a library is available as a system library. this first tries via pkg-config, then if that fails tries by testing for a specified function in the specified lib ''' # We always do a logic validation of 'onlyif' first missing = [] if onlyif: for l in TO_LIST(onlyif): f = 'FOUND_SYSTEMLIB_%s' % l if not f in conf.env: Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) + 'missing prerequisite check for ' + 'system library %s, onlyif=%r' % (l, onlyif)) sys.exit(1) if not conf.env[f]: missing.append(l) found = 'FOUND_SYSTEMLIB_%s' % libname if found in conf.env: return conf.env[found] if conf.LIB_MUST_BE_BUNDLED(libname): conf.env[found] = False return False # see if the library should only use a system version if another dependent # system version is found. That prevents possible use of mixed library # versions if missing: if not conf.LIB_MAY_BE_BUNDLED(libname): Logs.error( 'ERROR: Use of system library %s depends on missing system library/libraries %r' % (libname, missing)) sys.exit(1) conf.env[found] = False return False def check_functions_headers_code(): '''helper function for CHECK_BUNDLED_SYSTEM''' if require_headers and headers and not conf.CHECK_HEADERS(headers, lib=libname): return False if checkfunctions is not None: ok = conf.CHECK_FUNCS_IN(checkfunctions, libname, headers=headers, empty_decl=False, set_target=False) if not ok: return False if checkcode is not None: define = 'CHECK_BUNDLED_SYSTEM_%s' % libname.upper() ok = conf.CHECK_CODE(checkcode, lib=libname, headers=headers, local_include=False, msg=msg, define=define) conf.CONFIG_RESET(define) if not ok: return False return True minversion = minimum_library_version(conf, libname, minversion) msg = 'Checking for system %s' % libname msg_ver = [] if minversion != '0.0.0': msg_ver.append('>=%s' % minversion) if maxversion is not None: msg_ver.append('<=%s' % maxversion) for v in version_blacklist: msg_ver.append('!=%s' % v) if msg_ver != []: msg += " (%s)" % (" ".join(msg_ver)) uselib_store = libname.upper() if pkg is None: pkg = libname version_checks = '%s >= %s' % (pkg, minversion) if maxversion is not None: version_checks += ' %s <= %s' % (pkg, maxversion) for v in version_blacklist: version_checks += ' %s != %s' % (pkg, v) # try pkgconfig first if (conf.CHECK_CFG(package=pkg, args='"%s" --cflags --libs' % (version_checks), msg=msg, uselib_store=uselib_store) and check_functions_headers_code()): if set_target: conf.SET_TARGET_TYPE(libname, 'SYSLIB') conf.env[found] = True if implied_deps: conf.SET_SYSLIB_DEPS(libname, implied_deps) return True if checkfunctions is not None: if check_functions_headers_code(): conf.env[found] = True if implied_deps: conf.SET_SYSLIB_DEPS(libname, implied_deps) if set_target: conf.SET_TARGET_TYPE(libname, 'SYSLIB') return True conf.env[found] = False if not conf.LIB_MAY_BE_BUNDLED(libname): Logs.error( 'ERROR: System library %s of version %s not found, and bundling disabled' % (libname, minversion)) sys.exit(1) return False
def is_before(t1, t2): ret = old(t1, t2) if ret and old(t2, t1): Logs.error('Contradictory order constraints in classes %r %r', t1, t2) return ret
def waf_entry_point(current_directory, version, wafdir): Logs.init_log() if Context.WAFVERSION != version: Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir)) sys.exit(1) if '--version' in sys.argv: Context.run_dir = current_directory ctx = Context.create_context('options') ctx.curdir = current_directory ctx.parse_args() sys.exit(0) Context.waf_dir = wafdir Context.launch_dir = current_directory no_climb = os.environ.get('NOCLIMB', None) if not no_climb: for k in no_climb_commands: if k in sys.argv: no_climb = True break cur = current_directory while cur: lst = os.listdir(cur) if Options.lockfile in lst: env = ConfigSet.ConfigSet() try: env.load(os.path.join(cur, Options.lockfile)) ino = os.stat(cur)[stat.ST_INO] except Exception: pass else: for x in [env.run_dir, env.top_dir, env.out_dir]: if Utils.is_win32: if cur == x: load = True break else: try: ino2 = os.stat(x)[stat.ST_INO] except OSError: pass else: if ino == ino2: load = True break else: Logs.warn('invalid lock file in %s' % cur) load = False if load: Context.run_dir = env.run_dir Context.top_dir = env.top_dir Context.out_dir = env.out_dir break if not Context.run_dir: if Context.WSCRIPT_FILE in lst: Context.run_dir = cur next = os.path.dirname(cur) if next == cur: break cur = next if no_climb: break if not Context.run_dir: if '-h' in sys.argv or '--help' in sys.argv: Logs.warn( 'No wscript file found: the help message may be incomplete') Context.run_dir = current_directory ctx = Context.create_context('options') ctx.curdir = current_directory ctx.parse_args() sys.exit(0) Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE) sys.exit(1) try: os.chdir(Context.run_dir) except OSError: Logs.error('Waf: The folder %r is unreadable' % Context.run_dir) sys.exit(1) try: set_main_module(Context.run_dir + os.sep + Context.WSCRIPT_FILE) except Errors.WafError, e: Logs.pprint('RED', e.verbose_msg) Logs.error(str(e)) sys.exit(1)