def symbols_whyneeded(task): """check why 'target' needs to link to 'subsystem'""" bld = task.env.bld tgt_list = get_tgt_list(bld) why = Options.options.WHYNEEDED.split(":") if len(why) != 2: raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") target = why[0] subsystem = why[1] build_symbol_sets(bld, tgt_list) build_library_names(bld, tgt_list) build_syslib_sets(bld, tgt_list) Logs.info("Checking why %s needs to link to %s" % (target, subsystem)) if not target in bld.env.used_symbols: Logs.warn("unable to find target '%s' in used_symbols dict" % target) return if not subsystem in bld.env.public_symbols: Logs.warn("unable to find subsystem '%s' in public_symbols dict" % subsystem) return overlap = bld.env.used_symbols[target].intersection( bld.env.public_symbols[subsystem]) if not overlap: Logs.info("target '%s' doesn't use any public symbols from '%s'" % (target, subsystem)) else: Logs.info("target '%s' uses symbols %s from '%s'" % (target, overlap, subsystem))
def fake_build_environment(info=True, flush=False): """create all the tasks for the project, but do not run the build return the build context in use""" bld = getattr(Utils.g_module, 'build_context', Utils.Context)() bld = Scripting.check_configured(bld) Options.commands['install'] = False Options.commands['uninstall'] = False Options.is_install = False bld.is_install = 0 # False try: proj = Environment.Environment(Options.lockfile) except IOError: raise Utils.WafError("Project not configured (run 'waf configure' first)") bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) bld.load_envs() if info: Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath()) bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) bld.pre_build() if flush: bld.flush() return bld
def CHECK_BUNDLED_SYSTEM_PYTHON(conf, libname, modulename, minversion='0.0.0'): '''check if a python module is available on the system and has the specified minimum version. ''' if conf.LIB_MUST_BE_BUNDLED(libname): return False # see if the library should only use a system version if another dependent # system version is found. That prevents possible use of mixed library # versions minversion = minimum_library_version(conf, libname, minversion) try: m = __import__(modulename) except ImportError: found = False else: try: version = m.__version__ except AttributeError: found = False else: found = tuple(version.split(".")) >= tuple(minversion.split(".")) if not found and not conf.LIB_MAY_BE_BUNDLED(libname): Logs.error('ERROR: Python module %s of version %s not found, and bundling disabled' % (libname, minversion)) sys.exit(1) return found
def make_test(self): if not 'cprogram' in self.features: Logs.error('test cannot be executed {0!s}'.format(self)) return self.default_install_path = None self.create_task('utest', self.link_task.outputs)
def parse_args_impl(parser,_args=None): global options,commands,arg_line (options,args)=parser.parse_args(args=_args) arg_line=args commands={} for var in cmds:commands[var]=0 if not args: commands['build']=1 args.append('build') for arg in args: commands[arg]=True if'check'in args: idx=args.index('check') try: bidx=args.index('build') if bidx>idx: raise ValueError('build before check') except ValueError as e: args.insert(idx,'build') if args[0]!='init': args.insert(0,'init') if options.keep:options.jobs=1 if options.jobs<1:options.jobs=1 if'install'in sys.argv or'uninstall'in sys.argv: options.destdir=options.destdir and os.path.abspath(os.path.expanduser(options.destdir)) Logs.verbose=options.verbose Logs.init_log() if options.zones: Logs.zones=options.zones.split(',') if not Logs.verbose:Logs.verbose=1 elif Logs.verbose>0: Logs.zones=['runner'] if Logs.verbose>2: Logs.zones=['*']
def check_syslib_dependencies(bld, t): '''check for syslib depenencies''' if bld.name_to_obj(t.sname + ".objlist", bld.env): return sname = real_name(t.sname) remaining = set() features = TO_LIST(t.features) if 'pyembed' in features or 'pyext' in features: if 'python' in bld.env.public_symbols: t.unsatisfied_symbols = t.unsatisfied_symbols.difference( bld.env.public_symbols['python']) needed = {} for sym in t.unsatisfied_symbols: if sym in bld.env.symbol_map: dep = bld.env.symbol_map[sym][0] if dep == 'c': continue if not dep in needed: needed[dep] = set() needed[dep].add(sym) else: remaining.add(sym) for dep in needed: Logs.info("Target '%s' should add syslib dep '%s' for symbols %s" % (sname, dep, " ".join(needed[dep]))) if remaining: debug("deps: Target '%s' has unsatisfied symbols: %s" % (sname, " ".join(remaining)))
def load(self,filename): tbl=self.table code=Utils.readf(filename) for m in re_imp.finditer(code): g=m.group tbl[g(2)]=eval(g(3)) Logs.debug('env: %s',self.table)
def exec_command(s, **kw): if 'log' in kw: kw['stdout'] = kw['stderr'] = kw['log'] del(kw['log']) kw['shell'] = isinstance(s, str) if len(s) > 2000: startupinfo = pproc.STARTUPINFO() startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW kw['startupinfo'] = startupinfo try: if 'stdout' not in kw: kw['stdout'] = pproc.PIPE kw['stderr'] = pproc.PIPE proc = pproc.Popen(s,**kw) (stdout, stderr) = proc.communicate() Logs.info(stdout) if stderr: Logs.error(stderr) else: proc = pproc.Popen(s,**kw) return proc.wait() except OSError: return -1
def dblatex_build(task): env = task.env bld = task.generator.bld src = task.inputs[0] cmd = '{0} -T {1} -o {2}.pdf {3} {4}'.format(env['DBLATEX'], env['DBLATEX_STYLE'], src.bld_base(env), env['DBLATEX_FLAGS'], src.srcpath(env)) Logs.debug('Executing "{0}"'.format(cmd)) bld.exec_command(cmd)
def detect(conf): cc=conf.env['COMPILER_CC']or None cxx=conf.env['COMPILER_CXX']or None if not(cc or cxx): raise Utils.WafError("neither COMPILER_CC nor COMPILER_CXX are defined; ""maybe the compiler_cc or compiler_cxx tool has not been configured yet?") try: compiler=compiler_mapping[cc] except KeyError: try: compiler=compiler_mapping[cxx] except KeyError: Logs.warn("No compiler flags support for compiler %r or %r"%(cc,cxx)) return opt_level,warn_level,dbg_level=profiles[Options.options.build_profile] optimizations=compiler.get_optimization_flags(opt_level) debug,debug_defs=compiler.get_debug_flags(dbg_level) warnings=compiler.get_warnings_flags(warn_level) if cc and not conf.env['CCFLAGS']: conf.env.append_value('CCFLAGS',optimizations) conf.env.append_value('CCFLAGS',debug) conf.env.append_value('CCFLAGS',warnings) conf.env.append_value('CCDEFINES',debug_defs) if cxx and not conf.env['CXXFLAGS']: conf.env.append_value('CXXFLAGS',optimizations) conf.env.append_value('CXXFLAGS',debug) conf.env.append_value('CXXFLAGS',warnings) conf.env.append_value('CXXDEFINES',debug_defs)
def check_syslib_dependencies(bld, t): '''check for syslib depenencies''' if bld.name_to_obj(t.sname + ".objlist", bld.env): return sname = real_name(t.sname) remaining = set() features = TO_LIST(t.features) if 'pyembed' in features or 'pyext' in features: if 'python' in bld.env.public_symbols: t.unsatisfied_symbols = t.unsatisfied_symbols.difference(bld.env.public_symbols['python']) needed = {} for sym in t.unsatisfied_symbols: if sym in bld.env.symbol_map: dep = bld.env.symbol_map[sym][0] if dep == 'c': continue if not dep in needed: needed[dep] = set() needed[dep].add(sym) else: remaining.add(sym) for dep in needed: Logs.info("Target '%s' should add syslib dep '%s' for symbols %s" % (sname, dep, " ".join(needed[dep]))) if remaining: debug("deps: Target '%s' has unsatisfied symbols: %s" % (sname, " ".join(remaining)))
def EXPAND_VARIABLES(ctx, varstr, vars=None): '''expand variables from a user supplied dictionary This is most useful when you pass vars=locals() to expand all your local variables in strings ''' if isinstance(varstr, list): ret = [] for s in varstr: ret.append(EXPAND_VARIABLES(ctx, s, vars=vars)) return ret if not isinstance(varstr, str): return varstr import Environment env = Environment.Environment() ret = varstr # substitute on user supplied dict if avaiilable if vars is not None: for v in vars.keys(): env[v] = vars[v] ret = SUBST_VARS_RECURSIVE(ret, env) # if anything left, subst on the environment as well if ret.find('${') != -1: ret = SUBST_VARS_RECURSIVE(ret, ctx.env) # make sure there is nothing left. Also check for the common # typo of $( instead of ${ if ret.find('${') != -1 or ret.find('$(') != -1: Logs.error('Failed to substitute all variables in varstr=%s' % ret) sys.exit(1) return ret
def symbols_whyneeded(task): """check why 'target' needs to link to 'subsystem'""" bld = task.env.bld tgt_list = get_tgt_list(bld) why = Options.options.WHYNEEDED.split(":") if len(why) != 2: raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") target = why[0] subsystem = why[1] build_symbol_sets(bld, tgt_list) build_library_names(bld, tgt_list) build_syslib_sets(bld, tgt_list) Logs.info("Checking why %s needs to link to %s" % (target, subsystem)) if not target in bld.env.used_symbols: Logs.warn("unable to find target '%s' in used_symbols dict" % target) return if not subsystem in bld.env.public_symbols: Logs.warn("unable to find subsystem '%s' in public_symbols dict" % subsystem) return overlap = bld.env.used_symbols[target].intersection(bld.env.public_symbols[subsystem]) if not overlap: Logs.info("target '%s' doesn't use any public symbols from '%s'" % (target, subsystem)) else: Logs.info("target '%s' uses symbols %s from '%s'" % (target, overlap, subsystem))
def RECURSE(ctx, directory): '''recurse into a directory, relative to the curdir or top level''' try: visited_dirs = ctx.visited_dirs except: visited_dirs = ctx.visited_dirs = set() d = os.path.join(ctx.curdir, directory) if os.path.exists(d): abspath = os.path.abspath(d) else: abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory)) ctxclass = ctx.__class__.__name__ key = ctxclass + ':' + abspath if key in visited_dirs: # already done it return visited_dirs.add(key) relpath = os_path_relpath(abspath, ctx.curdir) if ctxclass == 'Handler': return ctx.sub_options(relpath) if ctxclass == 'ConfigurationContext': return ctx.sub_config(relpath) if ctxclass == 'BuildContext': return ctx.add_subdirs(relpath) Logs.error('Unknown RECURSE context class', ctxclass) raise
def parse_args_impl(parser,_args=None): global options,commands (options,args)=parser.parse_args(args=_args) commands={} for var in cmds:commands[var]=0 if len(args)==0:commands['build']=1 for arg in args: arg=arg.strip() if arg in cmds: commands[arg]=True else: print'Error: Invalid command specified ',arg parser.print_help() sys.exit(1) if commands['check']: commands['build']=True if commands['install']or commands['uninstall']: global is_install is_install=True if options.keep:options.jobs=1 if options.jobs<1:options.jobs=1 Logs.verbose=options.verbose Logs.init_log() if options.zones: Logs.zones=options.zones.split(',') if not Logs.verbose:Logs.verbose=1 elif Logs.verbose==1: Logs.zones=['runner']
def make_test(self): if not'cprogram'in self.features: Logs.error('test cannot be executed %s'%self) return self.default_install_path=None tsk=self.create_task('utest') tsk.set_inputs(self.link_task.outputs)
def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): # enable tool to build python extensions if conf.env.HAVE_PYTHON_H: conf.check_python_version(version) return interpreters = [] if conf.env['EXTRA_PYTHON']: conf.all_envs['extrapython'] = conf.env.copy() conf.setenv('extrapython') conf.env['PYTHON'] = conf.env['EXTRA_PYTHON'] conf.env['IS_EXTRA_PYTHON'] = 'yes' conf.find_program('python', var='PYTHON', mandatory=True) conf.check_tool('python') try: conf.check_python_version((3, 3, 0)) except Exception: Logs.warn('extra-python needs to be Python 3.3 or later') raise interpreters.append(conf.env['PYTHON']) conf.setenv('default') conf.find_program('python', var='PYTHON', mandatory=mandatory) conf.check_tool('python') path_python = conf.find_program('python') conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python) conf.check_python_version(version) interpreters.append(conf.env['PYTHON']) conf.env.python_interpreters = interpreters
def symbols_dupcheck(task): '''check for symbols defined in two different subsystems''' bld = task.env.bld tgt_list = get_tgt_list(bld) targets = LOCAL_CACHE(bld, 'TARGET_TYPE') Logs.info("Checking for duplicate symbols") for sym in bld.env.symbol_map: subsystems = set(bld.env.symbol_map[sym]) if len(subsystems) == 1: continue if sym in ['main', '_init', '_fini', 'init_samba_module', 'samba_init_module', 'ldb_init_module' ]: # these are expected to be in many subsystems continue # if all of them are in system libraries, we can ignore them. This copes # with the duplication between libc, libpthread and libattr all_syslib = True for s in subsystems: if s != 'c' and (not s in targets or targets[s] != 'SYSLIB'): all_syslib = False if all_syslib: continue Logs.info("symbol %s appears in %s" % (sym, subsystems))
def subst_at_vars(task): '''substiture @VAR@ style variables in a file''' s = task.inputs[0].read() # split on the vars a = re.split('(@\w+@)', s) out = [] done_var = {} back_sub = [ ('PREFIX', '${prefix}'), ('EXEC_PREFIX', '${exec_prefix}')] for v in a: if re.match('@\w+@', v): vname = v[1:-1] if not vname in task.env and vname.upper() in task.env: vname = vname.upper() if not vname in task.env: Logs.error("Unknown substitution %s in %s" % (v, task.name)) sys.exit(1) v = SUBST_VARS_RECURSIVE(task.env[vname], task.env) # now we back substitute the allowed pc vars for (b, m) in back_sub: s = task.env[b] if s == v[0:len(s)]: if not b in done_var: # we don't want to substitute the first usage done_var[b] = True else: v = m + v[len(s):] break out.append(v) contents = ''.join(out) task.outputs[0].write(contents) return 0
def exec_command(s, **kw): if "log" in kw: kw["stdout"] = kw["stderr"] = kw["log"] del (kw["log"]) kw["shell"] = isinstance(s, str) if len(s) > 2000: startupinfo = pproc.STARTUPINFO() startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW kw["startupinfo"] = startupinfo try: if "stdout" not in kw: kw["stdout"] = pproc.PIPE kw["stderr"] = pproc.PIPE kw["universal_newlines"] = True proc = pproc.Popen(s, **kw) (stdout, stderr) = proc.communicate() Logs.info(stdout) if stderr: Logs.error(stderr) return proc.returncode else: proc = pproc.Popen(s, **kw) return proc.wait() except OSError: return -1
def add_answer(ca_file, msg, answer): '''add an answer to a set of cross answers''' try: f = open(ca_file, 'a') except: Logs.error("Unable to open cross-answers file {0!s}".format(ca_file)) sys.exit(1) (retcode, retstring) = answer # if retstring is more than one line then we probably # don't care about its actual content (the tests should # yield one-line output in order to comply with the cross-answer # format) retstring = retstring.strip() if len(retstring.split('\n')) > 1: retstring = '' answer = (retcode, retstring) if answer == ANSWER_OK: f.write('{0!s}: OK\n'.format(msg)) elif answer == ANSWER_UNKNOWN: f.write('{0!s}: UNKNOWN\n'.format(msg)) elif answer == ANSWER_NO: f.write('{0!s}: NO\n'.format(msg)) else: if retcode == 0: f.write('{0!s}: "{1!s}"\n'.format(msg, retstring)) else: f.write('{0!s}: ({1:d}, "{2!s}")\n'.format(msg, retcode, retstring)) f.close()
def CHECK_LIB(conf, libs, mandatory=False, empty_decl=True, set_target=True, shlib=False): '''check if a set of libraries exist as system libraries returns the sublist of libs that do exist as a syslib or [] ''' fragment = ''' int foo() { int v = 2; return v*2; } ''' ret = [] liblist = TO_LIST(libs) for lib in liblist[:]: if GET_TARGET_TYPE(conf, lib) == 'SYSLIB': ret.append(lib) continue (ccflags, ldflags, cpppath) = library_flags(conf, lib) if shlib: res = conf.check(features='cc cshlib', fragment=fragment, lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper()) else: res = conf.check(lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper()) if not res: if mandatory: Logs.error( "Mandatory library '%s' not found for functions '%s'" % (lib, list)) sys.exit(1) if empty_decl: # if it isn't a mandatory library, then remove it from dependency lists if set_target: SET_TARGET_TYPE(conf, lib, 'EMPTY') else: conf.define('HAVE_LIB%s' % lib.upper().replace('-', '_'), 1) conf.env['LIB_' + lib.upper()] = lib if set_target: conf.SET_TARGET_TYPE(lib, 'SYSLIB') ret.append(lib) return ret
def check_tool(self, input, tooldir=None, funs=None): "load a waf tool" tools = Utils.to_list(input) if tooldir: tooldir = Utils.to_list(tooldir) for tool in tools: tool = tool.replace('++', 'xx') if tool == 'java': tool = 'javaw' if tool.lower() == 'unittest': tool = 'unittestw' # avoid loading the same tool more than once with the same functions # used by composite projects mag = (tool, id(self.env), funs) if mag in self.tool_cache: continue self.tool_cache.append(mag) if not tooldir: # check if the tool exists in the Tools or 3rdparty folders _Tools = Options.tooldir[0] _3rdparty = os.sep.join((_Tools, '..', '3rdparty')) for d in (_Tools, _3rdparty): lst = os.listdir(d) if tool + '.py' in lst: break else: # try to download the tool from the repository then for x in Utils.to_list(Options.remote_repo): for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']: url = '/'.join((x, sub, tool + '.py')) try: web = urlopen(url) if web.getcode() != 200: continue except Exception, e: # on python3 urlopen throws an exception continue else: try: loc = open(_3rdparty + os.sep + tool + '.py', 'wb') loc.write(web.read()) web.close() finally: loc.close() Logs.warn('downloaded %s from %s' % (tool, url)) else: break module = Utils.load_tool(tool, tooldir) if funs is not None: self.eval_rules(funs) else: func = getattr(module, 'detect', None) if func: if type(func) is type(find_file): func(self) else: self.eval_rules(func) self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
def make_test(self): if not 'cprogram' in self.features: Logs.error('test cannot be executed %s' % self) return self.default_install_path = None tsk = self.create_task('utest') tsk.set_inputs(self.link_task.outputs)
def load(self, filename): "Retrieve the variables from a file" tbl = self.table code = Utils.readf(filename) for m in re_imp.finditer(code): g = m.group tbl[g(2)] = eval(g(3)) Logs.debug('env: %s', self.table)
def run_tests(verbose=1): try: suite = unittest.TestLoader().loadTestsFromTestCase(CxxTester) # use the next line to run only specific tests: # suite = unittest.TestLoader().loadTestsFromName("test_customized_debug_level", CxxTester) return unittest.TextTestRunner(verbosity=verbose).run(suite) except common_test.StartupError, e: Logs.error(e)
def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False, headers=None, link=True, empty_decl=True, set_target=True): """ check that the functions in 'list' are available in 'library' if they are, then make that library available as a dependency if the library is not available and mandatory==True, then raise an error. If the library is not available and mandatory==False, then add the library to the list of dependencies to remove from build rules optionally check for the functions first in libc """ remaining = TO_LIST(list) liblist = TO_LIST(library) # check if some already found for f in remaining[:]: if CONFIG_SET(conf, 'HAVE_%s' % f.upper()): remaining.remove(f) # see if the functions are in libc if checklibc: for f in remaining[:]: if CHECK_FUNC(conf, f, link=True, headers=headers): remaining.remove(f) if remaining == []: for lib in liblist: if GET_TARGET_TYPE(conf, lib) != 'SYSLIB' and empty_decl: SET_TARGET_TYPE(conf, lib, 'EMPTY') return True checklist = conf.CHECK_LIB(liblist, empty_decl=empty_decl, set_target=set_target) for lib in liblist[:]: if not lib in checklist and mandatory: Logs.error("Mandatory library '%s' not found for functions '%s'" % (lib, list)) sys.exit(1) ret = True for f in remaining: if not CHECK_FUNC( conf, f, lib=' '.join(checklist), headers=headers, link=link): ret = False return ret
def configure(conf): if 'physics' in Options.options.disable_features: Logs.info("physics disabled") conf.env['STATICLIB_PHYSICS'] = ['physics_null'] else: conf.env['STATICLIB_PHYSICS'] = [ 'physics', 'BulletDynamics', 'BulletCollision', 'LinearMath', 'Box2D' ]
def exec_test(self): status = 0 variant = self.env.variant() filename = self.inputs[0].abspath(self.env) self.ut_exec = getattr(self, 'ut_exec', [filename]) if getattr(self.generator, 'ut_fun', None): self.generator.ut_fun(self) try: fu = getattr(self.generator.bld, 'all_test_paths') except AttributeError: fu = os.environ.copy() self.generator.bld.all_test_paths = fu lst = [] for obj in self.generator.bld.all_task_gen: link_task = getattr(obj, 'link_task', None) if link_task and link_task.env.variant() == variant: lst.append(link_task.outputs[0].parent.abspath(obj.env)) def add_path(dct, path, var): dct[var] = os.pathsep.join( Utils.to_list(path) + [os.environ.get(var, '')]) if sys.platform == 'win32': add_path(fu, lst, 'PATH') elif sys.platform == 'darwin': add_path(fu, lst, 'DYLD_LIBRARY_PATH') add_path(fu, lst, 'LD_LIBRARY_PATH') else: add_path(fu, lst, 'LD_LIBRARY_PATH') cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env) proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE) (stdout, stderr) = proc.communicate() tup = (filename, proc.returncode, stdout, stderr) self.generator.utest_result = tup testlock.acquire() try: bld = self.generator.bld Logs.debug("ut: %r", tup) try: bld.utest_results.append(tup) except AttributeError: bld.utest_results = [tup] finally: testlock.release()
def SET_TARGET_TYPE(ctx, target, value): '''set the target type of a target''' cache = LOCAL_CACHE(ctx, 'TARGET_TYPE') if target in cache and cache[target] != 'EMPTY': Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target])) sys.exit(1) LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value) debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir)) return True
def gtest_attach(self): if not self.env.HAVE_GTEST: Logs.error('gtest is not found') self.meths[:] = [] return if isinstance(self.uselib, str): self.uselib += " GTEST" else: self.uselib.append('GTEST')
def scan(self): env = self.env gruik = fortran_parser(env, env['INC_PATHS'], env["MODULE_SEARCH_PATH"]) gruik.start(self.inputs[0]) #print self.inputs, gruik.nodes, gruik.names if Logs.verbose: Logs.debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names))) #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps') return (gruik.nodes, gruik.names)
def apply_unit_test(self): if not 'cprogram' in self.features: Logs.error('test cannot be executed %s' % self) return task = self.create_task('unit_test') task.set_inputs(self.link_task.outputs) ouput = os.path.basename(self.link_task.outputs[0].abspath( self.env)) + '.passed' task.set_outputs(self.path.find_or_declare(ouput)) task.run_from_dir = getattr(self, "run_from_dir", True)
def cmd_and_log(self,cmd,kw): Logs.debug('runner: %s\n'%cmd) if self.log: self.log.write('%s\n'%cmd) try: p=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE,shell=True) (out,err)=p.communicate() except OSError,e: self.log.write('error %r'%e) self.fatal(str(e))
def build_syslib_sets(bld, tgt_list): '''build the public_symbols for all syslibs''' if bld.env.syslib_symbols: return # work out what syslibs we depend on, and what targets those are used in syslibs = {} objmap = {} for t in tgt_list: if getattr(t, 'uselib', []) and t.samba_type in ['LIBRARY', 'BINARY', 'PYTHON']: for lib in t.uselib: if lib in ['PYEMBED', 'PYEXT']: lib = "python" if not lib in syslibs: syslibs[lib] = [] syslibs[lib].append(t) # work out the paths to each syslib syslib_paths = [] for lib in syslibs: path = find_syslib_path(bld, lib, syslibs[lib]) if path is None: Logs.warn("Unable to find syslib path for %s" % lib) if path is not None: syslib_paths.append(path) objmap[path] = lib.lower() # add in libc syslib_paths.append(bld.env.libc_path) objmap[bld.env.libc_path] = 'c' symbols = symbols_extract(bld, syslib_paths, dynamic=True) # keep a map of syslib names to public symbols bld.env.syslib_symbols = {} for lib in symbols: bld.env.syslib_symbols[lib] = symbols[lib]["PUBLIC"] # add to the map of symbols to dependencies for lib in symbols: for sym in symbols[lib]["PUBLIC"]: if not sym in bld.env.symbol_map: bld.env.symbol_map[sym] = [] bld.env.symbol_map[sym].append(objmap[lib]) # keep the libc symbols as well, as these are useful for some of the # sanity checks bld.env.libc_symbols = symbols[bld.env.libc_path]["PUBLIC"] # add to the combined map of dependency name to public_symbols for lib in bld.env.syslib_symbols: bld.env.public_symbols[objmap[lib]] = bld.env.syslib_symbols[lib]
def CHECK_BUNDLED_SYSTEM_PKG(conf, libname, minversion='0.0.0', onlyif=None, implied_deps=None, pkg=None): '''check if a library is available as a system library. This only tries using pkg-config ''' if conf.LIB_MUST_BE_BUNDLED(libname): return False found = 'FOUND_SYSTEMLIB_%s' % libname if found in conf.env: return conf.env[found] # see if the library should only use a system version if another dependent # system version is found. That prevents possible use of mixed library # versions if onlyif: if not conf.CHECK_PREREQUISITES(onlyif): if not conf.LIB_MAY_BE_BUNDLED(libname): Logs.error( 'ERROR: Use of system library %s depends on missing system library %s' % (libname, onlyif)) sys.exit(1) conf.env[found] = False return False minversion = minimum_library_version(conf, libname, minversion) msg = 'Checking for system %s' % libname if minversion != '0.0.0': msg += ' >= %s' % minversion if pkg is None: pkg = libname if conf.check_cfg(package=pkg, args='"%s >= %s" --cflags --libs' % (pkg, minversion), msg=msg, uselib_store=libname.upper()): conf.SET_TARGET_TYPE(libname, 'SYSLIB') conf.env[found] = True if implied_deps: conf.SET_SYSLIB_DEPS(libname, implied_deps) return True conf.env[found] = False if not conf.LIB_MAY_BE_BUNDLED(libname): Logs.error( 'ERROR: System library %s of version %s not found, and bundling disabled' % (libname, minversion)) sys.exit(1) return False
def check_duplicate_sources(bld, tgt_list): '''see if we are compiling the same source file more than once without an allow_duplicates attribute''' debug('deps: checking for duplicate sources') targets = LOCAL_CACHE(bld, 'TARGET_TYPE') ret = True global tstart for t in tgt_list: source_list = TO_LIST(getattr(t, 'source', '')) tpath = os.path.normpath( os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default')) obj_sources = set() for s in source_list: p = os.path.normpath(os.path.join(tpath, s)) if p in obj_sources: Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname)) sys.exit(1) obj_sources.add(p) t.samba_source_set = obj_sources subsystems = {} # build a list of targets that each source file is part of for t in tgt_list: sources = [] if not targets[t.sname] in ['LIBRARY', 'BINARY', 'PYTHON']: continue for obj in t.add_objects: t2 = t.bld.name_to_obj(obj, bld.env) source_set = getattr(t2, 'samba_source_set', set()) for s in source_set: if not s in subsystems: subsystems[s] = {} if not t.sname in subsystems[s]: subsystems[s][t.sname] = [] subsystems[s][t.sname].append(t2.sname) for s in subsystems: if len(subsystems[s]) > 1 and Options.options.SHOW_DUPLICATES: Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys())) for tname in subsystems[s]: if len(subsystems[s][tname]) > 1: raise Utils.WafError( "ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname])) return ret
def RUN_COMMAND(cmd, env=None, shell=False): '''run a external command, return exit code or signal''' if env: cmd = SUBST_VARS_RECURSIVE(cmd, env) status = os.system(cmd) if os.WIFEXITED(status): return os.WEXITSTATUS(status) if os.WIFSIGNALED(status): return -os.WTERMSIG(status) Logs.error("Unknown exit reason %d for command: %s" (status, cmd)) return -1
def daemon(ctx): """waf command: rebuild as soon as something changes""" bld = None while True: try: bld = Utils.g_module.build_context() Scripting.build(bld) except Build.BuildError, e: Logs.warn(e) except KeyboardInterrupt: Utils.pprint('RED', 'interrupted') break
def check_tool(self, input, tooldir=None, funs=None): tools = Utils.to_list(input) if tooldir: tooldir = Utils.to_list(tooldir) for tool in tools: tool = tool.replace('++', 'xx') if tool == 'java': tool = 'javaw' if tool.lower() == 'unittest': tool = 'unittestw' mag = (tool, id(self.env), funs) if mag in self.tool_cache: continue self.tool_cache.append(mag) if not tooldir: _Tools = Options.tooldir[0] _3rdparty = os.sep.join((_Tools, '..', '3rdparty')) for d in (_Tools, _3rdparty): lst = os.listdir(d) if tool + '.py' in lst: break else: for x in Utils.to_list(Options.remote_repo): for sub in [ 'branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty' ]: url = '/'.join((x, sub, tool + '.py')) try: web = urlopen(url) if web.getcode() != 200: continue except Exception as e: continue else: try: loc = open( _3rdparty + os.sep + tool + '.py', 'wb') loc.write(web.read()) web.close() finally: loc.close() Logs.warn('downloaded %s from %s' % (tool, url)) else: break module = Utils.load_tool(tool, tooldir) if funs is not None: self.eval_rules(funs) else: func = getattr(module, 'detect', None) if func: if type(func) is type(find_file): func(self) else: self.eval_rules(func) self.tools.append({'tool': tool, 'tooldir': tooldir, 'funs': funs})
def check_tool(self, input, tooldir=None, funs=None): "load a waf tool" tools = Utils.to_list(input) if tooldir: tooldir = Utils.to_list(tooldir) for tool in tools: tool = tool.replace('++', 'xx') if tool == 'java': tool = 'javaw' if tool.lower() == 'unittest': tool = 'unittestw' # avoid loading the same tool more than once with the same functions # used by composite projects mag = (tool, id(self.env), funs) if mag in self.tool_cache: continue self.tool_cache.append(mag) try: module = Utils.load_tool(tool, tooldir) except Exception, e: if 1: raise e else: # try to download the tool from the repository then for x in Utils.to_list(Options.remote_repo): for sub in [ 'branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty' ]: url = '/'.join((x, sub, tool + '.py')) try: web = urlopen(url) if web.getcode() != 200: continue except Exception, e: # on python3 urlopen throws an exception continue else: try: loc = open( _3rdparty + os.sep + tool + '.py', 'wb') loc.write(web.read()) web.close() finally: loc.close() Logs.warn('downloaded %s from %s' % (tool, url)) else: break else: break
def waf_version(mini=0x010000,maxi=0x100000): ver=HEXVERSION try:min_val=mini+0 except TypeError:min_val=int(mini.replace('.','0'),16) if min_val>ver: Logs.error("waf version should be at least %s (%s found)"%(mini,ver)) sys.exit(0) try:max_val=maxi+0 except TypeError:max_val=int(maxi.replace('.','0'),16) if max_val<ver: Logs.error("waf version should be at most %s (%s found)"%(maxi,ver)) sys.exit(0)
def do_install(self, src, tgt, chmod=O644): """returns true if the file was effectively installed or uninstalled, false otherwise""" if self.is_install > 0: if not Options.options.force: # check if the file is already there to avoid a copy try: st1 = os.stat(tgt) st2 = os.stat(src) except OSError: pass else: # same size and identical timestamps -> make no copy if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size: return False srclbl = src.replace(self.srcnode.abspath(None) + os.sep, '') info("* installing %s as %s" % (srclbl, tgt)) # following is for shared libs and stale inodes (-_-) try: os.remove(tgt) except OSError: pass try: shutil.copy2(src, tgt) if chmod >= 0: os.chmod(tgt, chmod) except IOError: try: os.stat(src) except (OSError, IOError): error('File %r does not exist' % src) raise Utils.WafError('Could not install the file %r' % tgt) return True elif self.is_install < 0: info("* uninstalling %s" % tgt) self.uninstall.append(tgt) try: os.remove(tgt) except OSError, e: if e.errno != errno.ENOENT: if not getattr(self, 'uninstall_error', None): self.uninstall_error = True Logs.warn( 'build: some files could not be uninstalled (retry with -vv to list them)' ) if Logs.verbose > 1: Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno)) return True
def UPC(ItemLink,UPCDataBase): Retries = 5 # Number of times that getting the UPC will be attempted before giving up SleepTime = 1 # Time in seconds to delay between failed attempts (doubling each attempt) ItemLink = ItemLink.split('?')[0] # Remove un-needed information from URL link db = UPCDataBase User = Query() Search = db.search(User.Link == ItemLink) # Search for URL in database if(Search != []): # If search returns something return database UPC Logs.Write("Get UPC From Item Link: " + ItemLink + " Cache: " + str(Search[0]['UPC'])) return Search[0]['UPC'] page = None for RetryNumber in range(Retries): # Retry attempts try: page = requests.get(ItemLink) # Get page data except: # If unable to get data wait and try again print("Connection Error, Retrying in " + str(SleepTime) + " seconds") time.sleep(SleepTime) # wait before trying to fetch the data again SleepTime *= 2 # Exponential backoff if(page == None): # If failed to get connection after running out of attempts Logs.Write("Get UPC From Item Link: " + ItemLink + " Error: Unable to resolve connection error after 5 attempts") return None # Parce data recieved soup = BeautifulSoup(page.text, 'html.parser') UPC_Find = soup.find(itemprop="gtin13") if (UPC_Find == None): # If page didn't include UPC information UPC = -1 # Failure to get UPC code db.insert({'Link': ItemLink,'UPC': UPC, "Time": time.time()}) Logs.Write("Get UPC From Item Link: " + ItemLink + " Returned: No UPC") return UPC # Failure to find UPC UPC = UPC_Find.contents[0] if (UPC.isdigit() == False): # If page found UPC code but it dosen't match up with expected codes UPC = -1 # Failure to get UPC Code db.insert({'Link': ItemLink,'UPC': UPC, "Time": time.time()}) Logs.Write("Get UPC From Item Link: " + ItemLink + " Returned: No UPC") return UPC # Failure to find UPC # UPC is found and it is in expected format # Place UPC into database db.insert({'Link': ItemLink,'UPC': UPC, "Time": time.time()}) Logs.Write("Get UPC From Item Link: " + ItemLink + " Returned: " + str(UPC)) return UPC # Success finding UPC
def subst_vars_error(string, env): '''substitute vars, throw an error if a variable is not defined''' lst = re.split('(\$\{\w+\})', string) out = [] for v in lst: if re.match('\$\{\w+\}', v): vname = v[2:-1] if not vname in env: Logs.error("Failed to find variable %s in %s" % (vname, string)) sys.exit(1) v = env[vname] out.append(v) return ''.join(out)
def previewSettings(self): cwd = os.getcwd() directory = input("Enter the directory you would like to sort\n") if (os.path.isdir(directory)): F = FileActions() settings = F.jsonRead("settings.json") if (settings != False): print( "Here are your current settings for moving the files\n\n") for s in settings["settings"]: print(s["ext"] + "\t" + s["dir"]) ans = input("Would you like to use these settings?\t[y/n]\n") if ans.lower() == "y": #call method to sortFiles print("method to move files using settings") f = FileActions() i = 0 directoryList = os.listdir(directory) for i in range(len(settings["settings"]) - 1): fileExt = settings["settings"][i]["ext"] j = 1 logs = Logs() for j in range(len(directoryList)): if fileExt.lower() == f.extension( directoryList[j]): r = f.moveFile( directory + '/' + directoryList[j], settings["settings"][i]["dir"]) logs.log( 'simple', './Logs/MovedFiles.txt', "\nFile " + directory + "/" + directoryList[j] + " moved to " + settings["settings"][i]["dir"] + " on " + str(datetime.date.today()) + " at " + str(datetime.datetime.now())) if (r != True): print(r) rem = os.listdir(directory) for f in rem: if os.path.isfile(f): print(f + " not moved ") else: c = Control() c.loadNewSettings() else: print(settings) else: print("Not a valid directory") c = Control() c.previewSettings()
def check_orpaned_targets(bld, tgt_list): '''check if any build targets are orphaned''' target_dict = LOCAL_CACHE(bld, 'TARGET_TYPE') debug('deps: checking for orphaned targets') for t in tgt_list: if getattr(t, 'samba_used', False) == True: continue type = target_dict[t.sname] if not type in ['BINARY', 'LIBRARY', 'MODULE', 'ET', 'PYTHON']: if re.search('^PIDL_', t.sname) is None: Logs.warn("Target %s of type %s is unused by any other target" % (t.sname, type))
def get_text_messages(message): string = str(message.text).lower() ifbot = re.findall(r'бот,скажи|эй,бот|эй, бот|бот, скажи',string) if message.chat.type == "private" or message.chat.type == "group" and len(ifbot) > 0: Logs.log(message) result = re.findall(r'доллар|евро|юан|фунт|йен',string) result += re.findall(r'саратов|москв|санкт-петербург|казан|питер|екатеринбург' + r'|нижн|новосибирск|самар|ростове-на-дону|красноярск|воронеж|краснодар|' + r'тюмен|ижевск|иркутск|хабаровск|благовещенск|архангельск|астрахан|белгород|брянск' + r'|владимир|волгоград|вологд|иваново|калининград|калуга|петропавловске-камчатский|кемерово' + r'|киров|костром|курган|курск|липецк|магадан|мурманск|велик|омск|оренбург|пенз|перм|псков' + r'|рязан|южно-сахалинск|смоленск|тамбов|тул|тюмен|ульяновск|челябинск|чит|ярославл|майкоп' + r'|горно-алтайск|уф|улан-уде|махачкал|биробиджан|нальчик|элист|черкесск|петрозаводск|сыктывкар|симферопол' + r'|йошкар-ол|саранск|якутск|владикавказ|казан|кызыл|ижевск|абакан|грозн|чебоксар|барнаул|краснодар|' + r'владивосток|ставропол|нарьян-мар|ханты-мансийск|анадыр|салехард' ,string) resultcb = re.findall(r'доллар|евро|юан|фунт|йен',string) resultcb += re.findall(r'..-..-....',string) if len(result) < 2 and len(re.findall(r'цб',string)) == 0 or len(resultcb) == 0 or resultcb[0][0].isnumeric(): Logs.log(bot.send_message(message.chat.id,"Прости, по твоему запросу ничего не удалось найти(")) elif len(re.findall(r'цб',string)) != 0 and len(resultcb) != 0: date = None currency = DAO.parse(resultcb) if len(resultcb) == 2: date = resultcb[1] Logs.log(bot.send_message(message.chat.id,CentralBank.GetCurrency(currency,date))) elif(len(result) >= 2): currency,city = DAO.parse(result) print(currency,city) Logs.log(bot.send_message(message.chat.id,Parser.GetCurrency(currency,city)))
def show_object_duplicates(bld, tgt_list): '''show a list of object files that are included in more than one library or binary''' targets = LOCAL_CACHE(bld, 'TARGET_TYPE') used_by = {} Logs.info("showing duplicate objects") for t in tgt_list: if not targets[t.sname] in [ 'LIBRARY', 'PYTHON' ]: continue for n in getattr(t, 'final_objects', set()): t2 = bld.get_tgen_by_name(n) if not n in used_by: used_by[n] = set() used_by[n].add(t.sname) for n in used_by: if len(used_by[n]) > 1: Logs.info("target '%s' is used by %s" % (n, used_by[n])) Logs.info("showing indirect dependency counts (sorted by count)") def indirect_count(t1, t2): return len(t2.indirect_objects) - len(t1.indirect_objects) sorted_list = sorted(tgt_list, cmp=indirect_count) for t in sorted_list: if len(t.indirect_objects) > 1: Logs.info("%s depends on %u indirect objects" % (t.sname, len(t.indirect_objects)))