def apply_vnum(self): """ Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots:: def build(bld): bld.shlib(source='a.c', target='foo', vnum='14.15.16') In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created: * ``libfoo.so → libfoo.so.14.15.16`` * ``libfoo.so.14 → libfoo.so.14.15.16`` By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter: def build(bld): bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15') In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library. On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library. """ if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ( 'elf', 'mac-o'): return link = self.link_task if not re_vnum.match(self.vnum): raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self))) nums = self.vnum.split('.') node = link.outputs[0] cnum = getattr(self, 'cnum', str(nums[0])) cnums = cnum.split('.') if len(cnums) > len(nums) or nums[0:len(cnums)] != cnums: raise Errors.WafError('invalid compatibility version %s' % cnum) libname = node.name if libname.endswith('.dylib'): name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) name2 = libname.replace('.dylib', '.%s.dylib' % cnum) else: name3 = libname + '.' + self.vnum name2 = libname + '.' + cnum # add the so name for the ld linker - to disable, just unset env.SONAME_ST if self.env.SONAME_ST: v = self.env.SONAME_ST % name2 self.env.append_value('LINKFLAGS', v.split()) # the following task is just to enable execution from the build dir :-/ if self.env.DEST_OS != 'openbsd': outs = [node.parent.make_node(name3)] if name2 != name3: outs.append(node.parent.make_node(name2)) self.create_task('vnum', node, outs) if getattr(self, 'install_task', None): self.install_task.hasrun = Task.SKIPPED path = self.install_task.install_to if self.env.DEST_OS == 'openbsd': libname = self.link_task.outputs[0].name t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod) self.vnum_install_task = (t1, ) else: t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod) t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3) if name2 != name3: t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3) self.vnum_install_task = (t1, t2, t3) else: self.vnum_install_task = (t1, t3) if '-dynamiclib' in self.env.LINKFLAGS: # this requires after(propagate_uselib_vars) try: inst_to = self.install_path except AttributeError: inst_to = self.link_task.__class__.inst_to if inst_to: p = Utils.subst_vars(inst_to, self.env) path = os.path.join(p, name2) self.env.append_value('LINKFLAGS', ['-install_name', path]) self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum) self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum)
def post(self): """ Creates tasks for this task generators. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ if getattr(self, 'posted', None): return False self.posted = True keys = set(self.meths) keys.update(feats['*']) # add the methods listed in the features self.features = Utils.to_list(self.features) for x in self.features: st = feats[x] if st: keys.update(st) elif not x in Task.classes: Logs.warn( 'feature %r does not exist - bind at least one method to it?', x) # copy the precedence table prec = {} prec_tbl = self.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) tmp.sort(reverse=True) # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) tmp.sort(reverse=True) if prec: buf = ['Cycle detected in the method execution:'] for k, v in prec.items(): buf.append('- %s after %s' % (k, [x for x in v if x in prec])) raise Errors.WafError('\n'.join(buf)) self.meths = out # then we run the methods in order Logs.debug('task_gen: posting %s %d', self, id(self)) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)', x, id(self)) v() Logs.debug('task_gen: posted %s', self.name) return True
def SAMBA_LIBRARY(bld, libname, source, deps='', public_deps='', includes='', public_headers=None, public_headers_install=True, private_headers=None, header_path=None, pc_files=None, vnum=None, soname=None, cflags='', cflags_end=None, ldflags='', external_library=False, realname=None, keep_underscore=False, autoproto=None, autoproto_extra_source='', group='main', depends_on='', local_include=True, global_include=True, vars=None, subdir=None, install_path=None, install=True, pyembed=False, pyext=False, target_type='LIBRARY', bundled_extension=False, bundled_name=None, link_name=None, abi_directory=None, abi_match=None, hide_symbols=False, manpages=None, private_library=False, grouping_library=False, allow_undefined_symbols=False, allow_warnings=False, enabled=True): '''define a Samba library''' if pyembed and bld.env['IS_EXTRA_PYTHON']: public_headers = None if private_library and public_headers: raise Errors.WafError( "private library '%s' must not have public header files" % libname) if LIB_MUST_BE_PRIVATE(bld, libname): private_library = True if not enabled: SET_TARGET_TYPE(bld, libname, 'DISABLED') return source = bld.EXPAND_VARIABLES(source, vars=vars) if subdir: source = bld.SUBDIR(subdir, source) # remember empty libraries, so we can strip the dependencies if ((source == '') or (source == [])): if deps == '' and public_deps == '': SET_TARGET_TYPE(bld, libname, 'EMPTY') return empty_c = libname + '.empty.c' bld.SAMBA_GENERATOR('%s_empty_c' % libname, rule=generate_empty_file, target=empty_c) source = empty_c if BUILTIN_LIBRARY(bld, libname): obj_target = libname else: obj_target = libname + '.objlist' if group == 'libraries': subsystem_group = 'main' else: subsystem_group = group # first create a target for building the object files for this library # by separating in this way, we avoid recompiling the C files # separately for the install library and the build library bld.SAMBA_SUBSYSTEM(obj_target, source=source, deps=deps, public_deps=public_deps, includes=includes, public_headers=public_headers, public_headers_install=public_headers_install, private_headers=private_headers, header_path=header_path, cflags=cflags, cflags_end=cflags_end, group=subsystem_group, autoproto=autoproto, autoproto_extra_source=autoproto_extra_source, depends_on=depends_on, hide_symbols=hide_symbols, allow_warnings=allow_warnings, pyembed=pyembed, pyext=pyext, local_include=local_include, global_include=global_include) if BUILTIN_LIBRARY(bld, libname): return if not SET_TARGET_TYPE(bld, libname, target_type): return # the library itself will depend on that object target deps += ' ' + public_deps deps = TO_LIST(deps) deps.append(obj_target) realname = bld.map_shlib_extension(realname, python=(target_type == 'PYTHON')) link_name = bld.map_shlib_extension(link_name, python=(target_type == 'PYTHON')) # we don't want any public libraries without version numbers if (not private_library and target_type != 'PYTHON' and not realname): if vnum is None and soname is None: raise Errors.WafError("public library '%s' must have a vnum" % libname) if pc_files is None: raise Errors.WafError( "public library '%s' must have pkg-config file" % libname) if public_headers is None and not bld.env['IS_EXTRA_PYTHON']: raise Errors.WafError( "public library '%s' must have header files" % libname) if bundled_name is not None: pass elif target_type == 'PYTHON' or realname or not private_library: if keep_underscore: bundled_name = libname else: bundled_name = libname.replace('_', '-') else: assert (private_library == True and realname is None) if abi_directory or vnum or soname: bundled_extension = True bundled_name = PRIVATE_NAME(bld, libname.replace('_', '-'), bundled_extension, private_library) ldflags = TO_LIST(ldflags) if bld.env['ENABLE_RELRO'] is True: ldflags.extend(TO_LIST('-Wl,-z,relro,-z,now')) features = 'c cshlib symlink_lib install_lib' if pyext: features += ' pyext' if pyembed: features += ' pyembed' if abi_directory: features += ' abi_check' if pyembed and bld.env['PYTHON_SO_ABI_FLAG']: # For ABI checking, we don't care about the exact Python version. # Replace the Python ABI tag (e.g. ".cpython-35m") by a generic ".py3" abi_flag = bld.env['PYTHON_SO_ABI_FLAG'] replacement = '.py%s' % bld.env['PYTHON_VERSION'].split('.')[0] version_libname = libname.replace(abi_flag, replacement) else: version_libname = libname vscript = None if bld.env.HAVE_LD_VERSION_SCRIPT: if private_library: version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION) elif vnum: version = "%s_%s" % (libname, vnum) else: version = None if version: vscript = "%s.vscript" % libname bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript, abi_match) fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN) fullpath = bld.path.find_or_declare(fullname) vscriptpath = bld.path.find_or_declare(vscript) if not fullpath: raise Errors.WafError("unable to find fullpath for %s" % fullname) if not vscriptpath: raise Errors.WafError("unable to find vscript path for %s" % vscript) bld.add_manual_dependency(fullpath, vscriptpath) if bld.is_install: # also make the .inst file depend on the vscript instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN) bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript)) vscript = os.path.join(bld.path.abspath(bld.env), vscript) bld.SET_BUILD_GROUP(group) t = bld(features=features, source=[], target=bundled_name, depends_on=depends_on, samba_ldflags=ldflags, samba_deps=deps, samba_includes=includes, version_script=vscript, version_libname=version_libname, local_include=local_include, global_include=global_include, vnum=vnum, soname=soname, install_path=None, samba_inst_path=install_path, name=libname, samba_realname=realname, samba_install=install, abi_directory="%s/%s" % (bld.path.abspath(), abi_directory), abi_match=abi_match, private_library=private_library, grouping_library=grouping_library, allow_undefined_symbols=allow_undefined_symbols) if realname and not link_name: link_name = 'shared/%s' % realname if link_name: if 'waflib.extras.compat15' in sys.modules: link_name = 'default/' + link_name t.link_name = link_name if pc_files is not None and not private_library: if pyembed: bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG']) else: bld.PKG_CONFIG_FILES(pc_files, vnum=vnum) if (manpages is not None and 'XSLTPROC_MANPAGES' in bld.env and bld.env['XSLTPROC_MANPAGES']): bld.MANPAGES(manpages, install)
self.to_log(cmd) try: p=subprocess.Popen(cmd,**kw) (out,err)=p.communicate() except Exception ,e: raise Errors.WafError('Execution failure: %s'%str(e),ex=e) if not isinstance(out,str): out=out.decode(sys.stdout.encoding or'iso8859-1') if not isinstance(err,str): err=err.decode(sys.stdout.encoding or'iso8859-1') if out and quiet!=STDOUT and quiet!=BOTH: self.to_log('out: %s'%out) if err and quiet!=STDERR and quiet!=BOTH: self.to_log('err: %s'%err) if p.returncode: e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode)) e.returncode=p.returncode e.stderr=err e.stdout=out raise e if to_ret==BOTH: return(out,err) elif to_ret==STDERR: return err return out def fatal(self,msg,ex=None): if self.logger: self.logger.info('from %s: %s'%(self.path.abspath(),msg)) try: msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename) except Exception:
def exec_command(self, cmd, **kw): """ Runs an external process and returns the exit status:: def run(tsk): ret = tsk.generator.bld.exec_command('touch foo.txt') return ret If the context has the attribute 'log', then captures and logs the process stderr/stdout. Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the stdout/stderr values captured. :param cmd: command argument for subprocess.Popen :type cmd: string or list :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. :type kw: dict :returns: process exit status :rtype: integer """ subprocess = Utils.subprocess kw['shell'] = isinstance(cmd, str) Logs.debug('runner: %r', cmd) Logs.debug('runner_env: kw=%s', kw) if self.logger: self.logger.info(cmd) if 'stdout' not in kw: kw['stdout'] = subprocess.PIPE if 'stderr' not in kw: kw['stderr'] = subprocess.PIPE if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): raise Errors.WafError('Program %s not found!' % cmd[0]) wargs = {} if 'timeout' in kw: if kw['timeout'] is not None: wargs['timeout'] = kw['timeout'] del kw['timeout'] if 'input' in kw: if kw['input']: wargs['input'] = kw['input'] kw['stdin'] = subprocess.PIPE del kw['input'] if 'cwd' in kw: if not isinstance(kw['cwd'], str): kw['cwd'] = kw['cwd'].abspath() try: ret, out, err = Utils.run_process(cmd, kw, wargs) except Exception as e: raise Errors.WafError('Execution failure: %s' % str(e), ex=e) if out: if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') if self.logger: self.logger.debug('out: %s', out) else: Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) if err: if not isinstance(err, str): err = err.decode(sys.stdout.encoding or 'iso8859-1') if self.logger: self.logger.error('err: %s' % err) else: Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) return ret
def exec_command(self, cmd, **kw): """ Execute a command and return the exit status. If the context has the attribute 'log', capture and log the process stderr/stdout for logging purposes:: def run(tsk): ret = tsk.generator.bld.exec_command('touch foo.txt') return ret This method captures the standard/error outputs (Issue 1101), but it does not return the values unlike :py:meth:`waflib.Context.Context.cmd_and_log` :param cmd: command argument for subprocess.Popen :param kw: keyword arguments for subprocess.Popen """ subprocess = Utils.subprocess kw['shell'] = isinstance(cmd, str) if kw['shell']: Logs.debug('runner: (shell) %s' % cmd) else: Logs.debug('runner: (exec) %s' % Utils.list2cmdline(cmd)) Logs.debug('runner_env: kw=%s' % kw) if self.logger: self.logger.info(cmd) if 'stdout' not in kw: kw['stdout'] = subprocess.PIPE if 'stderr' not in kw: kw['stderr'] = subprocess.PIPE if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): raise Errors.WafError("Program %s not found!" % cmd[0]) try: if kw['stdout'] or kw['stderr']: p = subprocess.Popen(cmd, **kw) (out, err) = p.communicate() ret = p.returncode else: out, err = (None, None) ret = subprocess.Popen(cmd, **kw).wait() except Exception as e: raise Errors.WafError('Execution failure: %s' % str(e), ex=e) if out: if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') if self.logger: self.logger.debug('out: %s' % out) else: Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) if err: if not isinstance(err, str): err = err.decode(sys.stdout.encoding or 'iso8859-1') if self.logger: self.logger.error('err: %s' % err) else: Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) return ret
def __copy__(self): raise Errors.WafError('build contexts cannot be copied')
def add_link_task(self, node_to_link): # Using modified version of example here: # https://github.com/waf-project/waf/blob/7b7531b0c6d0598033bea608ffc3c8e335434a6d/docs/book/examples/scenarios_unknown/mytool.py try: task_hook = self.generator.get_hook(node_to_link) except Errors.WafError: raise Errors.WafError( '[ERROR] az_code_gen: Created file {} marked for "should add to build" ' 'is not buildable.'.format( node_to_link.path_from(self.generator.bldnode))) created_task = task_hook(self.generator, node_to_link) # Shove /Fd flags into codegen meta-tasks, this is similar to logic in msvc_helper's # set_pdb_flags. We compute PDB file path and add the requisite /Fd flag # This enables debug symbols for code outputted by azcg if 'msvc' in (self.generator.env.CC_NAME, self.generator.env.CXX_NAME): # The created_task from the original generator will not be able to go through the # 'verify_compiler_options_msvc' function at this point, so we will manually verify # the compiler options here (to strip out conflicting flags) verify_options_common(created_task.env) # Not having PDBs stops CL.exe working for precompiled header when we have VCCompiler set to true for IB... # When DISABLE_DEBUG_SYMBOLS_OVERRIDE doesn't exist in the dictionary it returns [] # which will results to false in this check. if self.bld.is_option_true( 'generate_debug_info' ) or self.generator.env['DISABLE_DEBUG_SYMBOLS_OVERRIDE']: pdb_folder = self.generator.path.get_bld().make_node( str(self.generator.target_uid)) pdb_cxxflag = '/Fd{}'.format(pdb_folder.abspath()) created_task.env.append_unique('CFLAGS', pdb_cxxflag) created_task.env.append_unique('CXXFLAGS', pdb_cxxflag) link_task = getattr(self.generator, 'link_task', None) if not link_task: link_task = getattr(self.bld, 'monolithic_link_task', None) if link_task: link_task.set_run_after( created_task) # Compile our .cpp before we link. # link_task is a shared resource that lives on the generator. Use a lock and a separate list # to ensure that the append order is consistent with self.generator.task_gen_link_lock: if not hasattr(self.generator, 'task_gen_link_inputs'): self.generator.task_gen_link_inputs = [] if (created_task.outputs[0] not in self.generator.task_gen_link_inputs): self.generator.task_gen_link_inputs.append( created_task.outputs[0]) self.generator.task_gen_link_inputs.sort( key=lambda x: x.name) for output in self.generator.task_gen_link_inputs: try: idx = link_task.inputs.index(output) del link_task.inputs[idx:] break except: continue link_task.inputs += self.generator.task_gen_link_inputs else: # If we ever have a use case where link_task is inappropriate (non-C-family lang?), # then we should do "self.more_tasks.append(created_task)" in those cases. raise Errors.WafError( '[ERROR] az_code_gen: Created file {} marked for "should add to build" ' 'was not added to a link task.'.format( node_to_link.path_from(self.generator.bld.bldnode))) return True
def prepare_task(self): # Create the directory if it doesn't already exist self.output_dir.mkdir() # We expect json output for friendlier parsing self.add_argument("-output-using-json") self.add_argument('-input-path "{}"'.format( clean_path(self.input_dir.abspath()))) self.add_argument('-output-path "{}"'.format( clean_path(self.output_dir.abspath()))) # Write input files to a file (command line version is too long) for input_file in self.inputs: input_file_rel_path = clean_path( input_file.path_from(self.input_dir)) self.add_argument('-input-file "{}"'.format(input_file_rel_path)) input_file.parent.get_bld().mkdir() def pypath(python_path): # Absolute paths are good to go as-is # Relative paths are assumed relative to src if not os.path.isabs(python_path): # Toss it in a node to figure out an absolute path python_path_node = self.generator.bld.srcnode.make_node( python_path) python_path = python_path_node.abspath() if not os.path.exists(python_path): Logs.warn( 'az_code_gen: Path given as python path does not exist: {}' .format(python_path)) return clean_path(python_path) # Python paths self.add_argument('-python-home "{}"'.format( pypath(self.env['CODE_GENERATOR_PYTHON_HOME']))) for python_path in self.env['CODE_GENERATOR_PYTHON_PATHS']: self.add_argument('-python-path "{}"'.format(pypath(python_path))) # Debug python paths self.add_argument('-python-home-debug "{}"'.format( pypath(self.env['CODE_GENERATOR_PYTHON_HOME_DEBUG']))) for python_debug_path in self.env['CODE_GENERATOR_PYTHON_DEBUG_PATHS']: self.add_argument('-python-debug-path "{}"'.format( pypath(python_debug_path))) if code_generator_ignore_includes: self.add_argument('-ignore-includes') if code_generator_suppress_errors_as_warnings: self.add_argument('-suppress-errors-as-warnings') if code_generator_verbose: self.add_argument('-v') if Utils.unversioned_sys_platform().startswith('linux'): self.add_argument('-include-path /usr/include/c++/v1') for include in self.includes: self.add_argument('-include-path "{}"'.format( clean_path(include.abspath()))) for include_path in self.env['CODE_GENERATOR_INCLUDE_PATHS']: self.add_argument('-include-path "{}"'.format( pypath(include_path))) if 'CODE_GENERATOR_CLANG_INCLUDE_PATH' in self.env: for clang_include_path in self.env[ 'CODE_GENERATOR_CLANG_INCLUDE_PATH']: self.add_argument('-include-path "{}"'.format( clean_path(clang_include_path))) for define in self.defines: self.add_argument('-define {}'.format(quote(define))) for script_node in self.script_nodes: self.add_argument('-codegen-script "{}"'.format( clean_path(script_node.get_src().abspath()))) # Include file that contains code generation tag definitions codegen_tags = self.env['CODE_GENERATOR_TAGS'] if not codegen_tags: codegen_tags = 'Code/Framework/AzCore/AzCore/Preprocessor/CodeGen.h' self.add_argument('-force-include "{}"'.format( clean_path( self.generator.bld.CreateRootRelativePath(codegen_tags)))) if self.error_output_file_node: self.add_argument('-redirect-output-file "{}"'.format( clean_path(self.error_output_file_node.abspath()))) if 'CLANG_SEARCH_PATHS' in self.env: self.add_argument('-resource-dir "{}"'.format( self.env['CLANG_SEARCH_PATHS']['libraries'][0])) if 'ISYSROOT' in self.env: self.add_argument('-isysroot "{}"'.format(self.env['ISYSROOT'])) if 'ANDROID' in self.defines: self.add_argument('-is-android-build') for flag in self.env['CXXFLAGS']: if flag.startswith('--gcc-toolchain='): gcc_toolchain = flag.split('=', 2) self.add_argument('-android-toolchain "{}"'.format( clean_path(gcc_toolchain[1]))) continue if flag.startswith('--target='): android_target = flag.split('=', 2) self.add_argument('-android-target "{}"'.format( clean_path(android_target[1]))) continue if flag.startswith('--sysroot='): android_sysroot = flag.split('=', 2) self.add_argument('-android-sysroot "{}"'.format( clean_path(android_sysroot[1]))) continue status, self.argument_file = self.write_argument_list_to_file() if not status: raise Errors.WafError('[ERROR] az_code_gen task creation failed')
def execute(self): """ Wraps :py:func:`waflib.Context.Context.execute` on the context class """ if not Configure.autoconfig: return execute_method(self) # Disable autoconfig so waf's version doesn't run (and don't end up on loop of bad configure) Configure.autoconfig = False if self.variant == '': raise Errors.WafError( 'The project is badly configured: run "waf configure" again!') env = ConfigSet.ConfigSet() do_config = False try: p = os.path.join(Context.out_dir, Build.CACHE_DIR, self.variant + Build.CACHE_SUFFIX) env.load(p) except EnvironmentError: raise Errors.WafError( 'The project is not configured for board {0}: run "waf configure --board {0} [...]" first!' .format(self.variant)) lock_env = ConfigSet.ConfigSet() try: lock_env.load(os.path.join(Context.top_dir, Options.lockfile)) except EnvironmentError: Logs.warn('Configuring the project') do_config = True else: if lock_env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env.CONFIGURE_FILES: try: h = Utils.h_list((h, Utils.readf(f, 'rb'))) except EnvironmentError: do_config = True break else: do_config = h != env.CONFIGURE_HASH if do_config: cmd = lock_env.config_cmd or 'configure' tmp = Options.options.__dict__ if env.OPTIONS and sorted(env.OPTIONS.keys()) == sorted( tmp.keys()): Options.options.__dict__ = env.OPTIONS else: raise Errors.WafError( 'The project configure options have changed: run "waf configure" again!' ) try: run_command(cmd) finally: Options.options.__dict__ = tmp run_command(self.cmd) else: return execute_method(self)
def load_win_x64_clang_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64 configurations """ v = conf.env if not conf.find_program('clang', mandatory=False, silent_output=True): raise Errors.WafError("Unable to detect Clang for windows") v['PLATFORM'] = PLATFORM # Load MSVC settings for non-build stuff (AzCG, CrcFix, etc) # load_win_x64_win_x64_vs2017_common_settings(conf) conf.load_windows_common_settings() conf.load_win_x64_vs2017_common_settings() windows_kit = conf.options.win_vs2017_winkit try: # Attempt to detect the C++ compiler for VS 2015 ( msvs version 14.0 ) ctx.detect_visual_studio_2015(windows_kit) system_includes = [] except: Logs.warn( 'Unable to find Windows Kit {}, removing build target'.format( windows_kit)) conf.disable_target_platform(PLATFORM) return # Remove MSVC/clang specific settings v['CFLAGS'] = [] v['CXXFLAGS'] = [] v['LINKFLAGS'] = [] # Linker v['CCLNK_SRC_F'] = v['CXXLNK_SRC_F'] = [] v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:' v['LIB_ST'] = '%s.lib' v['LIBPATH_ST'] = '/LIBPATH:%s' v['STLIB_ST'] = '%s.lib' v['STLIBPATH_ST'] = '/LIBPATH:%s' v['cprogram_PATTERN'] = '%s.exe' v['cxxprogram_PATTERN'] = '%s.exe' v['cstlib_PATTERN'] = '%s.lib' v['cxxstlib_PATTERN'] = '%s.lib' v['cshlib_PATTERN'] = '%s.dll' v['cxxshlib_PATTERN'] = '%s.dll' v['LINKFLAGS_cshlib'] = ['/DLL'] v['LINKFLAGS_cxxshlib'] = ['/DLL'] # AR Tools v['ARFLAGS'] = ['/NOLOGO'] v['AR_TGT_F'] = '/OUT:' # Delete the env variables so that they can be replaced with the clang versions del v['AR'] del v['CC'] del v['CXX'] del v['LINK'] conf.find_program('clang', var='CC', silent_output=True) conf.find_program('clang++', var='CXX', silent_output=True) conf.find_program('llvm-lib', var='AR', silent_output=True) conf.find_program('lld-link', var='LINK', silent_output=True) v['LINK_CC'] = v['LINK_CXX'] = v['LINK'] # Moved to platform.win_x64_clang.json """ clang_FLAGS = [ '-mcx16', '-msse3', '-Wno-macro-redefined', '-Wno-microsoft-cast', '-Wno-ignored-pragma-intrinsic', # Clang doens't need #pragma intrinsic anyway, so don't whine when one isn't recognized ] """ clang_FLAGS = [] # Path to clang.exe is [clang]/bin/clang.exe, but the include path is [clang]/lib/clang/7.0.0/include clang_include_path = os.path.join( os.path.dirname(os.path.dirname(v['CXX'])), 'lib', 'clang', CLANG_VERSION, 'include') system_includes = [clang_include_path] + system_includes # Treat all MSVC include paths as system headers for include in system_includes: clang_FLAGS += ['-isystem', include] v['CFLAGS'] += clang_FLAGS v['CXXFLAGS'] += clang_FLAGS # Moved to platform.win_x64_clang.json """ v['DEFINES'] += [ '_CRT_SECURE_NO_WARNINGS', '_CRT_NONSTDC_NO_WARNINGS', ] """ # Moved to platform.win_x64_clang.json """ v['LINKFLAGS'] += [ '/MACHINE:x64', '/MANIFEST', # Create a manifest file '/OPT:REF', '/OPT:ICF', # Always optimize for size, there's no reason not to '/LARGEADDRESSAWARE', # tell the linker that the application can handle addresses larger than 2 gigabytes. ] """ v['WINDOWS_CLANG_SUPPORTED'] = True conf.load_clang_common_settings() conf.load_cryengine_common_settings()
def configure(self): if not Options.options.java: return from build import recursiveGlob ant_home = Options.options.ant_home or self.environ.get('ANT_HOME', None) if ant_home is not None: ant_paths = [ join(self.environ['ANT_HOME'], 'bin'), self.environ['ANT_HOME'] ] else: ant_paths = [] env = self.env env['HAVE_ANT'] = self.find_program('ant', var='ANT', path_list=ant_paths, mandatory=False) if not env['ANT'] and Options.options.force_ant: raise Errors.WafError('Cannot find ant!') if Options.options.java_home: self.environ['JAVA_HOME'] = Options.options.java_home try: self.load('java') except Exception as e: if Options.options.force_java: raise e else: return if not self.env.CC_NAME and not self.env.CXX_NAME: self.fatal('load a compiler first (gcc, g++, ..)') try: if not self.env.JAVA_HOME: self.fatal('set JAVA_HOME in the system environment') # jni requires the jvm javaHome = abspath(self.env['JAVA_HOME'][0]) if not isdir(javaHome): self.fatal( 'could not find JAVA_HOME directory %r (see config.log)' % javaHome) incDir = abspath(join(javaHome, 'include')) if not isdir(incDir): self.fatal( 'could not find include directory in %r (see config.log)' % javaHome) incDirs = list( set([ dirname(x) for x in recursiveGlob(incDir, ['jni.h', 'jni_md.h']) ])) libDirs = list( set([ dirname(x) for x in recursiveGlob(javaHome, ['*jvm.a', '*jvm.lib']) ])) if not libDirs: libDirs = list( set([ dirname(x) for x in recursiveGlob(javaHome, ['*jvm.so', '*jvm.dll']) ])) #if not self.check_jni_headers(): if not self.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm', libpath=libDirs, includes=incDirs, uselib_store='JAVA', uselib='JAVA', function_name='JNI_GetCreatedJavaVMs'): if Options.options.force_jni: self.fatal('could not find lib jvm in %r (see config.log)' % libDirs) except ConfigurationError as ex: err = str(ex).strip() if err.startswith('error: '): err = err[7:] if Options.options.force_java: self.fatal(err) else: self.msg('Java lib/headers', err, color='YELLOW')
def add_moc_tasks(self): """ Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` """ node = self.inputs[0] bld = self.generator.bld try: # compute the signature once to know if there is a moc file to create self.signature() except KeyError: # the moc file may be referenced somewhere else pass else: # remove the signature, it must be recomputed with the moc task delattr(self, 'cache_sig') moctasks = [] mocfiles = [] try: tmp_lst = bld.raw_deps[self.uid()] bld.raw_deps[self.uid()] = [] except KeyError: tmp_lst = [] for d in tmp_lst: if not d.endswith('.moc'): continue # paranoid check if d in mocfiles: Logs.error("paranoia owns") continue # process that base.moc only once mocfiles.append(d) # find the extension - this search is done only once h_node = None try: ext = Options.options.qt_header_ext.split() except AttributeError: pass if not ext: ext = MOC_H base2 = d[:-4] for x in [node.parent] + self.generator.includes_nodes: for e in ext: h_node = x.find_node(base2 + e) if h_node: break if h_node: m_node = h_node.change_ext('.moc') break else: for k in EXT_QT4: if base2.endswith(k): for x in [node.parent] + self.generator.includes_nodes: h_node = x.find_node(base2) if h_node: break if h_node: m_node = h_node.change_ext(k + '.moc') break if not h_node: raise Errors.WafError( 'no header found for %r which is a moc file' % d) # next time we will not search for the extension (look at the 'for' loop below) bld.node_deps[(self.inputs[0].parent.abspath(), m_node.name)] = h_node # create the task task = self.create_moc_task(h_node, m_node) moctasks.append(task) # remove raw deps except the moc files to save space (optimization) tmp_lst = bld.raw_deps[self.uid()] = mocfiles # look at the file inputs, it is set right above lst = bld.node_deps.get(self.uid(), ()) for d in lst: name = d.name if name.endswith('.moc'): task = self.create_moc_task( bld.node_deps[(self.inputs[0].parent.abspath(), name)], d) moctasks.append(task) # simple scheduler dependency: run the moc task before others self.run_after.update(set(moctasks)) self.moc_done = 1
def sig_explicit_deps(self): """ Used by :py:meth:`waflib.Task.Task.signature`, hash :py:attr:`waflib.Task.Task.inputs` and :py:attr:`waflib.Task.Task.dep_nodes` signatures. :rtype: hash value """ bld = self.generator.bld bld_sigs = [] exp_output = '' # the inputs for x in self.inputs + self.dep_nodes: try: bld_sig = x.get_bld_sig() if Logs.sig_delta: exp_output += '{} {} {}\n'.format(x.name, x.abspath(), hexlify(bld_sig)) bld_sigs.append(bld_sig) except (AttributeError, TypeError, IOError): Logs.warn('Missing signature for node %r (required by %r)' % (x, self)) continue # skip adding the signature to the calculation, but continue adding other dependencies # manual dependencies, they can slow down the builds if bld.deps_man: additional_deps = bld.deps_man for x in self.inputs + self.outputs: try: d = additional_deps[id(x)] except KeyError: continue for v in d: v_name = v.name if isinstance(v, bld.root.__class__): try: v = v.get_bld_sig() except AttributeError: raise Errors.WafError( 'Missing node signature for %r (required by %r)' % (v, self)) elif hasattr(v, '__call__'): v = v() # dependency is a function, call it if Logs.sig_delta: exp_output += '{} {}\n'.format(v_name, hexlify(v)) bld_sigs.append(v) dep_bld_sigs_str = "".join(bld_sigs) m = Utils.md5() m.update(dep_bld_sigs_str) explicit_sig = m.digest() if Logs.sig_delta: key = self.uid() prev_sig = bld.task_sigs.get((key, 'exp'), []) if prev_sig and prev_sig != explicit_sig: self.capture_signature_log('\nExplicit(Old):\n') self.capture_signature_log(bld.last_build['exp_deps'].get( key, '')) self.capture_signature_log('\nExplicit(New):\n') self.capture_signature_log(exp_output) bld.last_build['exp_deps'][key] = exp_output bld.task_sigs[(key, 'exp')] = explicit_sig return explicit_sig
def apply_cmd_output(self): if self.command is None: raise Errors.WafError("command-output missing command") if self.command_is_external: cmd = self.command cmd_node = None else: cmd_node = self.path.find_resource(self.command) assert cmd_node is not None, ( '''Could not find command '%s' in source tree. Hint: if this is an external command, use command_is_external=True''') % (self.command, ) cmd = cmd_node if self.cwd is None: cwd = None args = [] inputs = [] outputs = [] for arg in self.argv: if isinstance(arg, cmd_arg): arg.find_node(self.path) if isinstance(arg, input_file): inputs.append(arg.node) if isinstance(arg, output_file): outputs.append(arg.node) if self.stdout is None: stdout = None else: assert isinstance(self.stdout, str) stdout = self.path.find_or_declare(self.stdout) if stdout is None: raise Errors.WafError("File %s not found" % (self.stdout, )) outputs.append(stdout) if self.stderr is None: stderr = None else: assert isinstance(self.stderr, str) stderr = self.path.find_or_declare(self.stderr) if stderr is None: raise Errors.WafError("File %s not found" % (self.stderr, )) outputs.append(stderr) if self.stdin is None: stdin = None else: assert isinstance(self.stdin, str) stdin = self.path.find_resource(self.stdin) if stdin is None: raise Errors.WafError("File %s not found" % (self.stdin, )) inputs.append(stdin) for hidden_input in self.to_list(self.hidden_inputs): node = self.path.find_resource(hidden_input) if node is None: raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path)) inputs.append(node) for hidden_output in self.to_list(self.hidden_outputs): node = self.path.find_or_declare(hidden_output) if node is None: raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path)) outputs.append(node) if not (inputs or getattr(self, 'no_inputs', None)): raise Errors.WafError( 'command-output objects must have at least one input file or give self.no_inputs' ) if not (outputs or getattr(self, 'no_outputs', None)): raise Errors.WafError( 'command-output objects must have at least one output file or give self.no_outputs' ) cwd = self.bld.variant_dir task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr) task.generator = self copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True) self.tasks.append(task) task.inputs = inputs task.outputs = outputs task.dep_vars = self.to_list(self.dep_vars) for dep in self.dependencies: assert dep is not self dep.post() for dep_task in dep.tasks: task.set_run_after(dep_task) if not task.inputs: # the case for svnversion, always run, and update the output nodes task.runnable_status = type(Task.TaskBase.run)( runnable_status, task, task.__class__) # always run task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
def multicheck(self,*k,**kw): self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw) for var in('DEFINES',DEFKEYS): self.env.append_value(var,[]) self.env.DEFINE_COMMENTS=self.env.DEFINE_COMMENTS or{} class par(object): def __init__(self): self.keep=False self.task_sigs={} self.progress_bar=0 def total(self): return len(tasks) def to_log(self,*k,**kw): return bld=par() bld.keep=kw.get('run_all_tests',True) bld.imp_sigs={} tasks=[] id_to_task={} for dct in k: x=Task.classes['cfgtask'](bld=bld,env=None) tasks.append(x) x.args=dct x.bld=bld x.conf=self x.args=dct x.logger=Logs.make_mem_logger(str(id(x)),self.logger) if'id'in dct: id_to_task[dct['id']]=x for x in tasks: for key in Utils.to_list(x.args.get('before_tests',[])): tsk=id_to_task[key] if not tsk: raise ValueError('No test named %r'%key) tsk.run_after.add(x) for key in Utils.to_list(x.args.get('after_tests',[])): tsk=id_to_task[key] if not tsk: raise ValueError('No test named %r'%key) x.run_after.add(tsk) def it(): yield tasks while 1: yield[] bld.producer=p=Runner.Parallel(bld,Options.options.jobs) bld.multicheck_lock=Utils.threading.Lock() p.biter=it() self.end_msg('started') p.start() for x in tasks: x.logger.memhandler.flush() self.start_msg('-> processing test results') if p.error: for x in p.error: if getattr(x,'err_msg',None): self.to_log(x.err_msg) self.end_msg('fail',color='RED') raise Errors.WafError('There is an error in the library, read config.log for more information') failure_count=0 for x in tasks: if x.hasrun not in(Task.SUCCESS,Task.NOT_RUN): failure_count+=1 if failure_count: self.end_msg(kw.get('errmsg','%s test failed'%failure_count),color='YELLOW',**kw) else: self.end_msg('all ok',**kw) for x in tasks: if x.hasrun!=Task.SUCCESS: if x.args.get('mandatory',True): self.fatal(kw.get('fatalmsg')or'One of the tests has failed, read config.log for more information')
def action_process_file_func(tsk): "Ask the function attached to the task to process it" if not tsk.fun: raise Errors.WafError( 'task must have a function attached to it for copy_func to work!') return tsk.fun(tsk)
def process_use(self): use_not = self.tmp_use_not = set([]) self.tmp_use_seen = [] use_prec = self.tmp_use_prec = {} self.uselib = self.to_list(getattr(self, 'uselib', [])) self.includes = self.to_list(getattr(self, 'includes', [])) names = self.to_list(getattr(self, 'use', [])) for x in names: self.use_rec(x) for x in use_not: if x in use_prec: del use_prec[x] out = [] tmp = [] for x in self.tmp_use_seen: for k in use_prec.values(): if x in k: break else: tmp.append(x) while tmp: e = tmp.pop() out.append(e) try: nlst = use_prec[e] except KeyError: pass else: del use_prec[e] for x in nlst: for y in use_prec: if x in use_prec[y]: break else: tmp.append(x) if use_prec: raise Errors.WafError('Cycle detected in the use processing %r' % use_prec) out.reverse() link_task = getattr(self, 'link_task', None) for x in out: y = self.bld.get_tgen_by_name(x) var = y.tmp_use_var if var and link_task: if var == 'LIB' or y.tmp_use_stlib: self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) self.link_task.dep_nodes.extend(y.link_task.outputs) tmp_path = y.link_task.outputs[0].parent.path_from( self.bld.bldnode) self.env.append_value(var + 'PATH', [tmp_path]) else: if y.tmp_use_objects: self.add_objects_from_tgen(y) if getattr(y, 'export_includes', None): self.includes.extend(y.to_incnodes(y.export_includes)) if getattr(y, 'export_defines', None): self.env.append_value('DEFINES', self.to_list(y.export_defines)) for x in names: try: y = self.bld.get_tgen_by_name(x) except Errors.WafError: if not self.env['STLIB_' + x] and not x in self.uselib: self.uselib.append(x) else: for k in self.to_list(getattr(y, 'use', [])): if not self.env['STLIB_' + k] and not k in self.uselib: self.uselib.append(k)
def cmd_and_log(self, cmd, **kw): """ Execute a command and return stdout if the execution is successful. An exception is thrown when the exit status is non-0. In that case, both stderr and stdout will be bound to the WafError object:: def configure(conf): out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) try: conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) except Exception as e: print(e.stdout, e.stderr) :param cmd: args for subprocess.Popen :param kw: keyword arguments for subprocess.Popen """ subprocess = Utils.subprocess kw['shell'] = isinstance(cmd, str) Logs.debug('runner: %r' % cmd) if 'quiet' in kw: quiet = kw['quiet'] del kw['quiet'] else: quiet = None if 'output' in kw: to_ret = kw['output'] del kw['output'] else: to_ret = STDOUT if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): raise Errors.WafError("Program %s not found!" % cmd[0]) kw['stdout'] = kw['stderr'] = subprocess.PIPE if quiet is None: self.to_log(cmd) try: p = subprocess.Popen(cmd, **kw) (out, err) = p.communicate() except Exception as e: raise Errors.WafError('Execution failure: %s' % str(e), ex=e) if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') if not isinstance(err, str): err = err.decode(sys.stdout.encoding or 'iso8859-1') if out and quiet != STDOUT and quiet != BOTH: self.to_log('out: %s' % out) if err and quiet != STDERR and quiet != BOTH: self.to_log('err: %s' % err) if p.returncode: e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode)) e.returncode = p.returncode e.stderr = err e.stdout = out raise e if to_ret == BOTH: return (out, err) elif to_ret == STDERR: return err return out
def _could_not_find_lib_error(): raise Errors.WafError('[ERROR] Could not find Android library %r. Run the command again with "--zones=android_library" included for more information.' % self.name)
def init_dirs(self): if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): raise Errors.WafError('The project was not configured: run "waf configure" first!') self.path=self.srcnode=self.root.find_dir(self.top_dir) self.bldnode=self.root.make_node(self.variant_dir) self.bldnode.mkdir()
def deco(func): def _add_method_event(name, method_event_map): # Check if the event has been registered in the after_conf_method_event_not_before_restrictions yet if func.__name__ in conf_event_not_before_restrictions and \ len(method_event_map[name]) > 0: pass else: method_event_map[name].append(func) def _reorder_events(conf_method_table, order_restriction_table): for conf_method, conf_events in conf_method_table.items(): # Check any remove existing func if func in conf_events: conf_events.remove(func) # Re-add in the correct order based on the restrictions insert_index = -1 current_index = 0 for current_event in conf_events: if current_event.__name__ in order_restriction_table[ func.__name__]: insert_index = current_index current_index += 1 conf_events.insert(insert_index + 1, func) # Either 'before_methods' or 'after_methods' keywords is required if 'before_methods' not in kw and 'after_methods' not in kw: raise Errors.WafError( "'before_method' or 'after_method' keyword missing @conf_event for function {}" .format(func.__name__)) # Add the before_methods to the pre conf method events if 'before_methods' in kw: for conf_name in kw['before_methods']: _add_method_event(conf_name, pre_conf_method_events) # Add the after_methods to the post conf method events if 'after_methods' in kw: for conf_name in kw['after_methods']: _add_method_event(conf_name, post_conf_method_events) # If there are any event ordering restrictions, process those if 'after_events' in kw: for conf_event in kw['after_events']: conf_event_not_before_restrictions[func.__name__].add( conf_event) # Check for cyclic dependencies if conf_event in conf_event_not_before_restrictions and \ func.__name__ in conf_event_not_before_restrictions[conf_event]: raise Errors.WafError( "Cyclic 'conf_event_not_before' dependency detected for {} and {}" .format(func.__name__, conf_event)) _reorder_events(pre_conf_method_events, conf_event_not_before_restrictions) _reorder_events(post_conf_method_events, conf_event_not_before_restrictions) return func
def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): """ Runs user-provided functions from the supplied list of directories. The directories can be either absolute, or relative to the directory of the wscript file The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse` are called immediately before and after a script has been executed. :param dirs: List of directories to visit :type dirs: list of string or space-separated string :param name: Name of function to invoke from the wscript :type name: string :param mandatory: whether sub wscript files are required to exist :type mandatory: bool :param once: read the script file once for a particular context :type once: bool """ try: cache = self.recurse_cache except AttributeError: cache = self.recurse_cache = {} for d in Utils.to_list(dirs): if not os.path.isabs(d): # absolute paths only d = os.path.join(self.path.abspath(), d) WSCRIPT = os.path.join(d, WSCRIPT_FILE) WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun) node = self.root.find_node(WSCRIPT_FUN) if node and (not once or node not in cache): cache[node] = True self.pre_recurse(node) try: function_code = node.read('rU', encoding) exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) finally: self.post_recurse(node) elif not node: node = self.root.find_node(WSCRIPT) tup = (node, name or self.fun) if node and (not once or tup not in cache): cache[tup] = True self.pre_recurse(node) try: wscript_module = load_module(node.abspath(), encoding=encoding) user_function = getattr(wscript_module, (name or self.fun), None) if not user_function: if not mandatory: continue raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath())) user_function(self) finally: self.post_recurse(node) elif not node: if not mandatory: continue try: os.listdir(d) except OSError: raise Errors.WafError('Cannot read the folder %r' % d) raise Errors.WafError('No wscript file in directory %s' % d)
def __copy__(self): raise Errors.WafError('nodes are not supposed to be copied')
def cmd_and_log(self, cmd, **kw): """ Executes a proces and returns stdout/stderr if the execution is successful. An exception is thrown when the exit status is non-0. In that case, both stderr and stdout will be bound to the WafError object:: def configure(conf): out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) try: conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) except Exception as e: print(e.stdout, e.stderr) :param cmd: args for subprocess.Popen :type cmd: list or string :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. :type kw: dict :returns: process exit status :rtype: integer :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object """ subprocess = Utils.subprocess kw['shell'] = isinstance(cmd, str) Logs.debug('runner: %r', cmd) if 'quiet' in kw: quiet = kw['quiet'] del kw['quiet'] else: quiet = None if 'output' in kw: to_ret = kw['output'] del kw['output'] else: to_ret = STDOUT if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): raise Errors.WafError('Program %r not found!' % cmd[0]) kw['stdout'] = kw['stderr'] = subprocess.PIPE if quiet is None: self.to_log(cmd) wargs = {} if 'timeout' in kw: if kw['timeout'] is not None: wargs['timeout'] = kw['timeout'] del kw['timeout'] if 'input' in kw: if kw['input']: wargs['input'] = kw['input'] kw['stdin'] = subprocess.PIPE del kw['input'] if 'cwd' in kw: if not isinstance(kw['cwd'], str): kw['cwd'] = kw['cwd'].abspath() try: ret, out, err = Utils.run_process(cmd, kw, wargs) except Exception as e: raise Errors.WafError('Execution failure: %s' % str(e), ex=e) if not isinstance(out, str): out = out.decode(sys.stdout.encoding or 'iso8859-1') if not isinstance(err, str): err = err.decode(sys.stdout.encoding or 'iso8859-1') if out and quiet != STDOUT and quiet != BOTH: self.to_log('out: %s' % out) if err and quiet != STDERR and quiet != BOTH: self.to_log('err: %s' % err) if ret: e = Errors.WafError('Command %r returned %r' % (cmd, ret)) e.returncode = ret e.stderr = err e.stdout = out raise e if to_ret == BOTH: return (out, err) elif to_ret == STDERR: return err return out
def find_node(self, base_path): assert isinstance(base_path, Node.Node) self.node = base_path.find_or_declare(self.name) if self.node is None: raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
def process_subst(self): """ Defines a transformation that substitutes the contents of *source* files to *target* files:: def build(bld): bld( features='subst', source='foo.c.in', target='foo.c', install_path='${LIBDIR}/pkgconfig', VAR = 'val' ) The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument of the task generator object. This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`. """ src = Utils.to_list(getattr(self, 'source', [])) if isinstance(src, Node.Node): src = [src] tgt = Utils.to_list(getattr(self, 'target', [])) if isinstance(tgt, Node.Node): tgt = [tgt] if len(src) != len(tgt): raise Errors.WafError('invalid number of source/target for %r' % self) for x, y in zip(src, tgt): if not x or not y: raise Errors.WafError('null source or target for %r' % self) a, b = None, None if isinstance(x, str) and isinstance(y, str) and x == y: a = self.path.find_node(x) b = self.path.get_bld().make_node(y) if not os.path.isfile(b.abspath()): b.parent.mkdir() else: if isinstance(x, str): a = self.path.find_resource(x) elif isinstance(x, Node.Node): a = x if isinstance(y, str): b = self.path.find_or_declare(y) elif isinstance(y, Node.Node): b = y if not a: raise Errors.WafError('could not find %r for %r' % (x, self)) tsk = self.create_task('subst', a, b) for k in ('after', 'before', 'ext_in', 'ext_out'): val = getattr(self, k, None) if val: setattr(tsk, k, val) # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies for xt in HEADER_EXTS: if b.name.endswith(xt): tsk.ext_in = tsk.ext_in + ['.h'] break inst_to = getattr(self, 'install_path', None) if inst_to: self.install_task = self.add_install_files(install_to=inst_to, install_from=b, chmod=getattr( self, 'chmod', Utils.O644)) self.source = []
def find_node(self, base_path): assert isinstance(base_path, Node.Node) self.node = base_path.find_dir(self.name) if self.node is None: raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
def post(self): """ Create task objects. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support if getattr(self, 'posted', None): #error("OBJECT ALREADY POSTED" + str( self)) return False self.posted = True keys = set(self.meths) # add the methods listed in the features self.features = Utils.to_list(self.features) for x in self.features + ['*']: st = feats[x] if not st: if not x in Task.classes: Logs.warn( 'feature %r does not exist - bind at least one method to it' % x) keys.update(list(st)) # ironpython 2.7 wants the cast to list # copy the precedence table prec = {} prec_tbl = self.prec or task_gen.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) tmp.sort() # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) if prec: raise Errors.WafError('Cycle detected in the method execution %r' % prec) out.reverse() self.meths = out # then we run the methods in order Logs.debug('task_gen: posting %s %d' % (self, id(self))) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)' % (x, id(self))) v() Logs.debug('task_gen: posted %s' % self.name) return True
def process_use(self): """ Process the ``use`` attribute which contains a list of task generator names:: def build(bld): bld.shlib(source='a.c', target='lib1') bld.program(source='main.c', target='app', use='lib1') See :py:func:`waflib.Tools.ccroot.use_rec`. """ use_not = self.tmp_use_not = set() self.tmp_use_seen = [] # we would like an ordered set use_prec = self.tmp_use_prec = {} self.uselib = self.to_list(getattr(self, 'uselib', [])) self.includes = self.to_list(getattr(self, 'includes', [])) names = self.to_list(getattr(self, 'use', [])) for x in names: self.use_rec(x) for x in use_not: if x in use_prec: del use_prec[x] # topological sort out = self.tmp_use_sorted = [] tmp = [] for x in self.tmp_use_seen: for k in use_prec.values(): if x in k: break else: tmp.append(x) while tmp: e = tmp.pop() out.append(e) try: nlst = use_prec[e] except KeyError: pass else: del use_prec[e] for x in nlst: for y in use_prec: if x in use_prec[y]: break else: tmp.append(x) if use_prec: raise Errors.WafError('Cycle detected in the use processing %r' % use_prec) out.reverse() link_task = getattr(self, 'link_task', None) for x in out: y = self.bld.get_tgen_by_name(x) var = y.tmp_use_var if var and link_task: if var == 'LIB' or y.tmp_use_stlib or x in names: self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) self.link_task.dep_nodes.extend(y.link_task.outputs) tmp_path = y.link_task.outputs[0].parent.path_from( self.get_cwd()) self.env.append_unique(var + 'PATH', [tmp_path]) else: if y.tmp_use_objects: self.add_objects_from_tgen(y) if getattr(y, 'export_includes', None): self.includes.extend(y.to_incnodes(y.export_includes)) if getattr(y, 'export_defines', None): self.env.append_value('DEFINES', self.to_list(y.export_defines)) # and finally, add the use variables (no recursion needed) for x in names: try: y = self.bld.get_tgen_by_name(x) except Errors.WafError: if not self.env['STLIB_' + x] and not x in self.uselib: self.uselib.append(x) else: for k in self.to_list(getattr(y, 'use', [])): if not self.env['STLIB_' + k] and not k in self.uselib: self.uselib.append(k)