def env_path(parent_dir_var,name): parent=os.getenv(parent_dir_var) if parent: return os.path.join(parent,name) else: Logs.warn('Environment variable %s unset, using LIBDIR\n'%parent_dir_var) return os.path.join(conf.env['LIBDIR'],name)
def configure(conf): areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0) defaultFlags = [] if conf.options.use_cxx11: defaultFlags += ['-std=c++0x', '-std=c++11'] else: defaultFlags += ['-std=c++03', '-Wno-variadic-macros', '-Wno-c99-extensions'] defaultFlags += ['-pedantic', '-Wall', '-Wno-long-long', '-Wno-unneeded-internal-declaration'] if conf.options.debug: conf.define('_DEBUG', 1) defaultFlags += ['-O0', '-Og', # gcc >= 4.8 '-g3', '-fcolor-diagnostics', # clang '-fdiagnostics-color', # gcc >= 4.9 '-Werror', '-Wno-error=deprecated-register', '-Wno-error=maybe-uninitialized', # Bug #1615 ] if areCustomCxxflagsPresent: missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS] if len(missingFlags) > 0: Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'" % " ".join(conf.env.CXXFLAGS)) Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags)) else: conf.add_supported_cxxflags(defaultFlags) else: defaultFlags += ['-O2', '-g'] if not areCustomCxxflagsPresent: conf.add_supported_cxxflags(defaultFlags)
def execute(self): """ See :py:func:`waflib.Context.Context.execute` """ self.init_dirs() self.cachedir = self.bldnode.make_node(Build.CACHE_DIR) self.cachedir.mkdir() path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG) self.logger = Logs.make_logger(path, 'cfg') app = getattr(Context.g_module, 'APPNAME', '') if app: ver = getattr(Context.g_module, 'VERSION', '') if ver: app = "%s (%s)" % (app, ver) params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app} self.to_log(conf_template % params) self.msg('Setting top to', self.srcnode.abspath()) self.msg('Setting out to', self.bldnode.abspath()) if id(self.srcnode) == id(self.bldnode): Logs.warn('Setting top == out') elif id(self.path) != id(self.srcnode): if self.srcnode.is_child_of(self.path): Logs.warn('Are you certain that you do not want to set top="." ?') super(ConfigurationContext, self).execute() self.store() Context.top_dir = self.srcnode.abspath() Context.out_dir = self.bldnode.abspath() # this will write a configure lock so that subsequent builds will # consider the current path as the root directory (see prepare_impl). # to remove: use 'waf distclean' env = ConfigSet.ConfigSet() env.argv = sys.argv env.options = Options.options.__dict__ env.config_cmd = self.cmd env.run_dir = Context.run_dir env.top_dir = Context.top_dir env.out_dir = Context.out_dir # conf.hash & conf.files hold wscript files paths and hash # (used only by Configure.autoconfig) env.hash = self.hash env.files = self.files env.environ = dict(self.environ) if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options, 'no_lock_in_run'): env.store(os.path.join(Context.run_dir, Options.lockfile)) if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options, 'no_lock_in_top'): env.store(os.path.join(Context.top_dir, Options.lockfile)) if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options, 'no_lock_in_out'): env.store(os.path.join(Context.out_dir, Options.lockfile))
def distclean_dir(dirname): """ Distclean function called in the particular case when:: top == out :param dirname: absolute path of the folder to clean :type dirname: string """ for (root, dirs, files) in os.walk(dirname): for f in files: if _can_distclean(f): fname = os.path.join(root, f) try: os.remove(fname) except OSError: Logs.warn('Could not remove %r' % fname) for x in (Context.DBFILE, 'config.log'): try: os.remove(x) except OSError: pass try: shutil.rmtree('c4che') except OSError: pass
def execute(self): if not Configure.autoconfig: return execute_method(self) env = ConfigSet.ConfigSet() do_config = False try: env.load(os.path.join(Context.top_dir, Options.lockfile)) except Exception: Logs.warn('Configuring the project') do_config = True else: if env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env['files']: h = Utils.h_list((h, Utils.readf(f, 'rb'))) do_config = h != env.hash if do_config: Options.commands.insert(0, self.cmd) Options.commands.insert(0, 'configure') if Configure.autoconfig == 'clobber': Options.options.__dict__ = env.options return return execute_method(self)
def bootstrap_dat_file(ctx, section_name, option_name, value): """ Configure the name of the boostrap.dat file """ if LOADED_OPTIONS.get('auto_run_bootstrap', 'False') == 'False': return '' if not _is_user_input_allowed(ctx, option_name, value): Logs.info('\nUser Input disabled.\nUsing default value "%s" for option: "%s"' % (value, option_name)) return value # GUI if not ctx.is_option_true('console_mode'): return ctx.gui_get_attribute(section_name, option_name, value) Logs.info("\nName of the bootstrap.dat file to use") Logs.info("(Press ENTER to keep the current default value shown in [])") while True: file_name = _get_string_value(ctx, 'Bootstrap.dat file:', value) (res, warning, error) = ATTRIBUTE_VERIFICATION_CALLBACKS['verify_bootstrap_dat_file'](ctx, option_name, file_name) if res: break else: Logs.warn(error) # Valid file, return return file_name
def download_tool(tool, force=False, ctx=None): """ Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`:: $ waf configure --download """ for x in Utils.to_list(Context.remote_repo): for sub in Utils.to_list(Context.remote_locs): url = '/'.join((x, sub, tool + '.py')) try: web = urlopen(url) if web.getcode() != 200: continue except Exception as e: # on python3 urlopen throws an exception continue else: tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py'))) tmp.write(web.read()) Logs.warn('Downloaded %s from %s' % (tool, url)) download_check(tmp) try: module = Context.load_tool(tool) except: Logs.warn('The tool %s from %s is unusable' % (tool, url)) try: tmp.delete() except: pass continue return module raise Errors.WafError('Could not load the Waf tool')
def _use_incredibuild(ctx, section_name, option_name, value, verification_fn): """ If Incredibuild should be used, check for required packages """ if not ctx.is_option_true('ask_for_user_input'): (isValid, warning, error) = verification_fn(ctx, option_name, value) if not isValid: return 'False' return value # GUI if not ctx.is_option_true('console_mode'): return ctx.gui_get_attribute(section_name, option_name, value) if not value or value != 'True': return value if not Utils.unversioned_sys_platform() == 'win32': return value _incredibuild_disclaimer(ctx) ctx.start_msg('Incredibuild Licence Check') (res, warning, error) = verification_fn(ctx, option_name, value) if not res: if warning: Logs.warn(warning) if error: ctx.end_msg(error, color='YELLOW') return 'False' ctx.end_msg('ok') return value
def auto_run_bootstrap(ctx, section_name, option_name, value): """ Configure automatic boostrap execution """ if not ctx.is_bootstrap_available(): return 'False' if not _is_user_input_allowed(ctx, option_name, value): Logs.info('\nUser Input disabled.\nUsing default value "%s" for option: "%s"' % (value, option_name)) return value # Check for P4 support # On failure: (i.e. no p4 available) Do not show if default has been 'False' in the first place # On failure: (i.e. no p4 available) Do show option if default has been 'True' (res, warning, error) = ATTRIBUTE_VERIFICATION_CALLBACKS['verify_auto_run_bootstrap'](ctx, option_name, 'True') if not res and not _is_user_option_true(value): return 'False'; info_str = ["Automatically execute Branch Bootstrap on each build?"] info_str.append("[When disabled the user is responsible to keep their 3rdParty Folder up to date]") # GUI if not ctx.is_option_true('console_mode'): return ctx.gui_get_attribute(section_name, option_name, value, '\n'.join(info_str)) info_str.append('\n(Press ENTER to keep the current default value shown in [])') Logs.info('\n'.join(info_str)) while True: value = _get_boolean_value(ctx, 'Enable Automatic Execution of Branch BootStrap', value) (res, warning, error) = ATTRIBUTE_VERIFICATION_CALLBACKS['verify_auto_run_bootstrap'](ctx, option_name, value) if res: break else: Logs.warn(error) return value
def get_ccenv(fname): '''Returns dictionary of variant C/C++ build environments. In which the keys are the name of the actual variant C/C++ build environments and its values the settings for that variant build environment. :param fname: Complete path to the configuration file. :type fname: str ''' if not os.path.exists(fname): Logs.warn("CCENV: ini file '%s' not found!" % fname) ccenv = {} c = configparser.ConfigParser() c.read(fname) for s in c.sections(): ccenv[s] = {'prefix' : None, 'shlib' : [], 'env' : [], 'c': ['gcc'], 'cxx': ['g++', 'cpp']} if c.has_option(s, 'c'): ccenv[s]['c'] = c.get(s,'c').split(',') if c.has_option(s, 'cxx'): ccenv[s]['cxx'] = c.get(s,'cxx').split(',') if c.has_option(s, 'prefix'): ccenv[s]['prefix'] = c.get(s,'prefix') if c.has_option(s, 'shlib'): ccenv[s]['shlib'] = [l for l in str(c.get(s,'shlib')).split(',') if len(l)] if c.has_option(s, 'env'): ccenv[s]['env'] = [l.split('\t') for l in c.get(s,'env').splitlines() if len(l)] if c.has_option(s, 'host'): ccenv[s]['host'] = c.get(s,'host') return ccenv
def configure(conf): areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0) defaultFlags = [] defaultFlags += ['-pedantic', '-Wall', '-Wno-long-long'] if conf.options.debug: conf.define('_DEBUG', 1) defaultFlags += ['-O0', '-Og', # gcc >= 4.8 '-g3', '-fcolor-diagnostics', # clang '-fdiagnostics-color', # gcc >= 4.9 '-Werror' ] if areCustomCxxflagsPresent: missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS] if len(missingFlags) > 0: Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'" % " ".join(conf.env.CXXFLAGS)) Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags)) else: conf.add_supported_cxxflags(defaultFlags) else: defaultFlags += ['-O2', '-g'] if not areCustomCxxflagsPresent: conf.add_supported_cxxflags(defaultFlags)
def distclean_dir(dirname): """ Distclean function called in the particular case when:: top == out :param dirname: absolute path of the folder to clean :type dirname: string """ for (root, dirs, files) in os.walk(dirname): for f in files: if _can_distclean(f): fname = root + os.sep + f try: os.unlink(fname) except: Logs.warn('could not remove %r' % fname) for x in [Context.DBFILE, 'config.log']: try: os.unlink(x) except: pass try: shutil.rmtree('c4che') except: pass
def download_tool(tool,force=False,ctx=None): for x in Utils.to_list(Context.remote_repo): for sub in Utils.to_list(Context.remote_locs): url='/'.join((x,sub,tool+'.py')) try: web=urlopen(url) try: if web.getcode()!=200: continue except AttributeError: pass except Exception: continue else: tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py'))) tmp.write(web.read(),'wb') Logs.warn('Downloaded %s from %s'%(tool,url)) download_check(tmp) try: module=Context.load_tool(tool) except Exception: Logs.warn('The tool %s from %s is unusable'%(tool,url)) try: tmp.delete() except Exception: pass continue return module raise Errors.WafError('Could not load the Waf tool')
def configure(conf): cc = conf.env['COMPILER_CC'] or None cxx = conf.env['COMPILER_CXX'] or None if not (cc or cxx): raise Utils.WafError("neither COMPILER_CC nor COMPILER_CXX are defined; " "maybe the compiler_cc or compiler_cxx tool has not been configured yet?") try: compiler = compiler_mapping[cc] except KeyError: try: compiler = compiler_mapping[cxx] except KeyError: Logs.warn("No compiler flags support for compiler %r or %r" % (cc, cxx)) return opt_level, warn_level, dbg_level = profiles[Options.options.build_profile] optimizations = compiler.get_optimization_flags(opt_level) debug, debug_defs = compiler.get_debug_flags(dbg_level) warnings = compiler.get_warnings_flags(warn_level) if cc and not conf.env['CCFLAGS']: conf.env.append_value('CCFLAGS', optimizations) conf.env.append_value('CCFLAGS', debug) conf.env.append_value('CCFLAGS', warnings) conf.env.append_value('CCDEFINES', debug_defs) if cxx and not conf.env['CXXFLAGS']: conf.env.append_value('CXXFLAGS', optimizations) conf.env.append_value('CXXFLAGS', debug) conf.env.append_value('CXXFLAGS', warnings) conf.env.append_value('CXXDEFINES', debug_defs)
def build_version_files(header_path, source_path, domain, major, minor, micro, exportname, visheader): header_path = os.path.abspath(header_path) source_path = os.path.abspath(source_path) text = "int " + domain + "_major_version = " + str(major) + ";\n" text += "int " + domain + "_minor_version = " + str(minor) + ";\n" text += "int " + domain + "_micro_version = " + str(micro) + ";\n" try: o = open(source_path, 'w') o.write(text) o.close() except IOError: Logs.error('Failed to open %s for writing\n' % source_path) sys.exit(-1) text = "#ifndef __" + domain + "_version_h__\n" text += "#define __" + domain + "_version_h__\n" if visheader != '': text += "#include \"" + visheader + "\"\n" text += exportname + " extern const char* " + domain + "_revision;\n" text += exportname + " extern int " + domain + "_major_version;\n" text += exportname + " extern int " + domain + "_minor_version;\n" text += exportname + " extern int " + domain + "_micro_version;\n" text += "#endif /* __" + domain + "_version_h__ */\n" try: o = open(header_path, 'w') o.write(text) o.close() except IOError: Logs.warn('Failed to open %s for writing\n' % header_path) sys.exit(-1) return None
def get_ccross(fname): '''Returns dictionary of cross-compile build environments. Dictionary key name depict the environment name (i.e. variant name). :param fname: Complete path to the config.ini file :type fname: str ''' if not os.path.exists(fname): Logs.warn("CCROSS: ini file '%s' not found!" % fname) cross = {} c = configparser.ConfigParser() c.read(fname) for s in c.sections(): cross[s] = {'prefix' : None, 'shlib' : [], 'env' : [], 'c': ['gcc'], 'cxx': ['g++']} if c.has_option(s, 'c'): cross[s]['c'] = c.get(s,'c').split(',') if c.has_option(s, 'cxx'): cross[s]['cxx'] = c.get(s,'cxx').split(',') if c.has_option(s, 'prefix'): cross[s]['prefix'] = c.get(s,'prefix') if c.has_option(s, 'shlib'): cross[s]['shlib'] = [l for l in str(c.get(s,'shlib')).split(',') if len(l)] if c.has_option(s, 'env'): cross[s]['env'] = [l.split('\t') for l in c.get(s,'env').splitlines() if len(l)] return cross
def compile(self): if not self.files: Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') BuildContext.compile(self) return for g in self.groups: for tg in g: try: f=tg.post except AttributeError: pass else: f() for pat in self.files.split(','): matcher=self.get_matcher(pat) for tg in g: if isinstance(tg,Task.TaskBase): lst=[tg] else: lst=tg.tasks for tsk in lst: do_exec=False for node in getattr(tsk,'inputs',[]): if matcher(node,output=False): do_exec=True break for node in getattr(tsk,'outputs',[]): if matcher(node,output=True): do_exec=True break if do_exec: ret=tsk.run() Logs.info('%s -> exit %r'%(str(tsk),ret))
def invalidate_cache(self): """Invalidate the cache to prevent bad builds.""" try: Logs.warn("Removing the cached configuration since the options have changed") shutil.rmtree(self.cache_path) except: pass
def write_files(self): if self.all_projects: # Generate the sln config|plat for this variant prop = msvs.build_property() prop.platform_tgt = self.env.CSPLATFORM prop.platform = self.get_platform(self.env) prop.platform_sln = prop.platform_tgt.replace('AnyCPU', 'Any CPU') prop.configuration = self.get_config(self, self.env) prop.variant = self.variant idegen.sln_configs[prop.variant] = prop idegen.all_projs[self.variant] = self.all_projects idegen.depth -= 1 if idegen.depth == 0: self.all_projects = self.flatten_projects() if Logs.verbose == 0: sys.stderr.write('\n') for p in self.all_projects: p.write() self.make_sln_configs() # and finally write the solution file node = self.get_solution_node() node.parent.mkdir() Logs.warn('Creating %r' % node) template1 = msvs.compile_template(msvs.SOLUTION_TEMPLATE) sln_str = template1(self) sln_str = msvs.rm_blank_lines(sln_str) node.stealth_write(sln_str)
def build(bld): if not "NETCACHE" in os.environ and not "NETCACHE_PULL" in os.environ and not "NETCACHE_PUSH" in os.environ: Logs.warn("Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001") os.environ["NETCACHE_PULL"] = "127.0.0.1:12001" os.environ["NETCACHE_PUSH"] = "127.0.0.1:11001" if "NETCACHE" in os.environ: if not "NETCACHE_PUSH" in os.environ: os.environ["NETCACHE_PUSH"] = os.environ["NETCACHE"] if not "NETCACHE_PULL" in os.environ: os.environ["NETCACHE_PULL"] = os.environ["NETCACHE"] v = os.environ["NETCACHE_PULL"] if v: h, p = v.split(":") pull_addr = (h, int(p)) else: pull_addr = None v = os.environ["NETCACHE_PUSH"] if v: h, p = v.split(":") push_addr = (h, int(p)) else: push_addr = None setup_netcache(bld, push_addr, pull_addr)
def scan(self): "scan for swig dependencies, climb the .i files" env = self.env lst_src = [] seen = [] to_see = [self.inputs[0]] while to_see: node = to_see.pop(0) if node in seen: continue seen.append(node) lst_src.append(node) # read the file code = node.read() code = c_preproc.re_nl.sub('', code) code = c_preproc.re_cpp.sub(c_preproc.repl, code) # find .i files and project headers names = re_2.findall(code) + re_3.findall(code) for n in names: for d in self.generator.includes_nodes + [node.parent]: u = d.find_resource(n) if u: to_see.append(u) break else: Logs.warn('could not find %r' % n) return (lst_src, [])
def execute(self): if not Configure.autoconfig: return execute_method(self) env = ConfigSet.ConfigSet() do_config = False if self.root.find_node(self.cache_dir) == None: do_config = True else: try: env.load(os.path.join(Context.lock_dir, Options.lockfile)) except Exception: Logs.warn('Configuring the project') do_config = True else: if env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env['files']: try: h = hash((h, Utils.readf(f, 'rb'))) except (IOError, EOFError): pass # ignore missing files (will cause a rerun cause of the changed hash) do_config = h != env.hash if do_config: Options.commands.insert(0, self.cmd) Options.commands.insert(0, 'configure') self.skip_finish_message = True return return execute_method(self)
def build(bld): if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ: Logs.warn('Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001') os.environ['NETCACHE_PULL'] = '127.0.0.1:12001' os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001' if 'NETCACHE' in os.environ: if not 'NETCACHE_PUSH' in os.environ: os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE'] if not 'NETCACHE_PULL' in os.environ: os.environ['NETCACHE_PULL'] = os.environ['NETCACHE'] v = os.environ['NETCACHE_PULL'] if v: h, p = v.split(':') pull_addr = (h, int(p)) else: pull_addr = None v = os.environ['NETCACHE_PUSH'] if v: h, p = v.split(':') push_addr = (h, int(p)) else: push_addr = None setup_netcache(bld, push_addr, pull_addr)
def execute(self): if not Configure.autoconfig: return execute_method(self) env = ConfigSet.ConfigSet() do_config = False try: env.load(os.path.join(Context.top_dir, Options.lockfile)) except Exception: Logs.warn("Configuring the project") do_config = True else: if env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env["files"]: h = hash((h, Utils.readf(f, "rb"))) do_config = h != env.hash if do_config: Options.commands.insert(0, self.cmd) Options.commands.insert(0, "configure") return return execute_method(self)
def load_device_list(self): self.devices = {} device_dir = os.path.join(Context.top_dir, '.devices') try: os.mkdir(device_dir, 0o700) except OSError: pass localhost = False for device_file in os.listdir(device_dir): try: with open(os.path.join(device_dir, device_file), 'rb') as device_file_object: d = pickle.load(device_file_object) except Exception as e: Logs.warn('device file %s could not be opened: %s' % (device_file, e)) except IOError: Logs.warn('device file %s could not be opened' % device_file) else: try: self.devices[d.platform.__class__.__name__].append(d) except KeyError: self.devices[d.platform.__class__.__name__] = [d] localhost = localhost or (d.name == 'localhost') if not localhost: localhost = device.Device('localhost', 'localhost://') try: self.devices[localhost.platform.__class__.__name__].append(localhost) except KeyError: self.devices[localhost.platform.__class__.__name__] = [localhost]
def configure(conf): areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0) defaultFlags = ['-std=c++0x', '-std=c++11', '-stdlib=libc++', # clang on OSX < 10.9 by default uses a non # C++11-compatible STL library '-Wall', '-Wno-long-long'] if conf.options.debug: conf.define('_DEBUG', 1) defaultFlags += ['-O0', '-Og', # gcc >= 4.8 '-g3', '-fcolor-diagnostics', # clang '-fdiagnostics-color', # gcc >= 4.9 '-Werror', '-Wno-error=deprecated-register', '-Wno-error=maybe-uninitialized', # Bug #1615 '-Wno-error=unneeded-internal-declaration', # Bug #1588 '-Wno-nested-anon-types', ] if areCustomCxxflagsPresent: missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS] if len(missingFlags) > 0: Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'" % " ".join(conf.env.CXXFLAGS)) Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags)) else: conf.add_supported_cxxflags(defaultFlags) else: defaultFlags += ['-O2', '-g'] if not areCustomCxxflagsPresent: conf.add_supported_cxxflags(defaultFlags) # clang on OSX < 10.9 by default uses a non C++11-compatible STL library conf.add_supported_linkflags(['-stdlib=libc++'])
def configure(conf): areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0) defaultFlags = ['-std=c++0x', '-std=c++11', '-stdlib=libc++', # clang on OSX < 10.9 by default uses gcc's # libstdc++, which is not C++11 compatible '-pedantic', '-Wall'] if conf.options.debug: conf.define('_DEBUG', 1) defaultFlags += ['-O0', '-Og', # gcc >= 4.8 '-g3', '-fcolor-diagnostics', # clang '-fdiagnostics-color', # gcc >= 4.9 '-Werror', '-Wno-error=maybe-uninitialized', ] if areCustomCxxflagsPresent: missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS] if len(missingFlags) > 0: Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'" % " ".join(conf.env.CXXFLAGS)) Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags)) else: conf.add_supported_cxxflags(defaultFlags) else: defaultFlags += ['-O2', '-g'] if not areCustomCxxflagsPresent: conf.add_supported_cxxflags(defaultFlags) # clang on OSX < 10.9 by default uses gcc's libstdc++, which is not C++11 compatible conf.add_supported_linkflags(['-stdlib=libc++'])
def postfun(self): if self.failure: build_show_failure(self) elif not len(self.targets): Logs.warn('makefile export failed: no C/C++ targets found') else: build_postfun(self)
def find_ifort_win32(conf): v=conf.env path=v.PATH compiler=v.MSVC_COMPILER version=v.MSVC_VERSION compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) v.IFORT_MANIFEST=(compiler=='intel'and version>=11) fc=conf.find_program(compiler_name,var='FC',path_list=path) env=dict(conf.environ) if path:env.update(PATH=';'.join(path)) if not conf.cmd_and_log(fc+['/nologo','/help'],env=env): conf.fatal('not intel fortran compiler could not be identified') v.FC_NAME='IFORT' if not v.LINK_FC: conf.find_program(linker_name,var='LINK_FC',path_list=path,mandatory=True) if not v.AR: conf.find_program(lib_name,path_list=path,var='AR',mandatory=True) v.ARFLAGS=['/nologo'] if v.IFORT_MANIFEST: conf.find_program('MT',path_list=path,var='MT') v.MTFLAGS=['/nologo'] try: conf.load('winres') except Errors.WafError: Logs.warn('Resource compiler not found. Compiling resource file is disabled')
def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']): diropts='' for i in dirs: diropts+=' -d '+i coverage_log=open('lcov-coverage.log','w') coverage_lcov=open('coverage.lcov','w') coverage_stripped_lcov=open('coverage-stripped.lcov','w') try: try: base='.' if g_is_child: base='..' subprocess.call(('lcov -c %s -b %s'%(diropts,base)).split(),stdout=coverage_lcov,stderr=coverage_log) subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log) if not os.path.isdir('coverage'): os.makedirs('coverage') subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log) except: Logs.warn('Failed to run lcov, no coverage report will be generated') finally: coverage_stripped_lcov.close() coverage_lcov.close() coverage_log.close() print('') Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd())) top_level=(len(ctx.stack_path)>1) if top_level: cd_to_orig_dir(ctx,top_level) print('') Logs.pprint('BOLD','Coverage:',sep='') print('<file://%s>\n\n'%os.path.abspath('coverage/index.html'))
def execute_waf_via_ib(bld): # Check if we can execute remotely with our current licenses if not bld.is_option_true('use_incredibuild_win') and ( 'win32' in bld.cmd or 'win64' in bld.cmd): Logs.warn( '[WARNING] Incredibuild for Windows targets disabled by build option' ) return False if not bld.is_option_true('use_incredibuild_win') and ( 'win_x86' in bld.cmd or 'win_x64' in bld.cmd): Logs.warn( '[WARNING] Incredibuild for Windows targets disabled by build option' ) return False if not bld.is_option_true( 'use_incredibuild_durango') and 'durango' in bld.cmd: Logs.warn( '[WARNING] Incredibuild for Durango targets disabled by build option' ) return False if not bld.is_option_true('use_incredibuild_orbis') and 'orbis' in bld.cmd: Logs.warn( '[WARNING] Incredibuild for Orbis targets disabled by build option' ) return False result = get_ib_licence_string() has_make_and_build_license = 'Make && Build Tools' in result has_dev_tools_acceleration_license = 'Dev Tools Acceleration' in result has_multi_core_license = 'Cores' in result if not has_make_and_build_license and not has_dev_tools_acceleration_license: Logs.warn( 'Neither "Make && Build Tools" nor " Dev Tools Acceleration" package found. You need a least one. Incredibuild disabled.' ) return False if not 'PlayStation' in result and 'orbis' in bld.cmd: Logs.warn( 'Playstation Extension Package not found! Incredibuild will build locally only.' ) #Disabled as current build pipeline does not require the licence #if not 'Xbox One' in result and 'durango' in bld.cmd: # Logs.warn('Xbox One Extension Package not found! Incredibuild will build locally only.') cmd_line_args = [] for arg in sys.argv[1:]: if arg == 'generate_uber_files': continue cmd_line_args += [arg] command_line_options = ' '.join(cmd_line_args) # Recreate command line # Add special option to not start IB from within IB command_line_options += ' --internal-dont-check-recursive-execution=True' # Build Command Line cry_waf = bld.path.make_node('Code/Tools/waf-1.7.13/bin/cry_waf.exe') command = cry_waf.abspath() + ' ' + command_line_options if bld.is_option_true('run_ib_as_service'): Logs.info('[WAF] Starting Incredibuild as "Build Service"') allow_local = "/AvoidLocal=On" else: Logs.info('[WAF] Starting Incredibuild as "Build Master"') allow_local = "/AvoidLocal=Off" if not has_multi_core_license: allow_local = "/AvoidLocal=On" # Get correct incredibuild installation folder to not depend on PATH ib_folder = get_ib_folder() if ( has_dev_tools_acceleration_license and not has_make_and_build_license ) or 'linux' in bld.variant or 'android' in bld.variant or 'cppcheck' in bld.variant: try: p = subprocess.Popen([ str(ib_folder) + '/xgconsole.exe', "/command=" + command, "/profile=Code\\Tools\\waf-1.7.13\\profile.xml", "/useidemonitor", "/nologo", allow_local ]) except: raise BuildError() else: try: p = subprocess.Popen([ str(ib_folder) + '/BuildConsole.exe', "/command=" + command, "/useidemonitor", "/nologo", allow_local ]) except: raise BuildError() if not bld.instance_is_build_master(): sys.exit(p.wait()) return True
def waf_entry_point(current_directory, version, wafdir): """ This is the main entry point, all Waf execution starts here. :param current_directory: absolute path representing the current directory :type current_directory: string :param version: version number :type version: string :param wafdir: absolute path representing the directory of the waf library :type wafdir: string """ Logs.init_log() if Context.WAFVERSION != version: Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir)) sys.exit(1) if '--version' in sys.argv: Context.run_dir = current_directory ctx = Context.create_context('options') ctx.curdir = current_directory ctx.parse_args() sys.exit(0) if len(sys.argv) > 1: # os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones) # if sys.argv[1] is not an absolute path, then it is relative to the current working directory potential_wscript = os.path.join(current_directory, sys.argv[1]) # maybe check if the file is executable # perhaps extract 'wscript' as a constant if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript): # need to explicitly normalize the path, as it may contain extra '/.' # TODO abspath? current_directory = os.path.normpath(os.path.dirname(potential_wscript)) sys.argv.pop(1) Context.waf_dir = wafdir Context.launch_dir = current_directory # if 'configure' is in the commands, do not search any further no_climb = os.environ.get('NOCLIMB', None) if not no_climb: for k in no_climb_commands: for y in sys.argv: if y.startswith(k): no_climb = True break # if --top is provided assume the build started in the top directory for i, x in enumerate(sys.argv): # WARNING: this modifies sys.argv if x.startswith('--top='): Context.run_dir = Context.top_dir = Utils.sane_path(x[6:]) sys.argv[i] = '--top=' + Context.run_dir if x.startswith('--out='): Context.out_dir = Utils.sane_path(x[6:]) sys.argv[i] = '--out=' + Context.out_dir # try to find a lock file (if the project was configured) # at the same time, store the first wscript file seen cur = current_directory while cur and not Context.top_dir: try: lst = os.listdir(cur) except OSError: lst = [] Logs.error('Directory %r is unreadable!' % cur) if Options.lockfile in lst: env = ConfigSet.ConfigSet() try: env.load(os.path.join(cur, Options.lockfile)) ino = os.stat(cur)[stat.ST_INO] except Exception: pass else: # check if the folder was not moved for x in (env.run_dir, env.top_dir, env.out_dir): if Utils.is_win32: if cur == x: load = True break else: # if the filesystem features symlinks, compare the inode numbers try: ino2 = os.stat(x)[stat.ST_INO] except OSError: pass else: if ino == ino2: load = True break else: Logs.warn('invalid lock file in %s' % cur) load = False if load: Context.run_dir = env.run_dir Context.top_dir = env.top_dir Context.out_dir = env.out_dir break if not Context.run_dir: if Context.WSCRIPT_FILE in lst: Context.run_dir = cur next = os.path.dirname(cur) if next == cur: break cur = next if no_climb: break if not Context.run_dir: if '-h' in sys.argv or '--help' in sys.argv: Logs.warn('No wscript file found: the help message may be incomplete') Context.run_dir = current_directory ctx = Context.create_context('options') ctx.curdir = current_directory ctx.parse_args() sys.exit(0) Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE) sys.exit(1) try: os.chdir(Context.run_dir) except OSError: Logs.error('Waf: The folder %r is unreadable' % Context.run_dir) sys.exit(1) try: set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE))) except Errors.WafError as e: Logs.pprint('RED', e.verbose_msg) Logs.error(str(e)) sys.exit(1) except Exception as e: Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e) traceback.print_exc(file=sys.stdout) sys.exit(2) """ import cProfile, pstats cProfile.runctx("from waflib import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt') p = pstats.Stats('profi.txt') p.sort_stats('time').print_stats(75) # or 'cumulative' """ try: run_commands() except Errors.WafError as e: if Logs.verbose > 1: Logs.pprint('RED', e.verbose_msg) Logs.error(e.msg) sys.exit(1) except SystemExit: raise except Exception as e: traceback.print_exc(file=sys.stdout) sys.exit(2) except KeyboardInterrupt: Logs.pprint('RED', 'Interrupted') sys.exit(68)
def check_boost(self, *k, **kw): """ Initialize boost libraries to be used. Keywords: you can pass the same parameters as with the command line (without "--boost-"). Note that the command line has the priority, and should preferably be used. """ if not self.env['CXX']: self.fatal('load a c++ compiler first, conf.load("compiler_cxx")') params = { 'lib': k and k[0] or kw.get('lib', None), 'stlib': kw.get('stlib', None) } for key, value in self.options.__dict__.items(): if not key.startswith('boost_'): continue key = key[len('boost_'):] params[key] = value and value or kw.get(key, '') var = kw.get('uselib_store', 'BOOST') self.start_msg('Checking boost includes') self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params) self.env.BOOST_VERSION = self.boost_get_version(inc) self.end_msg(self.env.BOOST_VERSION) if Logs.verbose: Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var]) if not params['lib'] and not params['stlib']: return if 'static' in kw or 'static' in params: Logs.warn('boost: static parameter is deprecated, use stlib instead.') self.start_msg('Checking boost libs') path, libs, stlibs = self.boost_get_libs(**params) self.env['LIBPATH_%s' % var] = [path] self.env['STLIBPATH_%s' % var] = [path] self.env['LIB_%s' % var] = libs self.env['STLIB_%s' % var] = stlibs self.end_msg('ok') if Logs.verbose: Logs.pprint('CYAN', ' path : %s' % path) Logs.pprint('CYAN', ' shared libs : %s' % libs) Logs.pprint('CYAN', ' static libs : %s' % stlibs) def try_link(): if (params['lib'] and 'system' in params['lib']) or \ params['stlib'] and 'system' in params['stlib']: self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False) if (params['lib'] and 'thread' in params['lib']) or \ params['stlib'] and 'thread' in params['stlib']: self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False) if (params['lib'] and 'log' in params['lib']) or \ params['stlib'] and 'log' in params['stlib']: self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False) if params.get('linkage_autodetect', False): self.start_msg("Attempting to detect boost linkage flags") toolset = self.boost_get_toolset(kw.get('toolset', '')) if toolset in ('vc', ): # disable auto-linking feature, causing error LNK1181 # because the code wants to be linked against self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB'] # if no dlls are present, we guess the .lib files are not stubs has_dlls = False for x in Utils.listdir(path): if x.endswith(self.env.cxxshlib_PATTERN % ''): has_dlls = True break if not has_dlls: self.env['STLIBPATH_%s' % var] = [path] self.env['STLIB_%s' % var] = libs del self.env['LIB_%s' % var] del self.env['LIBPATH_%s' % var] # we attempt to play with some known-to-work CXXFLAGS combinations for cxxflags in (['/MD', '/EHsc'], []): self.env.stash() self.env["CXXFLAGS_%s" % var] += cxxflags try: try_link() self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var])) exc = None break except Errors.ConfigurationError as e: self.env.revert() exc = e if exc is not None: self.end_msg( "Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc) self.fatal('The configuration failed') else: self.end_msg( "Boost linkage flags auto-detection not implemented (needed ?) for this toolchain" ) self.fatal('The configuration failed') else: self.start_msg('Checking for boost linkage') try: try_link() except Errors.ConfigurationError as e: self.end_msg( "Could not link against boost libraries using supplied options" ) self.fatal('The configuration failed') self.end_msg('ok')
def vala_file(self, node): valatask = getattr(self, "valatask", None) if not valatask: def _get_api_version(): api_version = getattr(Context.g_module, 'API_VERSION', None) if api_version == None: version = Context.g_module.VERSION.split(".") if version[0] == "0": api_version = "0." + version[1] else: api_version = version[0] + ".0" return api_version valatask = self.create_task('valac') self.valatask = valatask self.includes = Utils.to_list(getattr(self, 'includes', [])) self.uselib = self.to_list(getattr(self, 'uselib', [])) valatask.packages = [] valatask.packages_private = Utils.to_list( getattr(self, 'packages_private', [])) valatask.vapi_dirs = [] valatask.target = self.target valatask.threading = False valatask.install_path = getattr(self, 'install_path', '') valatask.profile = getattr(self, 'profile', 'gobject') valatask.vala_defines = getattr(self, 'vala_defines', []) valatask.target_glib = None valatask.gir = getattr(self, 'gir', None) valatask.gir_path = getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0') valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi') valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE']) valatask.header_path = getattr( self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version())) valatask.is_lib = False if not 'cprogram' in self.features: valatask.is_lib = True packages = Utils.to_list(getattr(self, 'packages', [])) vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', [])) includes = [] if hasattr(self, 'use'): local_packages = Utils.to_list(self.use)[:] seen = [] while len(local_packages) > 0: package = local_packages.pop() if package in seen: continue seen.append(package) try: package_obj = self.bld.get_tgen_by_name(package) except Errors.WafError: continue package_name = package_obj.target package_node = package_obj.path package_dir = package_node.path_from(self.path) for task in package_obj.tasks: for output in task.outputs: if output.name == package_name + ".vapi": valatask.set_run_after(task) if package_name not in packages: packages.append(package_name) if package_dir not in vapi_dirs: vapi_dirs.append(package_dir) if package_dir not in includes: includes.append(package_dir) if hasattr(package_obj, 'use'): lst = self.to_list(package_obj.use) lst.reverse() local_packages = [pkg for pkg in lst if pkg not in seen ] + local_packages valatask.packages = packages for vapi_dir in vapi_dirs: try: valatask.vapi_dirs.append( self.path.find_dir(vapi_dir).abspath()) valatask.vapi_dirs.append( self.path.find_dir(vapi_dir).get_bld().abspath()) except AttributeError: Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir) self.includes.append(self.bld.srcnode.abspath()) self.includes.append(self.bld.bldnode.abspath()) for include in includes: try: self.includes.append(self.path.find_dir(include).abspath()) self.includes.append( self.path.find_dir(include).get_bld().abspath()) except AttributeError: Logs.warn("Unable to locate include directory: '%s'" % include) if valatask.profile == 'gobject': if hasattr(self, 'target_glib'): Logs.warn( 'target_glib on vala tasks is not supported --vala-target-glib=MAJOR.MINOR from the vala tool options' ) if getattr(Options.options, 'vala_target_glib', None): valatask.target_glib = Options.options.vala_target_glib if not 'GOBJECT' in self.uselib: self.uselib.append('GOBJECT') if hasattr(self, 'threading'): if valatask.profile == 'gobject': valatask.threading = self.threading if not 'GTHREAD' in self.uselib: self.uselib.append('GTHREAD') else: Logs.warn("Profile %s does not have threading support" % valatask.profile) if valatask.is_lib: valatask.outputs.append( self.path.find_or_declare('%s.h' % self.target)) valatask.outputs.append( self.path.find_or_declare('%s.vapi' % self.target)) if valatask.gir: valatask.outputs.append( self.path.find_or_declare('%s.gir' % self.gir)) if valatask.packages: d = self.path.find_or_declare('%s.deps' % self.target) valatask.outputs.append(d) valatask.deps_node = d valatask.inputs.append(node) c_node = node.change_ext('.c') valatask.outputs.append(c_node) self.source.append(c_node) if valatask.is_lib: headers_list = [o for o in valatask.outputs if o.suffix() == ".h"] self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env) vapi_list = [ o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps")) ] self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env) gir_list = [o for o in valatask.outputs if o.suffix() == ".gir"] self.install_gir = self.bld.install_files(valatask.gir_path, gir_list, self.env)
def init_vala_task(self): self.profile = getattr(self, 'profile', 'gobject') self.packages = packages = Utils.to_list(getattr(self, 'packages', [])) self.use = Utils.to_list(getattr(self, 'use', [])) if packages and not self.use: self.use = packages[:] if self.profile == 'gobject': if not 'GOBJECT' in self.use: self.use.append('GOBJECT') def addflags(flags): self.env.append_value('VALAFLAGS', flags) if self.profile: addflags('--profile=%s' % self.profile) valatask = self.valatask if hasattr(self, 'vala_dir'): if isinstance(self.vala_dir, str): valatask.vala_dir_node = self.path.get_bld().make_node( self.vala_dir) try: valatask.vala_dir_node.mkdir() except OSError: raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node) else: valatask.vala_dir_node = self.vala_dir else: valatask.vala_dir_node = self.path.get_bld() addflags('--directory=%s' % valatask.vala_dir_node.abspath()) if hasattr(self, 'thread'): if self.profile == 'gobject': if not 'GTHREAD' in self.use: self.use.append('GTHREAD') else: Logs.warn('Profile %s means no threading support', self.profile) self.thread = False if self.thread: addflags('--thread') self.is_lib = 'cprogram' not in self.features if self.is_lib: addflags('--library=%s' % self.target) h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target) valatask.outputs.append(h_node) addflags('--header=%s' % h_node.name) valatask.outputs.append( valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target)) if getattr(self, 'gir', None): gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir) addflags('--gir=%s' % gir_node.name) valatask.outputs.append(gir_node) self.vala_target_glib = getattr( self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None)) if self.vala_target_glib: addflags('--target-glib=%s' % self.vala_target_glib) addflags([ '--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', [])) ]) packages_private = Utils.to_list(getattr(self, 'packages_private', [])) addflags(['--pkg=%s' % x for x in packages_private]) def _get_api_version(): api_version = '1.0' if hasattr(Context.g_module, 'API_VERSION'): version = Context.g_module.API_VERSION.split(".") if version[0] == "0": api_version = "0." + version[1] else: api_version = version[0] + ".0" return api_version self.includes = Utils.to_list(getattr(self, 'includes', [])) valatask.install_path = getattr(self, 'install_path', '') valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi') valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE) valatask.header_path = getattr( self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version())) valatask.install_binding = getattr(self, 'install_binding', True) self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', [])) if hasattr(self, 'use'): local_packages = Utils.to_list(self.use)[:] seen = [] while len(local_packages) > 0: package = local_packages.pop() if package in seen: continue seen.append(package) try: package_obj = self.bld.get_tgen_by_name(package) except Errors.WafError: continue package_obj.post() package_name = package_obj.target task = getattr(package_obj, 'valatask', None) if task: for output in task.outputs: if output.name == package_name + ".vapi": valatask.set_run_after(task) if package_name not in packages: packages.append(package_name) if output.parent not in vapi_dirs: vapi_dirs.append(output.parent) if output.parent not in self.includes: self.includes.append(output.parent) if hasattr(package_obj, 'use'): lst = self.to_list(package_obj.use) lst.reverse() local_packages = [pkg for pkg in lst if pkg not in seen ] + local_packages addflags(['--pkg=%s' % p for p in packages]) for vapi_dir in vapi_dirs: if isinstance(vapi_dir, Node.Node): v_node = vapi_dir else: v_node = self.path.find_dir(vapi_dir) if not v_node: Logs.warn('Unable to locate Vala API directory: %r', vapi_dir) else: addflags('--vapidir=%s' % v_node.abspath()) self.dump_deps_node = None if self.is_lib and self.packages: self.dump_deps_node = valatask.vala_dir_node.find_or_declare( '%s.deps' % self.target) valatask.outputs.append(self.dump_deps_node) if self.is_lib and valatask.install_binding: headers_list = [o for o in valatask.outputs if o.suffix() == ".h"] if headers_list: self.install_vheader = self.add_install_files( install_to=valatask.header_path, install_from=headers_list) vapi_list = [ o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps")) ] if vapi_list: self.install_vapi = self.add_install_files( install_to=valatask.vapi_path, install_from=vapi_list) gir_list = [o for o in valatask.outputs if o.suffix() == '.gir'] if gir_list: self.install_gir = self.add_install_files(install_to=getattr( self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list) if hasattr(self, 'vala_resources'): nodes = self.to_nodes(self.vala_resources) valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes valatask.inputs.extend(nodes) for x in nodes: addflags(['--gresources', x.abspath()])
def apply_uselib_local(self): """ process the uselib_local attribute execute after apply_link because of the execution order set on 'link_task' """ env = self.env from waflib.Tools.ccroot import stlink_task # 1. the case of the libs defined in the project (visit ancestors first) # the ancestors external libraries (uselib) will be prepended self.uselib = self.to_list(getattr(self, 'uselib', [])) self.includes = self.to_list(getattr(self, 'includes', [])) names = self.to_list(getattr(self, 'uselib_local', [])) get = self.bld.get_tgen_by_name seen = set() seen_uselib = set() tmp = Utils.deque(names) # consume a copy of the list of names if tmp: if Logs.verbose: Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') while tmp: lib_name = tmp.popleft() # visit dependencies only once if lib_name in seen: continue y = get(lib_name) y.post() seen.add(lib_name) # object has ancestors to process (shared libraries): add them to the end of the list if getattr(y, 'uselib_local', None): for x in self.to_list(getattr(y, 'uselib_local', [])): obj = get(x) obj.post() if getattr(obj, 'link_task', None): if not isinstance(obj.link_task, stlink_task): tmp.append(x) # link task and flags if getattr(y, 'link_task', None): link_name = y.target[y.target.rfind(os.sep) + 1:] if isinstance(y.link_task, stlink_task): env.append_value('STLIB', [link_name]) else: # some linkers can link against programs env.append_value('LIB', [link_name]) # the order self.link_task.set_run_after(y.link_task) # for the recompilation self.link_task.dep_nodes += y.link_task.outputs # add the link path too tmp_path = y.link_task.outputs[0].parent.bldpath() if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', [tmp_path]) # add ancestors uselib too - but only propagate those that have no staticlib defined for v in self.to_list(getattr(y, 'uselib', [])): if v not in seen_uselib: seen_uselib.add(v) if not env['STLIB_' + v]: if not v in self.uselib: self.uselib.insert(0, v) # if the library task generator provides 'export_includes', add to the include path # the export_includes must be a list of paths relative to the other library if getattr(y, 'export_includes', None): self.includes.extend(y.to_incnodes(y.export_includes))
def gather_intel_composer_versions(conf, versions): """ Checks ICL compilers that are part of Intel Composer Suites :param versions: list to modify :type versions: list """ version_pattern = re.compile('^...?.?\...?.?.?') try: all_versions = Utils.winreg.OpenKey( Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites') except WindowsError: try: all_versions = Utils.winreg.OpenKey( Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites') except WindowsError: return index = 0 while 1: try: version = Utils.winreg.EnumKey(all_versions, index) except WindowsError: break index = index + 1 if not version_pattern.match(version): continue targets = [] for target, arch in all_icl_platforms: try: if target == 'intel64': targetDir = 'EM64T_NATIVE' else: targetDir = target try: defaults = Utils.winreg.OpenKey( all_versions, version + '\\Defaults\\C++\\' + targetDir) except WindowsError: if targetDir == 'EM64T_NATIVE': defaults = Utils.winreg.OpenKey( all_versions, version + '\\Defaults\\C++\\EM64T') else: raise WindowsError uid, type = Utils.winreg.QueryValueEx(defaults, 'SubKey') Utils.winreg.OpenKey( all_versions, version + '\\' + uid + '\\C++\\' + targetDir) icl_version = Utils.winreg.OpenKey( all_versions, version + '\\' + uid + '\\C++') path, type = Utils.winreg.QueryValueEx(icl_version, 'ProductDir') batch_file = os.path.join(path, 'bin', 'iclvars.bat') if os.path.isfile(batch_file): try: targets.append( (target, (arch, conf.get_msvc_version('intel', version, target, batch_file)))) except conf.errors.ConfigurationError as e: pass # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 # http://software.intel.com/en-us/forums/topic/328487 compilervars_warning_attr = '_compilervars_warning_key' if version[0:2] == '13' and getattr( conf, compilervars_warning_attr, True): setattr(conf, compilervars_warning_attr, False) patch_url = 'http://software.intel.com/en-us/forums/topic/328487' compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat') for vscomntool in ['VS110COMNTOOLS', 'VS100COMNTOOLS']: if vscomntool in os.environ: vs_express_path = os.environ[ vscomntool] + r'..\IDE\VSWinExpress.exe' dev_env_path = os.environ[ vscomntool] + r'..\IDE\devenv.exe' if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)): Logs.warn(( 'The Intel compilervar_arch.bat only checks for one Visual Studio SKU ' '(VSWinExpress.exe) but it does not seem to be installed at %r. ' 'The intel command line set up will fail to configure unless the file %r' 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url)) except WindowsError: pass major = version[0:2] versions.append(('intel ' + major, targets))
def post(self): """ Create task objects. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support if getattr(self, 'posted', None): #error("OBJECT ALREADY POSTED" + str( self)) return False self.posted = True keys = set(self.meths) # add the methods listed in the features self.features = Utils.to_list(self.features) for x in self.features + ['*']: st = feats[x] if not st: if not x in Task.classes: Logs.warn( 'feature %r does not exist - bind at least one method to it' % x) keys.update(list(st)) # ironpython 2.7 wants the cast to list # copy the precedence table prec = {} prec_tbl = self.prec or task_gen.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) tmp.sort() # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) if prec: raise Errors.WafError('Cycle detected in the method execution %r' % prec) out.reverse() self.meths = out # then we run the methods in order Logs.debug('task_gen: posting %s %d' % (self, id(self))) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)' % (x, id(self))) v() Logs.debug('task_gen: posted %s' % self.name) return True
def _get_project_overrides(ctx, target): if ctx.cmd == 'generate_uber_files' or ctx.cmd == 'msvs' or ctx.cmd == 'configure': return ({}, {}) # Only perform on VS executed builds if not getattr(ctx.options, 'execsolution', None): return ({}, {}) if Utils.unversioned_sys_platform() != 'win32': return ({}, {}) if not hasattr(ctx, 'project_options_overrides'): ctx.project_options_overrides = {} if not hasattr(ctx, 'project_file_options_overrides'): ctx.project_file_options_overrides = {} try: return (ctx.project_options_overrides[target], ctx.project_file_options_overrides[target]) except: pass ctx.project_options_overrides[target] = {} ctx.project_file_options_overrides[target] = {} project_options_overrides = ctx.project_options_overrides[target] project_file_options_overrides = ctx.project_file_options_overrides[target] vs_spec = ctx.convert_waf_spec_to_vs_spec(ctx, ctx.options.project_spec) vs_platform = ctx.convert_waf_platform_to_vs_platform(ctx.env['PLATFORM']) vs_configuration = ctx.convert_waf_configuration_to_vs_configuration(ctx.env['CONFIGURATION']) vs_valid_spec_for_build = '[%s] %s|%s' % (vs_spec, vs_configuration, vs_platform) vcxproj_file = (ctx.get_project_output_folder().make_node('%s%s'%(target,'.vcxproj'))).abspath() # Example: <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='[MyProject] Profile|Win64'"> project_override = '<ItemDefinitionGroup Condition="\'$(Configuration)|$(Platform)\'==\'%s\'"' % vs_valid_spec_for_build # Open file try: file = open(vcxproj_file) except IOError: # the only reason for this is trying to read non-existent vcxprojs, usually inapplicable to the current configuration return ({}, {}) except: Logs.warn('warning: Unable to parse .vcxproj file to extract configuration overrides. [File:%s] [Exception:%s]' % (vcxproj_file, sys.exc_info()[0]) ) return ({}, {}) # Iterate line by line #(Note: skipping lines inside loop) file_iter = iter(file) for line in file_iter: stripped_line = line.lstrip() #[Per Project] # Example: # <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='[MyProject] Profile|x64'"> # <ClCompile> # <WAF_DisableCompilerOptimization>true</WAF_DisableCompilerOptimization> # <WAF_AdditionalPreprocessorDefinitions>/MyProjectDefinition</WAF_AdditionalPreprocessorDefinitions> # </ClCompile> #</ItemDefinitionGroup> if stripped_line.startswith(project_override) == True: # Extract WAF items while True: next_line = file_iter.next().strip() if next_line.startswith('<WAF_'): element_name = get_element_name(next_line) element_value = get_element_value(next_line) # Cache override _set_override(project_options_overrides, element_name, element_value, ctx.env, ctx.path.abspath()) elif next_line.startswith('</ItemDefinitionGroup>'): break #[Per Item] # Example: # <ItemGroup> # <ClCompile Include="e:\P4\CE_STREAMS\Code\CryEngine\CryAnimation\Attachment.cpp"> # <WAF_AdditionalPreprocessorDefinitions Condition="'$(Configuration)|$(Platform)'=='[MyProject] Profile|x64'">/MyItemDefinition</WAF_AdditionalPreprocessorDefinitions> # <WAF_AdditionalCompilerOptions_CXX Condition="'$(Configuration)|$(Platform)'=='[MyProject] Profile|x64'">/MyItemCompilerOption</WAF_AdditionalCompilerOptions_CXX> # </ClCompile> # </ItemGroup> elif stripped_line.startswith('<ItemGroup>') == True: while True: next_line = file_iter.next().strip() # Check that element is a "ClCompile" element that has child elements i.e. not <ClCompile ... /> if next_line.endswith('/>') == False and next_line.startswith('<ClCompile') == True: item_tasks = [] # Is WAF Element while True: next_line_child = file_iter.next().strip() if next_line_child.startswith('<WAF_'): # Condition meets platform specs (optimize if needed) if vs_valid_spec_for_build in next_line_child: # For first valid item, collect tasks if not item_tasks: # Get include path pos_include_name_start = next_line.find('"')+1 pos_include_name_end = next_line.find('"', pos_include_name_start) vs_file_path = next_line[pos_include_name_start:pos_include_name_end].lower() # Get element info element_name = get_element_name(next_line_child) element_value = get_element_value(next_line_child) # Cache override try: override_map = project_file_options_overrides[vs_file_path] except: project_file_options_overrides[vs_file_path] = {} override_map = project_file_options_overrides[vs_file_path] _set_override(override_map, element_name, element_value, ctx.env, ctx.path.abspath()) #end of ClCompile Element elif next_line_child.startswith('</ClCompile>'): break # end of "ItemGroup" Element elif next_line.startswith('</ItemGroup>'): break return (project_options_overrides, project_file_options_overrides)
def get_solution_overrides(self): if self.cmd == 'generate_uber_files' or self.cmd == 'msvs': return {} # Only perform on VS executed builds try: sln_file = self.options.execsolution except: return {} if not sln_file: return {} if Utils.unversioned_sys_platform() != 'win32': return # Open sln file try: file = open(sln_file) except: Logs.warn('warning: Unable to parse .sln file to extract configuration overrides: [File:%s] [Exception:%s]' % (sln_file, sys.exc_info()[0]) ) return {} ret_vs_project_override = {} vs_spec = self.convert_waf_spec_to_vs_spec(self, self.options.project_spec) vs_platform = self.convert_waf_platform_to_vs_platform(self.env['PLATFORM']) vs_configuration = self.convert_waf_configuration_to_vs_configuration(self.env['CONFIGURATION']) vs_build_configuration = '[%s] %s|%s' % (vs_spec, vs_configuration, vs_platform) # Example: [MyProject] Debug|x64 vs_project_identifier = 'Project("{8BC9CEB8' # full project id ... Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") # Iterate over all basic project information # Example: # Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Cry3DEngine", "e:\P4\CE_STREAMS\Solutions\.depproj\Cry3DEngine.vcxproj", "{60178AE3-57FD-488C-9A53-4AE4F66419AA}" project_guid_to_name = {} file_iter = iter(file) for line in file_iter: stripped_line = line.lstrip() if stripped_line.startswith(vs_project_identifier) == True: project_info = stripped_line[51:].split(',') # skip first 51 character ... "Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") =" project_name = project_info[0].strip()[1:-1] # trim left and right and remove '"' project_path = project_info[1].strip()[1:-1] project_guid = project_info[2].strip()[2:-2] # Store project GUID and Name pair project_guid_to_name[project_guid] = project_name elif stripped_line.startswith('Global') == True: file_iter.next() break else: continue # Skip to beginning of project configurations information for line in file_iter: if line.lstrip().startswith('GlobalSection(ProjectConfigurationPlatforms) = postSolution') == True: file_iter.next() break # Loop over all project # Example: # {60178AE3-57FD-488C-9A53-4AE4F66419AA}.[MyProject] Debug|x64.ActiveCfg = [Game and Tools] Debug|x64 # or # {60178AE3-57FD-488C-9A53-4AE4F66419AA}.[MyProject] Debug|x64.Build.0 = [Game and Tools] Debug|x64 for line in file_iter: stripped_line = line.strip() # Reached end of section if stripped_line.startswith('EndGlobalSection'): break project_build_info = stripped_line.split('.') if (len(project_build_info) == 3) and (project_build_info[1] == vs_build_configuration): starts_of_override_configuration = project_build_info[-1].find('[') project_build_info[-1] = project_build_info[-1][starts_of_override_configuration:] # remove anything prior to [xxx] e.g. "ActiveCfg = " vs_project_configuration = project_build_info[1] vs_project_override_configuation = project_build_info[-1] # Check for no override condition if vs_project_configuration == vs_project_override_configuation: continue project_guid = project_build_info[0][1:-1] # remove surrounding '{' and '}' project_name = project_guid_to_name[project_guid] # Check that spec is the same vs_override_spec_end = vs_project_override_configuation.find(']') vs_override_spec = vs_project_override_configuation[1:vs_override_spec_end] if vs_spec != vs_override_spec: self.cry_error('Project "%s" : Invalid override spec is of type "%s" when it should be "%s"' % (project_name, vs_override_spec, vs_spec)) # Get WAF configuration from VS project configuration e.g. [MyProject] Debug|x64 -> debug vs_project_configuration_end = vs_project_override_configuation.rfind('|') vs_project_configuration_start = vs_project_override_configuation.rfind(']', 0, vs_project_configuration_end) + 2 vs_project_configuration = vs_project_override_configuation[vs_project_configuration_start : vs_project_configuration_end] waf_configuration = self.convert_vs_configuration_to_waf_configuration(vs_project_configuration) # Store override ret_vs_project_override[project_name] = waf_configuration Logs.info("MSVS: User has selected %s for %s in visual studio. Overriding for this build." % (waf_configuration, project_name)) return ret_vs_project_override
def build_google_main_maven_repo_index(): ''' Connects to the Google's main Maven repository and creates a local map of all libs currently hosted there ''' global GOOGLE_MAIN_MAVEN_REPO_INDEX if GOOGLE_MAIN_MAVEN_REPO_INDEX: return global PROTOCOL master_root_url = '{}://{}'.format(PROTOCOL, GOOGLE_MAIN_MAVEN_REPO) master_index_url = '/'.join([master_root_url, 'master-index.xml']) master_repo = attempt_to_open_url(master_index_url) if not master_repo: Logs.error( '[ERROR] Failed to connect to {}. Unable to access to Google\'s main Maven repository at this time.' .format(master_index_url)) return data = master_repo.read() if not data: Logs.error( '[ERROR] Failed to retrive data from {}. Unable to access to Google\'s main Maven repository at this time.' .format(master_index_url)) return master_index = ET.fromstring(data) if not is_xml_elem_valid(master_index): Logs.error( '[ERROR] Data retrived from {} is malformed. Unable to access to Google\'s main Maven repository at this time.' .format(master_index_url)) return for group in master_index: Logs.debug( 'android_library: Adding group %s to the main maven repo index', group.tag) group_index_url = '/'.join([master_root_url] + group.tag.split('.') + ['group-index.xml']) group_index = attempt_to_open_url(group_index_url) if not group_index: Logs.warn( '[WARN] Failed to connect to {}. Access to Google\'s main Maven repository may be incomplete.' .format(group_index_url)) continue data = group_index.read() if not data: Logs.warn( '[WARN] Failed to retrive data from {}. Access to Google\'s main Maven repository may be incomplete.' .format(group_index_url)) continue group_libraries = {} group_index = ET.fromstring(data) for lib in group_index: versions = lib.attrib.get('versions', None) if not versions: Logs.warn( '[WARN] No found versions for library {} in group {}. Skipping' .format(lib.tag, group)) continue Logs.debug( 'android_library: -> Adding library %s with version(s) %s', lib.tag, versions) group_libraries[lib.tag] = versions.split(',') GOOGLE_MAIN_MAVEN_REPO_INDEX[group.tag] = group_libraries
def waf_entry_point(current_directory,version,wafdir): Logs.init_log() if Context.WAFVERSION!=version: Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir)) sys.exit(1) if'--version'in sys.argv: Context.run_dir=current_directory ctx=Context.create_context('options') ctx.curdir=current_directory ctx.parse_args() sys.exit(0) if len(sys.argv)>1: potential_wscript=os.path.join(current_directory,sys.argv[1]) if os.path.basename(potential_wscript)=='wscript'and os.path.isfile(potential_wscript): current_directory=os.path.normpath(os.path.dirname(potential_wscript)) sys.argv.pop(1) Context.waf_dir=wafdir Context.launch_dir=current_directory no_climb=os.environ.get('NOCLIMB',None) if not no_climb: for k in no_climb_commands: for y in sys.argv: if y.startswith(k): no_climb=True break cur=current_directory while cur: lst=os.listdir(cur) if Options.lockfile in lst: env=ConfigSet.ConfigSet() try: env.load(os.path.join(cur,Options.lockfile)) ino=os.stat(cur)[stat.ST_INO] except Exception: pass else: for x in(env.run_dir,env.top_dir,env.out_dir): if Utils.is_win32: if cur==x: load=True break else: try: ino2=os.stat(x)[stat.ST_INO] except OSError: pass else: if ino==ino2: load=True break else: Logs.warn('invalid lock file in %s'%cur) load=False if load: Context.run_dir=env.run_dir Context.top_dir=env.top_dir Context.out_dir=env.out_dir break if not Context.run_dir: if Context.WSCRIPT_FILE in lst: Context.run_dir=cur next=os.path.dirname(cur) if next==cur: break cur=next if no_climb: break if not Context.run_dir: if'-h'in sys.argv or'--help'in sys.argv: Logs.warn('No wscript file found: the help message may be incomplete') Context.run_dir=current_directory ctx=Context.create_context('options') ctx.curdir=current_directory ctx.parse_args() sys.exit(0) Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE) sys.exit(1) try: os.chdir(Context.run_dir) except OSError: Logs.error('Waf: The folder %r is unreadable'%Context.run_dir) sys.exit(1) try: set_main_module(os.path.join(Context.run_dir,Context.WSCRIPT_FILE)) except Errors.WafError ,e: Logs.pprint('RED',e.verbose_msg) Logs.error(str(e)) sys.exit(1)
def execute(self): """ Wraps :py:func:`waflib.Context.Context.execute` on the context class """ if not Configure.autoconfig: return execute_method(self) # Disable autoconfig so waf's version doesn't run (and don't end up on loop of bad configure) Configure.autoconfig = False if self.variant == '': raise Errors.WafError( 'The project is badly configured: run "waf configure" again!') env = ConfigSet.ConfigSet() do_config = False try: p = os.path.join(Context.out_dir, Build.CACHE_DIR, self.variant + Build.CACHE_SUFFIX) env.load(p) except EnvironmentError: raise Errors.WafError( 'The project is not configured for board {0}: run "waf configure --board {0} [...]" first!' .format(self.variant)) lock_env = ConfigSet.ConfigSet() try: lock_env.load(os.path.join(Context.top_dir, Options.lockfile)) except EnvironmentError: Logs.warn('Configuring the project') do_config = True else: if lock_env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env.CONFIGURE_FILES: try: h = Utils.h_list((h, Utils.readf(f, 'rb'))) except EnvironmentError: do_config = True break else: do_config = h != env.CONFIGURE_HASH if do_config: cmd = lock_env.config_cmd or 'configure' tmp = Options.options.__dict__ if env.OPTIONS and sorted(env.OPTIONS.keys()) == sorted( tmp.keys()): Options.options.__dict__ = env.OPTIONS else: raise Errors.WafError( 'The project configure options have changed: run "waf configure" again!' ) try: run_command(cmd) finally: Options.options.__dict__ = tmp run_command(self.cmd) else: return execute_method(self)
def search_maven_repos(ctx, name, group, version): ''' Searches all known maven repositories (local, main and bintray) for the exact library or closest match based on the inputs ''' group_path = group.replace('.', '/') partial_version = False if version and '+' in version: Logs.warn( '[WARN] It is not recommended to use "+" in version numbers. ' 'This will lead to unpredictable results due to the version silently changing. ' 'Found while processing {}'.format(name)) partial_version = True def _filter_versions(versions_list): if partial_version: base_version = version.split('+')[0] valid_versions = [ ver for ver in versions_list if ver.startswith(base_version) ] # try to elimiate the alpha, beta and rc versions stable_versions = [] for ver in valid_versions: if ('alpha' in ver) or ('beta' in ver) or ('rc' in ver): continue stable_versions.append(ver) if stable_versions: return sorted(stable_versions) else: return sorted(valid_versions) # make sure the 3rd Party path is in the env if 'THIRD_PARTY' not in ctx.env: ctx.env['THIRD_PARTY'] = ctx.tp.calculate_3rd_party_root() # first search the local repos from the Android SDK installation for local_repo in ANDROID_SDK_LOCAL_REPOS: repo_root = string.Template(local_repo).substitute(ctx.env) lib_root = os.path.join(repo_root, group_path, name) Logs.debug('android_library: Searching %s', lib_root) if not os.path.exists(lib_root): continue if not version or partial_version: # filter out all the non-directory, non-numerical entries installed_versions = [] contents = os.listdir(lib_root) for entry in contents: path = os.path.join(lib_root, entry) if os.path.isdir(path) and entry.split('.')[0].isdigit(): installed_versions.append(entry) valid_versions = _filter_versions(installed_versions) if valid_versions: Logs.debug( 'android_library: Valid installed versions of {} found: {}' .format(name, valid_versions)) highest_useable_version = valid_versions[-1] aar_file = '{}-{}.aar'.format(name, highest_useable_version) file_path = os.path.join(lib_root, highest_useable_version, aar_file) file_url = 'file:{}'.format(file_path) if os.path.exists(file_path): return file_url, aar_file else: aar_file = '{}-{}.aar'.format(name, version) file_path = os.path.join(lib_root, version, aar_file) file_url = 'file:{}'.format(file_path) if os.path.exists(file_path): return file_url, aar_file # if it's not local, try the main google maven repo Logs.debug('android_library: Searching %s', GOOGLE_MAIN_MAVEN_REPO) build_google_main_maven_repo_index() global PROTOCOL main_repo_root = '{}://{}'.format(PROTOCOL, GOOGLE_MAIN_MAVEN_REPO) if group in GOOGLE_MAIN_MAVEN_REPO_INDEX: repo_libs = GOOGLE_MAIN_MAVEN_REPO_INDEX[group] if name in repo_libs: repo_versions = repo_libs[name] if not version or partial_version: valid_versions = _filter_versions(repo_versions) Logs.debug( 'android_library: Valid repo versions of {} found: {}'. format(name, valid_versions)) highest_useable_version = valid_versions[-1] aar_file = '{}-{}.aar'.format(name, highest_useable_version) file_url = '/'.join([ main_repo_root, group_path, name, highest_useable_version, aar_file ]) return file_url, aar_file elif version in repo_versions: aar_file = '{}-{}.aar'.format(name, version) file_url = '/'.join( [main_repo_root, group_path, name, version, aar_file]) return file_url, aar_file # finally check the other known google maven repos for repo in GOOGLE_BINTRAY_MAVEN_REOPS: Logs.debug('android_library: Searching %s', repo) repo_root = '{}://{}'.format(PROTOCOL, repo) lib_root = '/'.join([repo_root, group_path, name]) latest_version, all_versions = get_bintray_version_info(lib_root) if not (latest_version and all_versions): continue if not version: aar_file = '{}-{}.aar'.format(name, latest_version) file_url = '/'.join([lib_root, latest_version, aar_file]) return file_url, aar_file elif partial_version: valid_versions = _filter_versions(all_versions) Logs.debug( 'android_library: Valid repo versions of {} found: {}'.format( name, valid_versions)) highest_useable_version = valid_versions[-1] aar_file = '{}-{}.aar'.format(name, highest_useable_version) file_url = '/'.join([lib_root, highest_useable_version, aar_file]) return file_url, aar_file elif version in all_versions: aar_file = '{}-{}.aar'.format(name, version) file_url = '/'.join([lib_root, version, aar_file]) return file_url, aar_file return None, None
def process(self): """ Process current directory for gems Note that this has to check each game project to know which gems are enabled and build a list of all enabled gems so that those are built. To debug gems output during build, use --zones=gems in your command line """ this_path = self.ctx.path append_to_unique_list(self.search_paths, os.path.normpath(this_path.abspath())) # Parse Gems search path config = RawConfigParser() if config.read( this_path.make_node( 'SetupAssistantUserPreferences.ini').abspath()): if config.has_section(GEMS_FOLDER) and config.has_option( GEMS_FOLDER, 'SearchPaths\\size'): # Parse QSettings style array (i.e. read 'size' attribute, then 1-based-idx\Path) array_len = config.getint(GEMS_FOLDER, 'SearchPaths\\size') for i in range(0, array_len): new_path = config.get( GEMS_FOLDER, 'SearchPaths\\{}\\Path'.format(i + 1)) new_path = os.path.normpath(new_path) Logs.debug('gems: Adding search path {}'.format(new_path)) append_to_unique_list(self.search_paths, os.path.normpath(new_path)) if not self.ctx.is_engine_local(): append_to_unique_list(self.search_paths, os.path.realpath(self.ctx.engine_path)) # Load all the gems under the Gems folder to search for required gems self.required_gems = self.ctx.load_required_gems() game_projects = self.ctx.get_enabled_game_project_list() for game_project in game_projects: Logs.debug('gems: Game Project: %s' % game_project) gems_list_file = self.ctx.get_project_node(game_project).make_node( GEMS_LIST_FILE) if not os.path.isfile(gems_list_file.abspath()): if self.ctx.is_option_true('gems_optional'): Logs.debug("gems: Game has no gems file, skipping [%s]" % gems_list_file) continue # go to the next game else: self.ctx.cry_error('Project {} is missing {} file.'.format( game_project, GEMS_LIST_FILE)) Logs.debug('gems: reading gems file at %s' % gems_list_file) gem_info_list = self.ctx.parse_json_file(gems_list_file) list_reader = _create_field_reader( self.ctx, gem_info_list, 'Gems list for project ' + game_project) # Verify that the project file is an up-to-date format gem_format_version = list_reader.field_int('GemListFormatVersion') if gem_format_version != GEMS_FORMAT_VERSION: self.ctx.cry_error( 'Gems list file at {} is of version {}, not expected version {}. Please update your project file.' .format(gems_list_file, gem_format_version, GEMS_FORMAT_VERSION)) for idx, gem_info_obj in enumerate(list_reader.field_req('Gems')): # String for error reporting. reader = _create_field_reader( self.ctx, gem_info_obj, 'Gem {} in game project {}'.format(idx, game_project)) gem_id = reader.uuid() version = reader.version() path = os.path.normpath(reader.field_req('Path')) gem = self.get_gem_by_spec(gem_id, version, path) if not gem: Logs.debug( 'gems: Gem not found in cache, attempting to load from disk: ({}, {}, {})' .format(gem_id, version, path)) detected_gem_versions = {} for search_path in self.search_paths: def_file = os.path.join(search_path, path, GEMS_DEFINITION_FILE) if not os.path.isfile(def_file): continue # Try again with the next path gem = Gem(self.ctx) gem.path = path gem.abspath = os.path.join(search_path, path) gem.load_from_json( self.ctx.parse_json_file( self.ctx.root.make_node(def_file))) # Protect against loading duplicate gems from different locations, showing a warning if detected dup_gem = detected_gem_versions.get( gem.version.__str__(), None) if dup_gem is not None: Logs.warn( '[WARN] Duplicate gem {} (version {}) found in multiple paths. Accepting the one at {}' .format(gem.name, gem.version, dup_gem.abspath)) gem = dup_gem break detected_gem_versions[gem.version.__str__()] = gem # Validate that the Gem loaded from the path specified actually matches the id and version. if gem.id != gem_id: self.ctx.cry_error( "Gem at path {} has ID {}, instead of ID {} specified in {}'s {}." .format(path, gem.id, gem_id, game_project, GEMS_LIST_FILE)) if gem.version != version: self.ctx.cry_error( "Gem at path {} has version {}, instead of version {} specified in {}'s {}." .format(path, gem.version, version, game_project, GEMS_LIST_FILE)) self.add_gem(gem) if not gem: self.ctx.cry_error( 'Failed to load from path "{}"'.format(path)) gem.games_enabled_in.append(game_project) for gem in self.gems: Logs.debug("gems: gem %s is used by games: %s" % (gem.name, gem.games_enabled_in)) # Always add required gems to the gems manager for required_gem in self.required_gems: self.add_gem(required_gem)
def execute(self): """ See :py:func:`waflib.Context.Context.execute` """ self.init_dirs() Logs.info("[WAF] Executing 'configure'") self.cachedir = self.bldnode.make_node(Build.CACHE_DIR) self.cachedir.mkdir() path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG) self.logger = Logs.make_logger(path, 'cfg') app = getattr(Context.g_module, 'APPNAME', '') if app: ver = getattr(Context.g_module, 'VERSION', '') if ver: app = "%s (%s)" % (app, ver) now = time.ctime() pyver = sys.hexversion systype = sys.platform args = " ".join(sys.argv) wafver = Context.WAFVERSION abi = Context.ABI self.to_log(conf_template % vars()) if id(self.srcnode) == id(self.bldnode): Logs.warn('Setting top == out (remember to use "update_outputs")') elif id(self.path) != id(self.srcnode): if self.srcnode.is_child_of(self.path): Logs.warn( 'Are you certain that you do not want to set top="." ?') super(ConfigurationContext, self).execute() self.store() Context.top_dir = self.srcnode.abspath() Context.out_dir = self.bldnode.abspath() # import waf branch spec branch_spec_globals = Context.load_branch_spec(Context.top_dir) Context.lock_dir = Context.run_dir + os.sep + branch_spec_globals[ 'BINTEMP_FOLDER'] # this will write a configure lock so that subsequent builds will # consider the current path as the root directory (see prepare_impl). # to remove: use 'waf distclean' env = ConfigSet.ConfigSet() env['argv'] = sys.argv env['options'] = Options.options.__dict__ env.run_dir = Context.run_dir env.top_dir = Context.top_dir env.out_dir = Context.out_dir env.lock_dir = Context.lock_dir # Add lmbr_waf.bat or lmbr_waf for dependency tracking ############################################################################### waf_command = os.path.basename(sys.executable) if waf_command.lower() == 'python' or waf_command.lower( ) == 'python.exe': waf_executable = self.engine_node.make_node( './Tools/build/waf-1.7.13/lmbr_waf') else: waf_executable = self.path.make_node(waf_command) self.hash = hash((self.hash, waf_executable.read('rb'))) self.files.append(os.path.normpath(waf_executable.abspath())) # conf.hash & conf.files hold wscript files paths and hash # (used only by Configure.autoconfig) env['hash'] = self.hash env['files'] = self.files env['environ'] = dict(self.environ) env.store(Context.lock_dir + os.sep + Options.lockfile)
def post(self): """ Creates tasks for this task generators. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ if getattr(self, 'posted', None): return False self.posted = True keys = set(self.meths) keys.update(feats['*']) # add the methods listed in the features self.features = Utils.to_list(self.features) for x in self.features: st = feats[x] if st: keys.update(st) elif not x in Task.classes: Logs.warn( 'feature %r does not exist - bind at least one method to it?', x) # copy the precedence table prec = {} prec_tbl = self.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in keys: for x in list(prec.values()): if a in x: break else: tmp.append(a) tmp.sort(reverse=True) # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) tmp.sort(reverse=True) if prec: buf = ['Cycle detected in the method execution:'] for k, v in list(prec.items()): buf.append('- %s after %s' % (k, [x for x in v if x in prec])) raise Errors.WafError('\n'.join(buf)) self.meths = out # then we run the methods in order Logs.debug('task_gen: posting %s %d', self, id(self)) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)', x, id(self)) v() Logs.debug('task_gen: posted %s', self.name) return True
def always_run(cls): Logs.warn( 'This decorator is deprecated, set always_run on the task class instead!' ) cls.always_run = True return cls
def name_to_obj(self, s, env=None): if Logs.verbose: Logs.warn( 'compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"' ) return self.get_tgen_by_name(s)
def validate_cfg(self, kw): """ Searches for the program *pkg-config* if missing, and validates the parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`. :param path: the **-config program to use** (default is *pkg-config*) :type path: list of string :param msg: message to display to describe the test executed :type msg: string :param okmsg: message to display when the test is successful :type okmsg: string :param errmsg: message to display in case of error :type errmsg: string """ if not 'path' in kw: if not self.env.PKGCONFIG: self.find_program('pkg-config', var='PKGCONFIG') kw['path'] = self.env.PKGCONFIG # pkg-config version if 'atleast_pkgconfig_version' in kw: if not 'msg' in kw: kw['msg'] = 'Checking for pkg-config version >= %r' % kw[ 'atleast_pkgconfig_version'] return if not 'okmsg' in kw: kw['okmsg'] = 'yes' if not 'errmsg' in kw: kw['errmsg'] = 'not found' if 'modversion' in kw: if not 'msg' in kw: kw['msg'] = 'Checking for %r version' % kw['modversion'] if not 'uselib_store' in kw: kw['uselib_store'] = kw['modversion'] if not 'define_name' in kw: kw['define_name'] = '%s_VERSION' % Utils.quote_define_name( kw['uselib_store']) return if not 'package' in kw: raise ValueError('a package name is required') if not 'uselib_store' in kw: kw['uselib_store'] = kw['package'].upper() if not 'define_name' in kw: kw['define_name'] = self.have_define(kw['uselib_store']) if not 'msg' in kw: kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path']) for x in cfg_ver: # Gotcha: only one predicate is allowed at a time # TODO remove in waf 2.0 y = x.replace('-', '_') if y in kw: package = kw['package'] if Logs.verbose: Logs.warn( 'Passing %r to conf.check_cfg() is obsolete, pass parameters directly, eg:', y) Logs.warn( " conf.check_cfg(package='%s', args=['--libs', '--cflags', '%s >= 1.6'])", package, package) if not 'msg' in kw: kw['msg'] = 'Checking for %r %s %s' % (package, cfg_ver[x], kw[y]) break
def waf_version(*k, **kw): Logs.warn('wrong version (waf_version was removed in waf 1.6)')
def set_incdirs(self, val): Logs.warn('compat: change "export_incdirs" by "export_includes"') self.export_includes = val
def clean_output_targets(self): to_delete = [] for base_output_folder_node in self.get_output_folders( self.env['PLATFORM'], self.env['CONFIGURATION']): # Go through the task generators for tgen in self.get_all_task_gen(): is_msvc = tgen.env['CXX_NAME'] == 'msvc' # collect only shlibs and programs if not hasattr(tgen, '_type') or not hasattr(tgen, 'env'): continue # determine the proper target extension pattern if tgen._type == 'shlib' and tgen.env['cxxshlib_PATTERN'] != '': target_ext_PATTERN = tgen.env['cxxshlib_PATTERN'] elif tgen._type == 'program' and tgen.env[ 'cxxprogram_PATTERN'] != '': target_ext_PATTERN = tgen.env['cxxprogram_PATTERN'] else: continue target_output_folder_nodes = [] # Determine if there is a sub folder if hasattr(tgen, 'output_sub_folder'): target_output_folder_nodes.append( base_output_folder_node.make_node(tgen.output_sub_folder)) else: target_output_folder_nodes.append(base_output_folder_node) if hasattr(tgen, 'output_folder'): target_output_folder_nodes.append( tgen.bld.root.make_node(tgen.output_folder)) # Determine if there are copy sub folders target_output_copy_folder_items = [] target_output_copy_folder_attr = getattr(tgen, 'output_sub_folder_copy', None) if isinstance(target_output_copy_folder_attr, str): target_output_copy_folder_items.append( base_output_folder_node.make_node( target_output_copy_folder_attr)) elif isinstance(target_output_copy_folder_attr, list): for target_output_copy_folder_attr_item in target_output_copy_folder_attr: if isinstance(target_output_copy_folder_attr_item, str): target_output_copy_folder_items.append( base_output_folder_node.make_node( target_output_copy_folder_attr_item)) for target_output_folder_node in target_output_folder_nodes: target_name = getattr(tgen, 'output_file_name', tgen.get_name()) delete_target = target_output_folder_node.make_node( target_ext_PATTERN % str(target_name)) to_delete.append(delete_target) for target_output_copy_folder_item in target_output_copy_folder_items: delete_target_copy = target_output_copy_folder_item.make_node( target_ext_PATTERN % str(target_name)) to_delete.append(delete_target_copy) # If this is an MSVC build, add pdb cleaning just in case if is_msvc: delete_target_pdb = target_output_folder_node.make_node( '%s.pdb' % str(target_name)) to_delete.append(delete_target_pdb) for target_output_copy_folder_item in target_output_copy_folder_items: delete_target_copy = target_output_copy_folder_item.make_node( '%s.pdb' % str(target_name)) to_delete.append(delete_target_copy) # Go through GEMS and add possible gems components gems_output_names = set() if self.options.project_spec in self.loaded_specs_dict: spec_dict = self.loaded_specs_dict[self.options.project_spec] if 'projects' in spec_dict: for project in spec_dict['projects']: for gem in self.get_game_gems(project): gems_output_names.update(module.file_name for module in gem.modules) gems_target_ext_PATTERN = self.env['cxxshlib_PATTERN'] for gems_output_name in gems_output_names: gems_delete_target = base_output_folder_node.make_node( gems_target_ext_PATTERN % gems_output_name) to_delete.append(gems_delete_target) if is_msvc: gems_delete_target_pdb = base_output_folder_node.make_node( '%s.pdb' % str(gems_output_name)) to_delete.append(gems_delete_target_pdb) for file in to_delete: if os.path.exists(file.abspath()): try: if self.options.verbose >= 1: Logs.info('Deleting {0}'.format(file.abspath())) file.delete() except: Logs.warn("Unable to delete {0}".format(file.abspath()))
def px4_firmware(self): global _cp_px4io, _firmware_semaphorish_tasks, _upload_task version = self.env.get_flat('PX4_VERSION') px4 = self.bld.cmake('px4') px4.vars['APM_PROGRAM_LIB'] = self.link_task.outputs[0].abspath() if self.env.PX4_USE_PX4IO and not _cp_px4io: px4io_task = self.create_cmake_build_task('px4', 'fw_io') if version == '3': px4io_version = '2' else: px4io_version = version px4io = px4io_task.cmake.bldnode.make_node( 'src/modules/px4iofirmware/px4io-v%s.bin' % px4io_version, ) px4io_elf = px4.bldnode.make_node( 'src/modules/px4iofirmware/px4io-v%s' % px4io_version) px4io_task.set_outputs([px4io, px4io_elf]) romfs = self.bld.bldnode.make_node(self.env.PX4_ROMFS_BLD) romfs_px4io = romfs.make_node('px4io/px4io.bin') romfs_px4io.parent.mkdir() _cp_px4io = self.create_task('px4_copy', px4io, romfs_px4io) _cp_px4io.keyword = lambda: 'PX4: Copying PX4IO to ROMFS' px4io_elf_dest = self.bld.bldnode.make_node(self.env.PX4IO_ELF_DEST) cp_px4io_elf = self.create_task('px4_copy', px4io_elf, px4io_elf_dest) fw_task = self.create_cmake_build_task( 'px4', 'build_firmware_px4fmu-v%s' % version, ) fw_task.set_run_after(self.link_task) # we need to synchronize in order to avoid the output expected by the # previous ap_program being overwritten before used for t in _firmware_semaphorish_tasks: fw_task.set_run_after(t) _firmware_semaphorish_tasks = [] if self.env.PX4_USE_PX4IO and _cp_px4io.generator is self: fw_task.set_run_after(_cp_px4io) firmware = px4.bldnode.make_node( 'src/firmware/nuttx/nuttx-px4fmu-v%s-apm.px4' % version, ) fw_elf = px4.bldnode.make_node('src/firmware/nuttx/firmware_nuttx', ) _update_firmware_sig(fw_task, firmware, fw_elf) fw_dest = self.bld.bldnode.make_node( os.path.join(self.program_dir, '%s.px4' % self.program_name)) git_hashes = self.create_task('px4_add_git_hashes', firmware, fw_dest) git_hashes.set_run_after(fw_task) _firmware_semaphorish_tasks.append(git_hashes) fw_elf_dest = self.bld.bldnode.make_node( os.path.join(self.program_dir, self.program_name)) cp_elf = self.create_task('px4_copy', fw_elf, fw_elf_dest) cp_elf.set_run_after(fw_task) _firmware_semaphorish_tasks.append(cp_elf) self.build_summary = dict( target=self.name, binary=fw_elf_dest.path_from(self.bld.bldnode), ) if self.bld.options.upload: if _upload_task: Logs.warn('PX4: upload for %s ignored' % self.name) return _upload_task = self.create_cmake_build_task('px4', 'upload') _upload_task.set_run_after(fw_task) _firmware_semaphorish_tasks.append(_upload_task)
def cry_warning(conf, msg): Logs.warn("warning: %s" % msg)
def compare_config_sets(left, right, deep_compare=False): """ Helper method to do a basic comparison of different config sets (env) :param left: The left config set to compare :param right: The right config set to compare :param deep_compare: Option to do a deeper value comparison :return: True if the two config sets are equal, False if not """ # If one or the other value is None, return false. If BOTH are none, return True if left is None: if right is None: return True else: return False elif right is None: return False # Compare the number of keys left_keys = left.keys() right_keys = right.keys() if len(left_keys) != len(right_keys): return False key_len = len(left_keys) for i in range(0, key_len): # Compare each config key entry # Compare the key name if left_keys[i] != right_keys[i]: return False # Compare the key value left_values = left_keys[i] right_values = right_keys[i] if isinstance(left_values, list): if isinstance(right_values, list): # The items is a list left_value_count = len(left_values) right_value_count = len(right_values) if left_value_count != right_value_count: return False if deep_compare: for j in range(0, left_value_count): left_value = left_values[j] right_value = right_values[j] if left_value != right_value: return False else: # The left and right types mismatch return False elif isinstance(left_values, bool): if isinstance(right_values, bool): # Items are a bool if left_values != right_values: return False else: # The left and right types mismatch return False elif isinstance(left_values, str): if isinstance(right_values, str): # The items are a string if left_values != right_values: return False else: # The left and right types mismatch return False else: Logs.warn( '[WARN] ConfigSet value cannot by compare, unsupported type {} for key ' .format(type(left_values), left_keys[i])) pass return True
def handle_code_generator_output(self, code_gen_output): """ Decode json object and process return from generator :param code_gen_output: json string :return True on success, False on failure """ try: json_object = json.loads(code_gen_output) errors_reported = False for output_object in json_object: if output_object['type'] == 'info': output = output_object['info'] Logs.debug('az_code_gen: {}'.format(output)) if (self.profile and output.startswith('Profile')): Logs.debug('az_code_gen: ' + output) elif output_object['type'] == 'error': Logs.error('{} - az_code_gen task error'.format( output_object['error'])) errors_reported = True elif output_object['type'] == 'generated_file': self.register_output_file( output_object['file_name'], output_object['should_be_added_to_build']) elif output_object['type'] == 'dependency_file': self.register_dependency_file( str(output_object['file_name'])) else: Logs.error( 'az_code_gen: Unknown output json type returned from Code Generator. Type is: {} - Raw output: {}' .format(output_object['type'], code_gen_output)) errors_reported = True # Fail the task if errors were reported if errors_reported: return False # Add local folder of each output node to include path of the task_gen and store path off to pickle for future runs azcg_paths = self.azcg_get('AZCG_INCPATHS', []) for output_node in self.outputs: # This check is here to ensure that tasks that were written out that had None outputs will be skipped. # The origin of this problem should have been solved by returning after the None checking during register output if output_node is None: Logs.warn( 'az_code_gen: Task output has a None entry, skipping!') continue output_path = output_node.parent.abspath() if output_path not in azcg_paths: azcg_paths.append(output_path) # Append any additional paths relative to the output directory found in export includes output_dir_node = get_azcg_output_dir_node(self.generator) for export_include in self.generator.export_includes: if isinstance(export_include, waflib.Node.Node) and export_include.is_child_of( output_dir_node): export_path = export_include.abspath() if export_path not in azcg_paths: azcg_paths.append(export_path) self.azcg_set('AZCG_INCPATHS', azcg_paths) self.propagate_azcg_incpaths(azcg_paths) return True except ValueError as value_error: # If we get output that isn't JSON, it means Clang errored before # the code generator gained control. Likely invalid commandline arguments. Logs.error( 'az_code_gen: Failed to json.loads output with error "{}" - output string was:\n{}' .format(str(value_error), code_gen_output)) import traceback import sys tb_list = traceback.extract_tb(sys.exc_info()[2]) for filename, lineno, name, line in tb_list: Logs.error('{}({}): error {}: in {}: {}'.format( filename, lineno, value_error.__class__.__name__, name, line)) filename, lineno, _, _ = tb_list[-1] Logs.error('{}({}): error {}: {}'.format( filename, lineno, value_error.__class__.__name__, str(value_error))) return False
def cry_file_warning(conf, msg, filePath, lineNum=0): Logs.warn('%s(%s): warning: %s.' % (filePath, lineNum, msg))
def create_code_generator_tasks(self): # Skip during project generation. Also skip if the 'az_code_gen_group' is not available on the BuildContext # the 'az_code_gen_group' is only added during the build step from <dev_root>/wscript # during package and deploy the 'az_code_gen_group' isn't set if self.bld.env[ 'PLATFORM'] == 'project_generator' or 'az_code_gen_group' not in self.bld.group_names: return # promote raw entries to list if isinstance(getattr(self, 'az_code_gen', []), dict): self.az_code_gen = [self.az_code_gen] # compute deps azcg_dep_nodes = [] azcg_dir = self.env['CODE_GENERATOR_PATH'][0] azcg_dir_node = self.bld.root.find_node(azcg_dir) if azcg_dir_node: # For a clean build, .pyc files don't exist at this point, but for the later incremental build, .pyc files will be added as node dependencies and will change the task signature. # Do not add .pyc files as dependencies. azcg_dep_nodes = azcg_dir_node.ant_glob('**/*', excl=Node.exclude_regs + '\n**/*.pyc') else: Logs.warn( 'az_code_gen: Unable to find azcg directory. Code Generator tasks will not have the utility/scripts as dependencies' ) # this script is a dependency script_node = self.bld.root.make_node(os.path.abspath(__file__)) # Use .py file as dependency instead of .pyc file. if script_node.suffix() == '.pyc': script_node = script_node.change_ext('.py') azcg_dep_nodes.append(script_node) for az_code_gen_pass in getattr(self, 'az_code_gen', []): # See if we have any scripts code_generator_scripts = az_code_gen_pass.get('scripts', []) if not code_generator_scripts: Logs.warn( 'az_code_gen feature enabled but no scripts were specified. ' 'No code generation performed for target {}'.format( self.target)) return code_gen_arguments = az_code_gen_pass.get('arguments', []) if isinstance(code_gen_arguments, str): code_gen_arguments = [code_gen_arguments] code_gen_options = az_code_gen_pass.get('options', []) if isinstance(code_gen_options, str): code_gen_options = [code_gen_options] code_gen_input = az_code_gen_pass.get('files', []) if not code_gen_input: Logs.warn( 'az_code_gen feature enabled but no files were specified. ' 'No code generation performed for target {}'.format( self.target)) return code_gen_override_output = az_code_gen_pass.get( 'override_output', None) # Create one task per input file/list for input_item in code_gen_input: # Auto promote non-lists to lists if not isinstance(input_item, list): input_file_list = [input_item] else: input_file_list = input_item create_az_code_generator_task(self, input_file_list, code_generator_scripts, code_gen_arguments, code_gen_options, azcg_dep_nodes, code_gen_override_output)
def draw_imp(self, digraph, graph_name, file_name): extract_nodes = self.ctx.options.extract_nodes.split(',') if \ self.ctx.options.extract_nodes is not None else [] is_trans_reduce = self.ctx.options.trans_reduce == 'yes' dot_text = dotutil.digraph_to_dot(graph_name, digraph, extract_nodes, is_trans_reduce) dot_node = self.graph_dir_node.make_node(file_name + '.dot') dg_node = self.graph_dir_node.make_node(file_name + '.png') dot_node.write(dot_text) cmd = [ self.dot_path if self.dot_path else 'dot', '-Tpng', dot_node.abspath(), '-o', dg_node.abspath() ] if not self.dot_path: Logs.warn('Can not find the program "dot" in the "PATH" ' 'environment variable.') Logs.warn('Please run the following command manually to ' 'generate the dependency graph.') Logs.warn(' '.join(cmd)) else: rc = subprocess.call(cmd) if rc != 0: Logs.warn('Failed to run %s' % cmd) else: Logs.warn('Generated dot file:\n%s' % dot_node.abspath()) Logs.warn('Generated graph:\n%s' % dg_node.abspath()) open_with = self.ctx.options.open_graph_with if open_with: try: cmd = [open_with, dg_node.abspath()] subprocess.call(cmd) except Exception as e: Logs.warn('Cannot execute command %s. Exception: %s.' % (cmd, e))