def build(bld): msg.debug ('orch: BUILD CALLED') bld.load(bld.env.orch_extra_tools) # batteries-included from . import features features.load() msg.debug('orch: available features: %s' % (', '.join(sorted(available_features.keys())), )) msg.info('Supported waf features: "%s"' % '", "'.join(sorted(available_features.keys()))) msg.debug('orch: Build envs: %s' % ', '.join(sorted(bld.all_envs.keys()))) tobuild = bld.env.orch_group_packages print 'TOBUILD',tobuild for grpname, pkgnames in tobuild: msg.debug('orch: Adding group: "%s"' % grpname) bld.add_group(grpname) for pkgname in pkgnames: bld.worch_package(pkgname) bld.add_pre_fun(pre_process) bld.add_post_fun(post_process) msg.debug ('orch: BUILD CALLED [done]')
def bibfile(self): """ Parses *.aux* files to find bibfiles to process. If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ for aux_node in self.aux_nodes: try: ct = aux_node.read() except EnvironmentError: Logs.error("Error reading %s: %r", aux_node.abspath()) continue if g_bibtex_re.findall(ct): Logs.info("calling bibtex") self.env.env = {} self.env.env.update(os.environ) self.env.env.update({"BIBINPUTS": self.texinputs(), "BSTINPUTS": self.texinputs()}) self.env.SRCFILE = aux_node.name[:-4] self.check_status("error when calling bibtex", self.bibtex_fun()) for node in getattr(self, "multibibs", []): self.env.env = {} self.env.env.update(os.environ) self.env.env.update({"BIBINPUTS": self.texinputs(), "BSTINPUTS": self.texinputs()}) self.env.SRCFILE = node.name[:-4] self.check_status("error when calling bibtex", self.bibtex_fun())
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ #Logs.info("Waf: Entering directory `%s'", self.variant_dir) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() try: self.compile() finally: if self.progress_bar == 1 and sys.stdout.isatty(): c = self.producer.processed or 1 m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) Logs.info(m, extra={'stream': sys.stdout, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on}) #Logs.info("Waf: Leaving directory `%s'", self.variant_dir) try: self.producer.bld = None del self.producer except AttributeError: pass self.post_build()
def archive(self): """ Create the archive. """ import tarfile arch_name = self.get_arch_name() try: self.base_path except: self.base_path = self.path node = self.base_path.make_node(arch_name) try: node.delete() except: pass files = self.get_files() if self.algo.startswith('tar.'): tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', '')) for x in files: tinfo = tar.gettarinfo(name=x.abspath(), arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) tinfo.uid = 0 tinfo.gid = 0 tinfo.uname = 'root' tinfo.gname = 'root' fu = None try: fu = open(x.abspath(), 'rb') tar.addfile(tinfo, fileobj=fu) finally: fu.close() tar.close() elif self.algo == 'zip': import zipfile zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED) for x in files: archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) zip.close() else: self.fatal('Valid algo types are tar.bz2, tar.gz or zip') try: from hashlib import sha1 as sha except ImportError: from sha import sha try: digest = " (sha=%r)" % sha(node.read()).hexdigest() except: digest = '' Logs.info('New archive created: %s%s' % (self.arch_name, digest))
def write_compilation_database(ctx): "Write the clang compilation database as JSON" database_file = ctx.bldnode.make_node('compile_commands.json') Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path)) try: root = json.load(database_file) except IOError: root = [] clang_db = dict((x['file'], x) for x in root) for task in getattr(ctx, 'clang_compilation_database_tasks', []): try: cmd = task.last_cmd except AttributeError: continue directory = getattr(task, 'cwd', ctx.variant_dir) f_node = task.inputs[0] filename = os.path.relpath(f_node.abspath(), directory) entry = { "directory": directory, "arguments": cmd, "file": filename, } clang_db[filename] = entry root = list(clang_db.values()) database_file.write(json.dumps(root, indent=2))
def _exec_doxygen(self, tgen, conf): '''Generate source code documentation for the given task generator.''' Logs.info("Generating documentation for '%s'" % tgen.name) # open template configuration and read as string name = self.env.DOXYGEN_CONFIG if not os.path.exists(name): name = '%s/doxy.config' % os.path.dirname(__file__) f = open(name, 'r') s = f.read() f.close() # write configuration key,value pairs into template string for key,value in conf.items(): s = re.sub('%s\s+=.*' % key, '%s = %s' % (key, value), s) # create base directory for storing reports doxygen_path = self.env.DOXYGEN_OUTPUT if not os.path.exists(doxygen_path): os.makedirs(doxygen_path) # write component configuration to file and doxygen on it config = '%s/doxy-%s.config' % (doxygen_path, tgen.name) f = open(config, 'w+') f.write(s) f.close() cmd = '%s %s' % (Utils.to_list(self.env.DOXYGEN)[0], config) self.cmd_and_log(cmd)
def vlogcomp(self, project): Logs.info("=> Running vlogcomp") tool = self.tg.env.XILINX_VLOGCOMP cmd = "%(tool)s -work isim_temp -intstyle ise -prj %(project)s" % locals() self.ctx.exec_command(cmd, cwd=self.path.abspath())
def bibfile(self): """ Parse the *.aux* files to find bibfiles to process. If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ for aux_node in self.aux_nodes: try: ct = aux_node.read() except EnvironmentError: Logs.error('Error reading %s: %r' % aux_node.abspath()) continue if g_bibtex_re.findall(ct): Logs.info('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) self.env.SRCFILE = aux_node.name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun()) for node in getattr(self, 'multibibs', []): self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) self.env.SRCFILE = node.name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
def before(*k): for key, val in repl.items(): if key in k: if Logs.verbose > 1: Logs.info('before %s -> %s' % (key, val)) k.replace(key, val) return TaskGen.before_method(*k)
def auto_run_bootstrap(ctx, section_name, option_name, value): """ Configure automatic boostrap execution """ if not ctx.is_bootstrap_available(): return 'False' if not _is_user_input_allowed(ctx, option_name, value): Logs.info('\nUser Input disabled.\nUsing default value "%s" for option: "%s"' % (value, option_name)) return value # Check for P4 support # On failure: (i.e. no p4 available) Do not show if default has been 'False' in the first place # On failure: (i.e. no p4 available) Do show option if default has been 'True' (res, warning, error) = ATTRIBUTE_VERIFICATION_CALLBACKS['verify_auto_run_bootstrap'](ctx, option_name, 'True') if not res and not _is_user_option_true(value): return 'False'; info_str = ["Automatically execute Branch Bootstrap on each build?"] info_str.append("[When disabled the user is responsible to keep their 3rdParty Folder up to date]") # GUI if not ctx.is_option_true('console_mode'): return ctx.gui_get_attribute(section_name, option_name, value, '\n'.join(info_str)) info_str.append('\n(Press ENTER to keep the current default value shown in [])') Logs.info('\n'.join(info_str)) while True: value = _get_boolean_value(ctx, 'Enable Automatic Execution of Branch BootStrap', value) (res, warning, error) = ATTRIBUTE_VERIFICATION_CALLBACKS['verify_auto_run_bootstrap'](ctx, option_name, value) if res: break else: Logs.warn(error) return value
def compile(self): if not self.files: Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') BuildContext.compile(self) return for g in self.groups: for tg in g: try: f=tg.post except AttributeError: pass else: f() for pat in self.files.split(','): matcher=self.get_matcher(pat) for tg in g: if isinstance(tg,Task.TaskBase): lst=[tg] else: lst=tg.tasks for tsk in lst: do_exec=False for node in getattr(tsk,'inputs',[]): if matcher(node,output=False): do_exec=True break for node in getattr(tsk,'outputs',[]): if matcher(node,output=True): do_exec=True break if do_exec: ret=tsk.run() Logs.info('%s -> exit %r'%(str(tsk),ret))
def run(self): env=self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS','-interaction=batchmode') env.append_value('PDFLATEXFLAGS','-interaction=batchmode') env.append_value('XELATEXFLAGS','-interaction=batchmode') self.cwd=self.inputs[0].parent.get_bld().abspath() Logs.info('first pass on %s'%self.__class__.__name__) cur_hash=self.hash_aux_nodes() self.call_latex() self.hash_aux_nodes() self.bibtopic() self.bibfile() self.bibunits() self.makeindex() self.makeglossaries() for i in range(10): prev_hash=cur_hash cur_hash=self.hash_aux_nodes() if not cur_hash: Logs.error('No aux.h to process') if cur_hash and cur_hash==prev_hash: break Logs.info('calling %s'%self.__class__.__name__) self.call_latex()
def run(self): """ Execute the test. The execution is always successful, and the results are stored on ``self.generator.bld.utest_results`` for postprocessing. Override ``add_test_results`` to interrupt the build """ filename = self.inputs[0].abspath() output_filename = path.splitext(filename)[0]+'_result.xml' self.ut_exec = getattr(self.generator, 'ut_exec', [filename]) self.ut_exec += ['--gtest_output=xml:{}'.format(output_filename)] if getattr(self.generator, 'ut_fun', None): self.generator.ut_fun(self) cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath() testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False) if testcmd: self.ut_exec = (testcmd % self.ut_exec[0]).split(' ') Logs.info(str(self.ut_exec)) proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE) (stdout, stderr) = proc.communicate() # read result from output_filename test_results = ElementTree.parse(output_filename).getroot() tup = (filename, proc.returncode, stdout, stderr, test_results) testlock.acquire() try: return self.generator.add_test_results(tup) finally: testlock.release()
def do_link(self, src, tgt): """ Create a symlink from tgt to src. This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink. :param src: file name as absolute path :type src: string :param tgt: file destination, as absolute path :type tgt: string """ d, _ = os.path.split(tgt) Utils.check_dir(d) link = False if not os.path.islink(tgt): link = True elif os.readlink(tgt) != src: link = True if link: try: os.remove(tgt) except OSError: pass if not self.progress_bar: Logs.info('+ symlink %s (to %s)' % (tgt, src)) os.symlink(src, tgt) else: if not self.progress_bar: Logs.info('- symlink %s (to %s)' % (tgt, src))
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ Logs.info("Waf: Entering directory `%s'" % self.variant_dir) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() if self.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if self.progress_bar == 1: c = len(self.returned_tasks) or 1 self.to_log(self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)) print('') sys.stdout.flush() sys.stderr.write(Logs.colors.cursor_on) Logs.info("Waf: Leaving directory `%s'" % self.variant_dir) self.post_build()
def do_install(self,src,tgt,chmod=Utils.O644): d,_=os.path.split(tgt) Utils.check_dir(d) srclbl=src.replace(self.srcnode.abspath()+os.sep,'') if not Options.options.force: try: st1=os.stat(tgt) st2=os.stat(src) except OSError: pass else: if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size: Logs.info('- install %s (from %s)'%(tgt,srclbl)) return False Logs.info('+ install %s (from %s)'%(tgt,srclbl)) try: os.remove(tgt) except OSError: pass try: shutil.copy2(src,tgt) os.chmod(tgt,chmod) except IOError: try: os.stat(src) except(OSError,IOError): Logs.error('File %r does not exist'%src) raise Errors.WafError('Could not install the file %r'%tgt)
def do_install(self,src,tgt,lbl,**kw): if not Options.options.force: try: st1=os.stat(tgt) st2=os.stat(src) except OSError: pass else: if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: if not self.generator.bld.progress_bar: Logs.info('- install %s (from %s)',tgt,lbl) return False if not self.generator.bld.progress_bar: Logs.info('+ install %s (from %s)',tgt,lbl) try: os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) except EnvironmentError: pass try: os.remove(tgt) except OSError: pass try: self.copy_fun(src,tgt) except EnvironmentError as e: if not os.path.exists(src): Logs.error('File %r does not exist',src) elif not os.path.isfile(src): Logs.error('Input %r is not a file',src) raise Errors.WafError('Could not install the file %r'%tgt,e)
def parse_options(): """ Parse the command-line options and initialize the logging system. Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. """ Context.create_context('options').execute() if not Options.commands: Options.commands = [default_cmd] Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076 # process some internal Waf options Logs.verbose = Options.options.verbose Logs.init_log() if Options.options.zones: Logs.zones = Options.options.zones.split(',') if not Logs.verbose: Logs.verbose = 1 elif Logs.verbose > 0: Logs.zones = ['runner'] if Logs.verbose > 2: Logs.zones = ['*'] # Force console mode for SSH connections if getattr(Options.options, 'console_mode', None): if os.environ.get('SSH_CLIENT') != None or os.environ.get('SSH_TTY') != None: Logs.info("[INFO] - SSH Connection detected. Forcing 'console_mode'") Options.options.console_mode = str(True)
def do_install(self,src,tgt,**kw): d,_=os.path.split(tgt) if not d: raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) Utils.check_dir(d) srclbl=src.replace(self.srcnode.abspath()+os.sep,'') if not Options.options.force: try: st1=os.stat(tgt) st2=os.stat(src) except OSError: pass else: if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: if not self.progress_bar: Logs.info('- install %s (from %s)'%(tgt,srclbl)) return False if not self.progress_bar: Logs.info('+ install %s (from %s)'%(tgt,srclbl)) try: os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) except EnvironmentError: pass try: os.remove(tgt) except OSError: pass try: self.copy_fun(src,tgt,**kw) except IOError: try: os.stat(src) except EnvironmentError: Logs.error('File %r does not exist'%src) raise Errors.WafError('Could not install the file %r'%tgt)
def install_pyfile(self,node,install_from=None): from_node=install_from or node.parent tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False) path=tsk.get_install_path() if self.bld.is_install<0: Logs.info("+ removing byte compiled python files") for x in'co': try: os.remove(path+x) except OSError: pass if self.bld.is_install>0: try: st1=os.stat(path) except OSError: Logs.error('The python file is missing, this should not happen') for x in['c','o']: do_inst=self.env['PY'+x.upper()] try: st2=os.stat(path+x) except OSError: pass else: if st1.st_mtime<=st2.st_mtime: do_inst=False if do_inst: lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[] (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x) argv=self.env['PYTHON']+lst+['-c',INST,a,b,c] Logs.info('+ byte compiling %r'%(path+x)) env=self.env.env or None ret=Utils.subprocess.Popen(argv,env=env).wait() if ret: raise Errors.WafError('py%s compilation failed %r'%(x,path))
def do_display_pkg_uses(self, pkgname, depth=0, maxdepth=2): pkgdeps = self.get_pkg_uses(pkgname) msg.info('%s%s' % (' '*depth, pkgname)) depth += 1 if depth < maxdepth: for pkgdep in pkgdeps: self.do_display_pkg_uses(pkgdep, depth)
def print_legend(self): """Displays description for the tree command.""" Logs.info("") Logs.info("") Logs.info("DESCRIPTION:") Logs.info("m (lib) = uses system library 'm' (i.e. libm.so)") Logs.info("")
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ if not self.is_option_true('internal_dont_check_recursive_execution'): Logs.info("[WAF] Executing '%s' in '%s'" % (self.cmd, self.variant_dir) ) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() if self.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if self.progress_bar == 1: c = len(self.returned_tasks) or 1 self.to_log(self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)) print('') sys.stdout.flush() sys.stderr.write(Logs.colors.cursor_on) self.post_build()
def do_display_project_uses(self, projname, depth=0): projdeps = self.get_project_uses(projname) msg.info('%s%s' % (' '*depth, projname)) for projdep in projdeps: self.do_display_project_uses(projdep, depth+1) pass return
def add_linux_launcher_script(self): if not getattr(self, 'create_linux_launcher', False): return if self.env['PLATFORM'] == 'project_generator': return if not getattr(self, 'link_task', None): self.bld.fatal('Linux Launcher is only supported for Executable Targets') # Write to rc file if content is different for node in self.bld.get_output_folders(self.bld.env['PLATFORM'], self.bld.env['CONFIGURATION']): node.mkdir() for project in self.bld.spec_game_projects(): # Set up values for linux launcher script template linux_launcher_script_file = node.make_node('Launch_'+self.bld.get_executable_name(project)+'.sh') self.to_launch_executable = self.bld.get_executable_name(project) template = compile_template(LAUNCHER_SCRIPT) linux_launcher_script_content = template(self) if not os.path.exists(linux_launcher_script_file.abspath()) or linux_launcher_script_file.read() != linux_launcher_script_content: Logs.info('Updating Linux Launcher Script (%s)' % linux_launcher_script_file.abspath() ) linux_launcher_script_file.write(linux_launcher_script_content)
def check_sparkle(self, *k, **kw): try: self.check_sparkle_base(*k, **kw) except: try: # Try local path # Logs.info("Check local version of Sparkle framework") self.check_sparkle_base(cxxflags="-F%s/Frameworks/" % self.path.abspath(), linkflags="-F%s/Frameworks/" % self.path.abspath()) except: import urllib, subprocess, os, shutil if not os.path.exists('osx/Frameworks/Sparkle.framework'): # Download to local path and retry Logs.info("Sparkle framework not found, trying to download it to 'build/'") urllib.urlretrieve("https://github.com/sparkle-project/Sparkle/releases/download/1.7.1/Sparkle-1.7.1.zip", "build/Sparkle.zip") if os.path.exists('build/Sparkle.zip'): # try: subprocess.check_call(['unzip', '-qq', 'build/Sparkle.zip', '-d', 'build/Sparkle']) os.remove("build/Sparkle.zip") if not os.path.exists("osx/Frameworks"): os.mkdir("osx/Frameworks") os.rename("build/Sparkle/Sparkle.framework", "osx/Frameworks/Sparkle.framework") shutil.rmtree("build/Sparkle", ignore_errors=True) self.check_sparkle_base(cxxflags="-F%s/Frameworks/" % self.path.abspath(), linkflags="-F%s/Frameworks/" % self.path.abspath())
def archive(self): import tarfile arch_name=self.get_arch_name() try: self.base_path except AttributeError: self.base_path=self.path node=self.base_path.make_node(arch_name) try: node.delete() except OSError: pass files=self.get_files() if self.algo.startswith('tar.'): tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.','')) for x in files: self.add_tar_file(x,tar) tar.close() elif self.algo=='zip': import zipfile zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED) for x in files: archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) zip.close() else: self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') try: from hashlib import sha1 except ImportError: digest='' else: digest=' (sha=%r)'%sha1(node.read(flags='rb')).hexdigest() Logs.info('New archive created: %s%s',self.arch_name,digest)
def do_install(self, src, tgt, chmod=Utils.O644): d, _ = os.path.split(tgt) if not d: raise Errors.WafError("Invalid installation given %r->%r" % (src, tgt)) Utils.check_dir(d) srclbl = src.replace(self.srcnode.abspath() + os.sep, "") if not Options.options.force: try: st1 = os.stat(tgt) st2 = os.stat(src) except OSError: pass else: if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size: if not self.progress_bar: Logs.info("- install %s (from %s)" % (tgt, srclbl)) return False if not self.progress_bar: Logs.info("+ install %s (from %s)" % (tgt, srclbl)) try: os.remove(tgt) except OSError: pass try: shutil.copy2(src, tgt) os.chmod(tgt, chmod) except IOError: try: os.stat(src) except (OSError, IOError): Logs.error("File %r does not exist" % src) raise Errors.WafError("Could not install the file %r" % tgt)
def bdist_nsis(self, appname, version): '''Creates an installer for Windows hosts using NSIS. If the install script does not exist, a default install script will be created by this module. :param appname: Functional application and bdist name :type appname: str :param version: Current version of the application being packaged :type version: str ''' nsis = self.env.NSIS if isinstance(nsis, list): if not len(nsis): Logs.warn('NSIS not available, skipping') return nsis = nsis[0] fname = self.env.NSIS_SCRIPT script = self.path.find_node(fname) if not script: script = self.nsis_create_script(fname, appname) args = [] args.append('/V4') args.append('/NOCD') args.append('/DVERSION=%s' % version) v = version.split('.') try: v_major = int(v[0]) args.append('/DVER_MAJOR=%s' % v_major) except: pass try: v_minor = int(v[1]) args.append('/DVER_MINOR=%s' % v_minor) except: pass try: v_patch = int(v[2]) args.append('/DVER_PATCH=%s' % v_patch) except: pass arch = platform.architecture() if sys.platform == 'win32' and arch[0] == '32bit': outfile = '%s-%s-win32-setup.exe' % (appname, version) else: outfile = '%s-%s-win64-setup.exe' % (appname, version) outfile = os.path.join(self.path.abspath(), outfile) args.append('/DINSTALLER=%s' % outfile) if sys.platform != 'win32': args = [a.replace('/','-',1) for a in args] cmd = '%s %s %s' % (nsis, ' '.join(args), script.abspath()) cwd = self._bdist.abspath() stdout = self.cmd_and_log(cmd, output=Context.STDOUT, quiet=Context.STDOUT, cwd=cwd) Logs.info(stdout)
def do_install(self, src, tgt, **kw): """ Copy a file from src to tgt with given file permissions. The actual copy is not performed if the source and target file have the same size and the same timestamps. When the copy occurs, the file is first removed and then copied (prevent stale inodes). This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_install` to remove the file. :param src: file name as absolute path :type src: string :param tgt: file destination, as absolute path :type tgt: string :param chmod: installation mode :type chmod: int """ d, _ = os.path.split(tgt) if not d: raise Errors.WafError('Invalid installation given %r->%r' % (src, tgt)) Utils.check_dir(d) srclbl = src.replace(self.srcnode.abspath() + os.sep, '') if not Options.options.force: # check if the file is already there to avoid a copy try: st1 = os.stat(tgt) st2 = os.stat(src) except OSError: pass else: # same size and identical timestamps -> make no copy if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size: if not self.progress_bar: Logs.info('- install %s (from %s)' % (tgt, srclbl)) return False if not self.progress_bar: Logs.info('+ install %s (from %s)' % (tgt, srclbl)) # Give best attempt at making destination overwritable, # like the 'install' utility used by 'make install' does. try: os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode)) except EnvironmentError: pass # following is for shared libs and stale inodes (-_-) try: os.remove(tgt) except OSError: pass try: self.copy_fun(src, tgt, **kw) except IOError: try: os.stat(src) except EnvironmentError: Logs.error('File %r does not exist' % src) raise Errors.WafError('Could not install the file %r' % tgt)
def process_executable(self): executable_source_node = self.inputs[0] executable_source_location_node = executable_source_node.parent dependency_source_location_nodes = [ self.bld.engine_node.make_node( self.bld.get_output_folders(self.bld.platform, self.bld.config)[0].name) ] if dependency_source_location_nodes != executable_source_location_node: dependency_source_location_nodes.append( executable_source_location_node) executable_dest_node = self.outputs[0].parent executable_dest_node.mkdir() Logs.info( "Putting final packaging into base output folder {}, executable folder {}" .format(self.destination_node.abspath(), executable_dest_node.abspath())) if executable_source_location_node != executable_dest_node: self.bld.install_files(executable_dest_node.abspath(), self.executable_name, cwd=executable_source_location_node, chmod=Utils.O755, postpone=False) if getattr(self, 'include_all_libs', False): self.bld.symlink_libraries(executable_source_location_node, executable_dest_node.abspath()) else: # self.dependencies comes from the scan function self.bld.symlink_dependencies( self.dependencies, dependency_source_location_nodes, executable_dest_node.abspath()) else: Logs.debug("package: source {} = dest {}".format( executable_source_location_node.abspath(), executable_dest_node.abspath())) if getattr(self, 'finalize_func', None): self.finalize_func(self.bld, executable_dest_node)
def test_summary(bld): from io import BytesIO import sys if not hasattr(bld, 'utest_results'): Logs.info('check: no test run') return fails = [] for filename, exit_code, out, err in bld.utest_results: Logs.pprint('GREEN' if exit_code == 0 else 'YELLOW', ' %s' % filename, 'returned %d' % exit_code) if exit_code != 0: fails.append(filename) elif not bld.options.check_verbose: continue if len(out): buf = BytesIO(out) for line in buf: print(" OUT: %s" % line.decode(), end='', file=sys.stderr) print() if len(err): buf = BytesIO(err) for line in buf: print(" ERR: %s" % line.decode(), end='', file=sys.stderr) print() if not fails: Logs.info('check: All %u tests passed!' % len(bld.utest_results)) return Logs.error('check: %u of %u tests failed' % (len(fails), len(bld.utest_results))) for filename in fails: Logs.error(' %s' % filename) bld.fatal('check: some tests failed')
def compile(self): """ Compile the tasks matching the input/output files given (regular expression matching). Derived from :py:meth:`waflib.Build.BuildContext.compile`:: $ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o $ waf step --files=in:foo.cpp.1.o # link task only """ if not self.files: Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') BuildContext.compile(self) return for g in self.groups: for tg in g: try: f = tg.post except AttributeError: pass else: f() for pat in self.files.split(','): matcher = self.get_matcher(pat) for tg in g: if isinstance(tg, Task.TaskBase): lst = [tg] else: lst = tg.tasks for tsk in lst: do_exec = False for node in getattr(tsk, 'inputs', []): if matcher(node, output=False): do_exec = True break for node in getattr(tsk, 'outputs', []): if matcher(node, output=True): do_exec = True break if do_exec: ret = tsk.run() Logs.info('%s -> exit %r' % (str(tsk), ret))
def do_uninstall(self, src, tgt, lbl, **kw): """ See :py:meth:`waflib.Build.inst.do_install` """ if not self.generator.bld.progress_bar: c1 = Logs.colors.NORMAL c2 = Logs.colors.BLUE Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) #self.uninstall.append(tgt) try: os.remove(tgt) except OSError as e: if e.errno != errno.ENOENT: if not getattr(self, 'uninstall_error', None): self.uninstall_error = True Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') if Logs.verbose > 1: Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno) self.rm_empty_dirs(tgt)
def do_install(self, src, tgt, **kw): """See :py:meth:`waflib.Build.InstallContext.do_install`""" if not self.progress_bar: Logs.info('- remove %s' % tgt) self.uninstall.append(tgt) try: os.remove(tgt) except OSError as e: if e.errno != errno.ENOENT: if not getattr(self, 'uninstall_error', None): self.uninstall_error = True Logs.warn( 'build: some files could not be uninstalled (retry with -vv to list them)' ) if Logs.verbose > 1: Logs.warn('Could not remove %s (error code %r)' % (e.filename, e.errno)) self.rm_empty_dirs(tgt)
def do_link(self, src, tgt, **kw): """ Creates a symlink from tgt to src. :param src: file name as absolute path :type src: string :param tgt: file destination, as absolute path :type tgt: string """ if os.path.islink(tgt) and os.readlink(tgt) == src: if not self.generator.bld.progress_bar: Logs.info('- symlink %s (to %s)', tgt, src) else: try: os.remove(tgt) except OSError: pass if not self.generator.bld.progress_bar: Logs.info('+ symlink %s (to %s)', tgt, src) os.symlink(src, tgt)
def copy_fun(self, src, tgt): inst_copy_fun(self, src, tgt) if self.generator.bld.options.no_strip: return if self.env.DEST_BINFMT not in ['elf', 'mac-o']: # don't strip unknown formats or PE return if getattr(self.generator, 'link_task', None) and self.generator.link_task.outputs[0] in self.inputs: cmd = self.env.STRIP + self.env.STRIPFLAGS + [tgt] try: if not self.generator.bld.progress_bar: c1 = Logs.colors.NORMAL c2 = Logs.colors.CYAN Logs.info('%s+ strip %s%s%s', c1, c2, tgt, c2) self.generator.bld.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH) except Errors.WafError as e: print(e.stdout, e.stderr)
def post_build_fun(ctx): is_success = True num_test_failures, num_tests = print_test_summary(ctx) error_msg = '' if num_test_failures > 0: error_msg += '%d/%d tests have failed.' % (num_test_failures, num_tests) is_success = False else: Logs.info('All tests passed.') if ctx.env['with_coverage']: is_coverage_success = generate_coverage_report(ctx) if not is_coverage_success: is_success = False error_msg += '\nFailed to generate coverage report.' if not is_success: ctx.fatal('%s (%s)' % (error_msg, str(ctx.log_timer)))
def execute_build(self): Logs.info("Waf: Entering directory `%s'" % self.variant_dir) self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() if self.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if self.progress_bar == 1: c = len(self.returned_tasks) or 1 self.to_log( self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)) print('') sys.stdout.flush() sys.stderr.write(Logs.colors.cursor_on) Logs.info("Waf: Leaving directory `%s'" % self.variant_dir) self.post_build()
def exit_cleanup(): try: fileobj = sys.stdout.fileobj except AttributeError: pass else: sys.stdout.is_valid = False sys.stderr.is_valid = False fileobj.close() filename = sys.stdout.filename Logs.info('Output logged to %r', filename) # then copy the log file to "latest.log" if possible up = os.path.dirname(os.path.abspath(filename)) try: shutil.copy(filename, os.path.join(up, 'latest.log')) except OSError: # this may fail on windows due to processes spawned pass
def bibfile(self): for aux_node in self.aux_nodes: try: ct=aux_node.read() except EnvironmentError: Logs.error('Error reading %s: %r'%aux_node.abspath()) continue if g_bibtex_re.findall(ct): Logs.info('calling bibtex') self.env.env={} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) self.env.SRCFILE=aux_node.name[:-4] self.check_status('error when calling bibtex',self.bibtex_fun()) for node in getattr(self,'multibibs',[]): self.env.env={} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) self.env.SRCFILE=node.name[:-4] self.check_status('error when calling bibtex',self.bibtex_fun())
def _check_ib_setting(setting_name, required_value, desc): """ Helper function to read (and potentially modify a registry setting for IB """ (data, type) = (None, _winreg.REG_SZ) try: (data,type) = _winreg.QueryValueEx(IB_settings_read_only, setting_name) except: import traceback traceback.print_exc(file=sys.stdout) Logs.warn('[WARNING] Cannot find a registry entry for "HKEY_LOCAL_MACHINE\\Software\\Wow6432Node\\Xoreax\\Incredibuild\\Builder\\%s"' % setting_name ) # Do we have the right value? if str(data) != required_value: if not allowUserInput: # Dont try anything if no input is allowed Logs.warn('[WARNING] "HKEY_LOCAL_MACHINE\\Software\\Wow6432Node\\Xoreax\\Incredibuild\\Builder\\%s" set to "%s" but should be "%s"; run WAF outside of Visual Studio to fix automatically' % (setting_name, data, required_value) ) return try: # Try to open the registry for writing IB_settings_writing = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "Software\\Wow6432Node\\Xoreax\\Incredibuild\\Builder", 0, _winreg.KEY_SET_VALUE | _winreg.KEY_READ) except: Logs.warn('[WARNING] Cannot access a registry entry "HKEY_LOCAL_MACHINE\\Software\\Wow6432Node\\Xoreax\\Incredibuild\\Builder\\%s" for writing.' % setting_name) Logs.warn('[WARNING] Please run cry_waf.exe as an administrator or change the value to "%s" in the registry to ensure a correct operation of WAF' % required_value) return if data is None: info_str = [('Should WAF create "HKEY_LOCAL_MACHINE\\Software\\Wow6432Node\\Xoreax\\Incredibuild\\Builder\\%s" with value "%s"?' % (setting_name, required_value) )] else: info_str = [('Should WAF change "HKEY_LOCAL_MACHINE\\Software\\Wow6432Node\\Xoreax\\Incredibuild\\Builder\\%s" from "%s" to "%s"?' % (setting_name, data, required_value) )] info_str.append(desc) # Get user choice if not ctx.is_option_true('console_mode'): # gui retVal = 'True' if ctx.gui_get_choice('\n'.join(info_str)) else 'False' else: # console Logs.info('\n'.join(info_str)) retVal = _get_boolean_value(ctx, 'Input', 'Yes') if retVal == 'True' or retVal == 'Yes': _winreg.SetValueEx(IB_settings_writing, setting_name,0, type, str(required_value)) else: Logs.warn('[WARNING] WAF is running with "unsupported" IncrediBuild settings. Expect to encounter IncrediBuild errors during compilation.')
def pclint_show_results(bld): """Dump all pclint logs after building to the console""" global stdout_sep, stderr_sep log_task = bld.log_task messages = log_task.messages summary = log_task.summary_messages msg = '' # select the messages to print type_priority = 'Error Warning Info Note'.split() if Options.options.static_analysis_console == 'errors_else_warnings': for t in type_priority: msgs_to_print = messages.get(t,[]) if msgs_to_print: msg = 'Printed only PC-lint %s messages' % t msgs_to_print = list(sorted(msgs_to_print)) msgs_to_print.extend(['',msg]) break else: msgs_to_print = list(sorted(reduce(operator.or_, messages.values(),set()))) if Options.options.static_analysis_console != 'off': if log_task.lst_summary: msgs_to_print.extend([ '', '{0:#^80}'.format(' PC-lint message summary '), '', ]) msgs_to_print.extend(log_task.lst_summary) msgs_to_print.append('') if msgs_to_print: Logs.info('\n'.join(msgs_to_print)) sys.stdout.flush()
def run(self): Logs.info("Running package task for {}".format(self.executable_name)) executable_source_node = self.inputs[0] if not self.destination_node: # destination not specified so assume we are putting the package # where the built executable is located, which is the input's # parent since the input node is the actual executable self.destination_node = self.inputs[0].parent Logs.debug("package: packaging {} to destination {}".format( executable_source_node.abspath(), self.destination_node.abspath())) if 'darwin' in self.bld.platform: run_xcode_build(self.bld, self.task_gen_name, self.destination_node) self.process_executable() self.process_resources() self.process_assets()
def pre_recurse(self, node): wscript_module = Context.load_module(node.abspath()) group_name = wscript_module.APPNAME self.stack.append(TestScope(self, group_name, self.defaults())) self.max_depth = max(self.max_depth, len(self.stack) - 1) bld_dir = node.get_bld().parent if hasattr(wscript_module, 'test'): self.original_dir = os.getcwd() Logs.info("Waf: Entering directory `%s'", bld_dir) os.chdir(str(bld_dir)) if not self.env.NO_COVERAGE and str( node.parent) == Context.top_dir: self.clear_coverage() Logs.info('') self.log_good('=' * 10, 'Running %s tests\n', group_name) super(TestContext, self).pre_recurse(node)
def parse_cds_libs(tgen): # Here, we check if all the libraries given in CDS_LIBS # and all the include paths defined in CDS_LIB_INCLUDES # exist and merge them into CDS_LIBS_FLAT. found_absolute_path = False try: for key, value in tgen.bld.env['CDS_LIBS'].iteritems(): tgen.bld.env['CDS_LIBS_FLAT'][key] = value if os.path.isabs(value): found_absolute_path = True if not tgen.bld.root.find_dir(value): tgen.bld.fatal('Cadence library ' + key + ' not found in ' + value + '.') else: if not tgen.path.find_dir(value): tgen.bld.fatal('Cadence library ' + key + ' not found in ' + value + '.') Logs.info('Checking for environment variable CDS_LIBS...Found ' + str(len(tgen.bld.env['CDS_LIBS_FLAT'])) + ' libraries.') except AttributeError, e: Logs.warn('Checking for environment variable CDS_LIBS...Found None')
def run(self): """Creates a command line processed by ``Utils.subprocess.Popen`` in order to build the sphinx documentation. See :numref:`confpy-to-docs` for a simplified representation""" verbosity = "" if Logs.verbose: verbosity = "-" + Logs.verbose * "v" cmd = " ".join([ "${SPHINX_BUILD}", "-b ${BUILDERNAME}", "-c ${CONFDIR}", "-D ${VERSION}", "-D ${RELEASE}", "-D graphviz_dot=${DOT}", "-d ${DOCTREEDIR}", "${SRCDIR}", "${OUTDIR}", verbosity, ]) cmd = " ".join(cmd.split()) cmd = Utils.subst_vars(cmd, self.env) Logs.info(cmd) env = self.env.env or None cwd = self.generator.bld.path.get_bld().abspath() proc = Utils.subprocess.Popen( cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env, cwd=cwd, ) std_out, std_err = proc.communicate() std_out = std_out.decode(errors="ignore") std_err = std_err.decode(errors="ignore") ret = getattr(self, "check_output_" + self.env.BUILDERNAME)(std_out, std_err) if ret: self.generator.bld.fatal(f"Could not build {self.env.BUILDERNAME}")
def set_cmd_coordinator(conf, coordinator_name): if coordinator_name == 'Local': conf.is_build_master = True conf.cmd_coordinator = Local_CommandCoordinator() elif coordinator_name == 'IB': jobs_backup = conf.jobs conf.options.jobs = int( conf.options.incredibuild_max_cores) + conf.options.jobs conf.jobs = conf.options.jobs # If a multi core licence is available, run IB as build master run_ib_as_service = conf.is_option_true('run_ib_as_service') is_recursive_ib_instance = conf.is_option_true( 'internal_dont_check_recursive_execution') if not is_recursive_ib_instance: if run_ib_as_service and "Cores" in get_ib_licence_string(): Logs.warn( 'Incredibuild multicore licence detected. Consider disabling "run_ib_as_service" for faster build times.' ) conf.is_build_master = run_ib_as_service if not execute_waf_via_ib(conf): conf.is_build_master = True conf.options.jobs = jobs_backup conf.jobs = jobs_backup return if run_ib_as_service: conf.cmd_coordinator = IB_CommandCoordinator_Client(conf) elif run_ib_as_service: Logs.info("[WAF] Run Incredibuild as a service") conf.is_build_master = False conf.cmd_coordinator = IB_CommandCoordinator_Server() conf.cmd_coordinator.enter_command_loop(conf) else: conf.is_build_master = True
def execute_build(self): Logs.info("Waf: Entering directory `%s'", self.variant_dir) self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() try: self.compile() finally: if self.progress_bar == 1 and sys.stderr.isatty(): c = self.producer.processed or 1 m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) Logs.info(m, extra={ 'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2': Logs.colors.cursor_on }) Logs.info("Waf: Leaving directory `%s'", self.variant_dir) try: self.producer.bld = None del self.producer except AttributeError: pass self.post_build()
def __python_run(self): "Installs python modules" res = self.__currentlist() itms = self.reqs('python', runtimeonly=self.rtime) if 'cpp' in self.reqs: itms.update( (i.replace('python_', ''), j) for i, j in self.reqs('cpp', runtimeonly=self.rtime).items() if i.startswith('python_') or i == 'gtest') boost = Boost.getlibs() if boost[0]: itms["boost"] = boost[-1] if len(self.packages): itms = {i: j for i, j in itms.items() if i in self.packages} for name, version in itms.items(): req = self.reqs.pinned('python', name) Logs.info("checking: %s%s%s%s", name, '=' if self.__ismin(name) else '>=', version, '=' + req if isinstance(req, str) else '') if self.__isgood(name, version, res): continue if self.__condaupdate(res, name, version): continue if (res.get(name, (0, 0))[1] != '<pip>' and self.__condainstall(name, version)): continue cmd = [self.__pip(), 'install'] if version is None: cmd += [name] elif self.__ismin(name): cmd += ["%s==%s" % (name, version)] else: cmd += ["%s>=%s" % (name, version)] subprocess.check_call(cmd)
def compile(self): if not self.files: Logs.warn( 'Add a pattern for the debug build, for example "waf step --files=main.c,app"' ) BuildContext.compile(self) return targets = None if self.targets and self.targets != '*': targets = self.targets.split(',') for g in self.groups: for tg in g: if targets and tg.name not in targets: continue try: f = tg.post except AttributeError: pass else: f() for pat in self.files.split(','): matcher = self.get_matcher(pat) for tg in g: if isinstance(tg, Task.TaskBase): lst = [tg] else: lst = tg.tasks for tsk in lst: do_exec = False for node in getattr(tsk, 'inputs', []): if matcher(node, output=False): do_exec = True break for node in getattr(tsk, 'outputs', []): if matcher(node, output=True): do_exec = True break if do_exec: ret = tsk.run() Logs.info('%s -> exit %r' % (str(tsk), ret))
def create_package_task(self, **kw): executable_name = kw.get('target', None) task_gen_name = kw.get('task_gen_name', executable_name) Logs.debug('package: create_package_task {}'.format(task_gen_name)) kw['bld'] = self # Needed for when we build the task executable_task_gen = self.get_tgen_by_name(task_gen_name) if executable_task_gen and getattr(executable_task_gen, 'output_folder', None): executable_source_node = self.srcnode.make_node( executable_task_gen.output_folder) else: executable_source_node = self.srcnode.make_node( self.get_output_folders(self.platform, self.config)[0].name) destination_node = getattr(self, 'destination', None) if not destination_node: destination_node = executable_source_node executable_dest_node = get_path_to_executable_package_location( self.platform, executable_name, destination_node) executable_source_node = executable_source_node.make_node(executable_name) if os.path.exists(executable_source_node.abspath()): new_task = package_task(env=self.env, **kw) new_task.set_inputs(executable_source_node) new_task.set_outputs(executable_dest_node.make_node(executable_name)) self.add_to_group(new_task, 'packaging') else: if os.path.exists( executable_dest_node.make_node(executable_name).abspath()): Logs.info( "Final package output already exists, skipping packaging of %s" % executable_source_node.abspath()) else: Logs.warn( "[WARNING] Source executable %s does not exist and final package artifact does not exist either. Did you run the build command before the package command?" % executable_source_node.abspath())
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ Logs.info("Waf: Entering directory `%s'" % self.variant_dir) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() try: self.compile() finally: if self.progress_bar == 1 and sys.stderr.isatty(): c = len(self.returned_tasks) or 1 m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) Logs.info(m, extra={ 'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2': Logs.colors.cursor_on }) Logs.info("Waf: Leaving directory `%s'" % self.variant_dir) self.post_build()
def run_unittest_launcher_for_win_x64(ctx, game_project_name): """ Helper context function to execute the unit test launcher for a specific game project :param ctx: Context :param game_project_name: The current project name (extracted from bootstrap.cfg) """ output_folder = ctx.get_output_folders(ctx.platform, ctx.config)[0] current_project_launcher = ctx.env[ 'cprogram_PATTERN'] % '{}Launcher'.format(game_project_name) current_project_unittest_launcher_fullpath = os.path.join( output_folder.abspath(), current_project_launcher) if not os.path.isfile(current_project_unittest_launcher_fullpath): raise Errors.WafError( "Unable to launch unit tests for project '{}'. Cannot find launcher file '{}'. Make sure the project has been built successfully." .format(game_project_name, current_project_unittest_launcher_fullpath)) Logs.info('[WAF] Running unit tests for {}'.format(game_project_name)) try: call_args = [current_project_unittest_launcher_fullpath] # Grab any optional arguments auto_launch_unit_test_arguments = ctx.get_settings_value( 'auto_launch_unit_test_arguments') if auto_launch_unit_test_arguments: call_args.extend(auto_launch_unit_test_arguments.split(' ')) result_code = subprocess.call(call_args) except Exception as e: raise Errors.WafError("Error executing unit tests for '{}': {}".format( game_project_name, e)) if result_code != 0: raise Errors.WafError( "Unit tests for '{}' failed. Return code {}".format( game_project_name, result_code)) else: Logs.info('[WAF] Running unit tests for {}'.format(game_project_name))
def run(self): """ Execute the test. The execution is always successful, and the results are stored on ``self.generator.bld.utest_results`` for postprocessing. Override ``add_test_results`` to interrupt the build """ filename = self.inputs[0].abspath() output_filename = path.splitext(filename)[0] + '_result.xml' self.ut_exec = getattr(self.generator, 'ut_exec', [filename]) self.ut_exec += ['--gtest_output=xml:{}'.format(output_filename)] if getattr(self.generator, 'ut_fun', None): self.generator.ut_fun(self) cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath() testcmd = getattr(self.generator, 'ut_cmd', False) or getattr( Options.options, 'testcmd', False) if testcmd: self.ut_exec = (testcmd % self.ut_exec[0]).split(' ') Logs.info(str(self.ut_exec)) proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE) (stdout, stderr) = proc.communicate() # read result from output_filename test_results = ElementTree.parse(output_filename).getroot() tup = (filename, proc.returncode, stdout, stderr, test_results) testlock.acquire() try: return self.generator.add_test_results(tup) finally: testlock.release()
def symbols_whyneeded(task): """check why 'target' needs to link to 'subsystem'""" bld = task.env.bld tgt_list = get_tgt_list(bld) why = Options.options.WHYNEEDED.split(":") if len(why) != 2: raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") target = why[0] subsystem = why[1] build_symbol_sets(bld, tgt_list) build_library_names(bld, tgt_list) build_syslib_sets(bld, tgt_list) Logs.info("Checking why %s needs to link to %s" % (target, subsystem)) if not target in bld.env.used_symbols: Logs.warn("unable to find target '%s' in used_symbols dict" % target) return if not subsystem in bld.env.public_symbols: Logs.warn("unable to find subsystem '%s' in public_symbols dict" % subsystem) return overlap = bld.env.used_symbols[target].intersection(bld.env.public_symbols[subsystem]) if not overlap: Logs.info("target '%s' doesn't use any public symbols from '%s'" % (target, subsystem)) else: Logs.info("target '%s' uses symbols %s from '%s'" % (target, overlap, subsystem))
def install_pyfile(self, node, install_from=None): from_node = install_from or node.parent tsk = self.bld.install_as(self.install_path + '/' + node.path_from(from_node), node, postpone=False) path = tsk.get_install_path() if self.bld.is_install < 0: Logs.info("+ removing byte compiled python files") for x in 'co': try: os.remove(path + x) except OSError: pass if self.bld.is_install > 0: try: st1 = os.stat(path) except OSError: Logs.error('The python file is missing, this should not happen') for x in ['c', 'o']: do_inst = self.env['PY' + x.upper()] try: st2 = os.stat(path + x) except OSError: pass else: if st1.st_mtime <= st2.st_mtime: do_inst = False if do_inst: lst = (x == 'o') and [self.env['PYFLAGS_OPT']] or [] (a, b, c) = (path, path + x, tsk.get_install_path(destdir=False) + x) argv = self.env['PYTHON'] + lst + ['-c', INST, a, b, c] Logs.info('+ byte compiling %r' % (path + x)) env = self.env.env or None ret = Utils.subprocess.Popen(argv, env=env).wait() if ret: raise Errors.WafError('py%s compilation failed %r' % (x, path))
def INSTALL_DIR(bld, path, chmod=0o755, env=None): """Install a directory if it doesn't exist, always set permissions.""" if not path: return [] destpath = bld.EXPAND_VARIABLES(path) if Options.options.destdir: destpath = os.path.join(Options.options.destdir, destpath.lstrip(os.sep)) if bld.is_install > 0: if not os.path.isdir(destpath): try: Logs.info('* create %s', destpath) os.makedirs(destpath) os.chmod(destpath, chmod) except OSError as e: if not os.path.isdir(destpath): raise Errors.WafError( "Cannot create the folder '%s' (error: %s)" % (path, e))
def archive(self): import tarfile arch_name = self.get_arch_name() try: self.base_path except AttributeError: self.base_path = self.path node = self.base_path.make_node(arch_name) try: node.delete() except Exception: pass files = self.get_files() if self.algo.startswith('tar.'): tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', '')) for x in files: self.add_tar_file(x, tar) tar.close() elif self.algo == 'zip': import zipfile zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED) for x in files: archive_name = self.get_base_name() + '/' + x.path_from( self.base_path) zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) zip.close() else: self.fatal('Valid algo types are tar.bz2, tar.gz or zip') try: from hashlib import sha1 as sha except ImportError: from sha import sha try: digest = " (sha=%r)" % sha(node.read()).hexdigest() except Exception: digest = '' Logs.info('New archive created: %s%s' % (self.arch_name, digest))
def write_compilation_database(ctx): "Write the clang compilation database as JSON" database_file = ctx.bldnode.make_node("compile_commands.json") Logs.info("Build commands will be stored in %s", database_file.path_from(ctx.path)) try: root = json.load(database_file) except OSError: root = [] clang_db = {x["file"]: x for x in root} for task in getattr(ctx, "clang_compilation_database_tasks", []): try: cmd = task.last_cmd except AttributeError: continue directory = getattr(task, "cwd", ctx.variant_dir) f_node = task.inputs[0] filename = os.path.relpath(f_node.abspath(), directory) entry = {"directory": directory, "arguments": cmd, "file": filename} clang_db[filename] = entry root = list(clang_db.values()) database_file.write(json.dumps(root, indent=2))