Ejemplo n.º 1
0
def apply_copy(self):
    Utils.def_attrs(self, fun=copy_func)
    self.default_install_path = 0

    lst = self.to_list(self.source)
    self.meths.remove("process_source")

    for filename in lst:
        node = self.path.find_resource(filename)
        if not node:
            raise Errors.WafError("cannot find input file %s for processing" % filename)

        target = self.target
        if not target or len(lst) > 1:
            target = node.name

        # TODO the file path may be incorrect
        newnode = self.path.find_or_declare(target)

        tsk = self.create_task("copy", node, newnode)
        tsk.fun = self.fun
        tsk.chmod = getattr(self, "chmod", Utils.O644)

        if not tsk.env:
            tsk.debug()
            raise Errors.WafError("task without an environment")
Ejemplo n.º 2
0
	def hash_env_vars(self, env, vars_lst):
		"""
		Hashes configuration set variables::

			def build(bld):
				bld.hash_env_vars(bld.env, ['CXX', 'CC'])

		This method uses an internal cache.

		:param env: Configuration Set
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		:param vars_lst: list of variables
		:type vars_list: list of string
		"""

		if not env.table:
			env = env.parent
			if not env:
				return Utils.SIG_NIL

		idx = str(id(env)) + str(vars_lst)
		try:
			cache = self.cache_env
		except AttributeError:
			cache = self.cache_env = {}
		else:
			try:
				return self.cache_env[idx]
			except KeyError:
				pass

		lst = [env[a] for a in vars_lst]
		cache[idx] = ret = Utils.h_list(lst)
		Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
		return ret
Ejemplo n.º 3
0
 def put_files_cache(self):
     if getattr(self, "cached", None):
         return None
     sig = self.signature()
     ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
     dname = os.path.join(self.generator.bld.cache_global, ssig)
     tmpdir = tempfile.mkdtemp(prefix=self.generator.bld.cache_global + os.sep + "waf")
     try:
         shutil.rmtree(dname)
     except:
         pass
     try:
         for node in self.outputs:
             dest = os.path.join(tmpdir, node.name)
             shutil.copy2(node.abspath(), dest)
     except (OSError, IOError):
         try:
             shutil.rmtree(tmpdir)
         except:
             pass
     else:
         try:
             os.rename(tmpdir, dname)
         except OSError:
             try:
                 shutil.rmtree(tmpdir)
             except:
                 pass
         else:
             try:
                 os.chmod(dname, Utils.O755)
             except:
                 pass
Ejemplo n.º 4
0
	def __init__(cls, name, bases, dict):
		super(store_task_type, cls).__init__(name, bases, dict)
		name = cls.__name__

		if name != 'evil' and name != 'Task':
			if getattr(cls, 'run_str', None):
				# if a string is provided, convert it to a method
				(f, dvars) = compile_fun(cls.run_str, cls.shell)
				cls.hcode = Utils.h_cmd(cls.run_str)
				cls.orig_run_str = cls.run_str
				# change the name of run_str or it is impossible to subclass with a function
				cls.run_str = None
				cls.run = f
				# process variables
				cls.vars = list(set(cls.vars + dvars))
				cls.vars.sort()
				if cls.vars:
					fun = compile_sig_vars(cls.vars)
					if fun:
						cls.sig_vars = fun
			elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__:
				# getattr(cls, 'hcode') would look in the upper classes
				cls.hcode = Utils.h_cmd(cls.run)

			# be creative
			getattr(cls, 'register', classes)[name] = cls
Ejemplo n.º 5
0
	def store(self):
		"""
		Store data for next runs, set the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary
		file to avoid problems on ctrl+c.
		"""
		data = {}
		for x in SAVED_ATTRS:
			data[x] = getattr(self, x)
		db = os.path.join(self.variant_dir, Context.DBFILE)

		try:
			Node.pickle_lock.acquire()
			Node.Nod3 = self.node_class
			x = cPickle.dumps(data, PROTOCOL)
		finally:
			Node.pickle_lock.release()

		Utils.writef(db + '.tmp', x, m='wb')

		try:
			st = os.stat(db)
			os.remove(db)
			if not Utils.is_win32: # win32 has no chown but we're paranoid
				os.chown(db + '.tmp', st.st_uid, st.st_gid)
		except (AttributeError, OSError):
			pass

		# do not use shutil.move (copy is not thread-safe)
		os.rename(db + '.tmp', db)
Ejemplo n.º 6
0
 def process(self):
     m = self.master
     if m.stop:
         m.out.put(self)
         return
     try:
         del self.generator.bld.task_sigs[self.uid()]
     except:
         pass
     try:
         self.generator.bld.returned_tasks.append(self)
         self.log_display(self.generator.bld)
         ret = self.run()
     except Exception:
         self.err_msg = Utils.ex_stack()
         self.hasrun = EXCEPTION
         m.error_handler(self)
         m.out.put(self)
         return
     if ret:
         self.err_code = ret
         self.hasrun = CRASHED
     else:
         try:
             self.post_run()
         except Errors.WafError:
             pass
         except Exception:
             self.err_msg = Utils.ex_stack()
             self.hasrun = EXCEPTION
         else:
             self.hasrun = SUCCESS
     if self.hasrun != SUCCESS:
         m.error_handler(self)
     m.out.put(self)
Ejemplo n.º 7
0
 def can_retrieve_cache(self):
     if not getattr(self, "outputs", None):
         return None
     sig = self.signature()
     ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
     dname = os.path.join(self.generator.bld.cache_global, ssig)
     try:
         t1 = os.stat(dname).st_mtime
     except OSError:
         return None
     for node in self.outputs:
         orig = os.path.join(dname, node.name)
         try:
             shutil.copy2(orig, node.abspath())
             os.utime(orig, None)
         except (OSError, IOError):
             Logs.debug("task: failed retrieving file")
             return None
     try:
         t2 = os.stat(dname).st_mtime
     except OSError:
         return None
     if t1 != t2:
         return None
     for node in self.outputs:
         node.sig = sig
         if self.generator.bld.progress_bar < 1:
             self.generator.bld.to_log("restoring from cache %r\n" % node.abspath())
     self.cached = True
     return True
Ejemplo n.º 8
0
	def execute(self):
		if not Configure.autoconfig:
			return execute_method(self)

		env = ConfigSet.ConfigSet()
		do_config = False
		try:
			env.load(os.path.join(Context.top_dir, Options.lockfile))
		except Exception:
			Logs.warn('Configuring the project')
			do_config = True
		else:
			if env.run_dir != Context.run_dir:
				do_config = True
			else:
				h = 0
				for f in env['files']:
					h = Utils.h_list((h, Utils.readf(f, 'rb')))
				do_config = h != env.hash

		if do_config:
			Options.commands.insert(0, self.cmd)
			Options.commands.insert(0, 'configure')
			if Configure.autoconfig == 'clobber':
				Options.options.__dict__ = env.options
			return

		return execute_method(self)
Ejemplo n.º 9
0
def rsync_and_ssh(task):

	# remove a warning
	task.uid_ = id(task)

	bld = task.generator.bld

	task.env.user, _, _ = task.env.login.partition('@')
	task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
	task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
	task.env.local_dir = bld.srcnode.abspath() + '/'

	task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
	task.env.build_dir = bld.bldnode.abspath()

	ret = task.exec_command(bld.make_mkdir_command(task))
	if ret:
		return ret
	ret = task.exec_command(bld.make_send_command(task))
	if ret:
		return ret
	ret = task.exec_command(bld.make_exec_command(task))
	if ret:
		return ret
	ret = task.exec_command(bld.make_save_command(task))
	if ret:
		return ret
Ejemplo n.º 10
0
def download_tool(tool, force=False, ctx=None):
	"""
	Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::

		$ waf configure --download
	"""
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url = '/'.join((x, sub, tool + '.py'))
			try:
				web = urlopen(url)
				if web.getcode() != 200:
					continue
			except Exception as e:
				# on python3 urlopen throws an exception
				continue
			else:
				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
				tmp.write(web.read())
				Logs.warn('Downloaded %s from %s' % (tool, url))
				download_check(tmp)
				try:
					module = Context.load_tool(tool)
				except:
					Logs.warn('The tool %s from %s is unusable' % (tool, url))
					try:
						tmp.delete()
					except:
						pass
					continue
				return module
	raise Errors.WafError('Could not load the Waf tool')
Ejemplo n.º 11
0
def set_qt4_libs_to_check(self):
	if not hasattr(self,'qt4_vars'):
		self.qt4_vars=QT4_LIBS
	self.qt4_vars=Utils.to_list(self.qt4_vars)
	if not hasattr(self,'qt4_vars_debug'):
		self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars]
	self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug)
Ejemplo n.º 12
0
	def store(self, filename):
		"""
		Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.

		:param filename: file to use
		:type filename: string
		"""
		try:
			os.makedirs(os.path.split(filename)[0])
		except OSError:
			pass

		buf = []
		merged_table = self.get_merged_dict()
		keys = list(merged_table.keys())
		keys.sort()

		try:
			fun = ascii
		except NameError:
			fun = repr

		for k in keys:
			if k != 'undo_stack':
				buf.append('%s = %s\n' % (k, fun(merged_table[k])))
		Utils.writef(filename, ''.join(buf))
Ejemplo n.º 13
0
def declare_chain(
    name="",
    rule=None,
    reentrant=True,
    color="BLUE",
    ext_in=[],
    ext_out=[],
    before=[],
    after=[],
    decider=None,
    scan=None,
):
    ext_in = Utils.to_list(ext_in)
    ext_out = Utils.to_list(ext_out)
    cls = Task.task_factory(
        name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan
    )

    def x_file(self, node):
        ext = decider and decider(self, node) or cls.ext_out
        if ext_in:
            _ext_in = ext_in[0]
        out_source = [node.change_ext(x, ext_in=_ext_in) for x in ext]
        if reentrant:
            for i in range(reentrant):
                self.source.append(out_source[i])
        tsk = self.create_task(name, node, out_source)

    for x in cls.ext_in:
        task_gen.mappings[x] = x_file
    return x_file
Ejemplo n.º 14
0
def post_check(self,*k,**kw):
	is_success=0
	if kw['execute']:
		if kw['success']is not None:
			if kw.get('define_ret',False):
				is_success=kw['success']
			else:
				is_success=(kw['success']==0)
	else:
		is_success=(kw['success']==0)
	if'define_name'in kw:
		if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
			if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str):
				self.define(kw['define_name'],is_success,quote=kw.get('quote',1))
			else:
				self.define_cond(kw['define_name'],is_success)
		else:
			self.define_cond(kw['define_name'],is_success)
	if'header_name'in kw:
		if kw.get('auto_add_header_name',False):
			self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
	if is_success and'uselib_store'in kw:
		from waflib.Tools import ccroot
		_vars=set([])
		for x in kw['features']:
			if x in ccroot.USELIB_VARS:
				_vars|=ccroot.USELIB_VARS[x]
		for k in _vars:
			lk=k.lower()
			if lk in kw:
				val=kw[lk]
				if isinstance(val,str):
					val=val.rstrip(os.path.sep)
				self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val))
	return is_success
Ejemplo n.º 15
0
def set_qt5_libs_to_check(self):
    if not hasattr(self, "qt5_vars"):
        self.qt5_vars = QT5_LIBS
    self.qt5_vars = Utils.to_list(self.qt5_vars)
    if not hasattr(self, "qt5_vars_debug"):
        self.qt5_vars_debug = [a + "_debug" for a in self.qt5_vars]
    self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
Ejemplo n.º 16
0
def download_tool(tool,force=False,ctx=None):
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url='/'.join((x,sub,tool+'.py'))
			try:
				web=urlopen(url)
				try:
					if web.getcode()!=200:
						continue
				except AttributeError:
					pass
			except Exception:
				continue
			else:
				tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
				tmp.write(web.read(),'wb')
				Logs.warn('Downloaded %s from %s'%(tool,url))
				download_check(tmp)
				try:
					module=Context.load_tool(tool)
				except Exception:
					Logs.warn('The tool %s from %s is unusable'%(tool,url))
					try:
						tmp.delete()
					except Exception:
						pass
					continue
				return module
	raise Errors.WafError('Could not load the Waf tool')
Ejemplo n.º 17
0
def find_file(self,filename,path_list=[]):
	for n in Utils.to_list(filename):
		for d in Utils.to_list(path_list):
			p=os.path.join(d,n)
			if os.path.exists(p):
				return p
	self.fatal('Could not find %r'%filename)
Ejemplo n.º 18
0
Archivo: c_preproc.py Proyecto: zsx/waf
	def start(self, node, env):
		debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

		self.env = env
		bld = node.ctx
		try:
			self.parse_cache = bld.parse_cache
		except AttributeError:
			bld.parse_cache = {}
			self.parse_cache = bld.parse_cache

		self.addlines(node)

		# macros may be defined on the command-line, so they must be parsed as if they were part of the file
		if env['DEFINES']:
			lst = ['%s %s' % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
			self.lines = [('define', x) for x in lst] + self.lines

		while self.lines:
			(kind, line) = self.lines.pop(0)
			if kind == POPFILE:
				self.currentnode_stack.pop()
				continue
			try:
				self.process_line(kind, line)
			except Exception as e:
				if Logs.verbose:
					debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
Ejemplo n.º 19
0
 def do_install(self, src, tgt, chmod=Utils.O644):
     d, _ = os.path.split(tgt)
     if not d:
         raise Errors.WafError("Invalid installation given %r->%r" % (src, tgt))
     Utils.check_dir(d)
     srclbl = src.replace(self.srcnode.abspath() + os.sep, "")
     if not Options.options.force:
         try:
             st1 = os.stat(tgt)
             st2 = os.stat(src)
         except OSError:
             pass
         else:
             if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
                 if not self.progress_bar:
                     Logs.info("- install %s (from %s)" % (tgt, srclbl))
                 return False
     if not self.progress_bar:
         Logs.info("+ install %s (from %s)" % (tgt, srclbl))
     try:
         os.remove(tgt)
     except OSError:
         pass
     try:
         shutil.copy2(src, tgt)
         os.chmod(tgt, chmod)
     except IOError:
         try:
             os.stat(src)
         except (OSError, IOError):
             Logs.error("File %r does not exist" % src)
         raise Errors.WafError("Could not install the file %r" % tgt)
Ejemplo n.º 20
0
def xcheck_host_envar(conf, name, wafname=None):
	wafname = wafname or name

	chost, chost_envar = get_chost_stuff(conf)

	specific = None
	if chost:
		specific = os.environ.get('%s_%s' % (chost_envar, name), None)

	if specific:
		value = Utils.to_list(specific)
		conf.env[wafname] += value
		conf.msg('Will use cross-compilation %s from %s_%s' \
		 % (name, chost_envar, name),
		 " ".join(quote(x) for x in value))
		return


	envar = os.environ.get('HOST_%s' % name, None)
	if envar is None:
		return

	value = Utils.to_list(envar) if envar != '' else [envar]

	conf.env[wafname] = value
	conf.msg('Will use cross-compilation %s from HOST_%s' \
	 % (name, name),
	 " ".join(quote(x) for x in value))
Ejemplo n.º 21
0
def apply_java(self):
	Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[])
	nodes_lst=[]
	outdir=getattr(self,'outdir',None)
	if outdir:
		if not isinstance(outdir,Node.Node):
			outdir=self.path.get_bld().make_node(self.outdir)
	else:
		outdir=self.path.get_bld()
	outdir.mkdir()
	self.env['OUTDIR']=outdir.abspath()
	self.javac_task=tsk=self.create_task('javac')
	tmp=[]
	srcdir=getattr(self,'srcdir','')
	if isinstance(srcdir,Node.Node):
		srcdir=[srcdir]
	for x in Utils.to_list(srcdir):
		if isinstance(x,Node.Node):
			y=x
		else:
			y=self.path.find_dir(x)
			if not y:
				self.bld.fatal('Could not find the folder %s from %s'%(x,self.path))
		tmp.append(y)
	tsk.srcdir=tmp
	if getattr(self,'compat',None):
		tsk.env.append_value('JAVACFLAGS',['-source',self.compat])
	if hasattr(self,'sourcepath'):
		fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)]
		names=os.pathsep.join([x.srcpath()for x in fold])
	else:
		names=[x.srcpath()for x in tsk.srcdir]
	if names:
		tsk.env.append_value('JAVACFLAGS',['-sourcepath',names])
Ejemplo n.º 22
0
def __boost_get_libs_path(self, *k, **kw):
	''' return the lib path and all the files in it '''
	if 'files' in kw:
		return self.root.find_dir('.'), Utils.to_list(kw['files'])
	libs = k and k[0] or kw.get('libs', None)
	if libs:
		path = self.root.find_dir(libs)
		files = path.ant_glob('*boost_*')
	if not libs or not files:
		for d in Utils.to_list(self.environ.get('LIB', [])) + BOOST_LIBS:
			path = self.root.find_dir(d)
			if path:
				files = path.ant_glob('*boost_*')
				if files:
					break
			path = self.root.find_dir(d + '64')
			if path:
				files = path.ant_glob('*boost_*')
				if files:
					break
	if not path:
		if libs:
			self.end_msg('libs not found in %s' % libs)
			self.fatal('The configuration failed')
		else:
			self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
			self.fatal('The configuration failed')

	self.to_log('Found the boost path in %r with the libraries:' % path)
	for x in files:
		self.to_log('    %r' % x)
	return path, files
Ejemplo n.º 23
0
	def load(self,input,tooldir=None,funs=None,download=True):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			mag=(tool,id(self.env),funs)
			if mag in self.tool_cache:
				self.to_log('(tool %s is already loaded, skipping)'%tool)
				continue
			self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir)
			except ImportError as e:
				if Options.options.download:
					module=download_tool(tool,ctx=self)
					if not module:
						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
				else:
					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
			if funs is not None:
				self.eval_rules(funs)
			else:
				func=getattr(module,'configure',None)
				if func:
					if type(func)is type(Utils.readf):func(self)
					else:self.eval_rules(func)
			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
Ejemplo n.º 24
0
def apply_haxe(self):
    Utils.def_attrs(self,
        target="", classpath="", flags="", libs="", swflib=None);

    classpath = Utils.to_list(self.classpath)
    flags = Utils.to_list(self.flags)
    target = self.target;

    inputs = []

    if target.endswith(".swf"):
        flags += ["-swf", target, "--flash-strict", "-D", "nativeTrace"]
        if (self.swflib is not None):
            swflib = self.path.get_bld().make_node(self.swflib)
            inputs += [swflib]
            flags += ["-swf-lib", str(swflib)]
    elif target.endswith(".js"):
        flags += ["-js", target]
    elif target.endswith(".n"):
        flags += ["-neko", target]
    else:
        raise "Unsupported target file type!"

    for lib in Utils.to_list(self.libs):
        flags += ["-lib", lib]

    task = self.create_task("haxe", inputs, self.path.get_bld().make_node(target))
    task.classpath = [self.path.find_node(cp) for cp in classpath]
    task.env.flags = flags
    self.haxe_task = task
Ejemplo n.º 25
0
def process_py(self,node):
	assert(node.get_bld_sig())
	assert(getattr(self,'install_path')),'add features="py"'
	if self.install_path:
		if self.install_from:
			self.bld.install_files(self.install_path,[node],cwd=self.install_from,relative_trick=True)
		else:
			self.bld.install_files(self.install_path,[node],relative_trick=True)
	lst=[]
	if self.env.PYC:
		lst.append('pyc')
	if self.env.PYO:
		lst.append('pyo')
	if self.install_path:
		if self.install_from:
			pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env)
		else:
			pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env)
	else:
		pyd=node.abspath()
	for ext in lst:
		if self.env.PYTAG:
			name=node.name[:-3]
			pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext))
			pyobj.parent.mkdir()
		else:
			pyobj=node.change_ext(".%s"%ext)
		tsk=self.create_task(ext,node,pyobj)
		tsk.pyd=pyd
		if self.install_path:
			self.bld.install_files(os.path.dirname(pyd),pyobj,cwd=node.parent.get_bld(),relative_trick=True)
Ejemplo n.º 26
0
def apply_java(self):
    Utils.def_attrs(
        self, jarname="", classpath="", sourcepath=".", srcdir=".", jar_mf_attributes={}, jar_mf_classpath=[]
    )
    nodes_lst = []
    if not self.classpath:
        if not self.env["CLASSPATH"]:
            self.env["CLASSPATH"] = ".." + os.pathsep + "."
    else:
        self.env["CLASSPATH"] = self.classpath
    if isinstance(self.srcdir, self.path.__class__):
        srcdir_node = self.srcdir
    else:
        srcdir_node = self.path.find_dir(self.srcdir)
    if not srcdir_node:
        raise Errors.WafError("could not find srcdir %r" % self.srcdir)
    self.env["OUTDIR"] = [srcdir_node.get_src().srcpath()]
    self.javac_task = tsk = self.create_task("javac")
    tsk.srcdir = srcdir_node
    if getattr(self, "compat", None):
        tsk.env.append_value("JAVACFLAGS", ["-source", self.compat])
    if hasattr(self, "sourcepath"):
        fold = [
            isinstance(x, self.path.__class__) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)
        ]
        names = os.pathsep.join([x.srcpath() for x in fold])
    else:
        names = srcdir_node.srcpath()
    if names:
        tsk.env.append_value("JAVACFLAGS", ["-sourcepath", names])
Ejemplo n.º 27
0
def set_qt5_libs_to_check(self):
	self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
	if not self.qt5_vars:
		dirlst = Utils.listdir(self.env.QTLIBS)

		pat = self.env.cxxshlib_PATTERN
		if Utils.is_win32:
			pat = pat.replace('.dll', '.lib')
		if self.environ.get('QT5_FORCE_STATIC'):
			pat = self.env.cxxstlib_PATTERN
		if Utils.unversioned_sys_platform() == 'darwin':
			pat = "%s\.framework"
		re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
		for x in dirlst:
			m = re_qt.match(x)
			if m:
				self.qt5_vars.append("Qt5%s" % m.group('name'))
		if not self.qt5_vars:
			self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)

	qtextralibs = getattr(Options.options, 'qtextralibs', None)
	if qtextralibs:
		self.qt5_vars.extend(qtextralibs.split(','))

	if not hasattr(self, 'qt5_vars_debug'):
		self.qt5_vars_debug = [a + '_DEBUG' for a in self.qt5_vars]
	self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
Ejemplo n.º 28
0
	def do_link(self, src, tgt):
		"""
		Create a symlink from tgt to src.

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		"""
		d, _ = os.path.split(tgt)
		Utils.check_dir(d)

		link = False
		if not os.path.islink(tgt):
			link = True
		elif os.readlink(tgt) != src:
			link = True

		if link:
			try: os.remove(tgt)
			except OSError: pass
			if not self.progress_bar:
				Logs.info('+ symlink %s (to %s)' % (tgt, src))
			os.symlink(src, tgt)
		else:
			if not self.progress_bar:
				Logs.info('- symlink %s (to %s)' % (tgt, src))
Ejemplo n.º 29
0
def apply_read_format_extensions(self):
    enabled_exts = Utils.to_list(getattr(self, 'enabled_exts', []))
    disabled_exts = Utils.to_list(getattr(self, 'disabled_exts', []))
    read_format = getattr(self, 'read_format', 'markdown')
    read_format += ''.join(('+' + e for e in enabled_exts))
    read_format += ''.join(('-' + e for e in disabled_exts))
    self.read_format = read_format
Ejemplo n.º 30
0
	def prepare_env(self,env):
		if not env.PREFIX:
			env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix))
		if not env.BINDIR:
			env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
		if not env.LIBDIR:
			env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env)
Ejemplo n.º 31
0
	def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
		"""
		Run user code from the supplied list of directories.
		The directories can be either absolute, or relative to the directory
		of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse`
		are called immediately before and after a script has been executed.

		:param dirs: List of directories to visit
		:type dirs: list of string or space-separated string
		:param name: Name of function to invoke from the wscript
		:type  name: string
		:param mandatory: whether sub wscript files are required to exist
		:type  mandatory: bool
		:param once: read the script file once for a particular context
		:type once: bool
		"""
		try:
			cache = self.recurse_cache
		except AttributeError:
			cache = self.recurse_cache = {}

		for d in Utils.to_list(dirs):

			if not os.path.isabs(d):
				# absolute paths only
				d = os.path.join(self.path.abspath(), d)

			WSCRIPT     = os.path.join(d, WSCRIPT_FILE)
			WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)

			node = self.root.find_node(WSCRIPT_FUN)
			if node and (not once or node not in cache):
				cache[node] = True
				self.pre_recurse(node)
				try:
					function_code = node.read('rU', encoding)
					exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
				finally:
					self.post_recurse(node)
			elif not node:
				node = self.root.find_node(WSCRIPT)
				tup = (node, name or self.fun)
				if node and (not once or tup not in cache):
					cache[tup] = True
					self.pre_recurse(node)
					try:
						wscript_module = load_module(node.abspath(), encoding=encoding)
						user_function = getattr(wscript_module, (name or self.fun), None)
						if not user_function:
							if not mandatory:
								continue
							raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath()))
						user_function(self)
					finally:
						self.post_recurse(node)
				elif not node:
					if not mandatory:
						continue
					try:
						os.listdir(d)
					except OSError:
						raise Errors.WafError('Cannot read the folder %r' % d)
					raise Errors.WafError('No wscript file in directory %s' % d)
    def start(self, node, env):
        """
		Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
		and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.

		:param node: source file
		:type node: :py:class:`waflib.Node.Node`
		:param env: config set containing additional defines to take into account
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		"""

        debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

        bld = node.ctx
        try:
            self.parse_cache = bld.parse_cache
        except AttributeError:
            bld.parse_cache = {}
            self.parse_cache = bld.parse_cache

        self.addlines(node)

        # macros may be defined on the command-line, so they must be parsed as if they were part of the file
        if env['DEFINES']:
            try:
                lst = [
                    '%s %s' % (x[0], trimquotes('='.join(x[1:])))
                    for x in [y.split('=') for y in env['DEFINES']]
                ]
                lst.reverse()
                self.lines.extend([('define', x) for x in lst])
            except AttributeError:
                # if the defines are invalid the compiler will tell the user
                pass

        while self.lines:
            (token, line) = self.lines.pop()
            if token == POPFILE:
                self.count_files -= 1
                self.currentnode_stack.pop()
                continue

            try:
                ve = Logs.verbose
                if ve:
                    debug('preproc: line is %s - %s state is %s', token, line,
                          self.state)
                state = self.state

                # make certain we define the state if we are about to enter in an if block
                if token[:2] == 'if':
                    state.append(undefined)
                elif token == 'endif':
                    state.pop()

                # skip lines when in a dead 'if' branch, wait for the endif
                if token[0] != 'e':
                    if skipped in self.state or ignored in self.state:
                        continue

                if token == 'if':
                    ret = eval_macro(tokenize(line), self.defs)
                    if ret: state[-1] = accepted
                    else: state[-1] = ignored
                elif token == 'ifdef':
                    m = re_mac.match(line)
                    if m and m.group(0) in self.defs: state[-1] = accepted
                    else: state[-1] = ignored
                elif token == 'ifndef':
                    m = re_mac.match(line)
                    if m and m.group(0) in self.defs: state[-1] = ignored
                    else: state[-1] = accepted
                elif token == 'include' or token == 'import':
                    (kind, inc) = extract_include(line, self.defs)
                    if inc in self.ban_includes:
                        continue
                    if token == 'import': self.ban_includes.add(inc)
                    if ve:
                        debug('preproc: include found %s    (%s) ', inc, kind)
                    if kind == '"' or not strict_quotes:
                        self.tryfind(inc)
                elif token == 'elif':
                    if state[-1] == accepted:
                        state[-1] = skipped
                    elif state[-1] == ignored:
                        if eval_macro(tokenize(line), self.defs):
                            state[-1] = accepted
                elif token == 'else':
                    if state[-1] == accepted: state[-1] = skipped
                    elif state[-1] == ignored: state[-1] = accepted
                elif token == 'define':
                    try:
                        self.defs[define_name(line)] = line
                    except Exception:
                        raise PreprocError("Invalid define line %s" % line)
                elif token == 'undef':
                    m = re_mac.match(line)
                    if m and m.group(0) in self.defs:
                        self.defs.__delitem__(m.group(0))
                        #print "undef %s" % name
                elif token == 'pragma':
                    if re_pragma_once.match(line.lower()):
                        self.ban_includes.add(self.curfile)
            except Exception as e:
                if Logs.verbose:
                    debug('preproc: line parsing failed (%s): %s %s', e, line,
                          Utils.ex_stack())
Ejemplo n.º 33
0
 def post_run():
     original_post_run()
     firmware.sig = firmware.cache_sig = Utils.h_file(firmware.abspath())
     elf.sig = elf.cache_sig = Utils.h_file(elf.abspath())
Ejemplo n.º 34
0
def configure(cfg):
    cfg.env.CMAKE_MIN_VERSION = '3.2'
    cfg.load('cmake')

    bldnode = cfg.bldnode.make_node(cfg.variant)
    env = cfg.env

    env.AP_PROGRAM_FEATURES += ['px4_ap_program']

    kw = env.AP_LIBRARIES_OBJECTS_KW
    kw['features'] = Utils.to_list(kw.get('features', [])) + ['px4_ap_library']

    def srcpath(path):
        return cfg.srcnode.make_node(path).abspath()

    def bldpath(path):
        return bldnode.make_node(path).abspath()

    board_name = env.get_flat('PX4_BOARD_NAME')

    # TODO: we should move stuff from mk/PX4 to Tools/ardupilotwaf/px4 after
    # stop using the make-based build system
    env.PX4_ROMFS_SRC = 'mk/PX4/ROMFS'
    env.PX4_ROMFS_BLD = 'px4-extra-files/ROMFS'
    env.PX4_BOOTLOADER = 'mk/PX4/bootloader/%s' % env.PX4_BOOTLOADER_NAME

    env.PX4_ADD_GIT_HASHES = srcpath('Tools/scripts/add_git_hashes.py')
    env.PX4_APM_ROOT = srcpath('')
    env.PX4_ROOT = srcpath('modules/PX4Firmware')
    env.PX4_NUTTX_ROOT = srcpath('modules/PX4NuttX')
    env.PX4_UAVCAN_ROOT = srcpath('modules/uavcan')

    if env.PX4_PX4IO_NAME:
        env.PX4IO_ELF_DEST = 'px4-extra-files/px4io'

    nuttx_config = 'nuttx_%s_apm' % board_name

    env.PX4_CMAKE_VARS = dict(
        CONFIG=nuttx_config,
        CMAKE_MODULE_PATH=srcpath('Tools/ardupilotwaf/px4/cmake'),
        UAVCAN_LIBUAVCAN_PATH=env.PX4_UAVCAN_ROOT,
        NUTTX_SRC=env.PX4_NUTTX_ROOT,
        PX4_NUTTX_ROMFS=bldpath(env.PX4_ROMFS_BLD),
        ARDUPILOT_BUILD='YES',
        EXTRA_CXX_FLAGS=' '.join((
            # NOTE: these "-Wno-error=*" flags should be removed as we update
            # the submodule
            '-Wno-error=double-promotion',
            '-Wno-error=reorder',
            # NOTE: *Temporarily* using this definition so that both
            # PX4Firmware build systems (cmake and legacy make-based) can live
            # together
            '-DCMAKE_BUILD',
            '-DARDUPILOT_BUILD',
            '-I%s' % bldpath('libraries/GCS_MAVLink'),
            '-I%s' % bldpath('libraries/GCS_MAVLink/include/mavlink'),
            '-Wl,--gc-sections',
        )),
        EXTRA_C_FLAGS=' '.join((
            # NOTE: *Temporarily* using this definition so that both
            # PX4Firmware build systems (cmake and legacy make-based) can live
            # together
            '-DCMAKE_BUILD', )),
    )
Ejemplo n.º 35
0
    def execute(self):
        """
        Wraps :py:func:`waflib.Context.Context.execute` on the context class
        """
        if not Configure.autoconfig:
            return execute_method(self)

        # Disable autoconfig so waf's version doesn't run (and don't end up on loop of bad configure)
        Configure.autoconfig = False

        if self.variant == '':
            raise Errors.WafError(
                'The project is badly configured: run "waf configure" again!')

        env = ConfigSet.ConfigSet()
        do_config = False

        try:
            p = os.path.join(Context.out_dir, Build.CACHE_DIR,
                             self.variant + Build.CACHE_SUFFIX)
            env.load(p)
        except EnvironmentError:
            raise Errors.WafError(
                'The project is not configured for board {0}: run "waf configure --board {0} [...]" first!'
                .format(self.variant))

        lock_env = ConfigSet.ConfigSet()

        try:
            lock_env.load(os.path.join(Context.top_dir, Options.lockfile))
        except EnvironmentError:
            Logs.warn('Configuring the project')
            do_config = True
        else:
            if lock_env.run_dir != Context.run_dir:
                do_config = True
            else:
                h = 0

                for f in env.CONFIGURE_FILES:
                    try:
                        h = Utils.h_list((h, Utils.readf(f, 'rb')))
                    except EnvironmentError:
                        do_config = True
                        break
                else:
                    do_config = h != env.CONFIGURE_HASH

        if do_config:
            cmd = lock_env.config_cmd or 'configure'
            tmp = Options.options.__dict__

            if env.OPTIONS and sorted(env.OPTIONS.keys()) == sorted(
                    tmp.keys()):
                Options.options.__dict__ = env.OPTIONS
            else:
                raise Errors.WafError(
                    'The project configure options have changed: run "waf configure" again!'
                )

            try:
                run_command(cmd)
            finally:
                Options.options.__dict__ = tmp

            run_command(self.cmd)
        else:
            return execute_method(self)
Ejemplo n.º 36
0
	def cmd_and_log(self, cmd, **kw):
		"""
		Execute a command and return stdout/stderr if the execution is successful.
		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
		will be bound to the WafError object::

			def configure(conf):
				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
				try:
					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
				except Exception as e:
					print(e.stdout, e.stderr)

		:param cmd: args for subprocess.Popen
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % (cmd,))

		if 'quiet' in kw:
			quiet = kw['quiet']
			del kw['quiet']
		else:
			quiet = None

		if 'output' in kw:
			to_ret = kw['output']
			del kw['output']
		else:
			to_ret = STDOUT

		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
			raise Errors.WafError("Program %s not found!" % cmd[0])

		kw['stdout'] = kw['stderr'] = subprocess.PIPE
		if quiet is None:
			self.to_log(cmd)

		wargs = {}
		if 'timeout' in kw:
			if kw['timeout'] is not None:
				wargs['timeout'] = kw['timeout']
			del kw['timeout']
		if 'input' in kw:
			if kw['input']:
				wargs['input'] = kw['input']
				kw['stdin'] = Utils.subprocess.PIPE
			del kw['input']

		try:
			p = subprocess.Popen(cmd, **kw)
			(out, err) = p.communicate(**wargs)
		except Exception as e:
			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

		if not isinstance(out, str):
			out = out.decode(sys.stdout.encoding or 'iso8859-1')
		if not isinstance(err, str):
			err = err.decode(sys.stdout.encoding or 'iso8859-1')

		if out and quiet != STDOUT and quiet != BOTH:
			self.to_log('out: %s' % out)
		if err and quiet != STDERR and quiet != BOTH:
			self.to_log('err: %s' % err)

		if p.returncode:
			e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
			e.returncode = p.returncode
			e.stderr = err
			e.stdout = out
			raise e

		if to_ret == BOTH:
			return (out, err)
		elif to_ret == STDERR:
			return err
		return out
Ejemplo n.º 37
0
	def exec_command(self, cmd, **kw):
		"""
		Execute a command and return the exit status. If the context has the attribute 'log',
		capture and log the process stderr/stdout for logging purposes::

			def run(tsk):
				ret = tsk.generator.bld.exec_command('touch foo.txt')
				return ret

		This method captures the standard/error outputs (Issue 1101), but it does not return the values
		unlike :py:meth:`waflib.Context.Context.cmd_and_log`

		:param cmd: command argument for subprocess.Popen
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % (cmd,))
		Logs.debug('runner_env: kw=%s' % kw)

		if self.logger:
			self.logger.info(cmd)

		if 'stdout' not in kw:
			kw['stdout'] = subprocess.PIPE
		if 'stderr' not in kw:
			kw['stderr'] = subprocess.PIPE

		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
			raise Errors.WafError("Program %s not found!" % cmd[0])

		wargs = {}
		if 'timeout' in kw:
			if kw['timeout'] is not None:
				wargs['timeout'] = kw['timeout']
			del kw['timeout']
		if 'input' in kw:
			if kw['input']:
				wargs['input'] = kw['input']
				kw['stdin'] = Utils.subprocess.PIPE
			del kw['input']

		try:
			if kw['stdout'] or kw['stderr']:
				p = subprocess.Popen(cmd, **kw)
				(out, err) = p.communicate(**wargs)
				ret = p.returncode
			else:
				out, err = (None, None)
				ret = subprocess.Popen(cmd, **kw).wait(**wargs)
		except Exception as e:
			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

		if out:
			if not isinstance(out, str):
				out = out.decode(sys.stdout.encoding or 'iso8859-1')
			if self.logger:
				self.logger.debug('out: %s' % out)
			else:
				Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
		if err:
			if not isinstance(err, str):
				err = err.decode(sys.stdout.encoding or 'iso8859-1')
			if self.logger:
				self.logger.error('err: %s' % err)
			else:
				Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})

		return ret
Ejemplo n.º 38
0
 def __init__(self, **kw):
     super(OptionsContext, self).__init__(**kw)
     self.parser = opt_parser(self)
     self.option_groups = {}
     jobs = self.jobs()
     p = self.add_option
     color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
     if os.environ.get('CLICOLOR', '') == '0':
         color = 'no'
     elif os.environ.get('CLICOLOR_FORCE', '') == '1':
         color = 'yes'
     p('-c',
       '--color',
       dest='colors',
       default=color,
       action='store',
       help='whether to use colors (yes/no/auto) [default: auto]',
       choices=('yes', 'no', 'auto'))
     p('-j',
       '--jobs',
       dest='jobs',
       default=jobs,
       type='int',
       help='amount of parallel jobs (%r)' % jobs)
     p('-k',
       '--keep',
       dest='keep',
       default=0,
       action='count',
       help='continue despite errors (-kk to try harder)')
     p('-v',
       '--verbose',
       dest='verbose',
       default=0,
       action='count',
       help='verbosity level -v -vv or -vvv [default: 0]')
     p('--zones',
       dest='zones',
       default='',
       action='store',
       help='debugging zones (task_gen, deps, tasks, etc)')
     p('--profile',
       dest='profile',
       default=0,
       action='store_true',
       help=optparse.SUPPRESS_HELP)
     p('--pdb',
       dest='pdb',
       default=0,
       action='store_true',
       help=optparse.SUPPRESS_HELP)
     p('-h',
       '--help',
       dest='whelp',
       default=0,
       action='store_true',
       help="show this help message and exit")
     gr = self.add_option_group('Configuration options')
     self.option_groups['configure options'] = gr
     gr.add_option('-o',
                   '--out',
                   action='store',
                   default='',
                   help='build dir for the project',
                   dest='out')
     gr.add_option('-t',
                   '--top',
                   action='store',
                   default='',
                   help='src dir for the project',
                   dest='top')
     gr.add_option('--no-lock-in-run',
                   action='store_true',
                   default='',
                   help=optparse.SUPPRESS_HELP,
                   dest='no_lock_in_run')
     gr.add_option('--no-lock-in-out',
                   action='store_true',
                   default='',
                   help=optparse.SUPPRESS_HELP,
                   dest='no_lock_in_out')
     gr.add_option('--no-lock-in-top',
                   action='store_true',
                   default='',
                   help=optparse.SUPPRESS_HELP,
                   dest='no_lock_in_top')
     default_prefix = getattr(Context.g_module, 'default_prefix',
                              os.environ.get('PREFIX'))
     if not default_prefix:
         if Utils.unversioned_sys_platform() == 'win32':
             d = tempfile.gettempdir()
             default_prefix = d[0].upper() + d[1:]
         else:
             default_prefix = '/usr/local/'
     gr.add_option('--prefix',
                   dest='prefix',
                   default=default_prefix,
                   help='installation prefix [default: %r]' %
                   default_prefix)
     gr.add_option('--bindir', dest='bindir', help='bindir')
     gr.add_option('--libdir', dest='libdir', help='libdir')
     gr = self.add_option_group('Build and installation options')
     self.option_groups['build and install options'] = gr
     gr.add_option('-p',
                   '--progress',
                   dest='progress_bar',
                   default=0,
                   action='count',
                   help='-p: progress bar; -pp: ide output')
     gr.add_option('--targets',
                   dest='targets',
                   default='',
                   action='store',
                   help='task generators, e.g. "target1,target2"')
     gr = self.add_option_group('Step options')
     self.option_groups['step options'] = gr
     gr.add_option(
         '--files',
         dest='files',
         default='',
         action='store',
         help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
     default_destdir = os.environ.get('DESTDIR', '')
     gr = self.add_option_group('Installation and uninstallation options')
     self.option_groups['install/uninstall options'] = gr
     gr.add_option('--destdir',
                   help='installation root [default: %r]' % default_destdir,
                   default=default_destdir,
                   dest='destdir')
     gr.add_option('-f',
                   '--force',
                   dest='force',
                   default=False,
                   action='store_true',
                   help='force file installation')
     gr.add_option('--distcheck-args',
                   metavar='ARGS',
                   help='arguments to pass to distcheck',
                   default=None,
                   action='store')
Ejemplo n.º 39
0
def git_submodule(bld, git_submodule, **kw):
    kw['git_submodule'] = git_submodule
    kw['features'] = Utils.to_list(kw.get('features', ''))
    kw['features'].append('git_submodule')

    return bld(**kw)
Ejemplo n.º 40
0
    def execute_tasks(self):
        # display the time elapsed in the progress bar
        self.timer = Utils.Timer()

        self.compile()
Ejemplo n.º 41
0
    def run(self):
        """
		Execute the test. This can fail.
		"""

        testname = str(self.inputs[0])
        filename = self.inputs[0].abspath()

        bld = self.generator.bld
        if not bld.logger:
            bld.logger = Logs.make_logger(
                os.path.join(bld.out_dir, "test.log"), 'unites_logger')

        self.unites_exec = getattr(self.generator, 'unites_exec', [filename])

        if getattr(self.generator, 'unites_fun', None):
            # FIXME waf 1.8 - add a return statement here?
            self.generator.unites_fun(self)

        try:
            shenv = getattr(self.generator.bld, 'all_test_paths')
        except AttributeError:
            # this operation may be performed by at most #maxjobs
            shenv = os.environ.copy()

            lst = []
            for g in self.generator.bld.groups:
                for tg in g:
                    if getattr(tg, 'link_task', None):
                        s = tg.link_task.outputs[0].parent.abspath()
                        if s not in lst:
                            lst.append(s)

            def add_path(dct, path, var):
                dct[var] = os.pathsep.join(
                    Utils.to_list(path) + [os.environ.get(var, '')])

            if Utils.is_win32:
                add_path(shenv, lst, 'PATH')
            elif Utils.unversioned_sys_platform() == 'darwin':
                add_path(shenv, lst, 'DYLD_LIBRARY_PATH')
                add_path(shenv, lst, 'LD_LIBRARY_PATH')
            else:
                add_path(shenv, lst, 'LD_LIBRARY_PATH')
            bld.all_test_paths = shenv

        cwd = getattr(self.generator, 'unites_cwd',
                      '') or self.inputs[0].parent.abspath()
        testcmd = getattr(Options.options, 'testcmd', False)

        if testcmd:
            self.unites_exec = (testcmd % self.unites_exec[0]).split(' ')

        bld.start_msg("Running test \'%s\'" % (testname))

        proc = Utils.subprocess.Popen(self.unites_exec,\
                                      cwd=cwd,\
                                      env=shenv,\
                                      stderr=Utils.subprocess.PIPE,\
                                      stdout=Utils.subprocess.PIPE)

        (out, err) = proc.communicate()

        if proc.returncode == 0:
            bld.end_msg("passed", 'GREEN')
        else:
            msg = []
            if out:
                msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
            if err:
                msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
            msg = os.linesep.join(msg)
            bld.end_msg("FAIL", 'RED')
            bld.to_log(msg)
            if not getattr(Options.options, 'permissive_tests', False):
                raise Errors.WafError('Test \'%s\' failed' % (testname))
        if not getattr(bld, 'unites_summary', None):
            bld.unites_summary = {}
        bld.unites_summary[testname] = (proc.returncode != 0)
Ejemplo n.º 42
0
 def execute(self):
     super(OptionsContext, self).execute()
     self.parse_args()
     Utils.alloc_process_pool(options.jobs)
Ejemplo n.º 43
0
def default_compilers():
    build_platform = Utils.unversioned_sys_platform()
    possible_compiler_list = fc_compiler.get(build_platform,
                                             fc_compiler['default'])
    return ' '.join(possible_compiler_list)
Ejemplo n.º 44
0
	def start(self,node,env):
		debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
		bld=node.ctx
		try:
			self.parse_cache=bld.parse_cache
		except AttributeError:
			bld.parse_cache={}
			self.parse_cache=bld.parse_cache
		self.addlines(node)
		if env['DEFINES']:
			try:
				lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
				lst.reverse()
				self.lines.extend([('define',x)for x in lst])
			except AttributeError:
				pass
		while self.lines:
			(token,line)=self.lines.pop()
			if token==POPFILE:
				self.count_files-=1
				self.currentnode_stack.pop()
				continue
			try:
				ve=Logs.verbose
				if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
				state=self.state
				if token[:2]=='if':
					state.append(undefined)
				elif token=='endif':
					state.pop()
				if token[0]!='e':
					if skipped in self.state or ignored in self.state:
						continue
				if token=='if':
					ret=eval_macro(tokenize(line),self.defs)
					if ret:state[-1]=accepted
					else:state[-1]=ignored
				elif token=='ifdef':
					m=re_mac.match(line)
					if m and m.group(0)in self.defs:state[-1]=accepted
					else:state[-1]=ignored
				elif token=='ifndef':
					m=re_mac.match(line)
					if m and m.group(0)in self.defs:state[-1]=ignored
					else:state[-1]=accepted
				elif token=='include'or token=='import':
					(kind,inc)=extract_include(line,self.defs)
					if inc in self.ban_includes:
						continue
					if token=='import':self.ban_includes.add(inc)
					if ve:debug('preproc: include found %s    (%s) ',inc,kind)
					if kind=='"'or not strict_quotes:
						self.tryfind(inc)
				elif token=='elif':
					if state[-1]==accepted:
						state[-1]=skipped
					elif state[-1]==ignored:
						if eval_macro(tokenize(line),self.defs):
							state[-1]=accepted
				elif token=='else':
					if state[-1]==accepted:state[-1]=skipped
					elif state[-1]==ignored:state[-1]=accepted
				elif token=='define':
					try:
						self.defs[define_name(line)]=line
					except Exception:
						raise PreprocError("Invalid define line %s"%line)
				elif token=='undef':
					m=re_mac.match(line)
					if m and m.group(0)in self.defs:
						self.defs.__delitem__(m.group(0))
				elif token=='pragma':
					if re_pragma_once.match(line.lower()):
						self.ban_includes.add(self.curfile)
			except Exception ,e:
				if Logs.verbose:
					debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
Ejemplo n.º 45
0
import sys
import re
from waflib import Utils, Logs, Errors
from waflib.Configure import conf

BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
BOOST_VERSION_FILE = 'boost/version.hpp'
BOOST_VERSION_CODE = '''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_LIB_VERSION << std::endl; }
'''

# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
PLATFORM = Utils.unversioned_sys_platform()
detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
BOOST_TOOLSETS = {
	'borland':  'bcb',
	'clang':	detect_clang,
	'como':	 'como',
	'cw':	   'cw',
	'darwin':   'xgcc',
	'edg':	  'edg',
	'g++':	  detect_mingw,
	'gcc':	  detect_mingw,
	'icpc':	 detect_intel,
	'intel':	detect_intel,
	'kcc':	  'kcc',
Ejemplo n.º 46
0
 def add_path(dct, path, var):
     dct[var] = os.pathsep.join(
         Utils.to_list(path) + [os.environ.get(var, '')])
Ejemplo n.º 47
0
def internal_validate_incredibuild_registry_settings(ctx):
    """ Helper function to verify the correct incredibuild settings """
    if Utils.unversioned_sys_platform() != 'win32':
        # Check windows registry only
        return False

    import _winreg

    if not ctx.is_option_true('use_incredibuild'):
        # No need to check IB settings if there is no IB
        return False

    allow_reg_updated = ctx.is_option_true('auto_update_incredibuild_settings') and \
                        not ctx.is_option_true('internal_dont_check_recursive_execution') and \
                        not Options.options.execsolution

    # Open the incredibuild settings registry key to validate if IB is installed properly
    try:
        ib_settings_read_only = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
                                                IB_REGISTRY_PATH, 0,
                                                _winreg.KEY_READ)
    except:
        Logs.debug(
            'lumberyard: Cannot open registry entry "HKEY_LOCAL_MACHINE\\{}"'.
            format(IB_REGISTRY_PATH))
        Logs.warn(
            '[WARNING] Incredibuild does not appear to be correctly installed on your machine.  Disabling Incredibuild.'
        )
        return False

    def _read_ib_reg_setting(reg_key, setting_name, setting_path,
                             expected_value):
        try:
            reg_data, reg_type = _winreg.QueryValueEx(reg_key, setting_name)
            return reg_data == expected_value
        except:
            Logs.debug(
                'lumberyard: Cannot find a registry entry for "HKEY_LOCAL_MACHINE\\{}\\{}"'
                .format(setting_path, setting_name))
            return False

    def _write_ib_reg_setting(reg_key, setting_name, setting_path, value):
        try:
            _winreg.SetValueEx(reg_key, setting_name, 0, _winreg.REG_SZ,
                               str(value))
            return True
        except WindowsError as e:
            Logs.warn(
                'lumberyard: Unable write to HKEY_LOCAL_MACHINE\\{}\\{} : {}'.
                format(setting_path, setting_name, e.strerror))
            return False

    valid_ib_reg_key_values = [('MaxConcurrentPDBs', '0')]

    is_ib_ready = True
    for settings_name, expected_value in valid_ib_reg_key_values:
        if is_ib_ready and not _read_ib_reg_setting(
                ib_settings_read_only, settings_name, IB_REGISTRY_PATH,
                expected_value):
            is_ib_ready = False

    # If we are IB ready, short-circuit out
    if is_ib_ready:
        return True

    # If we need updates, check if we have 'auto auto-update-incredibuild-settings' set or not
    if not allow_reg_updated:
        Logs.warn(
            '[WARNING] The required settings for incredibuild is not properly configured. '
        )
        if not ctx.is_option_true('auto_update_incredibuild_settings'):
            Logs.warn(
                "[WARNING]: Set the '--auto-update-incredibuild-settings' to True if you want to attempt to automatically update the settings"
            )
        return False

    # if auto-update-incredibuild-settings is true, then attempt to update the values automatically
    try:
        ib_settings_writing = _winreg.OpenKey(
            _winreg.HKEY_LOCAL_MACHINE, IB_REGISTRY_PATH, 0,
            _winreg.KEY_SET_VALUE | _winreg.KEY_READ)
    except:
        Logs.warn(
            '[WARNING] Cannot access a registry entry "HKEY_LOCAL_MACHINE\\{}" for writing.'
            .format(IB_REGISTRY_PATH))
        Logs.warn(
            '[WARNING] Please run "{0}" as an administrator or change the value to "0" in the registry to ensure a correct operation of WAF'
            .format(WAF_EXECUTABLE))
        return False

    # Once we get the key, attempt to update the values
    is_ib_updated = True
    for settings_name, set_value in valid_ib_reg_key_values:
        if is_ib_updated and not _write_ib_reg_setting(
                ib_settings_writing, settings_name, IB_REGISTRY_PATH,
                set_value):
            is_ib_updated = False
    if not is_ib_updated:
        Logs.warn(
            '[WARNING] Unable to update registry settings for incredibuild')
        return False

    Logs.info('[INFO] Registry values updated for incredibuild')
    return True
Ejemplo n.º 48
0
def check_boost(self, *k, **kw):
	"""
	Initialize boost libraries to be used.

	Keywords: you can pass the same parameters as with the command line (without "--boost-").
	Note that the command line has the priority, and should preferably be used.
	"""
	if not self.env['CXX']:
		self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')

	params = {'lib': k and k[0] or kw.get('lib', None)}
	for key, value in self.options.__dict__.items():
		if not key.startswith('boost_'):
			continue
		key = key[len('boost_'):]
		params[key] = value and value or kw.get(key, '')

	var = kw.get('uselib_store', 'BOOST')
	ext_uses = Utils.to_list(kw.get('use', ''))
	self.env['USE_%s' % var] = ext_uses

	self.start_msg('Checking boost includes')
	self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
	self.env.BOOST_VERSION = self.boost_get_version(inc)
	self.end_msg(self.env.BOOST_VERSION)
	if Logs.verbose:
		Logs.pprint('CYAN', '	path : %s' % self.env['INCLUDES_%s' % var])

	if not params['lib']:
		return
	self.start_msg('Checking boost libs')
	suffix = params.get('static', None) and 'ST' or ''
	path, libs = self.boost_get_libs(**params)
	self.env['%sLIBPATH_%s' % (suffix, var)] = [path]
	self.env['%sLIB_%s' % (suffix, var)] = libs
	self.end_msg('ok')
	if Logs.verbose:
		Logs.pprint('CYAN', '	path : %s' % path)
		Logs.pprint('CYAN', '	libs : %s' % libs)


	def try_link():
		if 'system' in params['lib']:
			self.check_cxx(
			 fragment="\n".join([
			  '#include <boost/system/error_code.hpp>',
			  'int main() { boost::system::error_code c; }',
			 ]),
			 use=var,
			 execute=False,
			)
		if 'thread' in params['lib']:
			self.check_cxx(
			 fragment="\n".join([
			  '#include <boost/thread.hpp>',
			  'int main() { boost::thread t; }',
			 ]),
			 use=var,
			 execute=False,
			)

	if params.get('linkage_autodetect', False):
		self.start_msg("Attempting to detect boost linkage flags")
		toolset = self.boost_get_toolset(kw.get('toolset', ''))
		if toolset in ['vc']:
			# disable auto-linking feature, causing error LNK1181
			# because the code wants to be linked against
			self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']

			# if no dlls are present, we guess the .lib files are not stubs
			has_dlls = False
			for x in Utils.listdir(path):
				if x.endswith(self.env.cxxshlib_PATTERN % ''):
					has_dlls = True
					break
			if not has_dlls:
				self.env['STLIBPATH_%s' % var] = [path]
				self.env['STLIB_%s' % var] = libs
				del self.env['LIB_%s' % var]
				del self.env['LIBPATH_%s' % var]

			# we attempt to play with some known-to-work CXXFLAGS combinations
			for cxxflags in (['/MD', '/EHsc'], []):
				self.env.stash()
				self.env["CXXFLAGS_%s" % var] += cxxflags
				try:
					try_link()
					self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
					e = None
					break
				except Errors.ConfigurationError as exc:
					self.env.revert()
					e = exc

			if e is not None:
				self.fatal("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=e)
		else:
			self.fatal("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
	else:
		self.start_msg('Checking for boost linkage')
		try:
			try_link()
		except Errors.ConfigurationError as e:
			self.fatal("Could not link against boost libraries using supplied options")
		self.end_msg('ok')
Ejemplo n.º 49
0
class task_gen(object):
    mappings = {}
    prec = Utils.defaultdict(list)

    def __init__(self, *k, **kw):
        self.source = ''
        self.target = ''
        self.meths = []
        self.prec = Utils.defaultdict(list)
        self.mappings = {}
        self.features = []
        self.tasks = []
        if not 'bld' in kw:
            self.env = ConfigSet.ConfigSet()
            self.idx = 0
            self.path = None
        else:
            self.bld = kw['bld']
            self.env = self.bld.env.derive()
            self.path = self.bld.path
            try:
                self.idx = self.bld.idx[id(
                    self.path)] = self.bld.idx.get(id(self.path), 0) + 1
            except AttributeError:
                self.bld.idx = {}
                self.idx = self.bld.idx[id(self.path)] = 1
        for key, val in kw.items():
            setattr(self, key, val)

    def __str__(self):
        return "<task_gen %r declared in %s>" % (self.name,
                                                 self.path.abspath())

    def __repr__(self):
        lst = []
        for x in self.__dict__.keys():
            if x not in ['env', 'bld', 'compiled_tasks', 'tasks']:
                lst.append("%s=%s" % (x, repr(getattr(self, x))))
        return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())

    def get_name(self):
        try:
            return self._name
        except AttributeError:
            if isinstance(self.target, list):
                lst = [str(x) for x in self.target]
                name = self._name = ','.join(lst)
            else:
                name = self._name = str(self.target)
            return name

    def set_name(self, name):
        self._name = name

    name = property(get_name, set_name)

    def to_list(self, val):
        if isinstance(val, str): return val.split()
        else: return val

    def post(self):
        if getattr(self, 'posted', None):
            return False
        self.posted = True
        keys = set(self.meths)
        self.features = Utils.to_list(self.features)
        for x in self.features + ['*']:
            st = feats[x]
            if not st:
                if not x in Task.classes:
                    Logs.warn(
                        'feature %r does not exist - bind at least one method to it'
                        % x)
            keys.update(list(st))
        prec = {}
        prec_tbl = self.prec or task_gen.prec
        for x in prec_tbl:
            if x in keys:
                prec[x] = prec_tbl[x]
        tmp = []
        for a in keys:
            for x in prec.values():
                if a in x: break
            else:
                tmp.append(a)
        tmp.sort()
        out = []
        while tmp:
            e = tmp.pop()
            if e in keys: out.append(e)
            try:
                nlst = prec[e]
            except KeyError:
                pass
            else:
                del prec[e]
                for x in nlst:
                    for y in prec:
                        if x in prec[y]:
                            break
                    else:
                        tmp.append(x)
        if prec:
            raise Errors.WafError('Cycle detected in the method execution %r' %
                                  prec)
        out.reverse()
        self.meths = out
        Logs.debug('task_gen: posting %s %d' % (self, id(self)))
        for x in out:
            try:
                v = getattr(self, x)
            except AttributeError:
                raise Errors.WafError(
                    '%r is not a valid task generator method' % x)
            Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
            v()
        Logs.debug('task_gen: posted %s' % self.name)
        return True

    def get_hook(self, node):
        name = node.name
        for k in self.mappings:
            if name.endswith(k):
                return self.mappings[k]
        for k in task_gen.mappings:
            if name.endswith(k):
                return task_gen.mappings[k]
        raise Errors.WafError(
            "File %r has no mapping in %r (did you forget to load a waf tool?)"
            % (node, task_gen.mappings.keys()))

    def create_task(self, name, src=None, tgt=None):
        task = Task.classes[name](env=self.env.derive(), generator=self)
        if src:
            task.set_inputs(src)
        if tgt:
            task.set_outputs(tgt)
        self.tasks.append(task)
        return task

    def clone(self, env):
        newobj = self.bld()
        for x in self.__dict__:
            if x in ['env', 'bld']:
                continue
            elif x in ['path', 'features']:
                setattr(newobj, x, getattr(self, x))
            else:
                setattr(newobj, x, copy.copy(getattr(self, x)))
        newobj.posted = False
        if isinstance(env, str):
            newobj.env = self.bld.all_envs[env].derive()
        else:
            newobj.env = env.derive()
        return newobj
Ejemplo n.º 50
0
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file

import os, tempfile, optparse, sys, re
from waflib import Logs, Utils, Context
cmds = 'distclean configure build install clean uninstall check dist distcheck'.split(
)
options = {}
commands = []
lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
try:
    cache_global = os.path.abspath(os.environ['WAFCACHE'])
except KeyError:
    cache_global = ''
platform = Utils.unversioned_sys_platform()


class opt_parser(optparse.OptionParser):
    def __init__(self, ctx):
        optparse.OptionParser.__init__(
            self,
            conflict_handler="resolve",
            version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
        self.formatter.width = Logs.get_term_cols()
        p = self.add_option
        self.ctx = ctx
        jobs = ctx.jobs()
        p('-j',
          '--jobs',
          dest='jobs',
Ejemplo n.º 51
0
 def post_run(self):
     old_post_run(self)
     for node in self.outputs:
         node.sig = Utils.h_file(node.abspath())
         self.generator.bld.task_sigs[
             node.abspath()] = self.uid()  # issue #1017
Ejemplo n.º 52
0
def invoke_waf_recursively(bld,
                           build_metrics_supported=False,
                           metrics_namespace=None):
    """
    Check the incredibuild parameters and environment to see if we need to invoke waf through incredibuild

    :param bld:                 The BuildContext

    :return:    True to short circuit the current build flow (because an incredibuild command has been invoked), False to continue the flow
    """

    if not WINREG_SUPPORTED:
        return False  # We can't run incredibuild on systems that don't support windows registry

    if not Utils.unversioned_sys_platform() == 'win32':
        return False  # Don't use recursive execution on non-windows hosts

    if bld.is_option_true('internal_dont_check_recursive_execution'):
        return False

    # Skip clean_ commands
    if bld.cmd.startswith('clean_'):
        return False

    # Skip non-build commands
    if bld.cmd in NON_BUILD_COMMANDS:
        return False

    # Don't use IB for special single file operations
    if bld.is_option_true('show_includes'):
        return False
    if bld.is_option_true('show_preprocessed_file'):
        return False
    if bld.is_option_true('show_disassembly'):
        return False
    if bld.options.file_filter != "":
        return False

    # Skip if incredibuild is disabled
    if not bld.is_option_true('use_incredibuild'):
        Logs.warn('[WARNING] Incredibuild disabled by build option')
        return False

    try:
        # Get correct incredibuild installation folder to not depend on PATH
        IB_settings = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
                                      IB_REGISTRY_PATH, 0, _winreg.KEY_READ)
        (ib_folder, type) = _winreg.QueryValueEx(IB_settings, 'Folder')
    except:
        Logs.warn(
            '[WARNING] Incredibuild disabled.  Cannot find incredibuild installation.'
        )
        return False

    # Get the incredibuild profile file
    ib_profile_xml_check = getattr(bld.options, 'incredibuild_profile',
                                   '').replace(
                                       "Code/Tools/waf-1.7.13/profile.xml",
                                       "Tools/build/waf-1.7.13/profile.xml")
    if os.path.exists(ib_profile_xml_check):
        ib_profile_xml = ib_profile_xml_check
    else:
        # If the profile doesnt exist, then attempt to use the engine as the base
        ib_profile_xml = os.path.join(bld.get_bintemp_folder_node().abspath(),
                                      'profile.xml')
        if not os.path.isfile(ib_profile_xml):
            Logs.warn(
                '[WARN] Unable to locate default incredibuild profile ({}). Run configure to regenerate.'
            )
            Logs.warn('[WARN] Incredibuild will be disabled.')
            return False

    result = subprocess.check_output(
        [str(ib_folder) + '/xgconsole.exe', '/QUERYLICENSE'])

    # Make & Build Tools is required
    if not 'Make && Build Tools' in result:
        Logs.warn(
            'Required Make && Build Tools Package not found.  Build will not be accelerated through Incredibuild'
        )
        return False

    # Determine if the Dev Tool Acceleration package is available.  This package is required for consoles
    dev_tool_accelerated = 'IncrediBuild for Dev Tool Acceleration' in result

    ib_flag_check_results = bld.check_ib_flag(bld.cmd, result,
                                              dev_tool_accelerated)
    if not all(ib_flag_check_results):
        return False

    # Windows builds can be run without the Dev Tools Acceleration Package, but won't distribute Qt tasks.
    if not dev_tool_accelerated:
        Logs.warn(
            'Dev Tool Acceleration Package not found.   Qt tasks will not be distributed through Incredibuild'
        )

    # Get all specific commands, but keep msvs to execute after IB has finished
    bExecuteMSVS = False
    if 'msvs' in Options.commands:
        bExecuteMSVS = True

    Options.commands = []
    cmd_line_args = []
    for arg in sys.argv[1:]:
        if arg == 'generate_uber_files':
            continue
        if arg == 'generate_module_def_files':
            continue
        if arg == 'msvs':
            bExecuteMSVS = True
            continue
        if arg == 'configure':
            Logs.warn(
                '[WARNING] Incredibuild disabled, running configure and build in one line is not supported with incredibuild. To build with incredibuild, run the build without the configure command on the same line'
            )
            return False
        if ' ' in arg and '=' in arg:  # replace strings like "--build-options=c:/root with spaces in it/file" with "--build-options=file"
            command, path_val = string.split(arg, '=')
            path_val = os.path.relpath(path_val)
            arg = command + '=' + path_val
        cmd_line_args += [arg]

    if bExecuteMSVS:  # Execute MSVS without IB
        Options.commands += ['msvs']

    command_line_options = ' '.join(cmd_line_args)  # Recreate command line

    # Add special option to not start IB from within IB
    command_line_options += ' --internal-dont-check-recursive-execution=True'

    num_jobs = bld.options.incredibuild_max_cores

    # Build Command Line
    command = CURRENT_WAF_EXECUTABLE + ' --jobs=' + str(
        num_jobs) + ' ' + command_line_options
    if build_metrics_supported:
        command += ' --enable-build-metrics'
        if metrics_namespace is not None:
            command += ' --metrics-namespace {0}'.format(metrics_namespace)

    sys.stdout.write('[WAF] Starting Incredibuild: ')

    process_call = []
    if dev_tool_accelerated:
        process_call.append(str(ib_folder) + '/xgconsole.exe')

        # If the IB profile is not blank, then attempt to use it
        if len(ib_profile_xml) > 0:
            ib_profile_xml_file = os.path.abspath(ib_profile_xml)
            # Set the profile for incredibuild only if it exists
            if os.path.exists(ib_profile_xml_file):
                process_call.append('/profile={}'.format(ib_profile_xml_file))
            else:
                Logs.warn(
                    '[WARN] Incredibuild profile file "{}" does not exist.  Using default incredibuild settings'
                    .format(ib_profile_xml))
    else:
        process_call.append(str(ib_folder) + '/buildconsole.exe')

        # using a profile overrides the handling of max link tasks.  Unfortunately, the make&build tool doesn't support
        # the profile, so we must check the registry settings to ensure that they allow parallel linking up to the
        # count specified in waf.  Incredibuild suggests adding an override parameter to the msbuild command to override
        # this, but since we aren't using this, we warn instead
        try:
            # grab the limitor for number of local jobs that incredibuild will use
            (ib_max_local_cpu,
             type) = _winreg.QueryValueEx(IB_settings,
                                          'ForceCPUCount_WhenInitiator')
            # grab the limitor that incredibuild will use if a profile is not specified
            (ib_max_link,
             type) = _winreg.QueryValueEx(IB_settings,
                                          'MaxParallelLinkTargets')
        except:
            Logs.warn(
                '[WARNING] unable to query Incredibuild registry, parallel linking may be sub-optimal'
            )
        else:
            ib_max_local_cpu = int(ib_max_local_cpu)
            if (ib_max_local_cpu == 0):
                ib_max_local_cpu = multiprocessing.cpu_count()
            # executable links are limited to max_parallel_link using a semaphore.  lib/dll links are limited to number
            # of cores since they are generally single threaded
            min_setting_needed = int(
                min(ib_max_local_cpu, bld.options.max_parallel_link))
            ib_max_link = int(ib_max_link)
            if ib_max_link < min_setting_needed:
                Logs.warn(
                    '[WARNING] Incredibuild configuration \'MaxParallelLinkTargets\' limits link tasks to %d, increasing to %d will improve link throughput'
                    % (ib_max_link, min_setting_needed))

    process_call.append('/command=' + command)
    process_call.append('/useidemonitor')
    process_call.append('/nologo')

    Logs.debug('incredibuild: Cmdline: ' + str(process_call))
    if subprocess.call(process_call, env=os.environ.copy()):
        bld.fatal("[ERROR] Build Failed")

    return True
Ejemplo n.º 53
0
sharedstatedir, architecture-independent variable data, ${PREFIX}/com
localstatedir, variable data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib%s
includedir, header files, ${PREFIX}/include
oldincludedir, header files for non-GCC compilers, /usr/include
datarootdir, architecture-independent data root, ${PREFIX}/share
datadir, architecture-independent data, ${DATAROOTDIR}
infodir, GNU "info" documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, manual pages, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, HTML documentation, ${DOCDIR}
dvidir, DVI documentation, ${DOCDIR}
pdfdir, PDF documentation, ${DOCDIR}
psdir, PostScript documentation, ${DOCDIR}
''' % Utils.lib64()

_options = [x.split(', ') for x in gnuopts.splitlines() if x]


def configure(conf):
    """
	Reads the command-line options to set lots of variables in *conf.env*. The variables
	BINDIR and LIBDIR will be overwritten.
	"""
    def get_param(varname, default):
        return getattr(Options.options, varname, '') or default

    env = conf.env
    env.LIBDIR = env.BINDIR = []
    env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
Ejemplo n.º 54
0
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file

import copy, re, os
from waflib import Task, Utils, Logs, Errors, ConfigSet, Node
feats = Utils.defaultdict(set)


class task_gen(object):
    mappings = {}
    prec = Utils.defaultdict(list)

    def __init__(self, *k, **kw):
        self.source = ''
        self.target = ''
        self.meths = []
        self.prec = Utils.defaultdict(list)
        self.mappings = {}
        self.features = []
        self.tasks = []
        if not 'bld' in kw:
            self.env = ConfigSet.ConfigSet()
            self.idx = 0
            self.path = None
        else:
            self.bld = kw['bld']
            self.env = self.bld.env.derive()
            self.path = self.bld.path
            try:
                self.idx = self.bld.idx[id(
Ejemplo n.º 55
0
Archivo: msvc.py Proyecto: rivy/waf
def gather_intel_composer_versions(conf, versions):
    """
	Checks ICL compilers that are part of Intel Composer Suites

	:param versions: list to modify
	:type versions: list
	"""
    version_pattern = re.compile('^...?.?\...?.?.?')
    try:
        all_versions = Utils.winreg.OpenKey(
            Utils.winreg.HKEY_LOCAL_MACHINE,
            'SOFTWARE\\Wow6432node\\Intel\\Suites')
    except OSError:
        try:
            all_versions = Utils.winreg.OpenKey(
                Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
        except OSError:
            return
    index = 0
    while 1:
        try:
            version = Utils.winreg.EnumKey(all_versions, index)
        except OSError:
            break
        index += 1
        if not version_pattern.match(version):
            continue
        targets = {}
        for target, arch in all_icl_platforms:
            if target == 'intel64':
                targetDir = 'EM64T_NATIVE'
            else:
                targetDir = target
            try:
                try:
                    defaults = Utils.winreg.OpenKey(
                        all_versions,
                        version + '\\Defaults\\C++\\' + targetDir)
                except OSError:
                    if targetDir == 'EM64T_NATIVE':
                        defaults = Utils.winreg.OpenKey(
                            all_versions, version + '\\Defaults\\C++\\EM64T')
                    else:
                        raise
                uid, type = Utils.winreg.QueryValueEx(defaults, 'SubKey')
                Utils.winreg.OpenKey(
                    all_versions, version + '\\' + uid + '\\C++\\' + targetDir)
                icl_version = Utils.winreg.OpenKey(
                    all_versions, version + '\\' + uid + '\\C++')
                path, type = Utils.winreg.QueryValueEx(icl_version,
                                                       'ProductDir')
            except OSError:
                pass
            else:
                batch_file = os.path.join(path, 'bin', 'iclvars.bat')
                if os.path.isfile(batch_file):
                    targets[target] = target_compiler(conf, 'intel', arch,
                                                      version, target,
                                                      batch_file)
                # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012
                # http://software.intel.com/en-us/forums/topic/328487
                compilervars_warning_attr = '_compilervars_warning_key'
                if version[0:2] == '13' and getattr(
                        conf, compilervars_warning_attr, True):
                    setattr(conf, compilervars_warning_attr, False)
                    patch_url = 'http://software.intel.com/en-us/forums/topic/328487'
                    compilervars_arch = os.path.join(path, 'bin',
                                                     'compilervars_arch.bat')
                    for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'):
                        if vscomntool in os.environ:
                            vs_express_path = os.environ[
                                vscomntool] + r'..\IDE\VSWinExpress.exe'
                            dev_env_path = os.environ[
                                vscomntool] + r'..\IDE\devenv.exe'
                            if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'
                                    in Utils.readf(compilervars_arch)
                                    and not os.path.exists(vs_express_path)
                                    and not os.path.exists(dev_env_path)):
                                Logs.warn((
                                    'The Intel compilervar_arch.bat only checks for one Visual Studio SKU '
                                    '(VSWinExpress.exe) but it does not seem to be installed at %r. '
                                    'The intel command line set up will fail to configure unless the file %r'
                                    'is patched. See: %s') %
                                          (vs_express_path, compilervars_arch,
                                           patch_url))
        major = version[0:2]
        versions['intel ' + major] = targets
Ejemplo n.º 56
0
	def ant_glob(self, *k, **kw):
		"""
		Finds files across folders and returns Node objects:

		* ``**/*`` find all files recursively
		* ``**/*.class`` find all files ending by .class
		* ``..`` find files having two dot characters

		For example::

			def configure(cfg):
				# find all .cpp files
				cfg.path.ant_glob('**/*.cpp')
				# find particular files from the root filesystem (can be slow)
				cfg.root.ant_glob('etc/*.txt')
				# simple exclusion rule example
				cfg.path.ant_glob('*.c*', excl=['*.c'], src=True, dir=False)

		For more information about the patterns, consult http://ant.apache.org/manual/dirtasks.html
		Please remember that the '..' sequence does not represent the parent directory::

			def configure(cfg):
				cfg.path.ant_glob('../*.h') # incorrect
				cfg.path.parent.ant_glob('*.h') # correct

		The Node structure is itself a filesystem cache, so certain precautions must
		be taken while matching files in the build or installation phases.
		Nodes objects that do have a corresponding file or folder are garbage-collected by default.
		This garbage collection is usually required to prevent returning files that do not
		exist anymore. Yet, this may also remove Node objects of files that are yet-to-be built.

		This typically happens when trying to match files in the build directory,
		but there are also cases when files are created in the source directory.
		Run ``waf -v`` to display any warnings, and try consider passing ``remove=False``
		when matching files in the build directory.

		Since ant_glob can traverse both source and build folders, it is a best practice
		to call this method only from the most specific build node::

			def build(bld):
				# traverses the build directory, may need ``remove=False``:
				bld.path.ant_glob('project/dir/**/*.h')
				# better, no accidental build directory traversal:
				bld.path.find_node('project/dir').ant_glob('**/*.h') # best

		In addition, files and folders are listed immediately. When matching files in the
		build folders, consider passing ``generator=True`` so that the generator object
		returned can defer computation to a later stage. For example::

			def build(bld):
				bld(rule='tar xvf ${SRC}', source='arch.tar')
				bld.add_group()
				gen = bld.bldnode.ant_glob("*.h", generator=True, remove=True)
				# files will be listed only after the arch.tar is unpacked
				bld(rule='ls ${SRC}', source=gen, name='XYZ')


		:param incl: ant patterns or list of patterns to include
		:type incl: string or list of strings
		:param excl: ant patterns or list of patterns to exclude
		:type excl: string or list of strings
		:param dir: return folders too (False by default)
		:type dir: bool
		:param src: return files (True by default)
		:type src: bool
		:param maxdepth: maximum depth of recursion
		:type maxdepth: int
		:param ignorecase: ignore case while matching (False by default)
		:type ignorecase: bool
		:param generator: Whether to evaluate the Nodes lazily
		:type generator: bool
		:param remove: remove files/folders that do not exist (True by default)
		:type remove: bool
		:param quiet: disable build directory traversal warnings (verbose mode)
		:type quiet: bool
		:returns: The corresponding Node objects as a list or as a generator object (generator=True)
		:rtype: by default, list of :py:class:`waflib.Node.Node` instances
		"""
		src = kw.get('src', True)
		dir = kw.get('dir')
		excl = kw.get('excl', exclude_regs)
		incl = k and k[0] or kw.get('incl', '**')
		remove = kw.get('remove', True)
		maxdepth = kw.get('maxdepth', 25)
		ignorecase = kw.get('ignorecase', False)
		quiet = kw.get('quiet', False)
		pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase))

		if kw.get('generator'):
			return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet))

		it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)
		if kw.get('flat'):
			# returns relative paths as a space-delimited string
			# prefer Node objects whenever possible
			return ' '.join(x.path_from(self) for x in it)
		return list(it)
Ejemplo n.º 57
0
def apply_vnum(self):
	"""
	Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::

		def build(bld):
			bld.shlib(source='a.c', target='foo', vnum='14.15.16')

	In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created:

	* ``libfoo.so   → libfoo.so.1.2.3``
	* ``libfoo.so.1 → libfoo.so.1.2.3``
	"""
	if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
		return

	link = self.link_task
	if not re_vnum.match(self.vnum):
		raise Errors.WafError('Invalid version %r for %r' % (self.vnum, self))
	nums = self.vnum.split('.')
	node = link.outputs[0]

	libname = node.name
	if libname.endswith('.dylib'):
		name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
		name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
	else:
		name3 = libname + '.' + self.vnum
		name2 = libname + '.' + nums[0]

	# add the so name for the ld linker - to disable, just unset env.SONAME_ST
	if self.env.SONAME_ST:
		v = self.env.SONAME_ST % name2
		self.env.append_value('LINKFLAGS', v.split())

	# the following task is just to enable execution from the build dir :-/
	if self.env.DEST_OS != 'openbsd':
		outs = [node.parent.find_or_declare(name3)]
		if name2 != name3:
			outs.append(node.parent.find_or_declare(name2))
		self.create_task('vnum', node, outs)

	if getattr(self, 'install_task', None):
		self.install_task.hasrun = Task.SKIP_ME
		bld = self.bld
		path = self.install_task.dest
		if self.env.DEST_OS == 'openbsd':
			libname = self.link_task.outputs[0].name
			t1 = bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env, chmod=self.link_task.chmod)
			self.vnum_install_task = (t1,)
		else:
			t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
			t3 = bld.symlink_as(path + os.sep + libname, name3)
			if name2 != name3:
				t2 = bld.symlink_as(path + os.sep + name2, name3)
				self.vnum_install_task = (t1, t2, t3)
			else:
				self.vnum_install_task = (t1, t3)

	if '-dynamiclib' in self.env['LINKFLAGS']:
		# this requires after(propagate_uselib_vars)
		try:
			inst_to = self.install_path
		except AttributeError:
			inst_to = self.link_task.__class__.inst_to
		if inst_to:
			p = Utils.subst_vars(inst_to, self.env)
			path = os.path.join(p, self.link_task.outputs[0].name)
			self.env.append_value('LINKFLAGS', ['-install_name', path])
Ejemplo n.º 58
0
Archivo: msvc.py Proyecto: rivy/waf
def check_libs_msvc(self, libnames, is_static=False):
    for libname in Utils.to_list(libnames):
        self.check_lib_msvc(libname, is_static)
Ejemplo n.º 59
0
# Thomas Nagy, 2005-2010 (ita)

"""
Classes and methods shared by tools providing support for C-like language such
as C/C++/D/Assembly/Go (this support module is almost never used alone).
"""

import os, re
from waflib import Task, Utils, Node, Errors
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
from waflib.Configure import conf

SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']

USELIB_VARS = Utils.defaultdict(set)
"""
Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
"""

USELIB_VARS['c']        = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
USELIB_VARS['cxx']      = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
USELIB_VARS['d']        = set(['INCLUDES', 'DFLAGS'])
USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])

USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
USELIB_VARS['cshlib']   = USELIB_VARS['cxxshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
USELIB_VARS['cstlib']   = USELIB_VARS['cxxstlib']   = set(['ARFLAGS', 'LINKDEPS'])

USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
USELIB_VARS['dshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
Ejemplo n.º 60
0
def configure(cfg):
    cfg.find_program('make', var='MAKE')
    #cfg.objcopy = cfg.find_program('%s-%s'%(cfg.env.TOOLCHAIN,'objcopy'), var='OBJCOPY', mandatory=True)
    cfg.find_program('arm-none-eabi-objcopy', var='OBJCOPY')
    env = cfg.env
    bldnode = cfg.bldnode.make_node(cfg.variant)

    def srcpath(path):
        return cfg.srcnode.make_node(path).abspath()

    def bldpath(path):
        return bldnode.make_node(path).abspath()

    env.AP_PROGRAM_FEATURES += ['ch_ap_program']

    kw = env.AP_LIBRARIES_OBJECTS_KW
    kw['features'] = Utils.to_list(kw.get('features', [])) + ['ch_ap_library']

    env.CH_ROOT = srcpath('modules/ChibiOS')
    env.AP_HAL_ROOT = srcpath('libraries/AP_HAL_ChibiOS')
    env.BUILDDIR = bldpath('modules/ChibiOS')
    env.BUILDROOT = bldpath('')
    env.SRCROOT = srcpath('')
    env.PT_DIR = srcpath('Tools/ardupilotwaf/chibios/image')
    env.UPLOAD_TOOLS = srcpath('Tools/ardupilotwaf')
    env.CHIBIOS_SCRIPTS = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts')
    env.TOOLS_SCRIPTS = srcpath('Tools/scripts')
    env.APJ_TOOL = srcpath('Tools/scripts/apj_tool.py')
    env.SERIAL_PORT = srcpath('/dev/serial/by-id/*_STLink*')

    # relative paths to pass to make, relative to directory that make is run from
    env.CH_ROOT_REL = os.path.relpath(env.CH_ROOT, env.BUILDROOT)
    env.AP_HAL_REL = os.path.relpath(env.AP_HAL_ROOT, env.BUILDROOT)
    env.BUILDDIR_REL = os.path.relpath(env.BUILDDIR, env.BUILDROOT)

    mk_custom = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/chibios_board.mk' %
                        env.BOARD)
    mk_common = srcpath(
        'libraries/AP_HAL_ChibiOS/hwdef/common/chibios_board.mk')
    # see if there is a board specific make file
    if os.path.exists(mk_custom):
        env.BOARD_MK = mk_custom
    else:
        env.BOARD_MK = mk_common

    if cfg.options.default_parameters:
        cfg.msg('Default parameters',
                cfg.options.default_parameters,
                color='YELLOW')
        env.DEFAULT_PARAMETERS = srcpath(cfg.options.default_parameters)

    # we need to run chibios_hwdef.py at configure stage to generate the ldscript.ld
    # that is needed by the remaining configure checks
    import subprocess

    if env.BOOTLOADER:
        env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef-bl.dat' %
                            env.BOARD)
        env.BOOTLOADER_OPTION = "--bootloader"
    else:
        env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef.dat' %
                            env.BOARD)
        env.BOOTLOADER_OPTION = ""
    hwdef_script = srcpath(
        'libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py')
    hwdef_out = env.BUILDROOT
    if not os.path.exists(hwdef_out):
        os.mkdir(hwdef_out)
    try:
        cmd = "python '{0}' -D '{1}' '{2}' {3}".format(hwdef_script, hwdef_out,
                                                       env.HWDEF,
                                                       env.BOOTLOADER_OPTION)
        ret = subprocess.call(cmd, shell=True)
    except Exception:
        cfg.fatal("Failed to process hwdef.dat")
    if ret != 0:
        cfg.fatal("Failed to process hwdef.dat ret=%d" % ret)

    load_env_vars(cfg.env)
    if env.HAL_WITH_UAVCAN:
        setup_can_build(cfg)