Beispiel #1
0
def find_program(self, filename, **kw):
	"""
	Search for a program on the operating system
	Additional arguments in kw:
	* path_list: list of paths to look into
	* var: store the result to conf.env[var], by default use filename.upper()
	* ext: list of extensions for the binary (do not forget the empty extension)

	when var is used, you may set os.environ[var] to help finding a specific program version, for example
	$ VALAC=/usr/bin/valac_test waf configure
	"""

	exts = kw.get('exts', Options.platform == 'win32' and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')

	environ = kw.get('environ', os.environ)

	ret = ''
	filename = Utils.to_list(filename)

	var = kw.get('var', '')
	if not var:
		var = filename[0].upper()

	if self.env[var]:
		ret = self.env[var]
	elif var in environ:
		ret = environ[var]

	path_list = kw.get('path_list', '')
	if not ret:
		if path_list:
			path_list = Utils.to_list(path_list)
		else:
			path_list = environ.get('PATH', '').split(os.pathsep)

		if not isinstance(filename, list):
			filename = [filename]

		for a in exts.split(','):
			if ret:
				break
			for b in filename:
				if ret:
					break
				for c in path_list:
					if ret:
						break
					x = os.path.join(c, b + a)
					if os.path.isfile(x):
						ret = x

	self.msg('Checking for program ' + ','.join(filename), ret or False)
	self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))

	if not ret:
		self.fatal(kw.get('errmsg', '') or 'Could not find the program %s' % ','.join(filename))

	if var:
		self.env[var] = ret
	return ret
Beispiel #2
0
def set_qt4_libs_to_check(self):
	if not hasattr(self,'qt4_vars'):
		self.qt4_vars=QT4_LIBS
	self.qt4_vars=Utils.to_list(self.qt4_vars)
	if not hasattr(self,'qt4_vars_debug'):
		self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars]
	self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug)
Beispiel #3
0
def declare_chain(
    name="",
    rule=None,
    reentrant=True,
    color="BLUE",
    ext_in=[],
    ext_out=[],
    before=[],
    after=[],
    decider=None,
    scan=None,
):
    ext_in = Utils.to_list(ext_in)
    ext_out = Utils.to_list(ext_out)
    cls = Task.task_factory(
        name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan
    )

    def x_file(self, node):
        ext = decider and decider(self, node) or cls.ext_out
        if ext_in:
            _ext_in = ext_in[0]
        out_source = [node.change_ext(x, ext_in=_ext_in) for x in ext]
        if reentrant:
            for i in range(reentrant):
                self.source.append(out_source[i])
        tsk = self.create_task(name, node, out_source)

    for x in cls.ext_in:
        task_gen.mappings[x] = x_file
    return x_file
Beispiel #4
0
def download_tool(tool, force=False):
	"""downloads a tool from the waf repository"""
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url = '/'.join((x, sub, tool + '.py'))
			try:
				web = urlopen(url)
				if web.getcode() != 200:
					continue
			except Exception as e:
				# on python3 urlopen throws an exception
				continue
			else:
				tmp = self.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
				tmp.write(web.read())
				Logs.warn('downloaded %s from %s' % (tool, url))
				download_check(tmp)
				try:
					module = Context.load_tool(tool)
				except:
					Logs.warn('module %s from %s is unusable' % (tool, url))
					try:
						tmp.delete()
					except:
						pass
					continue
				return module
		else:
				break
		raise Errors.WafError('Could not load the Waf tool')
Beispiel #5
0
def download_tool(tool, force=False, ctx=None):
	"""
	Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::

		$ waf configure --download
	"""
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url = '/'.join((x, sub, tool + '.py'))
			try:
				web = urlopen(url)
				if web.getcode() != 200:
					continue
			except Exception as e:
				# on python3 urlopen throws an exception
				continue
			else:
				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
				tmp.write(web.read())
				Logs.warn('Downloaded %s from %s' % (tool, url))
				download_check(tmp)
				try:
					module = Context.load_tool(tool)
				except:
					Logs.warn('The tool %s from %s is unusable' % (tool, url))
					try:
						tmp.delete()
					except:
						pass
					continue
				return module
	raise Errors.WafError('Could not load the Waf tool')
Beispiel #6
0
def post_check(self,*k,**kw):
	is_success=0
	if kw['execute']:
		if kw['success']is not None:
			if kw.get('define_ret',False):
				is_success=kw['success']
			else:
				is_success=(kw['success']==0)
	else:
		is_success=(kw['success']==0)
	if'define_name'in kw:
		if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
			if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str):
				self.define(kw['define_name'],is_success,quote=kw.get('quote',1))
			else:
				self.define_cond(kw['define_name'],is_success)
		else:
			self.define_cond(kw['define_name'],is_success)
	if'header_name'in kw:
		if kw.get('auto_add_header_name',False):
			self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
	if is_success and'uselib_store'in kw:
		from waflib.Tools import ccroot
		_vars=set([])
		for x in kw['features']:
			if x in ccroot.USELIB_VARS:
				_vars|=ccroot.USELIB_VARS[x]
		for k in _vars:
			lk=k.lower()
			if lk in kw:
				val=kw[lk]
				if isinstance(val,str):
					val=val.rstrip(os.path.sep)
				self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val))
	return is_success
Beispiel #7
0
def set_qt5_libs_to_check(self):
    if not hasattr(self, "qt5_vars"):
        self.qt5_vars = QT5_LIBS
    self.qt5_vars = Utils.to_list(self.qt5_vars)
    if not hasattr(self, "qt5_vars_debug"):
        self.qt5_vars_debug = [a + "_debug" for a in self.qt5_vars]
    self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
Beispiel #8
0
def download_tool(tool,force=False,ctx=None):
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url='/'.join((x,sub,tool+'.py'))
			try:
				web=urlopen(url)
				try:
					if web.getcode()!=200:
						continue
				except AttributeError:
					pass
			except Exception:
				continue
			else:
				tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
				tmp.write(web.read(),'wb')
				Logs.warn('Downloaded %s from %s'%(tool,url))
				download_check(tmp)
				try:
					module=Context.load_tool(tool)
				except Exception:
					Logs.warn('The tool %s from %s is unusable'%(tool,url))
					try:
						tmp.delete()
					except Exception:
						pass
					continue
				return module
	raise Errors.WafError('Could not load the Waf tool')
Beispiel #9
0
def find_file(self,filename,path_list=[]):
	for n in Utils.to_list(filename):
		for d in Utils.to_list(path_list):
			p=os.path.join(d,n)
			if os.path.exists(p):
				return p
	self.fatal('Could not find %r'%filename)
Beispiel #10
0
def __boost_get_libs_path(self, *k, **kw):
	''' return the lib path and all the files in it '''
	if 'files' in kw:
		return self.root.find_dir('.'), Utils.to_list(kw['files'])
	libs = k and k[0] or kw.get('libs', None)
	if libs:
		path = self.root.find_dir(libs)
		files = path.ant_glob('*boost_*')
	if not libs or not files:
		for d in Utils.to_list(self.environ.get('LIB', [])) + BOOST_LIBS:
			path = self.root.find_dir(d)
			if path:
				files = path.ant_glob('*boost_*')
				if files:
					break
			path = self.root.find_dir(d + '64')
			if path:
				files = path.ant_glob('*boost_*')
				if files:
					break
	if not path:
		if libs:
			self.end_msg('libs not found in %s' % libs)
			self.fatal('The configuration failed')
		else:
			self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
			self.fatal('The configuration failed')

	self.to_log('Found the boost path in %r with the libraries:' % path)
	for x in files:
		self.to_log('    %r' % x)
	return path, files
Beispiel #11
0
def xcheck_host_envar(conf, name, wafname=None):
	wafname = wafname or name

	chost, chost_envar = get_chost_stuff(conf)

	specific = None
	if chost:
		specific = os.environ.get('%s_%s' % (chost_envar, name), None)

	if specific:
		value = Utils.to_list(specific)
		conf.env[wafname] += value
		conf.msg('Will use cross-compilation %s from %s_%s' \
		 % (name, chost_envar, name),
		 " ".join(quote(x) for x in value))
		return


	envar = os.environ.get('HOST_%s' % name, None)
	if envar is None:
		return

	value = Utils.to_list(envar) if envar != '' else [envar]

	conf.env[wafname] = value
	conf.msg('Will use cross-compilation %s from HOST_%s' \
	 % (name, name),
	 " ".join(quote(x) for x in value))
Beispiel #12
0
def apply_haxe(self):
    Utils.def_attrs(self,
        target="", classpath="", flags="", libs="", swflib=None);

    classpath = Utils.to_list(self.classpath)
    flags = Utils.to_list(self.flags)
    target = self.target;

    inputs = []

    if target.endswith(".swf"):
        flags += ["-swf", target, "--flash-strict", "-D", "nativeTrace"]
        if (self.swflib is not None):
            swflib = self.path.get_bld().make_node(self.swflib)
            inputs += [swflib]
            flags += ["-swf-lib", str(swflib)]
    elif target.endswith(".js"):
        flags += ["-js", target]
    elif target.endswith(".n"):
        flags += ["-neko", target]
    else:
        raise "Unsupported target file type!"

    for lib in Utils.to_list(self.libs):
        flags += ["-lib", lib]

    task = self.create_task("haxe", inputs, self.path.get_bld().make_node(target))
    task.classpath = [self.path.find_node(cp) for cp in classpath]
    task.env.flags = flags
    self.haxe_task = task
Beispiel #13
0
	def load(self,input,tooldir=None,funs=None,download=True):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			mag=(tool,id(self.env),funs)
			if mag in self.tool_cache:
				self.to_log('(tool %s is already loaded, skipping)'%tool)
				continue
			self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir)
			except ImportError as e:
				if Options.options.download:
					module=download_tool(tool,ctx=self)
					if not module:
						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
				else:
					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
			if funs is not None:
				self.eval_rules(funs)
			else:
				func=getattr(module,'configure',None)
				if func:
					if type(func)is type(Utils.readf):func(self)
					else:self.eval_rules(func)
			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
Beispiel #14
0
def apply_read_format_extensions(self):
    enabled_exts = Utils.to_list(getattr(self, 'enabled_exts', []))
    disabled_exts = Utils.to_list(getattr(self, 'disabled_exts', []))
    read_format = getattr(self, 'read_format', 'markdown')
    read_format += ''.join(('+' + e for e in enabled_exts))
    read_format += ''.join(('-' + e for e in disabled_exts))
    self.read_format = read_format
Beispiel #15
0
def set_qt5_libs_to_check(self):
	self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
	if not self.qt5_vars:
		dirlst = Utils.listdir(self.env.QTLIBS)

		pat = self.env.cxxshlib_PATTERN
		if Utils.is_win32:
			pat = pat.replace('.dll', '.lib')
		if self.environ.get('QT5_FORCE_STATIC'):
			pat = self.env.cxxstlib_PATTERN
		if Utils.unversioned_sys_platform() == 'darwin':
			pat = "%s\.framework"
		re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
		for x in dirlst:
			m = re_qt.match(x)
			if m:
				self.qt5_vars.append("Qt5%s" % m.group('name'))
		if not self.qt5_vars:
			self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)

	qtextralibs = getattr(Options.options, 'qtextralibs', None)
	if qtextralibs:
		self.qt5_vars.extend(qtextralibs.split(','))

	if not hasattr(self, 'qt5_vars_debug'):
		self.qt5_vars_debug = [a + '_DEBUG' for a in self.qt5_vars]
	self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
Beispiel #16
0
def apply_msi(self):
	self.env.append_value('WIX_EXTENSION', Utils.to_list(getattr(self, 'extensions', '')))
	self.env.append_value('WIX_DEFINES', Utils.to_list(getattr(self, 'defines', '')))

	# Add buildtag define
	self.env.append_value('WIX_DEFINES', [ 'BUILDTAG=%s' % self.env.BUILDTAG ])

	# Ensure the source directory path is added to the BINDERPATH variable
	self.env.append_value('WIX_BINDERPATH', self.path.abspath())

	# Dict of ComponentName -> Directory that needs harvesting
	for k,v in getattr(self, 'heat', {}).iteritems():
		n = self.path.find_dir(v)
		if not n:
			raise Errors.WafError("heat directory not found: %r in %r" % (v, self))

		out = self.path.find_or_declare(k + '.wxs')
		tsk = self.create_task('heat', None, out)

		tsk.source_dir = n

		self.env.HEAT_DIRECTORY = n.bldpath()
		self.env.HEAT_COMPONENT = k
		self.env.append_value('WIX_BINDERPATH', [ n.bldpath() ])
		self.source = Utils.to_list(self.source) + tsk.outputs
Beispiel #17
0
def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
	ext_in=Utils.to_list(ext_in)
	ext_out=Utils.to_list(ext_out)
	if not name:
		name=rule
	cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
	def x_file(self,node):
		ext=decider and decider(self,node)or cls.ext_out
		if ext_in:
			_ext_in=ext_in[0]
		tsk=self.create_task(name,node)
		cnt=0
		keys=set(self.mappings.keys())|set(self.__class__.mappings.keys())
		for x in ext:
			k=node.change_ext(x,ext_in=_ext_in)
			tsk.outputs.append(k)
			if reentrant!=None:
				if cnt<int(reentrant):
					self.source.append(k)
			else:
				for y in keys:
					if k.name.endswith(y):
						self.source.append(k)
						break
			cnt+=1
		if install_path:
			self.bld.install_files(install_path,tsk.outputs)
		return tsk
	for x in cls.ext_in:
		task_gen.mappings[x]=x_file
	return x_file
	def toolchain_linker_get_libs(self):
		'''returns a list of tuples containing lib name, task name and list of 
		search paths.
		
		libs = [(str, str, []), (str, str, []), ... ]
		
		'''
		tgen = self.tgen
		name = tgen.get_name()
		libs = []
		uses = []
		for use in Utils.to_list(getattr(tgen, 'use', [])):
			uses.extend(self._get_libs(use, 0))
			
		# sort by task with highest nesting
		uses = sorted(uses, key=lambda use: use[0])
		uses = uses[::-1]

		# insert tasks at start of list
		# lowest nesting at start
		for (_, lib, name, paths) in uses:
			if name:
				entry = (lib, name, paths)
				if entry not in libs:
					libs.insert(0, entry)

		# append system libraries at end of list
		for lib in Utils.to_list(getattr(tgen, 'lib', [])):
			libs.append((lib, None, None))
		for (_, lib, name, paths) in uses:
			if not name:
				entry = (lib, name, paths)
				if entry not in libs:
					libs.append(entry)
		return libs
Beispiel #19
0
 def load(self, input, tooldir=None, funs=None, download=True):
     tools = Utils.to_list(input)
     if tooldir:
         tooldir = Utils.to_list(tooldir)
     for tool in tools:
         mag = (tool, id(self.env), funs)
         if mag in self.tool_cache:
             self.to_log("(tool %s is already loaded, skipping)" % tool)
             continue
         self.tool_cache.append(mag)
         module = None
         try:
             module = Context.load_tool(tool, tooldir)
         except ImportError, e:
             if Options.options.download:
                 module = download_tool(tool, ctx=self)
                 if not module:
                     self.fatal(
                         "Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s"
                         % (tool, sys.path, e)
                     )
             else:
                 self.fatal(
                     "Could not load the Waf tool %r from %r (try the --download option?):\n%s" % (tool, sys.path, e)
                 )
         except Exception, e:
             self.to_log("imp %r (%r & %r)" % (tool, tooldir, funs))
             self.to_log(Utils.ex_stack())
             raise
Beispiel #20
0
	def exec_mf(self):
		"""
		Create manifest files for VS-like compilers (msvc, ifort, ...)
		"""
		if not self.env.MT:
			return 0

		manifest = None
		for out_node in self.outputs:
			if out_node.name.endswith('.manifest'):
				manifest = out_node.abspath()
				break
		else:
			# Should never get here.  If we do, it means the manifest file was
			# never added to the outputs list, thus we don't have a manifest file
			# to embed, so we just return.
			return 0

		# embedding mode. Different for EXE's and DLL's.
		# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
		mode = ''
		for x in Utils.to_list(self.generator.features):
			if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
				mode = 1
			elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
				mode = 2

		Logs.debug('msvc: embedding manifest in mode %r', mode)

		lst = [] + self.env.MT
		lst.extend(Utils.to_list(self.env.MTFLAGS))
		lst.extend(['-manifest', manifest])
		lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))

		return super(link_task, self).exec_command(lst)
	def _get_libs(self, target, nest):
		try:
			tgen = self.bld.get_tgen_by_name(target)
		except Errors.WafError:
			return []
		name = tgen.get_name()
		features = Utils.to_list(getattr(tgen, 'features', []))
		
		if 'fake_lib' in features:
			paths = [p.replace('\\', '/') for p in tgen.lib_paths]
			return [(nest, name, None, paths)]
		
		if not set(('c','cxx')) & set(features):
			return []
		
		libs = [(nest, name, name, None)]		
		
		if set(('cshlib', 'cxxshlib')) & set(features):
			return libs
		
		for lib in Utils.to_list(getattr(tgen, 'lib', [])):
			libs.append((nest, lib, None, None))
			
		for use in getattr(tgen, 'use', []):
			libs.extend(self._get_libs(use, nest+1))
		
		return libs
Beispiel #22
0
def xcheck_host_prog(conf, name, tool, wafname=None):
	wafname = wafname or name

	chost, chost_envar = get_chost_stuff(conf)

	specific = None
	if chost:
		specific = os.environ.get('%s_%s' % (chost_envar, name))

	if specific:
		value = Utils.to_list(specific)
		conf.env[wafname] += value
		conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
		 " ".join(quote(x) for x in value))
		return
	else:
		envar = os.environ.get('HOST_%s' % name)
		if envar is not None:
			value = Utils.to_list(envar)
			conf.env[wafname] = value
			conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
			 " ".join(quote(x) for x in value))
			return

	if conf.env[wafname]:
		return

	value = None
	if chost:
		value = '%s-%s' % (chost, tool)

	if value:
		conf.env[wafname] = value
		conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
Beispiel #23
0
def exec_cfg(self, kw):

	# pkg-config version
	if 'atleast_pkgconfig_version' in kw:
		cmd = [kw['path'], '--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
		self.cmd_and_log(cmd)
		if not 'okmsg' in kw:
			kw['okmsg'] = 'yes'
		return

	# checking for the version of a module
	for x in cfg_ver:
		y = x.replace('-', '_')
		if y in kw:
			self.cmd_and_log([kw['path'], '--%s=%s' % (x, kw[y]), kw['package']])
			if not 'okmsg' in kw:
				kw['okmsg'] = 'yes'
			self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
			break

	# retrieving the version of a module
	if 'modversion' in kw:
		version = self.cmd_and_log([kw['path'], '--modversion', kw['modversion']]).strip()
		self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
		return version

	lst = [kw['path']]

	defi = kw.get('define_variable', None)
	if not defi:
		defi = self.env.PKG_CONFIG_DEFINES or {}
	for key, val in defi.items():
		lst.append('--define-variable=%s=%s' % (key, val))

	if kw['package']:
		lst.extend(Utils.to_list(kw['package']))

	# retrieving variables of a module
	if 'variables' in kw:
		env = kw.get('env', self.env)
		uselib = kw.get('uselib_store', kw['package'].upper())
		vars = Utils.to_list(kw['variables'])
		for v in vars:
			val = self.cmd_and_log(lst + ['--variable=' + v]).strip()
			var = '%s_%s' % (uselib, v)
			env[var] = val
		if not 'okmsg' in kw:
			kw['okmsg'] = 'yes'
		return

	if 'args' in kw:
		lst += Utils.to_list(kw['args'])
	# so we assume the command-line will output flags to be parsed afterwards
	ret = self.cmd_and_log(lst)
	if not 'okmsg' in kw:
		kw['okmsg'] = 'yes'

	self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
	self.parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
	return ret
Beispiel #24
0
def exec_mf(self):
	env=self.env
	mtool=env['MT']
	if not mtool:
		return 0
	self.do_manifest=False
	outfile=self.outputs[0].abspath()
	manifest=None
	for out_node in self.outputs:
		if out_node.name.endswith('.manifest'):
			manifest=out_node.abspath()
			break
	if manifest is None:
		return 0
	mode=''
	if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
		mode='1'
	elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
		mode='2'
	debug('msvc: embedding manifest in mode %r'%mode)
	lst=[]
	lst.extend([env['MT']])
	lst.extend(Utils.to_list(env['MTFLAGS']))
	lst.extend(Utils.to_list("-manifest"))
	lst.extend(Utils.to_list(manifest))
	lst.extend(Utils.to_list("-outputresource:%s;%s"%(outfile,mode)))
	lst=[lst]
	return self.exec_command(*lst)
Beispiel #25
0
def task_factory(name, func=None, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None):
	"""
	return a new Task subclass with the function run compiled from the line given
	provided for compatibility with waf 1.5
	"""

	params = {
		'vars': vars,
		'color': color,
		'name': name,
		'ext_in': Utils.to_list(ext_in),
		'ext_out': Utils.to_list(ext_out),
		'before': Utils.to_list(before),
		'after': Utils.to_list(after),
		'shell': shell,
		'scan': scan,
	}

	if isinstance(func, str):
		params['run_str'] = func
	else:
		params['run'] = func

	cls = type(Task)(name, (Task,), params)
	global classes
	classes[name] = cls
	return cls
def make_dot_file(ctx):
    # Lazy load module
    from bld.project_paths import project_paths_join as ppj

    # Select task groups, drop first which are project paths
    groups = [group for group in ctx.groups if len(group) != 0]
    groups = groups[1:]
    # Create
    dag = digraph()
    for group in groups:
        for taskgen in group:
            name = taskgen.get_name()

            add_nodes(dag, [name])
            # Add dependencies
            deps = Utils.to_list(getattr(taskgen, "deps", []))
            for dep in deps:
                dep = Path(dep).name
                add_nodes(dag, [dep])
                add_edges(dag, [(dep, name)])

            # Write targets
            targets = Utils.to_list(getattr(taskgen, "target", []))
            for target in targets:
                target = Path(target).name
                add_nodes(dag, [target])
                add_edges(dag, [(name, target)])

    dag = apply_styles(dag, styles)

    # Save DAG
    dag.render(ppj("OUT_FIGURES", "dag"))
Beispiel #27
0
def apply_haxe(self):
    Utils.def_attrs(self,
        target="", classpath="", flags="", libs="", swflib=None);

    classpath = Utils.to_list(self.classpath)
    flags = Utils.to_list(self.flags)
    target = self.target;

    inputs = []
    outputs = [ self.path.get_bld().make_node(target) ]

    if target.endswith(".swf"):
        flags += ["-swf", target, "--flash-strict", "-D", "nativeTrace",
            "-swf-header", "640:480:60:ffffff"]
        if (self.swflib is not None):
            swflib = self.path.get_bld().make_node(self.swflib)
            inputs += [swflib]
            flags += ["-swf-lib", str(swflib)]
    elif target.endswith(".js"):
        if "-debug" in flags:
            outputs += [self.path.get_bld().make_node(target + ".map")]
        flags += ["-js", target, "--js-modern"]
    elif target.endswith(".n"):
        flags += ["-neko", target]
    else:
        raise "Unsupported target file type!"

    for lib in Utils.to_list(self.libs):
        flags += ["-lib", lib]

    task = self.create_task("haxe", inputs, outputs)
    task.classpath = classpath
    task.env.flags = flags
    self.haxe_task = task
Beispiel #28
0
Datei: msvc.py Projekt: zsx/waf
def exec_mf(self):
	env = self.env
	outfile = self.inputs[0].bldpath()
	manifest = outfile + '.manifest'
	if os.path.exists(manifest):
		debug('msvc: manifesttool')
		mtool = env['MT']
		if not mtool:
			return 0

		mode = ''
		# embedding mode. Different for EXE's and DLL's.
		# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
		if 'cprogram' in self.generator.features:
			mode = '1'
		elif 'cshlib' in self.generator.features or 'cxxshlib' in self.generator.features:
			mode = '2'

		debug('msvc: embedding manifest')
		#flags = ' '.join(env['MTFLAGS'] or [])

		lst = []
		lst.extend(Utils.to_list(env['MT']))
		lst.extend(Utils.to_list(env['MTFLAGS']))
		lst.extend(Utils.to_list("-manifest"))
		lst.extend(Utils.to_list(manifest))
		lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))

		#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
		#	manifest, outfile, mode)
		lst = [lst]
		ret = self.exec_command(*lst)

		return ret
Beispiel #29
0
    def group_dependent_assembly(self):
        '''
        cycle over all dependencies and sort them into types
        '''
        get = self.bld.get_tgen_by_name
        env = self.env
        for x in Utils.to_list(getattr(self.tg, 'use', [])):
            uselib = x.upper()
            csflags = getattr(env, 'CSFLAGS_' + uselib, None)
            if csflags:
                pkg = getattr(env, 'PKG_' + uselib, None)

                for ref in Utils.to_list(csflags):
                    self.external_refs.append({"reference": ref[3:], "package": pkg})

                continue

            try:
                y = get(x)
            except Errors.WafError:
                self.dotnet_refs.append(x)
                continue

            y.post()
            tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
            if tsk:
                self.projects.append(y)
Beispiel #30
0
	def load(self,input,tooldir=None,funs=None,with_sys_path=True,cache=False):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			if cache:
				mag=(tool,id(self.env),tooldir,funs)
				if mag in self.tool_cache:
					self.to_log('(tool %s is already loaded, skipping)'%tool)
					continue
				self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path)
			except ImportError as e:
				self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
			if funs is not None:
				self.eval_rules(funs)
			else:
				func=getattr(module,'configure',None)
				if func:
					if type(func)is type(Utils.readf):func(self)
					else:self.eval_rules(func)
			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
Beispiel #31
0
def validate_c(self, kw):
	"""
	Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build`

	:param compiler: c or cxx (tries to guess what is best)
	:type compiler: string
	:param type: cprogram, cshlib, cstlib - not required if *features are given directly*
	:type type: binary to create
	:param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib``
	:type feature: list of string
	:param fragment: provide a piece of code for the test (default is to let the system create one)
	:type fragment: string
	:param uselib_store: define variables after the test is executed (IMPORTANT!)
	:type uselib_store: string
	:param use: parameters to use for building (just like the normal *use* keyword)
	:type use: list of string
	:param define_name: define to set when the check is over
	:type define_name: string
	:param execute: execute the resulting binary
	:type execute: bool
	:param define_ret: if execute is set to True, use the execution output in both the define and the return value
	:type define_ret: bool
	:param header_name: check for a particular header
	:type header_name: string
	:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
	:type auto_add_header_name: bool
	"""

	if not 'build_fun' in kw:
		kw['build_fun'] = build_fun

	if not 'env' in kw:
		kw['env'] = self.env.derive()
	env = kw['env']

	if not 'compiler' in kw and not 'features' in kw:
		kw['compiler'] = 'c'
		if env.CXX_NAME and Task.classes.get('cxx'):
			kw['compiler'] = 'cxx'
			if not self.env.CXX:
				self.fatal('a c++ compiler is required')
		else:
			if not self.env.CC:
				self.fatal('a c compiler is required')

	if not 'compile_mode' in kw:
		kw['compile_mode'] = 'c'
		if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx':
			kw['compile_mode'] = 'cxx'

	if not 'type' in kw:
		kw['type'] = 'cprogram'

	if not 'features' in kw:
		if not 'header_name' in kw or kw.get('link_header_test', True):
			kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram"
		else:
			kw['features'] = [kw['compile_mode']]
	else:
		kw['features'] = Utils.to_list(kw['features'])

	if not 'compile_filename' in kw:
		kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')

	def to_header(dct):
		if 'header_name' in dct:
			dct = Utils.to_list(dct['header_name'])
			return ''.join(['#include <%s>\n' % x for x in dct])
		return ''

	#OSX
	if 'framework_name' in kw:
		fwkname = kw['framework_name']
		if not 'uselib_store' in kw:
			kw['uselib_store'] = fwkname.upper()
		if not kw.get('no_header', False):
			if not 'header_name' in kw:
				kw['header_name'] = []
			fwk = '%s/%s.h' % (fwkname, fwkname)
			if kw.get('remove_dot_h'):
				fwk = fwk[:-2]
			kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]

		kw['msg'] = 'Checking for framework %s' % fwkname
		kw['framework'] = fwkname
		#kw['frameworkpath'] = set it yourself

	if 'function_name' in kw:
		fu = kw['function_name']
		if not 'msg' in kw:
			kw['msg'] = 'Checking for function %s' % fu
		kw['code'] = to_header(kw) + SNIP_FUNCTION % fu
		if not 'uselib_store' in kw:
			kw['uselib_store'] = fu.upper()
		if not 'define_name' in kw:
			kw['define_name'] = self.have_define(fu)

	elif 'type_name' in kw:
		tu = kw['type_name']
		if not 'header_name' in kw:
			kw['header_name'] = 'stdint.h'
		if 'field_name' in kw:
			field = kw['field_name']
			kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field}
			if not 'msg' in kw:
				kw['msg'] = 'Checking for field %s in %s' % (field, tu)
			if not 'define_name' in kw:
				kw['define_name'] = self.have_define((tu + '_' + field).upper())
		else:
			kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu}
			if not 'msg' in kw:
				kw['msg'] = 'Checking for type %s' % tu
			if not 'define_name' in kw:
				kw['define_name'] = self.have_define(tu.upper())

	elif 'header_name' in kw:
		if not 'msg' in kw:
			kw['msg'] = 'Checking for header %s' % kw['header_name']

		l = Utils.to_list(kw['header_name'])
		assert len(l), 'list of headers in header_name is empty'

		kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
		if not 'uselib_store' in kw:
			kw['uselib_store'] = l[0].upper()
		if not 'define_name' in kw:
			kw['define_name'] = self.have_define(l[0])

	if 'lib' in kw:
		if not 'msg' in kw:
			kw['msg'] = 'Checking for library %s' % kw['lib']
		if not 'uselib_store' in kw:
			kw['uselib_store'] = kw['lib'].upper()

	if 'stlib' in kw:
		if not 'msg' in kw:
			kw['msg'] = 'Checking for static library %s' % kw['stlib']
		if not 'uselib_store' in kw:
			kw['uselib_store'] = kw['stlib'].upper()

	if 'fragment' in kw:
		# an additional code fragment may be provided to replace the predefined code
		# in custom headers
		kw['code'] = kw['fragment']
		if not 'msg' in kw:
			kw['msg'] = 'Checking for code snippet'
		if not 'errmsg' in kw:
			kw['errmsg'] = 'no'

	for (flagsname,flagstype) in (('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')):
		if flagsname in kw:
			if not 'msg' in kw:
				kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
			if not 'errmsg' in kw:
				kw['errmsg'] = 'no'

	if not 'execute' in kw:
		kw['execute'] = False
	if kw['execute']:
		kw['features'].append('test_exec')
		kw['chmod'] = Utils.O755

	if not 'errmsg' in kw:
		kw['errmsg'] = 'not found'

	if not 'okmsg' in kw:
		kw['okmsg'] = 'yes'

	if not 'code' in kw:
		kw['code'] = SNIP_EMPTY_PROGRAM

	# if there are headers to append automatically to the next tests
	if self.env[INCKEYS]:
		kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']

	# in case defines lead to very long command-lines
	if kw.get('merge_config_header', False) or env.merge_config_header:
		kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
		env.DEFINES = [] # modify the copy

	if not kw.get('success'): kw['success'] = None

	if 'define_name' in kw:
		self.undefine(kw['define_name'])
	if not 'msg' in kw:
		self.fatal('missing "msg" in conf.check(...)')
Beispiel #32
0
 def add_path(dct, path, var):
    dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
Beispiel #33
0
Datei: msvc.py Projekt: rivy/waf
def check_libs_msvc(self, libnames, is_static=False):
    for libname in Utils.to_list(libnames):
        self.check_lib_msvc(libname, is_static)
Beispiel #34
0
    def process_qt(self):
        """
        Process Qt libraries for packaging for macOS. 

        This function will copy the Qt framework/libraries that an application
        needs into the specific location for app bundles (Framworks directory)
        and perform any cleanup on the copied framework to conform to Apple's
        framework bundle structure. This is required so that App bundles can
        be properly code signed.
        """
        if 'darwin' not in self.bld.platform or 'qtlibs' not in getattr(
                self, 'dir_resources', []):
            return

        # Don't need the process_resources method to process the qtlibs folder
        # since we are handling it
        self.dir_resources.remove('qtlibs')
        executable_dest_node = self.outputs[0].parent

        output_folder_node = self.bld.get_output_folders(
            self.bld.platform, self.bld.config)[0]
        qt_plugin_source_node = output_folder_node.make_node("qtlibs/plugins")
        qt_plugins_dest_node = executable_dest_node.make_node("qtlibs/plugins")

        # To be on the safe side check if the destination qtlibs is a link and
        # unlink it before we creat the plugins copy/link
        if os.path.islink(qt_plugins_dest_node.parent.abspath()):
            os.unlink(qt_plugins_dest_node.parent.abspath())

        self.bld.create_symlink_or_copy(qt_plugin_source_node,
                                        qt_plugins_dest_node.abspath(),
                                        postpone=False)

        qt_libs_source_node = output_folder_node.make_node("qtlibs/lib")

        # Executable dest node will be something like
        # Application.app/Contents/MacOS. The parent will be Contents, which
        # needs to contain the Frameworks folder according to macOS Framework
        # bundle structure
        frameworks_node = executable_dest_node.parent.make_node("Frameworks")
        frameworks_node.mkdir()

        def post_copy_cleanup(dst_framework_node):
            # Apple does not like any file in the top level directory of an
            # embedded framework. In 5.6 Qt has perl scripts for their build in the
            # top level directory so we will just delete them from the embedded
            # framework since we won't be building anything.
            pearl_files = dst_framework_node.ant_glob("*.prl")
            for file in pearl_files:
                file.delete()

        # on macOS there is not a clean way to get Qt dependencies on itself,
        # so we have to scan the lib using otool and then add any of those Qt
        # dependencies to our set.

        qt_frameworks_to_copy = set()

        qt5_vars = Utils.to_list(QT5_LIBS)
        for i in qt5_vars:
            uselib = i.upper()
            if uselib in self.dependencies:
                # QT for darwin does not have '5' in the name, so we need to remove it
                darwin_adjusted_name = i.replace('Qt5', 'Qt')
                framework_name = darwin_adjusted_name + ".framework"
                src = qt_libs_source_node.make_node(framework_name).abspath()

                if os.path.exists(src):
                    qt_frameworks_to_copy.add(framework_name)

                    # otool -L will generate output like this:
                    #     @rpath/QtWebKit.framework/Versions/5/QtWebKit (compatibility version 5.6.0, current version 5.6.0)
                    # cut -d ' ' -f 1 will slice the line by spaces and returns the first field. That results in: @rpath/QtWebKit.framework/Versions/5/QtWebKit
                    # grep @rpath will make sure we only have QtLibraries and not system libraries
                    # cut -d '/' -f 2 slices the line by '/' and selects the second field resulting in: QtWebKit.framework
                    otool_command = "otool -L '%s' | cut -d ' ' -f 1 | grep @rpath.*Qt | cut -d '/' -f 2" % (
                        os.path.join(src, darwin_adjusted_name))
                    output = subprocess.check_output(otool_command, shell=True)
                    qt_dependent_libs = re.split("\s+", output.strip())

                    for lib in qt_dependent_libs:
                        qt_frameworks_to_copy.add(lib)

        for framework_name in qt_frameworks_to_copy:
            src_node = qt_libs_source_node.make_node(framework_name)
            src = src_node.abspath()
            dst = frameworks_node.make_node(framework_name).abspath()
            if os.path.islink(dst):
                os.unlink(dst)
            if os.path.isdir(dst):
                shutil.rmtree(dst)
            Logs.info("Copying Qt Framework {} to {}".format(src, dst))
            self.bld.create_symlink_or_copy(src_node, dst)
            if not os.path.islink(dst):
                post_copy_cleanup(frameworks_node.make_node(framework_name))
Beispiel #35
0
def configure(cfg):
    cfg.find_program('make', var='MAKE')
    #cfg.objcopy = cfg.find_program('%s-%s'%(cfg.env.TOOLCHAIN,'objcopy'), var='OBJCOPY', mandatory=True)
    cfg.find_program('arm-none-eabi-objcopy', var='OBJCOPY')
    env = cfg.env
    bldnode = cfg.bldnode.make_node(cfg.variant)

    def srcpath(path):
        return cfg.srcnode.make_node(path).abspath()

    def bldpath(path):
        return bldnode.make_node(path).abspath()

    env.AP_PROGRAM_FEATURES += ['ch_ap_program']

    kw = env.AP_LIBRARIES_OBJECTS_KW
    kw['features'] = Utils.to_list(kw.get('features', [])) + ['ch_ap_library']

    env.CH_ROOT = srcpath('modules/ChibiOS')
    env.AP_HAL_ROOT = srcpath('libraries/AP_HAL_ChibiOS')
    env.BUILDDIR = bldpath('modules/ChibiOS')
    env.BUILDROOT = bldpath('')
    env.SRCROOT = srcpath('')
    env.PT_DIR = srcpath('Tools/ardupilotwaf/chibios/image')
    env.UPLOAD_TOOLS = srcpath('Tools/ardupilotwaf')
    env.CHIBIOS_SCRIPTS = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts')
    env.TOOLS_SCRIPTS = srcpath('Tools/scripts')
    env.APJ_TOOL = srcpath('Tools/scripts/apj_tool.py')
    env.SERIAL_PORT = srcpath('/dev/serial/by-id/*_STLink*')

    # relative paths to pass to make, relative to directory that make is run from
    env.CH_ROOT_REL = os.path.relpath(env.CH_ROOT, env.BUILDROOT)
    env.AP_HAL_REL = os.path.relpath(env.AP_HAL_ROOT, env.BUILDROOT)
    env.BUILDDIR_REL = os.path.relpath(env.BUILDDIR, env.BUILDROOT)

    mk_custom = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/chibios_board.mk' %
                        env.BOARD)
    mk_common = srcpath(
        'libraries/AP_HAL_ChibiOS/hwdef/common/chibios_board.mk')
    # see if there is a board specific make file
    if os.path.exists(mk_custom):
        env.BOARD_MK = mk_custom
    else:
        env.BOARD_MK = mk_common

    if cfg.options.default_parameters:
        cfg.msg('Default parameters',
                cfg.options.default_parameters,
                color='YELLOW')
        env.DEFAULT_PARAMETERS = srcpath(cfg.options.default_parameters)

    # we need to run chibios_hwdef.py at configure stage to generate the ldscript.ld
    # that is needed by the remaining configure checks
    import subprocess

    if env.BOOTLOADER:
        env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef-bl.dat' %
                            env.BOARD)
        env.BOOTLOADER_OPTION = "--bootloader"
    else:
        env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef.dat' %
                            env.BOARD)
        env.BOOTLOADER_OPTION = ""
    hwdef_script = srcpath(
        'libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py')
    hwdef_out = env.BUILDROOT
    if not os.path.exists(hwdef_out):
        os.mkdir(hwdef_out)
    try:
        cmd = "python '{0}' -D '{1}' '{2}' {3}".format(hwdef_script, hwdef_out,
                                                       env.HWDEF,
                                                       env.BOOTLOADER_OPTION)
        ret = subprocess.call(cmd, shell=True)
    except Exception:
        cfg.fatal("Failed to process hwdef.dat")
    if ret != 0:
        cfg.fatal("Failed to process hwdef.dat ret=%d" % ret)

    load_env_vars(cfg.env)
    if env.HAL_WITH_UAVCAN:
        setup_can_build(cfg)
Beispiel #36
0
def post_check(self, *k, **kw):
	"""
	Sets the variables after a test executed in
	:py:func:`waflib.Tools.c_config.check` was run successfully
	"""
	is_success = 0
	if kw['execute']:
		if kw['success'] is not None:
			if kw.get('define_ret', False):
				is_success = kw['success']
			else:
				is_success = (kw['success'] == 0)
	else:
		is_success = (kw['success'] == 0)

	if 'define_name' in kw:
		# TODO this is still way too complicated
		comment = kw.get('comment', '')
		define_name = kw['define_name']
		if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
			if kw.get('global_define', 1):
				self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment)
			else:
				if kw.get('quote', 1):
					succ = '"%s"' % is_success
				else:
					succ = int(is_success)
				val = '%s=%s' % (define_name, succ)
				var = 'DEFINES_%s' % kw['uselib_store']
				self.env.append_value(var, val)
		else:
			if kw.get('global_define', 1):
				self.define_cond(define_name, is_success, comment=comment)
			else:
				var = 'DEFINES_%s' % kw['uselib_store']
				self.env.append_value(var, '%s=%s' % (define_name, int(is_success)))

		# define conf.env.HAVE_X to 1
		if kw.get('add_have_to_env', 1):
			if kw.get('uselib_store'):
				self.env[self.have_define(kw['uselib_store'])] = 1
			else:
				self.env[define_name] = int(is_success)

	if 'header_name' in kw:
		if kw.get('auto_add_header_name', False):
			self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))

	if is_success and 'uselib_store' in kw:
		from waflib.Tools import ccroot
		# See get_uselib_vars in ccroot.py
		_vars = set([])
		for x in kw['features']:
			if x in ccroot.USELIB_VARS:
				_vars |= ccroot.USELIB_VARS[x]

		for k in _vars:
			x = k.lower()
			if x in kw:
				self.env.append_value(k + '_' + kw['uselib_store'], kw[x])
	return is_success
Beispiel #37
0
	def to_header(dct):
		if 'header_name' in dct:
			dct = Utils.to_list(dct['header_name'])
			return ''.join(['#include <%s>\n' % x for x in dct])
		return ''
Beispiel #38
0
def vala_file(self, node):
    valatask = getattr(self, "valatask", None)
    if not valatask:

        def _get_api_version():
            api_version = getattr(Context.g_module, 'API_VERSION', None)
            if api_version == None:
                version = Context.g_module.VERSION.split(".")
                if version[0] == "0":
                    api_version = "0." + version[1]
                else:
                    api_version = version[0] + ".0"
            return api_version

        valatask = self.create_task('valac')
        self.valatask = valatask
        self.includes = Utils.to_list(getattr(self, 'includes', []))
        self.uselib = self.to_list(getattr(self, 'uselib', []))
        valatask.packages = []
        valatask.packages_private = Utils.to_list(
            getattr(self, 'packages_private', []))
        valatask.vapi_dirs = []
        valatask.target = self.target
        valatask.threading = False
        valatask.install_path = getattr(self, 'install_path', '')
        valatask.profile = getattr(self, 'profile', 'gobject')
        valatask.vala_defines = getattr(self, 'vala_defines', [])
        valatask.target_glib = None
        valatask.gir = getattr(self, 'gir', None)
        valatask.gir_path = getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0')
        valatask.vapi_path = getattr(self, 'vapi_path',
                                     '${DATAROOTDIR}/vala/vapi')
        valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE'])
        valatask.header_path = getattr(
            self, 'header_path',
            '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
        valatask.is_lib = False
        if not 'cprogram' in self.features:
            valatask.is_lib = True
        packages = Utils.to_list(getattr(self, 'packages', []))
        vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
        includes = []
        if hasattr(self, 'use'):
            local_packages = Utils.to_list(self.use)[:]
            seen = []
            while len(local_packages) > 0:
                package = local_packages.pop()
                if package in seen:
                    continue
                seen.append(package)
                try:
                    package_obj = self.bld.get_tgen_by_name(package)
                except Errors.WafError:
                    continue
                package_name = package_obj.target
                package_node = package_obj.path
                package_dir = package_node.path_from(self.path)
                for task in package_obj.tasks:
                    for output in task.outputs:
                        if output.name == package_name + ".vapi":
                            valatask.set_run_after(task)
                            if package_name not in packages:
                                packages.append(package_name)
                            if package_dir not in vapi_dirs:
                                vapi_dirs.append(package_dir)
                            if package_dir not in includes:
                                includes.append(package_dir)
                if hasattr(package_obj, 'use'):
                    lst = self.to_list(package_obj.use)
                    lst.reverse()
                    local_packages = [pkg for pkg in lst if pkg not in seen
                                      ] + local_packages
        valatask.packages = packages
        for vapi_dir in vapi_dirs:
            try:
                valatask.vapi_dirs.append(
                    self.path.find_dir(vapi_dir).abspath())
                valatask.vapi_dirs.append(
                    self.path.find_dir(vapi_dir).get_bld().abspath())
            except AttributeError:
                Logs.warn("Unable to locate Vala API directory: '%s'" %
                          vapi_dir)
        self.includes.append(self.bld.srcnode.abspath())
        self.includes.append(self.bld.bldnode.abspath())
        for include in includes:
            try:
                self.includes.append(self.path.find_dir(include).abspath())
                self.includes.append(
                    self.path.find_dir(include).get_bld().abspath())
            except AttributeError:
                Logs.warn("Unable to locate include directory: '%s'" % include)
        if valatask.profile == 'gobject':
            if hasattr(self, 'target_glib'):
                Logs.warn(
                    'target_glib on vala tasks is not supported --vala-target-glib=MAJOR.MINOR from the vala tool options'
                )
            if getattr(Options.options, 'vala_target_glib', None):
                valatask.target_glib = Options.options.vala_target_glib
            if not 'GOBJECT' in self.uselib:
                self.uselib.append('GOBJECT')
        if hasattr(self, 'threading'):
            if valatask.profile == 'gobject':
                valatask.threading = self.threading
                if not 'GTHREAD' in self.uselib:
                    self.uselib.append('GTHREAD')
            else:
                Logs.warn("Profile %s does not have threading support" %
                          valatask.profile)
        if valatask.is_lib:
            valatask.outputs.append(
                self.path.find_or_declare('%s.h' % self.target))
            valatask.outputs.append(
                self.path.find_or_declare('%s.vapi' % self.target))
            if valatask.gir:
                valatask.outputs.append(
                    self.path.find_or_declare('%s.gir' % self.gir))
            if valatask.packages:
                d = self.path.find_or_declare('%s.deps' % self.target)
                valatask.outputs.append(d)
                valatask.deps_node = d
    valatask.inputs.append(node)
    c_node = node.change_ext('.c')
    valatask.outputs.append(c_node)
    self.source.append(c_node)
    if valatask.is_lib:
        headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
        self.install_vheader = self.bld.install_files(valatask.header_path,
                                                      headers_list, self.env)
        vapi_list = [
            o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))
        ]
        self.install_vapi = self.bld.install_files(valatask.vapi_path,
                                                   vapi_list, self.env)
        gir_list = [o for o in valatask.outputs if o.suffix() == ".gir"]
        self.install_gir = self.bld.install_files(valatask.gir_path, gir_list,
                                                  self.env)
Beispiel #39
0
def handle_add_object(tgen):
    if getattr(tgen, 'add_object', None):
        for input in Utils.to_list(tgen.add_object):
            input_node = tgen.path.find_resource(input)
            if input_node is not None:
                tgen.link_task.inputs.append(input_node)
Beispiel #40
0
def exec_cfg(self, kw):
	"""
	Executes ``pkg-config`` or other ``-config`` applications to colect configuration flags:

	* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
	* if modversion is given, then return the module version
	* else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable

	:param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
	:type atleast_pkgconfig_version: string
	:param package: package name, for example *gtk+-2.0*
	:type package: string
	:param uselib_store: if the test is successful, define HAVE\_*name*. It is also used to define *conf.env.FLAGS_name* variables.
	:type uselib_store: string
	:param modversion: if provided, return the version of the given module and define *name*\_VERSION
	:type modversion: string
	:param args: arguments to give to *package* when retrieving flags
	:type args: list of string
	:param variables: return the values of particular variables
	:type variables: list of string
	:param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
	:type define_variable: dict(string: string)
	"""

	path = Utils.to_list(kw['path'])
	env = self.env.env or None
	def define_it():
		define_name = kw['define_name']
		# by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X
		if kw.get('global_define', 1):
			self.define(define_name, 1, False)
		else:
			self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name)

		if kw.get('add_have_to_env', 1):
			self.env[define_name] = 1

	# pkg-config version
	if 'atleast_pkgconfig_version' in kw:
		cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
		self.cmd_and_log(cmd, env=env)
		if not 'okmsg' in kw:
			kw['okmsg'] = 'yes'
		return

	for x in cfg_ver:
		# TODO remove in waf 2.0
		y = x.replace('-', '_')
		if y in kw:
			self.cmd_and_log(path + ['--%s=%s' % (x, kw[y]), kw['package']], env=env)
			if not 'okmsg' in kw:
				kw['okmsg'] = 'yes'
			define_it()
			break

	# single version for a module
	if 'modversion' in kw:
		version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
		self.define(kw['define_name'], version)
		return version

	lst = [] + path

	defi = kw.get('define_variable')
	if not defi:
		defi = self.env.PKG_CONFIG_DEFINES or {}
	for key, val in defi.items():
		lst.append('--define-variable=%s=%s' % (key, val))

	static = kw.get('force_static', False)
	if 'args' in kw:
		args = Utils.to_list(kw['args'])
		if '--static' in args or '--static-libs' in args:
			static = True
		lst += args

	# tools like pkgconf expect the package argument after the -- ones -_-
	lst.extend(Utils.to_list(kw['package']))

	# retrieving variables of a module
	if 'variables' in kw:
		v_env = kw.get('env', self.env)
		vars = Utils.to_list(kw['variables'])
		for v in vars:
			val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
			var = '%s_%s' % (kw['uselib_store'], v)
			v_env[var] = val
		if not 'okmsg' in kw:
			kw['okmsg'] = 'yes'
		return

	# so we assume the command-line will output flags to be parsed afterwards
	ret = self.cmd_and_log(lst, env=env)
	if not 'okmsg' in kw:
		kw['okmsg'] = 'yes'

	define_it()
	self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
	return ret
Beispiel #41
0
def add_subproject(ctx, dirs, prepend=None):
    '''
	Recurse into subproject directory
	
	:param dirs: Directories we recurse into
	:type dirs: array or string
	:param prepend: Prepend virtual path, useful when managing projects with different environments
	:type prepend: string
	
	'''
    if isinstance(ctx, Configure.ConfigurationContext):
        if not ctx.env.IGNORED_SUBDIRS and ctx.options.SKIP_SUBDIRS:
            ctx.env.IGNORED_SUBDIRS = ctx.options.SKIP_SUBDIRS.split(',')

        for prj in Utils.to_list(dirs):
            if ctx.env.SUBPROJECT_PATH:
                subprj_path = list(ctx.env.SUBPROJECT_PATH)
            else:
                subprj_path = []

            if prj in ctx.env.IGNORED_SUBDIRS:
                ctx.msg(msg='--X %s' % '/'.join(subprj_path),
                        result='ignored',
                        color='YELLOW')
                continue

            if prepend:
                subprj_path.append(prepend)

            subprj_path.append(prj)

            saveenv = ctx.env

            ctx.setenv('_'.join(subprj_path),
                       ctx.env)  # derive new env from previous

            ctx.env.ENVNAME = prj
            ctx.env.SUBPROJECT_PATH = list(subprj_path)

            ctx.msg(msg='--> %s' % '/'.join(subprj_path),
                    result='in progress',
                    color='BLUE')
            check_and_add_waifulib(os.path.join(ctx.path.abspath(), prj))
            ctx.recurse(prj)
            remove_waifulib(os.path.join(ctx.path.abspath(), prj))
            ctx.msg(msg='<-- %s' % '/'.join(subprj_path),
                    result='done',
                    color='BLUE')

            ctx.setenv('')  # save env changes

            ctx.env = saveenv  # but use previous
    else:
        if not ctx.all_envs:
            ctx.load_envs()

        for prj in Utils.to_list(dirs):
            if prj in ctx.env.IGNORED_SUBDIRS:
                continue

            if ctx.env.SUBPROJECT_PATH:
                subprj_path = list(ctx.env.SUBPROJECT_PATH)
            else:
                subprj_path = []

            if prepend:
                subprj_path.append(prepend)

            subprj_path.append(prj)
            saveenv = ctx.env
            try:
                ctx.env = ctx.all_envs['_'.join(subprj_path)]
            except:
                ctx.fatal('Can\'t find env cache %s' % '_'.join(subprj_path))

            check_and_add_waifulib(os.path.join(ctx.path.abspath(), prj))
            ctx.recurse(prj)
            remove_waifulib(os.path.join(ctx.path.abspath(), prj))
            ctx.env = saveenv
def halide(self):
    Utils.def_attrs(
        self,
        args=[],
        halide_env={},
    )

    bld = self.bld

    env = self.halide_env
    try:
        if isinstance(env, str):
            env = dict(x.split('=') for x in env.split())
        elif isinstance(env, list):
            env = dict(x.split('=') for x in env)
        assert isinstance(env, dict)
    except Exception as e:
        if not isinstance(e, ValueError) \
         and not isinstance(e, AssertionError):
            raise
        raise Errors.WafError(
         "halide_env must be under the form" \
         " {'HL_x':'a', 'HL_y':'b'}" \
         " or ['HL_x=y', 'HL_y=b']" \
         " or 'HL_x=y HL_y=b'")

    src = self.to_nodes(self.source)
    assert len(src) == 1, "Only one source expected"
    src = src[0]

    args = Utils.to_list(self.args)

    def change_ext(src, ext):
        # Return a node with a new extension, in an appropriate folder
        name = src.name
        xpos = src.name.rfind('.')
        if xpos == -1:
            xpos = len(src.name)
        newname = name[:xpos] + ext
        if src.is_child_of(bld.bldnode):
            node = src.get_src().parent.find_or_declare(newname)
        else:
            node = bld.bldnode.find_or_declare(newname)
        return node

    def to_nodes(self, lst, path=None):
        tmp = []
        path = path or self.path
        find = path.find_or_declare

        if isinstance(lst, self.path.__class__):
            lst = [lst]

        for x in Utils.to_list(lst):
            if isinstance(x, str):
                node = find(x)
            else:
                node = x
            tmp.append(node)
        return tmp

    tgt = to_nodes(self, self.target)
    if not tgt:
        tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
    cwd = tgt[0].parent.abspath()
    task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
    task.env.append_unique('HALIDE_ARGS', args)
    if task.env.env == []:
        task.env.env = {}
    task.env.env.update(env)
    task.env.HALIDE_ENV = " ".join(
        ("%s=%s" % (k, v)) for (k, v) in sorted(env.items()))
    task.env.HALIDE_ARGS = args

    try:
        self.compiled_tasks.append(task)
    except AttributeError:
        self.compiled_tasks = [task]
    self.source = []
Beispiel #43
0
    def recurse(self, dirs, name=None, mandatory=True, once=True):
        """
		Run user code from the supplied list of directories.
		The directories can be either absolute, or relative to the directory
		of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse`
		are called immediately before and after a script has been executed.

		:param dirs: List of directories to visit
		:type dirs: list of string or space-separated string
		:param name: Name of function to invoke from the wscript
		:type  name: string
		:param mandatory: whether sub wscript files are required to exist
		:type  mandatory: bool
		:param once: read the script file once for a particular context
		:type once: bool
		"""
        try:
            cache = self.recurse_cache
        except:
            cache = self.recurse_cache = {}

        for d in Utils.to_list(dirs):

            if not os.path.isabs(d):
                # absolute paths only
                d = os.path.join(self.path.abspath(), d)

            WSCRIPT = os.path.join(d, WSCRIPT_FILE)
            WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)

            node = self.root.find_node(WSCRIPT_FUN)
            if node and (not once or node not in cache):
                cache[node] = True
                self.pre_recurse(node)
                try:
                    function_code = node.read('rU')
                    exec(compile(function_code, node.abspath(), 'exec'),
                         self.exec_dict)
                finally:
                    self.post_recurse(node)
            elif not node:
                node = self.root.find_node(WSCRIPT)
                if node and (not once or node not in cache):
                    cache[node] = True
                    self.pre_recurse(node)
                    try:
                        wscript_module = load_module(node.abspath())
                        user_function = getattr(wscript_module,
                                                (name or self.fun), None)
                        if not user_function:
                            if not mandatory:
                                continue
                            raise Errors.WafError(
                                'No function %s defined in %s' %
                                (name or self.fun, node.abspath()))
                        user_function(self)
                    finally:
                        self.post_recurse(node)
                elif not node:
                    if not mandatory:
                        continue
                    raise Errors.WafError('No wscript file in directory %s' %
                                          d)
Beispiel #44
0
	def cfg_lst(cfg):
		return Utils.to_list(cfg_str(cfg))
Beispiel #45
0
 def read_out(cmd):
     return Utils.to_list(
         self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))
def apply_tex(self):
    """
	Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
	"""
    if not getattr(self, 'type', None) in ['latex', 'pdflatex', 'xelatex']:
        self.type = 'pdflatex'

    tree = self.bld
    outs = Utils.to_list(getattr(self, 'outs', []))

    # prompt for incomplete files (else the batchmode is used)
    self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)

    deps_lst = []

    if getattr(self, 'deps', None):
        deps = self.to_list(self.deps)
        for filename in deps:
            n = self.path.find_resource(filename)
            if not n in deps_lst: deps_lst.append(n)

    for node in self.to_nodes(self.source):

        if self.type == 'latex':
            task = self.create_task('latex', node, node.change_ext('.dvi'))
        elif self.type == 'pdflatex':
            task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
        elif self.type == 'xelatex':
            task = self.create_task('xelatex', node, node.change_ext('.pdf'))

        task.env = self.env

        # add the manual dependencies
        if deps_lst:
            try:
                lst = tree.node_deps[task.uid()]
                for n in deps_lst:
                    if not n in lst:
                        lst.append(n)
            except KeyError:
                tree.node_deps[task.uid()] = deps_lst

        if self.type == 'latex':
            if 'ps' in outs:
                tsk = self.create_task('dvips', task.outputs,
                                       node.change_ext('.ps'))
                tsk.env.env = {
                    'TEXINPUTS':
                    node.parent.abspath() + os.pathsep + self.path.abspath() +
                    os.pathsep + self.path.get_bld().abspath()
                }
            if 'pdf' in outs:
                tsk = self.create_task('dvipdf', task.outputs,
                                       node.change_ext('.pdf'))
                tsk.env.env = {
                    'TEXINPUTS':
                    node.parent.abspath() + os.pathsep + self.path.abspath() +
                    os.pathsep + self.path.get_bld().abspath()
                }
        elif self.type == 'pdflatex':
            if 'ps' in outs:
                self.create_task('pdf2ps', task.outputs,
                                 node.change_ext('.ps'))
    self.source = []
Beispiel #47
0
def configure(cfg):
    cfg.find_program('make', var='MAKE')
    #cfg.objcopy = cfg.find_program('%s-%s'%(cfg.env.TOOLCHAIN,'objcopy'), var='OBJCOPY', mandatory=True)
    cfg.find_program('arm-none-eabi-objcopy', var='OBJCOPY')
    env = cfg.env
    bldnode = cfg.bldnode.make_node(cfg.variant)

    def srcpath(path):
        return cfg.srcnode.make_node(path).abspath()

    def bldpath(path):
        return bldnode.make_node(path).abspath()

    env.AP_PROGRAM_FEATURES += ['ch_ap_program']

    kw = env.AP_LIBRARIES_OBJECTS_KW
    kw['features'] = Utils.to_list(kw.get('features', [])) + ['ch_ap_library']

    env.CH_ROOT = srcpath('modules/ChibiOS')
    env.AP_HAL_ROOT = srcpath('libraries/AP_HAL_ChibiOS')
    env.BUILDDIR = bldpath('modules/ChibiOS')
    env.BUILDROOT = bldpath('')
    env.SRCROOT = srcpath('')
    env.PT_DIR = srcpath('Tools/ardupilotwaf/chibios/image')
    env.MKFW_TOOLS = srcpath('Tools/ardupilotwaf')
    env.UPLOAD_TOOLS = srcpath('Tools/scripts')
    env.CHIBIOS_SCRIPTS = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts')
    env.TOOLS_SCRIPTS = srcpath('Tools/scripts')
    env.APJ_TOOL = srcpath('Tools/scripts/apj_tool.py')
    env.SERIAL_PORT = srcpath('/dev/serial/by-id/*_STLink*')

    # relative paths to pass to make, relative to directory that make is run from
    env.CH_ROOT_REL = os.path.relpath(env.CH_ROOT, env.BUILDROOT)
    env.AP_HAL_REL = os.path.relpath(env.AP_HAL_ROOT, env.BUILDROOT)
    env.BUILDDIR_REL = os.path.relpath(env.BUILDDIR, env.BUILDROOT)

    mk_custom = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/chibios_board.mk' %
                        env.BOARD)
    mk_common = srcpath(
        'libraries/AP_HAL_ChibiOS/hwdef/common/chibios_board.mk')
    # see if there is a board specific make file
    if os.path.exists(mk_custom):
        env.BOARD_MK = mk_custom
    else:
        env.BOARD_MK = mk_common

    if cfg.options.default_parameters:
        cfg.msg('Default parameters',
                cfg.options.default_parameters,
                color='YELLOW')
        env.DEFAULT_PARAMETERS = cfg.options.default_parameters

    try:
        ret = generate_hwdef_h(env)
    except Exception:
        cfg.fatal("Failed to process hwdef.dat")
    if ret != 0:
        cfg.fatal("Failed to process hwdef.dat ret=%d" % ret)
    load_env_vars(cfg.env)
    if env.HAL_NUM_CAN_IFACES and not env.AP_PERIPH:
        setup_canmgr_build(cfg)
    setup_optimization(cfg.env)
Beispiel #48
0
    def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
        """
		Create the Eclipse CDT .project and .cproject files
		@param appname The name that will appear in the Project Explorer
		@param build The BuildContext object to extract includes from
		@param workspace_includes Optional project includes to prevent
			  "Unresolved Inclusion" errors in the Eclipse editor
		@param pythonpath Optional project specific python paths
		"""
        hasc = hasjava = haspython = False
        source_dirs = []
        cpppath = self.env['CPPPATH']
        javasrcpath = []
        javalibpath = []
        includes = STANDARD_INCLUDES
        if sys.platform != 'win32':
            cc = self.env.CC or self.env.CXX
            if cc:
                cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
                try:
                    gccout = self.cmd_and_log(
                        cmd,
                        output=Context.STDERR,
                        quiet=Context.BOTH,
                        input='\n'.encode()).splitlines()
                except Errors.WafError:
                    pass
                else:
                    includes = []
                    for ipath in gccout:
                        if ipath.startswith(' /'):
                            includes.append(ipath[1:])
            cpppath += includes
        Logs.warn('Generating Eclipse CDT project files')

        for g in self.groups:
            for tg in g:
                if not isinstance(tg, TaskGen.task_gen):
                    continue

                tg.post()

                # Add local Python modules paths to configuration so object resolving will work in IDE
                # This may also contain generated files (ie. pyqt5 or protoc) that get picked from build
                if 'py' in tg.features:
                    pypath = tg.path.relpath()
                    py_installfrom = getattr(tg, 'install_from', None)
                    if isinstance(py_installfrom, Node.Node):
                        pypath = py_installfrom.path_from(
                            self.root.make_node(self.top_dir))
                    if pypath not in pythonpath:
                        pythonpath.append(pypath)
                    haspython = True

                # Add Java source directories so object resolving works in IDE
                # This may also contain generated files (ie. protoc) that get picked from build
                if 'javac' in tg.features:
                    java_src = tg.path.relpath()
                    java_srcdir = getattr(tg.javac_task, 'srcdir', None)
                    if java_srcdir:
                        if isinstance(java_srcdir, Node.Node):
                            java_srcdir = [java_srcdir]
                        for x in Utils.to_list(java_srcdir):
                            x = x.path_from(self.root.make_node(self.top_dir))
                            if x not in javasrcpath:
                                javasrcpath.append(x)
                    else:
                        if java_src not in javasrcpath:
                            javasrcpath.append(java_src)
                    hasjava = True

                    # Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse
                    usedlibs = getattr(tg, 'use', [])
                    for x in Utils.to_list(usedlibs):
                        for cl in Utils.to_list(tg.env['CLASSPATH_' + x]):
                            if cl not in javalibpath:
                                javalibpath.append(cl)

                if not getattr(tg, 'link_task', None):
                    continue

                features = Utils.to_list(getattr(tg, 'features', ''))

                is_cc = 'c' in features or 'cxx' in features

                incnodes = tg.to_incnodes(
                    tg.to_list(getattr(tg, 'includes', [])) +
                    tg.env['INCLUDES'])
                for p in incnodes:
                    path = p.path_from(self.srcnode)

                    if (path.startswith("/")):
                        cpppath.append(path)
                    else:
                        workspace_includes.append(path)

                    if is_cc and path not in source_dirs:
                        source_dirs.append(path)

                    hasc = True

        waf_executable = os.path.abspath(sys.argv[0])
        project = self.impl_create_project(sys.executable, appname, hasc,
                                           hasjava, haspython, waf_executable)
        self.write_conf_to_xml('.project', project)

        if hasc:
            project = self.impl_create_cproject(sys.executable, waf_executable,
                                                appname, workspace_includes,
                                                cpppath, source_dirs)
            self.write_conf_to_xml('.cproject', project)

        if haspython:
            project = self.impl_create_pydevproject(sys.path, pythonpath)
            self.write_conf_to_xml('.pydevproject', project)

        if hasjava:
            project = self.impl_create_javaproject(javasrcpath, javalibpath)
            self.write_conf_to_xml('.classpath', project)
Beispiel #49
0
def check_python_headers(conf, features="pyembed pyext"):
    """
    Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
    On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:

    * PYEXT: for compiling python extensions
    * PYEMBED: for embedding a python interpreter
    """
    features = Utils.to_list(features)
    assert ("pyembed" in features) or (
        "pyext" in features
    ), "check_python_headers features must include 'pyembed' and/or 'pyext'"
    env = conf.env
    if not env.CC_NAME and not env.CXX_NAME:
        conf.fatal("load a compiler first (gcc, g++, ..)")

    # bypass all the code below for cross-compilation
    if conf.python_cross_compile(features):
        return

    if not env.PYTHON_VERSION:
        conf.check_python_version()

    pybin = env.PYTHON
    if not pybin:
        conf.fatal("Could not find the python executable")

    # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
    v = "prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION".split(
    )
    try:
        lst = conf.get_python_variables(
            ["get_config_var('%s') or ''" % x for x in v])
    except RuntimeError:
        conf.fatal("Python development headers not found (-v for details).")

    vals = [f"{x} = {y!r}" for (x, y) in zip(v, lst)]
    conf.to_log("Configuration returned from {!r}:\n{}\n".format(
        pybin, "\n".join(vals)))

    dct = dict(zip(v, lst))
    x = "MACOSX_DEPLOYMENT_TARGET"
    if dct[x]:
        env[x] = conf.environ[x] = dct[x]
    env.pyext_PATTERN = "%s" + dct["SO"]  # not a mistake

    # Try to get pythonX.Y-config
    num = ".".join(env.PYTHON_VERSION.split(".")[:2])
    conf.find_program(
        [
            "".join(pybin) + "-config",
            "python%s-config" % num,
            "python-config-%s" % num,
            "python%sm-config" % num,
        ],
        var="PYTHON_CONFIG",
        msg="python-config",
        mandatory=False,
    )

    if env.PYTHON_CONFIG:
        # python2.6-config requires 3 runs
        all_flags = [["--cflags", "--libs", "--ldflags"]]
        if sys.hexversion < 0x2070000:
            all_flags = [[k] for k in all_flags[0]]

        xx = env.CXX_NAME and "cxx" or "c"

        if "pyembed" in features:
            for flags in all_flags:
                conf.check_cfg(
                    msg="Asking python-config for pyembed %r flags" %
                    " ".join(flags),
                    path=env.PYTHON_CONFIG,
                    package="",
                    uselib_store="PYEMBED",
                    args=flags,
                )

            try:
                conf.test_pyembed(xx)
            except conf.errors.ConfigurationError:
                # python bug 7352
                if dct["Py_ENABLE_SHARED"] and dct["LIBDIR"]:
                    env.append_unique("LIBPATH_PYEMBED", [dct["LIBDIR"]])
                    conf.test_pyembed(xx)
                else:
                    raise

        if "pyext" in features:
            for flags in all_flags:
                conf.check_cfg(
                    msg="Asking python-config for pyext %r flags" %
                    " ".join(flags),
                    path=env.PYTHON_CONFIG,
                    package="",
                    uselib_store="PYEXT",
                    args=flags,
                )

            try:
                conf.test_pyext(xx)
            except conf.errors.ConfigurationError:
                # python bug 7352
                if dct["Py_ENABLE_SHARED"] and dct["LIBDIR"]:
                    env.append_unique("LIBPATH_PYEXT", [dct["LIBDIR"]])
                    conf.test_pyext(xx)
                else:
                    raise

        conf.define("HAVE_PYTHON_H", 1)
        return

    # No python-config, do something else on windows systems
    all_flags = dct["LDFLAGS"] + " " + dct["CFLAGS"]
    conf.parse_flags(all_flags, "PYEMBED")

    all_flags = dct["LDFLAGS"] + " " + dct["LDSHARED"] + " " + dct["CFLAGS"]
    conf.parse_flags(all_flags, "PYEXT")

    result = None
    if not dct["LDVERSION"]:
        dct["LDVERSION"] = env.PYTHON_VERSION

    # further simplification will be complicated
    for name in (
            "python" + dct["LDVERSION"],
            "python" + env.PYTHON_VERSION + "m",
            "python" + env.PYTHON_VERSION.replace(".", ""),
    ):

        # LIBPATH_PYEMBED is already set; see if it works.
        if not result and env.LIBPATH_PYEMBED:
            path = env.LIBPATH_PYEMBED
            conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
            result = conf.check(
                lib=name,
                uselib="PYEMBED",
                libpath=path,
                mandatory=False,
                msg="Checking for library %s in LIBPATH_PYEMBED" % name,
            )

        if not result and dct["LIBDIR"]:
            path = [dct["LIBDIR"]]
            conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
            result = conf.check(
                lib=name,
                uselib="PYEMBED",
                libpath=path,
                mandatory=False,
                msg="Checking for library %s in LIBDIR" % name,
            )

        if not result and dct["LIBPL"]:
            path = [dct["LIBPL"]]
            conf.to_log(
                "\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n"
            )
            result = conf.check(
                lib=name,
                uselib="PYEMBED",
                libpath=path,
                mandatory=False,
                msg="Checking for library %s in python_LIBPL" % name,
            )

        if not result:
            path = [os.path.join(dct["prefix"], "libs")]
            conf.to_log(
                "\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n"
            )
            result = conf.check(
                lib=name,
                uselib="PYEMBED",
                libpath=path,
                mandatory=False,
                msg="Checking for library %s in $prefix/libs" % name,
            )

        if result:
            break  # do not forget to set LIBPATH_PYEMBED

    if result:
        env.LIBPATH_PYEMBED = path
        env.append_value("LIB_PYEMBED", [name])
    else:
        conf.to_log("\n\n### LIB NOT FOUND\n")

    # under certain conditions, python extensions must link to
    # python libraries, not just python embedding programs.
    if Utils.is_win32 or dct["Py_ENABLE_SHARED"]:
        env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
        env.LIB_PYEXT = env.LIB_PYEMBED

    conf.to_log(
        "Include path for Python extensions (found via distutils module): {!r}\n"
        .format(dct["INCLUDEPY"]))
    env.INCLUDES_PYEXT = [dct["INCLUDEPY"]]
    env.INCLUDES_PYEMBED = [dct["INCLUDEPY"]]

    # Code using the Python API needs to be compiled with -fno-strict-aliasing
    if env.CC_NAME == "gcc":
        env.append_value("CFLAGS_PYEMBED", ["-fno-strict-aliasing"])
        env.append_value("CFLAGS_PYEXT", ["-fno-strict-aliasing"])
    if env.CXX_NAME == "gcc":
        env.append_value("CXXFLAGS_PYEMBED", ["-fno-strict-aliasing"])
        env.append_value("CXXFLAGS_PYEXT", ["-fno-strict-aliasing"])

    if env.CC_NAME == "msvc":
        from distutils.msvccompiler import MSVCCompiler

        dist_compiler = MSVCCompiler()
        dist_compiler.initialize()
        env.append_value("CFLAGS_PYEXT", dist_compiler.compile_options)
        env.append_value("CXXFLAGS_PYEXT", dist_compiler.compile_options)
        env.append_value("LINKFLAGS_PYEXT", dist_compiler.ldflags_shared)

    # See if it compiles
    conf.check(
        header_name="Python.h",
        define_name="HAVE_PYTHON_H",
        uselib="PYEMBED",
        fragment=FRAG,
        errmsg=
        "Distutils not installed? Broken python installation? Get python-config now!",
    )
def check_python_headers(conf, features='pyembed pyext'):
    features = Utils.to_list(features)
    assert ('pyembed' in features) or (
        'pyext' in features
    ), "check_python_headers features must include 'pyembed' and/or 'pyext'"
    env = conf.env
    if not env.CC_NAME and not env.CXX_NAME:
        conf.fatal('load a compiler first (gcc, g++, ..)')
    if conf.python_cross_compile(features):
        return
    if not env.PYTHON_VERSION:
        conf.check_python_version()
    pybin = env.PYTHON
    if not pybin:
        conf.fatal('Could not find the python executable')
    v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split(
    )
    try:
        lst = conf.get_python_variables(
            ["get_config_var('%s') or ''" % x for x in v])
    except RuntimeError:
        conf.fatal("Python development headers not found (-v for details).")
    vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
    conf.to_log("Configuration returned from %r:\n%s\n" %
                (pybin, '\n'.join(vals)))
    dct = dict(zip(v, lst))
    x = 'MACOSX_DEPLOYMENT_TARGET'
    if dct[x]:
        env[x] = conf.environ[x] = dct[x]
    env.pyext_PATTERN = '%s' + dct['SO']
    num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
    conf.find_program([
        ''.join(pybin) + '-config',
        'python%s-config' % num,
        'python-config-%s' % num,
        'python%sm-config' % num
    ],
                      var='PYTHON_CONFIG',
                      msg="python-config",
                      mandatory=False)
    if env.PYTHON_CONFIG:
        if conf.env.HAVE_PYTHON_H:
            return
        all_flags = [['--cflags', '--libs', '--ldflags']]
        if sys.hexversion < 0x2070000:
            all_flags = [[k] for k in all_flags[0]]
        xx = env.CXX_NAME and 'cxx' or 'c'
        if 'pyembed' in features:
            for flags in all_flags:
                embedflags = flags + ['--embed']
                try:
                    conf.check_cfg(
                        msg='Asking python-config for pyembed %r flags' %
                        ' '.join(embedflags),
                        path=env.PYTHON_CONFIG,
                        package='',
                        uselib_store='PYEMBED',
                        args=embedflags)
                except conf.errors.ConfigurationError:
                    conf.check_cfg(
                        msg='Asking python-config for pyembed %r flags' %
                        ' '.join(flags),
                        path=env.PYTHON_CONFIG,
                        package='',
                        uselib_store='PYEMBED',
                        args=flags)
            try:
                conf.test_pyembed(xx)
            except conf.errors.ConfigurationError:
                if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
                    env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
                    conf.test_pyembed(xx)
                else:
                    raise
        if 'pyext' in features:
            for flags in all_flags:
                conf.check_cfg(msg='Asking python-config for pyext %r flags' %
                               ' '.join(flags),
                               path=env.PYTHON_CONFIG,
                               package='',
                               uselib_store='PYEXT',
                               args=flags)
            try:
                conf.test_pyext(xx)
            except conf.errors.ConfigurationError:
                if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
                    env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
                    conf.test_pyext(xx)
                else:
                    raise
        conf.define('HAVE_PYTHON_H', 1)
        return
    all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
    conf.parse_flags(all_flags, 'PYEMBED')
    all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
    conf.parse_flags(all_flags, 'PYEXT')
    result = None
    if not dct["LDVERSION"]:
        dct["LDVERSION"] = env.PYTHON_VERSION
    for name in ('python' + dct['LDVERSION'],
                 'python' + env.PYTHON_VERSION + 'm',
                 'python' + env.PYTHON_VERSION.replace('.', '')):
        if not result and env.LIBPATH_PYEMBED:
            path = env.LIBPATH_PYEMBED
            conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
            result = conf.check(
                lib=name,
                uselib='PYEMBED',
                libpath=path,
                mandatory=False,
                msg='Checking for library %s in LIBPATH_PYEMBED' % name)
        if not result and dct['LIBDIR']:
            path = [dct['LIBDIR']]
            conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
            result = conf.check(lib=name,
                                uselib='PYEMBED',
                                libpath=path,
                                mandatory=False,
                                msg='Checking for library %s in LIBDIR' % name)
        if not result and dct['LIBPL']:
            path = [dct['LIBPL']]
            conf.to_log(
                "\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n"
            )
            result = conf.check(lib=name,
                                uselib='PYEMBED',
                                libpath=path,
                                mandatory=False,
                                msg='Checking for library %s in python_LIBPL' %
                                name)
        if not result:
            path = [os.path.join(dct['prefix'], "libs")]
            conf.to_log(
                "\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n"
            )
            result = conf.check(lib=name,
                                uselib='PYEMBED',
                                libpath=path,
                                mandatory=False,
                                msg='Checking for library %s in $prefix/libs' %
                                name)
        if result:
            break
    if result:
        env.LIBPATH_PYEMBED = path
        env.append_value('LIB_PYEMBED', [name])
    else:
        conf.to_log("\n\n### LIB NOT FOUND\n")
    if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
        env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
        env.LIB_PYEXT = env.LIB_PYEMBED
    conf.to_log(
        "Include path for Python extensions (found via distutils module): %r\n"
        % (dct['INCLUDEPY'], ))
    env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
    env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
    if env.CC_NAME == 'gcc':
        env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
        env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
    if env.CXX_NAME == 'gcc':
        env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
        env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
    if env.CC_NAME == "msvc":
        from distutils.msvccompiler import MSVCCompiler
        dist_compiler = MSVCCompiler()
        dist_compiler.initialize()
        env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
        env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
        env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
    conf.check(
        header_name='Python.h',
        define_name='HAVE_PYTHON_H',
        uselib='PYEMBED',
        fragment=FRAG,
        errmsg=
        'Distutils not installed? Broken python installation? Get python-config now!'
    )
Beispiel #51
0
def multicheck(self, *k, **kw):
	"""
	Runs configuration tests in parallel; results are printed sequentially at the end of the build
	but each test must provide its own msg value to display a line::

		def test_build(ctx):
			ctx.in_msg = True # suppress console outputs
			ctx.check_large_file(mandatory=False)

		conf.multicheck(
			{'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
			{'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
			{'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
			{'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
			msg       = 'Checking for headers in parallel',
			mandatory = True, # mandatory tests raise an error at the end
			run_all_tests = True, # try running all tests
		)

	The configuration tests may modify the values in conf.env in any order, and the define
	values can affect configuration tests being executed. It is hence recommended
	to provide `uselib_store` values with `global_define=False` to prevent such issues.
	"""
	self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw)

	# Force a copy so that threads append to the same list at least
	# no order is guaranteed, but the values should not disappear at least
	for var in ('DEFINES', DEFKEYS):
		self.env.append_value(var, [])
	self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}

	# define a task object that will execute our tests
	class par(object):
		def __init__(self):
			self.keep = False
			self.task_sigs = {}
			self.progress_bar = 0
		def total(self):
			return len(tasks)
		def to_log(self, *k, **kw):
			return

	bld = par()
	bld.keep = kw.get('run_all_tests', True)
	bld.imp_sigs = {}
	tasks = []

	id_to_task = {}
	for dct in k:
		x = Task.classes['cfgtask'](bld=bld, env=None)
		tasks.append(x)
		x.args = dct
		x.bld = bld
		x.conf = self
		x.args = dct

		# bind a logger that will keep the info in memory
		x.logger = Logs.make_mem_logger(str(id(x)), self.logger)

		if 'id' in dct:
			id_to_task[dct['id']] = x

	# second pass to set dependencies with after_test/before_test
	for x in tasks:
		for key in Utils.to_list(x.args.get('before_tests', [])):
			tsk = id_to_task[key]
			if not tsk:
				raise ValueError('No test named %r' % key)
			tsk.run_after.add(x)
		for key in Utils.to_list(x.args.get('after_tests', [])):
			tsk = id_to_task[key]
			if not tsk:
				raise ValueError('No test named %r' % key)
			x.run_after.add(tsk)

	def it():
		yield tasks
		while 1:
			yield []
	bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
	bld.multicheck_lock = Utils.threading.Lock()
	p.biter = it()

	self.end_msg('started')
	p.start()

	# flush the logs in order into the config.log
	for x in tasks:
		x.logger.memhandler.flush()

	self.start_msg('-> processing test results')
	if p.error:
		for x in p.error:
			if getattr(x, 'err_msg', None):
				self.to_log(x.err_msg)
				self.end_msg('fail', color='RED')
				raise Errors.WafError('There is an error in the library, read config.log for more information')

	failure_count = 0
	for x in tasks:
		if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
			failure_count += 1

	if failure_count:
		self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw)
	else:
		self.end_msg('all ok', **kw)

	for x in tasks:
		if x.hasrun != Task.SUCCESS:
			if x.args.get('mandatory', True):
				self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
Beispiel #52
0
def apply_tex(self):
    """
	Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
	"""
    if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
        self.type = 'pdflatex'

    outs = Utils.to_list(getattr(self, 'outs', []))

    # prompt for incomplete files (else the batchmode is used)
    self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)

    deps_lst = []

    if getattr(self, 'deps', None):
        deps = self.to_list(self.deps)
        for dep in deps:
            if isinstance(dep, str):
                n = self.path.find_resource(dep)
                if not n:
                    self.bld.fatal('Could not find %r for %r' % (dep, self))
                if not n in deps_lst:
                    deps_lst.append(n)
            elif isinstance(dep, Node.Node):
                deps_lst.append(dep)

    for node in self.to_nodes(self.source):

        if self.type == 'latex':
            task = self.create_task('latex', node, node.change_ext('.dvi'))
        elif self.type == 'pdflatex':
            task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
        elif self.type == 'xelatex':
            task = self.create_task('xelatex', node, node.change_ext('.pdf'))

        task.env = self.env

        # add the manual dependencies
        if deps_lst:
            for n in deps_lst:
                if not n in task.dep_nodes:
                    task.dep_nodes.append(n)

        # texinputs is a nasty beast
        if hasattr(self, 'texinputs_nodes'):
            task.texinputs_nodes = self.texinputs_nodes
        else:
            task.texinputs_nodes = [
                node.parent,
                node.parent.get_bld(), self.path,
                self.path.get_bld()
            ]
            lst = os.environ.get('TEXINPUTS', '')
            if self.env.TEXINPUTS:
                lst += os.pathsep + self.env.TEXINPUTS
            if lst:
                lst = lst.split(os.pathsep)
            for x in lst:
                if x:
                    if os.path.isabs(x):
                        p = self.bld.root.find_node(x)
                        if p:
                            task.texinputs_nodes.append(p)
                        else:
                            Logs.error('Invalid TEXINPUTS folder %s' % x)
                    else:
                        Logs.error(
                            'Cannot resolve relative paths in TEXINPUTS %s' %
                            x)

        if self.type == 'latex':
            if 'ps' in outs:
                tsk = self.create_task('dvips', task.outputs,
                                       node.change_ext('.ps'))
                tsk.env.env = dict(os.environ)
            if 'pdf' in outs:
                tsk = self.create_task('dvipdf', task.outputs,
                                       node.change_ext('.pdf'))
                tsk.env.env = dict(os.environ)
        elif self.type == 'pdflatex':
            if 'ps' in outs:
                self.create_task('pdf2ps', task.outputs,
                                 node.change_ext('.ps'))
    self.source = []
Beispiel #53
0
def exec_cfg(self, kw):
    """
	Execute the program *pkg-config*:

	* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
	* if modversion is given, then return the module version
	* else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable

	:param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
	:type atleast_pkgconfig_version: string
	:param package: package name, for example *gtk+-2.0*
	:type package: string
	:param uselib_store: if the test is successful, define HAVE\_*name*. It is also used to define *conf.env.FLAGS_name* variables.
	:type uselib_store: string
	:param modversion: if provided, return the version of the given module and define *name*\_VERSION
	:type modversion: string
	:param args: arguments to give to *package* when retrieving flags
	:type args: list of string
	:param variables: return the values of particular variables
	:type variables: list of string
	:param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
	:type define_variable: dict(string: string)
	"""
    def define_it():
        self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1,
                    0)

    # pkg-config version
    if 'atleast_pkgconfig_version' in kw:
        cmd = [
            kw['path'],
            '--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']
        ]
        self.cmd_and_log(cmd)
        if not 'okmsg' in kw:
            kw['okmsg'] = 'yes'
        return

    # checking for the version of a module
    for x in cfg_ver:
        y = x.replace('-', '_')
        if y in kw:
            self.cmd_and_log(
                [kw['path'], '--%s=%s' % (x, kw[y]), kw['package']])
            if not 'okmsg' in kw:
                kw['okmsg'] = 'yes'
            define_it()
            break

    # retrieving the version of a module
    if 'modversion' in kw:
        version = self.cmd_and_log(
            [kw['path'], '--modversion', kw['modversion']]).strip()
        self.define(
            '%s_VERSION' %
            Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])),
            version)
        return version

    lst = [kw['path']]

    defi = kw.get('define_variable', None)
    if not defi:
        defi = self.env.PKG_CONFIG_DEFINES or {}
    for key, val in defi.items():
        lst.append('--define-variable=%s=%s' % (key, val))

    static = False
    if 'args' in kw:
        args = Utils.to_list(kw['args'])
        if '--static' in args or '--static-libs' in args:
            static = True
        lst += args

    # tools like pkgconf expect the package argument after the -- ones -_-
    lst.extend(Utils.to_list(kw['package']))

    # retrieving variables of a module
    if 'variables' in kw:
        env = kw.get('env', self.env)
        uselib = kw.get('uselib_store', kw['package'].upper())
        vars = Utils.to_list(kw['variables'])
        for v in vars:
            val = self.cmd_and_log(lst + ['--variable=' + v]).strip()
            var = '%s_%s' % (uselib, v)
            env[var] = val
        if not 'okmsg' in kw:
            kw['okmsg'] = 'yes'
        return

    # so we assume the command-line will output flags to be parsed afterwards
    ret = self.cmd_and_log(lst)
    if not 'okmsg' in kw:
        kw['okmsg'] = 'yes'

    define_it()
    self.parse_flags(ret,
                     kw.get('uselib_store', kw['package'].upper()),
                     kw.get('env', self.env),
                     force_static=static)
    return ret
Beispiel #54
0
def find_program(self, filename, **kw):
	"""
	Search for a program on the operating system

	When var is used, you may set os.environ[var] to help find a specific program version, for example::

		$ CC='ccache gcc' waf configure

	:param path_list: paths to use for searching
	:type param_list: list of string
	:param var: store the result to conf.env[var], by default use filename.upper()
	:type var: string
	:param ext: list of extensions for the binary (do not add an extension for portability)
	:type ext: list of string
	:param msg: name to display in the log, by default filename is used
	:type msg: string
	:param interpreter: interpreter for the program
	:type interpreter: ConfigSet variable key
	"""

	exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')

	environ = kw.get('environ', getattr(self, 'environ', os.environ))

	ret = ''

	filename = Utils.to_list(filename)
	msg = kw.get('msg', ', '.join(filename))

	var = kw.get('var', '')
	if not var:
		var = re.sub(r'[-.]', '_', filename[0].upper())

	path_list = kw.get('path_list', '')
	if path_list:
		path_list = Utils.to_list(path_list)
	else:
		path_list = environ.get('PATH', '').split(os.pathsep)

	if var in environ:
		filename = environ[var]
		if os.path.isfile(filename):
			# typical CC=/usr/bin/gcc waf configure build
			ret = [filename]
		else:
			# case  CC='ccache gcc' waf configure build
			ret = self.cmd_to_list(filename)
	elif self.env[var]:
		# set by the user in the wscript file
		ret = self.env[var]
		ret = self.cmd_to_list(ret)
	else:
		if not ret:
			ret = self.find_binary(filename, exts.split(','), path_list)
		if not ret and Utils.winreg:
			ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
		if not ret and Utils.winreg:
			ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
		ret = self.cmd_to_list(ret)


	if ret:
		if len(ret) == 1:
			retmsg = ret[0]
		else:
			retmsg = ret
	else:
		retmsg = False

	self.msg("Checking for program '%s'" % msg, retmsg, **kw)
	if not kw.get('quiet', None):
		self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))

	if not ret:
		self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)

	interpreter = kw.get('interpreter', None)
	if interpreter is None:
		if not Utils.check_exe(ret[0], env=environ):
			self.fatal('Program %r is not executable' % ret)
		self.env[var] = ret
	else:
		self.env[var] = self.env[interpreter] + ret

	return ret
Beispiel #55
0
 def post(self):
     if getattr(self, 'posted', None):
         return False
     self.posted = True
     keys = set(self.meths)
     keys.update(feats['*'])
     self.features = Utils.to_list(self.features)
     for x in self.features:
         st = feats[x]
         if st:
             keys.update(st)
         elif not x in Task.classes:
             Logs.warn(
                 'feature %r does not exist - bind at least one method to it?',
                 x)
     prec = {}
     prec_tbl = self.prec
     for x in prec_tbl:
         if x in keys:
             prec[x] = prec_tbl[x]
     tmp = []
     for a in keys:
         for x in prec.values():
             if a in x:
                 break
         else:
             tmp.append(a)
     tmp.sort(reverse=True)
     out = []
     while tmp:
         e = tmp.pop()
         if e in keys:
             out.append(e)
         try:
             nlst = prec[e]
         except KeyError:
             pass
         else:
             del prec[e]
             for x in nlst:
                 for y in prec:
                     if x in prec[y]:
                         break
                 else:
                     tmp.append(x)
                     tmp.sort(reverse=True)
     if prec:
         buf = ['Cycle detected in the method execution:']
         for k, v in prec.items():
             buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
         raise Errors.WafError('\n'.join(buf))
     self.meths = out
     Logs.debug('task_gen: posting %s %d', self, id(self))
     for x in out:
         try:
             v = getattr(self, x)
         except AttributeError:
             raise Errors.WafError(
                 '%r is not a valid task generator method' % x)
         Logs.debug('task_gen: -> %s (%d)', x, id(self))
         v()
     Logs.debug('task_gen: posted %s', self.name)
     return True
Beispiel #56
0
def create_javadoc(self):
	tsk = self.create_task('javadoc')
	tsk.classpath = getattr(self, 'classpath', [])
	self.javadoc_package = Utils.to_list(self.javadoc_package)
	if not isinstance(self.javadoc_output, Node.Node):
		self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
Beispiel #57
0
def check_python_headers(conf, features='pyembed pyext'):
	"""
	Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
	On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:

	* PYEXT: for compiling python extensions
	* PYEMBED: for embedding a python interpreter
	"""
	features = Utils.to_list(features)
	assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
	env = conf.env
	if not env['CC_NAME'] and not env['CXX_NAME']:
		conf.fatal('load a compiler first (gcc, g++, ..)')

	# bypass all the code below for cross-compilation
	if conf.python_cross_compile(features):
		return

	if not env['PYTHON_VERSION']:
		conf.check_python_version()

	pybin = env.PYTHON
	if not pybin:
		conf.fatal('Could not find the python executable')

	# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
	v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
	try:
		lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
	except RuntimeError:
		conf.fatal("Python development headers not found (-v for details).")

	vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
	conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))

	dct = dict(zip(v, lst))
	x = 'MACOSX_DEPLOYMENT_TARGET'
	if dct[x]:
		env[x] = conf.environ[x] = dct[x]
	env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake


	# Try to get pythonX.Y-config
	num = '.'.join(env['PYTHON_VERSION'].split('.')[:2])
	conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)

	if env.PYTHON_CONFIG:
		# python2.6-config requires 3 runs
		all_flags = [['--cflags', '--libs', '--ldflags']]
		if sys.hexversion < 0x2070000:
			all_flags = [[k] for k in all_flags[0]]

		xx = env.CXX_NAME and 'cxx' or 'c'

		if 'pyembed' in features:
			for flags in all_flags:
				conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)

			conf.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Getting pyembed flags from python-config',
				fragment=FRAG, errmsg='Could not build a python embedded interpreter',
				features='%s %sprogram pyembed' % (xx, xx))

		if 'pyext' in features:
			for flags in all_flags:
				conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)

			conf.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Getting pyext flags from python-config',
				features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions')

		conf.define('HAVE_PYTHON_H', 1)
		return

	# No python-config, do something else on windows systems
	all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
	conf.parse_flags(all_flags, 'PYEMBED')

	all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
	conf.parse_flags(all_flags, 'PYEXT')

	result = None
	if not dct["LDVERSION"]:
		dct["LDVERSION"] = env['PYTHON_VERSION']

	# further simplification will be complicated
	for name in ('python' + dct['LDVERSION'], 'python' + env['PYTHON_VERSION'] + 'm', 'python' + env['PYTHON_VERSION'].replace('.', '')):

		# LIBPATH_PYEMBED is already set; see if it works.
		if not result and env['LIBPATH_PYEMBED']:
			path = env['LIBPATH_PYEMBED']
			conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)

		if not result and dct['LIBDIR']:
			path = [dct['LIBDIR']]
			conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)

		if not result and dct['LIBPL']:
			path = [dct['LIBPL']]
			conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)

		if not result:
			path = [os.path.join(dct['prefix'], "libs")]
			conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)

		if result:
			break # do not forget to set LIBPATH_PYEMBED

	if result:
		env['LIBPATH_PYEMBED'] = path
		env.append_value('LIB_PYEMBED', [name])
	else:
		conf.to_log("\n\n### LIB NOT FOUND\n")

	# under certain conditions, python extensions must link to
	# python libraries, not just python embedding programs.
	if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
		env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
		env['LIB_PYEXT'] = env['LIB_PYEMBED']

	conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
	env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']]
	env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']]

	# Code using the Python API needs to be compiled with -fno-strict-aliasing
	if env['CC_NAME'] == 'gcc':
		env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
		env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
	if env['CXX_NAME'] == 'gcc':
		env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
		env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])

	if env.CC_NAME == "msvc":
		from distutils.msvccompiler import MSVCCompiler
		dist_compiler = MSVCCompiler()
		dist_compiler.initialize()
		env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
		env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
		env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)

	# See if it compiles
	conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')
Beispiel #58
0
 def read_out(cmd):
     return Utils.to_list(
         self.cmd_and_log(self.env.RUBY + ["-rrbconfig", "-e", cmd]))
Beispiel #59
0
def process_xcode(self):
    bld = self.bld
    try:
        p = bld.project
    except AttributeError:
        return

    if not hasattr(self, 'target_type'):
        return

    products_group = bld.products_group

    target_group = PBXGroup(self.name)
    p.mainGroup.children.append(target_group)

    # Determine what type to build - framework, app bundle etc.
    target_type = getattr(self, 'target_type', 'app')
    if target_type not in TARGET_TYPES:
        raise Errors.WafError(
            "Target type '%s' does not exists. Available options are '%s'. In target '%s'"
            % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
    else:
        target_type = TARGET_TYPES[target_type]
    file_ext = target_type[2]

    # Create the output node
    target_node = self.path.find_or_declare(self.name + file_ext)
    target = PBXNativeTarget(self.name, target_node, target_type, [], [])

    products_group.children.append(target.productReference)

    # Pull source files from the 'source' attribute and assign them to a UI group.
    # Use a default UI group named 'Source' unless the user
    # provides a 'group_files' dictionary to customize the UI grouping.
    sources = getattr(self, 'source', [])
    if hasattr(self, 'group_files'):
        group_files = getattr(self, 'group_files', [])
        for grpname, files in group_files.items():
            group = bld.create_group(grpname, files)
            target_group.children.append(group)
    else:
        group = bld.create_group('Source', sources)
        target_group.children.append(group)

    # Create a PBXFileReference for each source file.
    # If the source file already exists as a PBXFileReference in any of the UI groups, then
    # reuse that PBXFileReference object (XCode does not like it if we don't reuse)
    for idx, path in enumerate(sources):
        fileref = PBXFileReference(path.name, path.abspath())
        existing_fileref = target_group.find_fileref(fileref)
        if existing_fileref:
            sources[idx] = existing_fileref
        else:
            sources[idx] = fileref

    # If the 'source' attribute contains any file extension that XCode can't work with,
    # then remove it. The allowed file extensions are defined in XCODE_EXTS.
    is_valid_file_extension = lambda file: os.path.splitext(file.path)[
        1] in XCODE_EXTS
    sources = list(filter(is_valid_file_extension, sources))

    buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
    target.add_build_phase(PBXSourcesBuildPhase(buildfiles))

    # Check if any framework to link against is some other target we've made
    libs = getattr(self, 'tmp_use_seen', [])
    for lib in libs:
        use_target = p.get_target(lib)
        if use_target:
            # Create an XCode dependency so that XCode knows to build the other target before this target
            dependency = p.create_target_dependency(use_target,
                                                    use_target.name)
            target.add_dependency(dependency)

            buildphase = PBXFrameworksBuildPhase(
                [PBXBuildFile(use_target.productReference)])
            target.add_build_phase(buildphase)
            if lib in self.env.LIB:
                self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))

    # If 'export_headers' is present, add files to the Headers build phase in xcode.
    # These are files that'll get packed into the Framework for instance.
    exp_hdrs = getattr(self, 'export_headers', [])
    hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
    files = [
        p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath()))
        for n in hdrs
    ]
    files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public', )}) for f in files]
    buildphase = PBXHeadersBuildPhase(files)
    target.add_build_phase(buildphase)

    # Merge frameworks and libs into one list, and prefix the frameworks
    frameworks = Utils.to_list(self.env.FRAMEWORK)
    frameworks = ' '.join(
        ['-framework %s' % (f.split('.framework')[0]) for f in frameworks])

    libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
    libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)

    # Override target specific build settings
    bldsettings = {
        'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
        'LIBRARY_SEARCH_PATHS':
        ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) +
        Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR),
        'FRAMEWORK_SEARCH_PATHS':
        ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
        'OTHER_LDFLAGS':
        libs + ' ' + frameworks,
        'OTHER_LIBTOOLFLAGS':
        bld.env['LINKFLAGS'],
        'OTHER_CPLUSPLUSFLAGS':
        Utils.to_list(self.env['CXXFLAGS']),
        'OTHER_CFLAGS':
        Utils.to_list(self.env['CFLAGS']),
        'INSTALL_PATH': []
    }

    # Install path
    installpaths = Utils.to_list(getattr(self, 'install', []))
    prodbuildfile = PBXBuildFile(target.productReference)
    for instpath in installpaths:
        bldsettings['INSTALL_PATH'].append(instpath)
        target.add_build_phase(
            PBXCopyFilesBuildPhase([prodbuildfile], instpath))

    if not bldsettings['INSTALL_PATH']:
        del bldsettings['INSTALL_PATH']

    # Create build settings which can override the project settings. Defaults to none if user
    # did not pass argument. This will be filled up with target specific
    # search paths, libs to link etc.
    settings = getattr(self, 'settings', {})

    # The keys represents different build configuration, e.g. Debug, Release and so on..
    # Insert our generated build settings to all configuration names
    keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
    for k in keys:
        if k in settings:
            settings[k].update(bldsettings)
        else:
            settings[k] = bldsettings

    for k, v in settings.items():
        target.add_configuration(XCBuildConfiguration(k, v))

    p.add_target(target)
Beispiel #60
0
	def read_out(cmd):
		return Utils.to_list(self.cmd_and_log(perl + cmd))