Пример #1
0
	def restore(self):
		"""
		Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
		"""
		try:
			env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py'))
		except (IOError, OSError):
			pass
		else:
			if env['version'] < Context.HEXVERSION:
				raise Errors.WafError('Version mismatch! reconfigure the project')
			for t in env['tools']:
				self.setup(**t)

		dbfn = os.path.join(self.variant_dir, Context.DBFILE)
		try:
			data = Utils.readf(dbfn, 'rb')
		except (IOError, EOFError):
			# handle missing file/empty file
			Logs.debug('build: Could not load the build cache %s (missing)' % dbfn)
		else:
			try:
				waflib.Node.pickle_lock.acquire()
				waflib.Node.Nod3 = self.node_class
				try:
					data = cPickle.loads(data)
				except Exception as e:
					Logs.debug('build: Could not pickle the build cache %s: %r' % (dbfn, e))
				else:
					for x in SAVED_ATTRS:
						setattr(self, x, data[x])
			finally:
				waflib.Node.pickle_lock.release()

		self.init_dirs()
Пример #2
0
	def check_err_features(self):
		lst=self.to_list(self.features)
		if'shlib'in lst:
			Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
		for x in('c','cxx','d','fc'):
			if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]:
				Logs.error('%r features is probably missing %r'%(self,x))
Пример #3
0
	def call(self,*k,**kw):
		ret=oldcall(self,*k,**kw)
		for x in typos:
			if x in kw:
				err=True
				Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret))
		return ret
Пример #4
0
	def do_link(self, src, tgt):
		"""
		Create a symlink from tgt to src.

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		"""
		d, _ = os.path.split(tgt)
		Utils.check_dir(d)

		link = False
		if not os.path.islink(tgt):
			link = True
		elif os.readlink(tgt) != src:
			link = True

		if link:
			try: os.remove(tgt)
			except OSError: pass
			if not self.progress_bar:
				Logs.info('+ symlink %s (to %s)' % (tgt, src))
			os.symlink(src, tgt)
		else:
			if not self.progress_bar:
				Logs.info('- symlink %s (to %s)' % (tgt, src))
Пример #5
0
	def ant_glob(self,*k,**kw):
		if k:
			lst=Utils.to_list(k[0])
			for pat in lst:
				if'..'in pat.split('/'):
					Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0])
		return old_ant_glob(self,*k,**kw)
Пример #6
0
	def hash_env_vars(self, env, vars_lst):
		"""
		Hashes configuration set variables::

			def build(bld):
				bld.hash_env_vars(bld.env, ['CXX', 'CC'])

		This method uses an internal cache.

		:param env: Configuration Set
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		:param vars_lst: list of variables
		:type vars_list: list of string
		"""

		if not env.table:
			env = env.parent
			if not env:
				return Utils.SIG_NIL

		idx = str(id(env)) + str(vars_lst)
		try:
			cache = self.cache_env
		except AttributeError:
			cache = self.cache_env = {}
		else:
			try:
				return self.cache_env[idx]
			except KeyError:
				pass

		lst = [env[a] for a in vars_lst]
		cache[idx] = ret = Utils.h_list(lst)
		Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
		return ret
Пример #7
0
	def env_path(parent_dir_var,name):
		parent=os.getenv(parent_dir_var)
		if parent:
			return os.path.join(parent,name)
		else:
			Logs.warn('Environment variable %s unset, using LIBDIR\n'%parent_dir_var)
			return os.path.join(conf.env['LIBDIR'],name)
Пример #8
0
 def can_retrieve_cache(self):
     if not getattr(self, "outputs", None):
         return None
     sig = self.signature()
     ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
     dname = os.path.join(self.generator.bld.cache_global, ssig)
     try:
         t1 = os.stat(dname).st_mtime
     except OSError:
         return None
     for node in self.outputs:
         orig = os.path.join(dname, node.name)
         try:
             shutil.copy2(orig, node.abspath())
             os.utime(orig, None)
         except (OSError, IOError):
             Logs.debug("task: failed retrieving file")
             return None
     try:
         t2 = os.stat(dname).st_mtime
     except OSError:
         return None
     if t1 != t2:
         return None
     for node in self.outputs:
         node.sig = sig
         if self.generator.bld.progress_bar < 1:
             self.generator.bld.to_log("restoring from cache %r\n" % node.abspath())
     self.cached = True
     return True
Пример #9
0
	def clean(self):
		"""
		Remove most files from the build directory, and reset all caches.

		Custom lists of files to clean can be declared as `bld.clean_files`.
		For example, exclude `build/program/myprogram` from getting removed::

			def build(bld):
				bld.clean_files = bld.bldnode.ant_glob('**',
					excl='.lock* config.log c4che/* config.h program/myprogram',
					quiet=True, generator=True)
		"""
		Logs.debug('build: clean called')

		if hasattr(self, 'clean_files'):
			for n in self.clean_files:
				n.delete()
		elif self.bldnode != self.srcnode:
			# would lead to a disaster if top == out
			lst = []
			for env in self.all_envs.values():
				lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES])
			for n in self.bldnode.ant_glob('**/*', excl='.lock* *conf_check_*/** config.log c4che/*', quiet=True):
				if n in lst:
					continue
				n.delete()
		self.root.children = {}

		for v in SAVED_ATTRS:
			if v == 'root':
				continue
			setattr(self, v, {})
Пример #10
0
	def cmd_and_log(self, cmd, **kw):
		"""
		execute a command, return the stdout
		this method should be used whenever possible for proper logging

		to obtain stdout+stderr, pass output=BOTH in the arguments (or output=0)
		to obtain just stderr, pass output=STDERR in the arguments (or output=-1)

		@param cmd: args for subprocess.Popen
		@param kw: keyword arguments for subprocess.Popen
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % cmd)

		if 'quiet' in kw:
			quiet = kw['quiet']
			del kw['quiet']
		else:
			quiet = None

		if 'output' in kw:
			to_ret = kw['output']
			del kw['output']
		else:
			to_ret = STDOUT

		kw['stdout'] = kw['stderr'] = subprocess.PIPE
		if not quiet:
			self.to_log(cmd)
		try:
			p = subprocess.Popen(cmd, **kw)
			(out, err) = p.communicate()
		except Exception as e:
			try:
				self.to_log(str(err))
			except:
				pass
			raise Errors.WafError('Execution failure', ex=e)

		if not isinstance(out, str):
			out = out.decode('utf-8')
		if not isinstance(err, str):
			err = err.decode('utf-8')

		if out and quiet != STDOUT and quiet != BOTH:
			self.to_log('out: %s' % out)
		if err and quiet != STDERR and quiet != BOTH:
			self.to_log('err: %s' % err)

		if p.returncode:
			e = Errors.WafError('command %r returned %r' % (cmd, p.returncode))
			e.returncode = p.returncode
			raise e

		if to_ret == BOTH:
			return (out, err)
		elif to_ret == STDERR:
			return err
		return out
Пример #11
0
 def runnable_status(self):
     for t in self.run_after:
         if not t.hasrun:
             return ASK_LATER
     bld = self.generator.bld
     try:
         new_sig = self.signature()
     except Errors.TaskNotReady:
         return ASK_LATER
     key = self.uid()
     try:
         prev_sig = bld.task_sigs[key]
     except KeyError:
         Logs.debug("task: task %r must run as it was never run before or the task code changed" % self)
         return RUN_ME
     for node in self.outputs:
         try:
             if node.sig != new_sig:
                 return RUN_ME
         except AttributeError:
             Logs.debug("task: task %r must run as the output nodes do not exist" % self)
             return RUN_ME
     if new_sig != prev_sig:
         return RUN_ME
     return SKIP_ME
Пример #12
0
	def execute(self):
		if not Configure.autoconfig:
			return execute_method(self)

		env = ConfigSet.ConfigSet()
		do_config = False
		try:
			env.load(os.path.join(Context.top_dir, Options.lockfile))
		except Exception:
			Logs.warn('Configuring the project')
			do_config = True
		else:
			if env.run_dir != Context.run_dir:
				do_config = True
			else:
				h = 0
				for f in env['files']:
					h = Utils.h_list((h, Utils.readf(f, 'rb')))
				do_config = h != env.hash

		if do_config:
			Options.commands.insert(0, self.cmd)
			Options.commands.insert(0, 'configure')
			if Configure.autoconfig == 'clobber':
				Options.options.__dict__ = env.options
			return

		return execute_method(self)
Пример #13
0
	def exec_command(self, cmd, **kw):
		"""
		execute a command, return the exit status
		if the context has the attribute 'log', capture and log the process stderr/stdout

		this method should be used whenever possible for proper logging

		@param cmd: args for subprocess.Popen
		@param kw: keyword arguments for subprocess.Popen
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % cmd)

		try:
			if self.logger:
				# warning: may deadlock with a lot of output (subprocess limitation)

				self.logger.info(cmd)

				kw['stdout'] = kw['stderr'] = subprocess.PIPE
				p = subprocess.Popen(cmd, **kw)
				(out, err) = p.communicate()
				if out:
					self.logger.debug('out: %s' % out.decode('utf-8'))
				if err:
					self.logger.error('err: %s' % err.decode('utf-8'))
				return p.returncode
			else:
				p = subprocess.Popen(cmd, **kw)
				return p.wait()
		except OSError:
			return -1
Пример #14
0
def write_compilation_database(ctx):
	"Write the clang compilation database as JSON"
	database_file = ctx.bldnode.make_node('compile_commands.json')
	Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
	try:
		root = json.load(database_file)
	except IOError:
		root = []
	clang_db = dict((x['file'], x) for x in root)
	for task in getattr(ctx, 'clang_compilation_database_tasks', []):
		try:
			cmd = task.last_cmd
		except AttributeError:
			continue
		directory = getattr(task, 'cwd', ctx.variant_dir)
		f_node = task.inputs[0]
		filename = os.path.relpath(f_node.abspath(), directory)
		entry = {
			"directory": directory,
			"arguments": cmd,
			"file": filename,
		}
		clang_db[filename] = entry
	root = list(clang_db.values())
	database_file.write(json.dumps(root, indent=2))
Пример #15
0
def distclean_dir(dirname):
	"""
	Distclean function called in the particular case when::

		top == out

	:param dirname: absolute path of the folder to clean
	:type dirname: string
	"""
	for (root, dirs, files) in os.walk(dirname):
		for f in files:
			if _can_distclean(f):
				fname = os.path.join(root, f)
				try:
					os.remove(fname)
				except OSError:
					Logs.warn('Could not remove %r' % fname)

	for x in (Context.DBFILE, 'config.log'):
		try:
			os.remove(x)
		except OSError:
			pass

	try:
		shutil.rmtree('c4che')
	except OSError:
		pass
Пример #16
0
def download_tool(tool, force=False, ctx=None):
	"""
	Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::

		$ waf configure --download
	"""
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url = '/'.join((x, sub, tool + '.py'))
			try:
				web = urlopen(url)
				if web.getcode() != 200:
					continue
			except Exception as e:
				# on python3 urlopen throws an exception
				continue
			else:
				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
				tmp.write(web.read())
				Logs.warn('Downloaded %s from %s' % (tool, url))
				download_check(tmp)
				try:
					module = Context.load_tool(tool)
				except:
					Logs.warn('The tool %s from %s is unusable' % (tool, url))
					try:
						tmp.delete()
					except:
						pass
					continue
				return module
	raise Errors.WafError('Could not load the Waf tool')
Пример #17
0
def configure(cfg):
    msg.debug('orch: CONFIG CALLED')

    if not cfg.options.orch_config:
        raise RuntimeError('No Orchestration configuration file given (--orch-config)')
    orch_config = []
    for lst in util.string2list(cfg.options.orch_config):
        lst = lst.strip()
        orch_config += glob(lst)
    okay = True
    for maybe in orch_config:
        if os.path.exists(maybe):
            continue
        msg.error('No such file: %s' % maybe)
        okay = False
    if not okay or not orch_config:
        raise ValueError('missing configuration files')
            
    cfg.msg('Orch configuration files', '"%s"' % '", "'.join(orch_config))

    extra = dict(cfg.env)
    extra['top'] = context.top_dir
    extra['out'] = context.out_dir # usually {top}/tmp
    extra['DESTDIR'] = getattr(cfg.options, 'destdir', '')
    suite = pkgconf.load(orch_config, start = cfg.options.orch_start, **extra)

    envmunge.decompose(cfg, suite)

    cfg.msg('Orch configure envs', '"%s"' % '", "'.join(cfg.all_envs.keys()))
    bind_functions(cfg)
    return
Пример #18
0
def _use_incredibuild(ctx, section_name, option_name, value, verification_fn):
	""" If Incredibuild should be used, check for required packages """
	
	if not ctx.is_option_true('ask_for_user_input'):
		(isValid, warning, error) = verification_fn(ctx, option_name, value)	
		if not isValid:
			return 'False'
		return value
		
	# GUI
	if not ctx.is_option_true('console_mode'):
		return ctx.gui_get_attribute(section_name, option_name, value)
		
	if not value or value != 'True':
		return value
	if not Utils.unversioned_sys_platform() == 'win32':
		return value	
		
	_incredibuild_disclaimer(ctx)	
	ctx.start_msg('Incredibuild Licence Check')
	(res, warning, error) = verification_fn(ctx, option_name, value)	
	if not res:
		if warning:
			Logs.warn(warning)
		if error:
			ctx.end_msg(error, color='YELLOW')			
		return 'False'

	ctx.end_msg('ok')
	return value
Пример #19
0
	def isfile_cached(self):
		# optimize for nt.stat calls, assuming there are many files for few folders
		try:
			cache = self.__class__.cache_isfile_cache
		except AttributeError:
			cache = self.__class__.cache_isfile_cache = {}

		try:
			c1 = cache[id(self.parent)]
		except KeyError:
			c1 = cache[id(self.parent)] = []

			curpath = self.parent.abspath()
			findData = ctypes.wintypes.WIN32_FIND_DATAW()
			find     = FindFirstFile(TP % curpath, ctypes.byref(findData))

			if find == INVALID_HANDLE_VALUE:
				Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
				return os.path.isfile(self.abspath())

			try:
				while True:
					if findData.cFileName not in UPPER_FOLDERS:
						thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
						if not thatsadir:
							c1.append(str(findData.cFileName))
					if not FindNextFile(find, ctypes.byref(findData)):
						break
			except Exception as e:
				Logs.error('exception while listing a folder %r %r', self.abspath(), e)
				return os.path.isfile(self.abspath())
			finally:
				FindClose(find)
		return self.name in c1
Пример #20
0
def start(cwd, version, wafdir):
	# simple example, the file main.c is hard-coded
	try:
		os.stat(cwd + os.sep + 'cbit')
	except:
		print('call from a folder containing a file named "cbit"')
		sys.exit(1)

	Logs.init_log()
	Context.waf_dir = wafdir
	Context.top_dir = Context.run_dir = cwd
	Context.out_dir = os.path.join(cwd, 'build')
	Context.g_module = imp.new_module('wscript')
	Context.g_module.root_path = os.path.join(cwd, 'cbit')
	Context.Context.recurse = recurse_rep

	# this is a fake module, which looks like a standard wscript file
	Context.g_module.options = options
	Context.g_module.configure = configure
	Context.g_module.build = build

	Options.OptionsContext().execute()

	do_config = 'configure' in sys.argv
	try:
		os.stat(cwd + os.sep + 'build')
	except:
		do_config = True
	if do_config:
		Context.create_context('configure').execute()

	if 'clean' in sys.argv:
		Context.create_context('clean').execute()
	if 'build' in sys.argv:
		Context.create_context('build').execute()
Пример #21
0
def configure(conf):
    areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0)
    defaultFlags = []

    defaultFlags += ['-pedantic', '-Wall', '-Wno-long-long']

    if conf.options.debug:
        conf.define('_DEBUG', 1)
        defaultFlags += ['-O0',
                         '-Og', # gcc >= 4.8
                         '-g3',
                         '-fcolor-diagnostics', # clang
                         '-fdiagnostics-color', # gcc >= 4.9
                         '-Werror'
                        ]
        if areCustomCxxflagsPresent:
            missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS]
            if len(missingFlags) > 0:
                Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'"
                          % " ".join(conf.env.CXXFLAGS))
                Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags))
        else:
            conf.add_supported_cxxflags(defaultFlags)
    else:
        defaultFlags += ['-O2', '-g']
        if not areCustomCxxflagsPresent:
            conf.add_supported_cxxflags(defaultFlags)
Пример #22
0
		def parse_node(node):
			if node in seen:
				return
			seen.append(node)
			code = node.read()
			global re_tex
			for match in re_tex.finditer(code):
				for path in match.group('file').split(','):
					if path:
						add_name = True
						found = None
						for k in exts_deps_tex:
							Logs.debug('tex: trying %s%s' % (path, k))
							found = node.parent.find_resource(path + k)

							for tsk in self.generator.tasks:
								if not found or found in tsk.outputs:
									break
							else:
								nodes.append(found)
								add_name = False
								for ext in exts_tex:
									if found.name.endswith(ext):
										parse_node(found)
										break
							# no break, people are crazy
						if add_name:
							names.append(path)
Пример #23
0
def download_tool(tool,force=False,ctx=None):
	for x in Utils.to_list(Context.remote_repo):
		for sub in Utils.to_list(Context.remote_locs):
			url='/'.join((x,sub,tool+'.py'))
			try:
				web=urlopen(url)
				try:
					if web.getcode()!=200:
						continue
				except AttributeError:
					pass
			except Exception:
				continue
			else:
				tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
				tmp.write(web.read(),'wb')
				Logs.warn('Downloaded %s from %s'%(tool,url))
				download_check(tmp)
				try:
					module=Context.load_tool(tool)
				except Exception:
					Logs.warn('The tool %s from %s is unusable'%(tool,url))
					try:
						tmp.delete()
					except Exception:
						pass
					continue
				return module
	raise Errors.WafError('Could not load the Waf tool')
Пример #24
0
def distclean_dir(dirname):
	"""
	Distclean function called in the particular case when::

		top == out

	:param dirname: absolute path of the folder to clean
	:type dirname: string
	"""
	for (root, dirs, files) in os.walk(dirname):
		for f in files:
			if _can_distclean(f):
				fname = root + os.sep + f
				try:
					os.unlink(fname)
				except:
					Logs.warn('could not remove %r' % fname)

	for x in [Context.DBFILE, 'config.log']:
		try:
			os.unlink(x)
		except:
			pass

	try:
		shutil.rmtree('c4che')
	except:
		pass
Пример #25
0
 def restore(self):
     try:
         env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, "build.config.py"))
     except (IOError, OSError):
         pass
     else:
         if env["version"] < Context.HEXVERSION:
             raise Errors.WafError("Version mismatch! reconfigure the project")
         for t in env["tools"]:
             self.setup(**t)
     f = None
     try:
         try:
             f = open(os.path.join(self.variant_dir, Context.DBFILE), "rb")
         except (IOError, EOFError):
             Logs.debug("build: could not load the build cache (missing)")
         else:
             try:
                 waflib.Node.pickle_lock.acquire()
                 waflib.Node.Nod3 = self.node_class
                 try:
                     data = cPickle.load(f)
                 except Exception, e:
                     Logs.debug("build: could not load the build cache %r" % e)
                 else:
                     for x in SAVED_ATTRS:
                         setattr(self, x, data[x])
Пример #26
0
	def end_msg(self, *k, **kw):
		"""Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
		if kw.get('quiet', None):
			return
		self.in_msg -= 1
		if self.in_msg:
			return

		result = kw.get('result', None) or k[0]

		defcolor = 'GREEN'
		if result == True:
			msg = 'ok'
		elif result == False:
			msg = 'not found'
			defcolor = 'YELLOW'
		else:
			msg = str(result)

		self.to_log(msg)
		try:
			color = kw['color']
		except KeyError:
			if len(k) > 1 and k[1] in Logs.colors_lst:
				# compatibility waf 1.7
				color = k[1]
			else:
				color = defcolor
		Logs.pprint(color, msg)
Пример #27
0
def build_version_files(header_path, source_path, domain, major, minor, micro, exportname, visheader):
    header_path = os.path.abspath(header_path)
    source_path = os.path.abspath(source_path)
    text  = "int " + domain + "_major_version = " + str(major) + ";\n"
    text += "int " + domain + "_minor_version = " + str(minor) + ";\n"
    text += "int " + domain + "_micro_version = " + str(micro) + ";\n"
    try:
        o = open(source_path, 'w')
        o.write(text)
        o.close()
    except IOError:
        Logs.error('Failed to open %s for writing\n' % source_path)
        sys.exit(-1)

    text  = "#ifndef __" + domain + "_version_h__\n"
    text += "#define __" + domain + "_version_h__\n"
    if visheader != '':
        text += "#include \"" + visheader + "\"\n"
    text += exportname + " extern const char* " + domain + "_revision;\n"
    text += exportname + " extern int " + domain + "_major_version;\n"
    text += exportname + " extern int " + domain + "_minor_version;\n"
    text += exportname + " extern int " + domain + "_micro_version;\n"
    text += "#endif /* __" + domain + "_version_h__ */\n"
    try:
        o = open(header_path, 'w')
        o.write(text)
        o.close()
    except IOError:
        Logs.warn('Failed to open %s for writing\n' % header_path)
        sys.exit(-1)

    return None
Пример #28
0
def get_ccenv(fname):
	'''Returns dictionary of variant C/C++ build environments. In which the keys
	are the name of the actual variant C/C++ build environments and its values the
	settings for that variant build environment.
	
	:param fname: Complete path to the configuration file.
	:type fname: str
	'''
	if not os.path.exists(fname):
		Logs.warn("CCENV: ini file '%s' not found!" % fname)
	ccenv = {}
	c = configparser.ConfigParser()
	c.read(fname)
	for s in c.sections():
		ccenv[s] = {'prefix' : None, 'shlib' : [], 'env' : [], 'c': ['gcc'], 'cxx': ['g++', 'cpp']}
		if c.has_option(s, 'c'):
			ccenv[s]['c'] = c.get(s,'c').split(',')
		if c.has_option(s, 'cxx'):
			ccenv[s]['cxx'] = c.get(s,'cxx').split(',')
		if c.has_option(s, 'prefix'):
			ccenv[s]['prefix'] = c.get(s,'prefix')
		if c.has_option(s, 'shlib'):
			ccenv[s]['shlib'] = [l for l in str(c.get(s,'shlib')).split(',') if len(l)]
		if c.has_option(s, 'env'):
			ccenv[s]['env'] = [l.split('\t') for l in c.get(s,'env').splitlines() if len(l)]
		if c.has_option(s, 'host'):
			ccenv[s]['host'] = c.get(s,'host')
	return ccenv
Пример #29
0
    def dl_task(task):
        src = task.inputs[0]
        tgt = task.outputs[0]
        url = src.read().strip()
        try:
            web = urlopen(url)
            tgt.write(web.read(),'wb')
        except Exception:
            import traceback
            traceback.print_exc()
            msg.error(tgen.worch.format("[{package}_dlpatch] problem downloading [{patch_urlfile}]"))
            raise

        checksum = tgen.worch.patch_checksum
        if not checksum:
            return
        hasher_name, ref = checksum.split(":")
        import hashlib, os
        # FIXME: check the hasher method exists. check for typos.
        hasher = getattr(hashlib, hasher_name)()
        hasher.update(tgt.read('rb'))
        data= hasher.hexdigest()
        if data != ref:
            msg.error(tgen.worch.format("[{package}_dlpatch] invalid checksum:\nref: %s\nnew: %s" %\
                                        (ref, data)))
            try:
                os.remove(tgt.abspath())
            except IOError: 
                pass
            return 1
        return
Пример #30
0
	def load(self,filename):
		tbl=self.table
		code=Utils.readf(filename,m='rU')
		for m in re_imp.finditer(code):
			g=m.group
			tbl[g(2)]=eval(g(3))
		Logs.debug('env: %s'%str(self.table))
Пример #31
0
def apply_tex(self):
	"""
	Creates :py:class:`waflib.Tools.tex.tex` objects, and
	dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
	"""
	if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
		self.type = 'pdflatex'

	outs = Utils.to_list(getattr(self, 'outs', []))

	# prompt for incomplete files (else the batchmode is used)
	self.env.PROMPT_LATEX = getattr(self, 'prompt', 1)

	deps_lst = []

	if getattr(self, 'deps', None):
		deps = self.to_list(self.deps)
		for dep in deps:
			if isinstance(dep, str):
				n = self.path.find_resource(dep)
				if not n:
					self.bld.fatal('Could not find %r for %r' % (dep, self))
				if not n in deps_lst:
					deps_lst.append(n)
			elif isinstance(dep, Node.Node):
				deps_lst.append(dep)

	for node in self.to_nodes(self.source):
		if self.type == 'latex':
			task = self.create_task('latex', node, node.change_ext('.dvi'))
		elif self.type == 'pdflatex':
			task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
		elif self.type == 'xelatex':
			task = self.create_task('xelatex', node, node.change_ext('.pdf'))

		task.env = self.env

		# add the manual dependencies
		if deps_lst:
			for n in deps_lst:
				if not n in task.dep_nodes:
					task.dep_nodes.append(n)

		# texinputs is a nasty beast
		if hasattr(self, 'texinputs_nodes'):
			task.texinputs_nodes = self.texinputs_nodes
		else:
			task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
			lst = os.environ.get('TEXINPUTS', '')
			if self.env.TEXINPUTS:
				lst += os.pathsep + self.env.TEXINPUTS
			if lst:
				lst = lst.split(os.pathsep)
			for x in lst:
				if x:
					if os.path.isabs(x):
						p = self.bld.root.find_node(x)
						if p:
							task.texinputs_nodes.append(p)
						else:
							Logs.error('Invalid TEXINPUTS folder %s', x)
					else:
						Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)

		if self.type == 'latex':
			if 'ps' in outs:
				tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
				tsk.env.env = dict(os.environ)
			if 'pdf' in outs:
				tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
				tsk.env.env = dict(os.environ)
		elif self.type == 'pdflatex':
			if 'ps' in outs:
				self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
	self.source = []
Пример #32
0
def validate_c(self, kw):
    """
	Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build`

	:param compiler: c or cxx (tries to guess what is best)
	:type compiler: string
	:param type: cprogram, cshlib, cstlib - not required if *features are given directly*
	:type type: binary to create
	:param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib``
	:type feature: list of string
	:param fragment: provide a piece of code for the test (default is to let the system create one)
	:type fragment: string
	:param uselib_store: define variables after the test is executed (IMPORTANT!)
	:type uselib_store: string
	:param use: parameters to use for building (just like the normal *use* keyword)
	:type use: list of string
	:param define_name: define to set when the check is over
	:type define_name: string
	:param execute: execute the resulting binary
	:type execute: bool
	:param define_ret: if execute is set to True, use the execution output in both the define and the return value
	:type define_ret: bool
	:param header_name: check for a particular header
	:type header_name: string
	:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
	:type auto_add_header_name: bool
	"""
    for x in ('type_name', 'field_name', 'function_name'):
        if x in kw:
            Logs.warn('Invalid argument %r in test' % x)

    if not 'build_fun' in kw:
        kw['build_fun'] = build_fun

    if not 'env' in kw:
        kw['env'] = self.env.derive()
    env = kw['env']

    if not 'compiler' in kw and not 'features' in kw:
        kw['compiler'] = 'c'
        if env.CXX_NAME and Task.classes.get('cxx'):
            kw['compiler'] = 'cxx'
            if not self.env.CXX:
                self.fatal('a c++ compiler is required')
        else:
            if not self.env.CC:
                self.fatal('a c compiler is required')

    if not 'compile_mode' in kw:
        kw['compile_mode'] = 'c'
        if 'cxx' in Utils.to_list(kw.get('features',
                                         [])) or kw.get('compiler') == 'cxx':
            kw['compile_mode'] = 'cxx'

    if not 'type' in kw:
        kw['type'] = 'cprogram'

    if not 'features' in kw:
        if not 'header_name' in kw or kw.get('link_header_test', True):
            kw['features'] = [kw['compile_mode'], kw['type']]  # "c ccprogram"
        else:
            kw['features'] = [kw['compile_mode']]
    else:
        kw['features'] = Utils.to_list(kw['features'])

    if not 'compile_filename' in kw:
        kw['compile_filename'] = 'test.c' + (
            (kw['compile_mode'] == 'cxx') and 'pp' or '')

    def to_header(dct):
        if 'header_name' in dct:
            dct = Utils.to_list(dct['header_name'])
            return ''.join(['#include <%s>\n' % x for x in dct])
        return ''

    if 'framework_name' in kw:
        # OSX, not sure this is used anywhere
        fwkname = kw['framework_name']
        if not 'uselib_store' in kw:
            kw['uselib_store'] = fwkname.upper()
        if not kw.get('no_header'):
            fwk = '%s/%s.h' % (fwkname, fwkname)
            if kw.get('remove_dot_h'):
                fwk = fwk[:-2]
            val = kw.get('header_name', [])
            kw['header_name'] = Utils.to_list(val) + [fwk]
        kw['msg'] = 'Checking for framework %s' % fwkname
        kw['framework'] = fwkname

    elif 'header_name' in kw:
        if not 'msg' in kw:
            kw['msg'] = 'Checking for header %s' % kw['header_name']

        l = Utils.to_list(kw['header_name'])
        assert len(l), 'list of headers in header_name is empty'

        kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
        if not 'uselib_store' in kw:
            kw['uselib_store'] = l[0].upper()
        if not 'define_name' in kw:
            kw['define_name'] = self.have_define(l[0])

    if 'lib' in kw:
        if not 'msg' in kw:
            kw['msg'] = 'Checking for library %s' % kw['lib']
        if not 'uselib_store' in kw:
            kw['uselib_store'] = kw['lib'].upper()

    if 'stlib' in kw:
        if not 'msg' in kw:
            kw['msg'] = 'Checking for static library %s' % kw['stlib']
        if not 'uselib_store' in kw:
            kw['uselib_store'] = kw['stlib'].upper()

    if 'fragment' in kw:
        # an additional code fragment may be provided to replace the predefined code
        # in custom headers
        kw['code'] = kw['fragment']
        if not 'msg' in kw:
            kw['msg'] = 'Checking for code snippet'
        if not 'errmsg' in kw:
            kw['errmsg'] = 'no'

    for (flagsname, flagstype) in (('cxxflags', 'compiler'),
                                   ('cflags', 'compiler'), ('linkflags',
                                                            'linker')):
        if flagsname in kw:
            if not 'msg' in kw:
                kw['msg'] = 'Checking for %s flags %s' % (flagstype,
                                                          kw[flagsname])
            if not 'errmsg' in kw:
                kw['errmsg'] = 'no'

    if not 'execute' in kw:
        kw['execute'] = False
    if kw['execute']:
        kw['features'].append('test_exec')
        kw['chmod'] = Utils.O755

    if not 'errmsg' in kw:
        kw['errmsg'] = 'not found'

    if not 'okmsg' in kw:
        kw['okmsg'] = 'yes'

    if not 'code' in kw:
        kw['code'] = SNIP_EMPTY_PROGRAM

    # if there are headers to append automatically to the next tests
    if self.env[INCKEYS]:
        kw['code'] = '\n'.join(
            ['#include <%s>' % x
             for x in self.env[INCKEYS]]) + '\n' + kw['code']

    # in case defines lead to very long command-lines
    if kw.get('merge_config_header') or env.merge_config_header:
        kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
        env.DEFINES = []  # modify the copy

    if not kw.get('success'):
        kw['success'] = None

    if 'define_name' in kw:
        self.undefine(kw['define_name'])
    if not 'msg' in kw:
        self.fatal('missing "msg" in conf.check(...)')
Пример #33
0
def multicheck(self, *k, **kw):
    """
	Runs configuration tests in parallel; results are printed sequentially at the end of the build
	but each test must provide its own msg value to display a line::

		def test_build(ctx):
			ctx.in_msg = True # suppress console outputs
			ctx.check_large_file(mandatory=False)

		conf.multicheck(
			{'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
			{'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
			{'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
			{'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
			msg       = 'Checking for headers in parallel',
			mandatory = True, # mandatory tests raise an error at the end
			run_all_tests = True, # try running all tests
		)

	The configuration tests may modify the values in conf.env in any order, and the define
	values can affect configuration tests being executed. It is hence recommended
	to provide `uselib_store` values with `global_define=False` to prevent such issues.
	"""
    self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)),
                   **kw)

    # Force a copy so that threads append to the same list at least
    # no order is guaranteed, but the values should not disappear at least
    for var in ('DEFINES', DEFKEYS):
        self.env.append_value(var, [])
    self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}

    # define a task object that will execute our tests
    class par(object):
        def __init__(self):
            self.keep = False
            self.task_sigs = {}
            self.progress_bar = 0

        def total(self):
            return len(tasks)

        def to_log(self, *k, **kw):
            return

    bld = par()
    bld.keep = kw.get('run_all_tests', True)
    bld.imp_sigs = {}
    tasks = []

    id_to_task = {}
    for dct in k:
        x = Task.classes['cfgtask'](bld=bld, env=None)
        tasks.append(x)
        x.args = dct
        x.bld = bld
        x.conf = self
        x.args = dct

        # bind a logger that will keep the info in memory
        x.logger = Logs.make_mem_logger(str(id(x)), self.logger)

        if 'id' in dct:
            id_to_task[dct['id']] = x

    # second pass to set dependencies with after_test/before_test
    for x in tasks:
        for key in Utils.to_list(x.args.get('before_tests', [])):
            tsk = id_to_task[key]
            if not tsk:
                raise ValueError('No test named %r' % key)
            tsk.run_after.add(x)
        for key in Utils.to_list(x.args.get('after_tests', [])):
            tsk = id_to_task[key]
            if not tsk:
                raise ValueError('No test named %r' % key)
            x.run_after.add(tsk)

    def it():
        yield tasks
        while 1:
            yield []

    bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
    bld.multicheck_lock = Utils.threading.Lock()
    p.biter = it()

    self.end_msg('started')
    p.start()

    # flush the logs in order into the config.log
    for x in tasks:
        x.logger.memhandler.flush()

    self.start_msg('-> processing test results')
    if p.error:
        for x in p.error:
            if getattr(x, 'err_msg', None):
                self.to_log(x.err_msg)
                self.end_msg('fail', color='RED')
                raise Errors.WafError(
                    'There is an error in the library, read config.log for more information'
                )

    failure_count = 0
    for x in tasks:
        if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
            failure_count += 1

    if failure_count:
        self.end_msg(kw.get('errmsg', '%s test failed' % failure_count),
                     color='YELLOW',
                     **kw)
    else:
        self.end_msg('all ok', **kw)

    for x in tasks:
        if x.hasrun != Task.SUCCESS:
            if x.args.get('mandatory', True):
                self.fatal(
                    kw.get('fatalmsg') or
                    'One of the tests has failed, read config.log for more information'
                )
Пример #34
0
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
    """
	Runs the preprocessor to determine the gcc/icc/clang version

	The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*

	:raise: :py:class:`waflib.Errors.ConfigurationError`
	"""
    cmd = cc + ['-dM', '-E', '-']
    env = conf.env.env or None
    try:
        out, err = conf.cmd_and_log(cmd,
                                    output=0,
                                    input='\n'.encode(),
                                    env=env)
    except Errors.WafError:
        conf.fatal('Could not determine the compiler version %r' % cmd)

    if gcc:
        if out.find('__INTEL_COMPILER') >= 0:
            conf.fatal('The intel compiler pretends to be gcc')
        if out.find('__GNUC__') < 0 and out.find('__clang__') < 0:
            conf.fatal('Could not determine the compiler type')

    if icc and out.find('__INTEL_COMPILER') < 0:
        conf.fatal('Not icc/icpc')

    if clang and out.find('__clang__') < 0:
        conf.fatal('Not clang/clang++')
    if not clang and out.find('__clang__') >= 0:
        conf.fatal(
            'Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure'
        )

    k = {}
    if icc or gcc or clang:
        out = out.splitlines()
        for line in out:
            lst = shlex.split(line)
            if len(lst) > 2:
                key = lst[1]
                val = lst[2]
                k[key] = val

        def isD(var):
            return var in k

        # Some documentation is available at http://predef.sourceforge.net
        # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
        if not conf.env.DEST_OS:
            conf.env.DEST_OS = ''
        for i in MACRO_TO_DESTOS:
            if isD(i):
                conf.env.DEST_OS = MACRO_TO_DESTOS[i]
                break
        else:
            if isD('__APPLE__') and isD('__MACH__'):
                conf.env.DEST_OS = 'darwin'
            elif isD('__unix__'
                     ):  # unix must be tested last as it's a generic fallback
                conf.env.DEST_OS = 'generic'

        if isD('__ELF__'):
            conf.env.DEST_BINFMT = 'elf'
        elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
            conf.env.DEST_BINFMT = 'pe'
            if not conf.env.IMPLIBDIR:
                conf.env.IMPLIBDIR = conf.env.LIBDIR  # for .lib or .dll.a files
            conf.env.LIBDIR = conf.env.BINDIR
        elif isD('__APPLE__'):
            conf.env.DEST_BINFMT = 'mac-o'

        if not conf.env.DEST_BINFMT:
            # Infer the binary format from the os name.
            conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)

        for i in MACRO_TO_DEST_CPU:
            if isD(i):
                conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
                break

        Logs.debug('ccroot: dest platform: ' + ' '.join([
            conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')
        ]))
        if icc:
            ver = k['__INTEL_COMPILER']
            conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
        else:
            if isD('__clang__') and isD('__clang_major__'):
                conf.env.CC_VERSION = (k['__clang_major__'],
                                       k['__clang_minor__'],
                                       k['__clang_patchlevel__'])
            else:
                # older clang versions and gcc
                conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'],
                                       k.get('__GNUC_PATCHLEVEL__', '0'))
    return k
Пример #35
0
def check_same_targets(self):
    mp = Utils.defaultdict(list)
    uids = {}

    def check_task(tsk):
        if not isinstance(tsk, Task.Task):
            return
        if hasattr(tsk, 'no_errcheck_out'):
            return

        for node in tsk.outputs:
            mp[node].append(tsk)
        try:
            uids[tsk.uid()].append(tsk)
        except KeyError:
            uids[tsk.uid()] = [tsk]

    for g in self.groups:
        for tg in g:
            try:
                for tsk in tg.tasks:
                    check_task(tsk)
            except AttributeError:
                # raised if not a task generator, which should be uncommon
                check_task(tg)

    dupe = False
    for (k, v) in mp.items():
        if len(v) > 1:
            dupe = True
            msg = '* Node %r is created more than once%s. The task generators are:' % (
                k, Logs.verbose == 1 and " (full message on 'waf -v -v')"
                or "")
            Logs.error(msg)
            for x in v:
                if Logs.verbose > 1:
                    Logs.error('  %d. %r', 1 + v.index(x), x.generator)
                else:
                    Logs.error('  %d. %r in %r', 1 + v.index(x),
                               x.generator.name,
                               getattr(x.generator, 'path', None))
            Logs.error(
                'If you think that this is an error, set no_errcheck_out on the task instance'
            )

    if not dupe:
        for (k, v) in uids.items():
            if len(v) > 1:
                Logs.error(
                    '* Several tasks use the same identifier. Please check the information on\n   https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid'
                )
                tg_details = tsk.generator.name
                if Logs.verbose > 2:
                    tg_details = tsk.generator
                for tsk in v:
                    Logs.error('  - object %r (%r) defined in %r',
                               tsk.__class__.__name__, tsk, tg_details)
Пример #36
0
 def is_before(t1, t2):
     ret = old(t1, t2)
     if ret and old(t2, t1):
         Logs.error('Contradictory order constraints in classes %r %r', t1,
                    t2)
     return ret
Пример #37
0
def gather_intel_composer_versions(conf, versions):
    """
	Checks ICL compilers that are part of Intel Composer Suites

	:param versions: list to modify
	:type versions: list
	"""
    version_pattern = re.compile('^...?.?\...?.?.?')
    try:
        all_versions = Utils.winreg.OpenKey(
            Utils.winreg.HKEY_LOCAL_MACHINE,
            'SOFTWARE\\Wow6432node\\Intel\\Suites')
    except WindowsError:
        try:
            all_versions = Utils.winreg.OpenKey(
                Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
        except WindowsError:
            return
    index = 0
    while 1:
        try:
            version = Utils.winreg.EnumKey(all_versions, index)
        except WindowsError:
            break
        index = index + 1
        if not version_pattern.match(version):
            continue
        targets = []
        for target, arch in all_icl_platforms:
            try:
                if target == 'intel64': targetDir = 'EM64T_NATIVE'
                else: targetDir = target
                try:
                    defaults = Utils.winreg.OpenKey(
                        all_versions,
                        version + '\\Defaults\\C++\\' + targetDir)
                except WindowsError:
                    if targetDir == 'EM64T_NATIVE':
                        defaults = Utils.winreg.OpenKey(
                            all_versions, version + '\\Defaults\\C++\\EM64T')
                    else:
                        raise WindowsError
                uid, type = Utils.winreg.QueryValueEx(defaults, 'SubKey')
                Utils.winreg.OpenKey(
                    all_versions, version + '\\' + uid + '\\C++\\' + targetDir)
                icl_version = Utils.winreg.OpenKey(
                    all_versions, version + '\\' + uid + '\\C++')
                path, type = Utils.winreg.QueryValueEx(icl_version,
                                                       'ProductDir')
                batch_file = os.path.join(path, 'bin', 'iclvars.bat')
                if os.path.isfile(batch_file):
                    try:
                        targets.append(
                            (target,
                             (arch,
                              conf.get_msvc_version('intel', version, target,
                                                    batch_file))))
                    except conf.errors.ConfigurationError as e:
                        pass
                # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012
                # http://software.intel.com/en-us/forums/topic/328487
                compilervars_warning_attr = '_compilervars_warning_key'
                if version[0:2] == '13' and getattr(
                        conf, compilervars_warning_attr, True):
                    setattr(conf, compilervars_warning_attr, False)
                    patch_url = 'http://software.intel.com/en-us/forums/topic/328487'
                    compilervars_arch = os.path.join(path, 'bin',
                                                     'compilervars_arch.bat')
                    vs_express_path = os.environ[
                        'VS110COMNTOOLS'] + r'..\IDE\VSWinExpress.exe'
                    dev_env_path = os.environ[
                        'VS110COMNTOOLS'] + r'..\IDE\devenv.exe'
                    if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'
                            in Utils.readf(compilervars_arch)
                            and not os.path.exists(vs_express_path)
                            and not os.path.exists(dev_env_path)):
                        Logs.warn((
                            'The Intel compilervar_arch.bat only checks for one Visual Studio SKU '
                            '(VSWinExpress.exe) but it does not seem to be installed at %r. '
                            'The intel command line set up will fail to configure unless the file %r'
                            'is patched. See: %s') %
                                  (vs_express_path, compilervars_arch,
                                   patch_url))
            except WindowsError:
                pass
        major = version[0:2]
        versions.append(('intel ' + major, targets))
Пример #38
0
    def cmd_and_log(self, cmd, **kw):
        """
		Executes a process and returns stdout/stderr if the execution is successful.
		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
		will be bound to the WafError object::

			def configure(conf):
				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
				try:
					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
				except Exception as e:
					print(e.stdout, e.stderr)

		:param cmd: args for subprocess.Popen
		:type cmd: list or string
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		:type kw: dict
		:returns: process exit status
		:rtype: integer
		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
		"""
        subprocess = Utils.subprocess
        kw['shell'] = isinstance(cmd, str)
        Logs.debug('runner: %r', cmd)

        if 'quiet' in kw:
            quiet = kw['quiet']
            del kw['quiet']
        else:
            quiet = None

        if 'output' in kw:
            to_ret = kw['output']
            del kw['output']
        else:
            to_ret = STDOUT

        if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError('Program %r not found!' % cmd[0])

        kw['stdout'] = kw['stderr'] = subprocess.PIPE
        if quiet is None:
            self.to_log(cmd)

        cargs = {}
        if 'timeout' in kw:
            if sys.hexversion >= 0x3030000:
                cargs['timeout'] = kw['timeout']
                if not 'start_new_session' in kw:
                    kw['start_new_session'] = True
            del kw['timeout']
        if 'input' in kw:
            if kw['input']:
                cargs['input'] = kw['input']
                kw['stdin'] = subprocess.PIPE
            del kw['input']

        if 'cwd' in kw:
            if not isinstance(kw['cwd'], str):
                kw['cwd'] = kw['cwd'].abspath()

        try:
            ret, out, err = Utils.run_process(cmd, kw, cargs)
        except Exception as e:
            raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

        if not isinstance(out, str):
            out = out.decode(sys.stdout.encoding or 'iso8859-1',
                             errors='replace')
        if not isinstance(err, str):
            err = err.decode(sys.stdout.encoding or 'iso8859-1',
                             errors='replace')

        if out and quiet != STDOUT and quiet != BOTH:
            self.to_log('out: %s' % out)
        if err and quiet != STDERR and quiet != BOTH:
            self.to_log('err: %s' % err)

        if ret:
            e = Errors.WafError('Command %r returned %r' % (cmd, ret))
            e.returncode = ret
            e.stderr = err
            e.stdout = out
            raise e

        if to_ret == BOTH:
            return (out, err)
        elif to_ret == STDERR:
            return err
        return out
Пример #39
0
def waf_entry_point(current_directory,version,wafdir):
	Logs.init_log()
	if Context.WAFVERSION!=version:
		Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir))
		sys.exit(1)
	if'--version'in sys.argv:
		Context.run_dir=current_directory
		ctx=Context.create_context('options')
		ctx.curdir=current_directory
		ctx.parse_args()
		sys.exit(0)
	if len(sys.argv)>1:
		potential_wscript=os.path.join(current_directory,sys.argv[1])
		if os.path.basename(potential_wscript)=='wscript'and os.path.isfile(potential_wscript):
			current_directory=os.path.normpath(os.path.dirname(potential_wscript))
			sys.argv.pop(1)
	Context.waf_dir=wafdir
	Context.launch_dir=current_directory
	no_climb=os.environ.get('NOCLIMB',None)
	if not no_climb:
		for k in no_climb_commands:
			for y in sys.argv:
				if y.startswith(k):
					no_climb=True
					break
	cur=current_directory
	while cur:
		lst=os.listdir(cur)
		if Options.lockfile in lst:
			env=ConfigSet.ConfigSet()
			try:
				env.load(os.path.join(cur,Options.lockfile))
				ino=os.stat(cur)[stat.ST_INO]
			except Exception:
				pass
			else:
				for x in(env.run_dir,env.top_dir,env.out_dir):
					if Utils.is_win32:
						if cur==x:
							load=True
							break
					else:
						try:
							ino2=os.stat(x)[stat.ST_INO]
						except OSError:
							pass
						else:
							if ino==ino2:
								load=True
								break
				else:
					Logs.warn('invalid lock file in %s'%cur)
					load=False
				if load:
					Context.run_dir=env.run_dir
					Context.top_dir=env.top_dir
					Context.out_dir=env.out_dir
					break
		if not Context.run_dir:
			if Context.WSCRIPT_FILE in lst:
				Context.run_dir=cur
		next=os.path.dirname(cur)
		if next==cur:
			break
		cur=next
		if no_climb:
			break
	if not Context.run_dir:
		if'-h'in sys.argv or'--help'in sys.argv:
			Logs.warn('No wscript file found: the help message may be incomplete')
			Context.run_dir=current_directory
			ctx=Context.create_context('options')
			ctx.curdir=current_directory
			ctx.parse_args()
			sys.exit(0)
		Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE)
		sys.exit(1)
	try:
		os.chdir(Context.run_dir)
	except OSError:
		Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)
		sys.exit(1)
	try:
		set_main_module(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))
	except Errors.WafError ,e:
		Logs.pprint('RED',e.verbose_msg)
		Logs.error(str(e))
		sys.exit(1)
Пример #40
0
	def scan(self):
		"""
		Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`

		Depending on your needs you might want:

		* to change re_tex::

			from waflib.Tools import tex
			tex.re_tex = myregex

		* or to change the method scan from the latex tasks::

			from waflib.Task import classes
			classes['latex'].scan = myscanfunction
		"""
		node = self.inputs[0]

		nodes = []
		names = []
		seen = []
		if not node: return (nodes, names)

		def parse_node(node):
			if node in seen:
				return
			seen.append(node)
			code = node.read()
			global re_tex
			for match in re_tex.finditer(code):

				multibib = match.group('type')
				if multibib and multibib.startswith('bibliography'):
					multibib = multibib[len('bibliography'):]
					if multibib.startswith('style'):
						continue
				else:
					multibib = None

				for path in match.group('file').split(','):
					if path:
						add_name = True
						found = None
						for k in exts_deps_tex:

							# issue 1067, scan in all texinputs folders
							for up in self.texinputs_nodes:
								Logs.debug('tex: trying %s%s', path, k)
								found = up.find_resource(path + k)
								if found:
									break


							for tsk in self.generator.tasks:
								if not found or found in tsk.outputs:
									break
							else:
								nodes.append(found)
								add_name = False
								for ext in exts_tex:
									if found.name.endswith(ext):
										parse_node(found)
										break

							# multibib stuff
							if found and multibib and found.name.endswith('.bib'):
								try:
									self.multibibs.append(found)
								except AttributeError:
									self.multibibs = [found]

							# no break, people are crazy
						if add_name:
							names.append(path)
		parse_node(node)

		for x in nodes:
			x.parent.get_bld().mkdir()

		Logs.debug("tex: found the following : %s and names %s", nodes, names)
		return (nodes, names)
Пример #41
0
def process_valadoc(self):
    """
	Generate API documentation from Vala source code with valadoc

	doc = bld(
		features = 'valadoc',
		output_dir = '../doc/html',
		package_name = 'vala-gtk-example',
		package_version = '1.0.0',
		packages = 'gtk+-2.0',
		vapi_dirs = '../vapi',
		force = True
	)

	path = bld.path.find_dir ('../src')
	doc.files = path.ant_glob (incl='**/*.vala')
	"""

    task = self.create_task('valadoc')
    if getattr(self, 'output_dir', None):
        task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
    else:
        Errors.WafError('no output directory')
    if getattr(self, 'doclet', None):
        task.doclet = self.doclet
    else:
        Errors.WafError('no doclet directory')
    if getattr(self, 'package_name', None):
        task.package_name = self.package_name
    else:
        Errors.WafError('no package name')
    if getattr(self, 'package_version', None):
        task.package_version = self.package_version
    if getattr(self, 'packages', None):
        task.packages = Utils.to_list(self.packages)
    if getattr(self, 'vapi_dirs', None):
        vapi_dirs = Utils.to_list(self.vapi_dirs)
        for vapi_dir in vapi_dirs:
            try:
                task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
            except AttributeError:
                Logs.warn("Unable to locate Vala API directory: '%s'" %
                          vapi_dir)
    if getattr(self, 'files', None):
        task.files = self.files
    else:
        Errors.WafError('no input file')
    if getattr(self, 'protected', None):
        task.protected = self.protected
    if getattr(self, 'private', None):
        task.private = self.private
    if getattr(self, 'inherit', None):
        task.inherit = self.inherit
    if getattr(self, 'deps', None):
        task.deps = self.deps
    if getattr(self, 'vala_defines', None):
        task.vala_defines = Utils.to_list(self.vala_defines)
    if getattr(self, 'enable_non_null_experimental', None):
        task.enable_non_null_experimental = self.enable_non_null_experimental
    if getattr(self, 'force', None):
        task.force = self.force
Пример #42
0
    def exec_command(self, cmd, **kw):
        """
		Runs an external process and returns the exit status::

			def run(tsk):
				ret = tsk.generator.bld.exec_command('touch foo.txt')
				return ret

		If the context has the attribute 'log', then captures and logs the process stderr/stdout.
		Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
		stdout/stderr values captured.

		:param cmd: command argument for subprocess.Popen
		:type cmd: string or list
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		:type kw: dict
		:returns: process exit status
		:rtype: integer
		"""
        subprocess = Utils.subprocess
        kw['shell'] = isinstance(cmd, str)
        Logs.debug('runner: %r', cmd)
        Logs.debug('runner_env: kw=%s', kw)

        if self.logger:
            self.logger.info(cmd)

        if 'stdout' not in kw:
            kw['stdout'] = subprocess.PIPE
        if 'stderr' not in kw:
            kw['stderr'] = subprocess.PIPE

        if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError('Program %s not found!' % cmd[0])

        cargs = {}
        if 'timeout' in kw:
            if sys.hexversion >= 0x3030000:
                cargs['timeout'] = kw['timeout']
                if not 'start_new_session' in kw:
                    kw['start_new_session'] = True
            del kw['timeout']
        if 'input' in kw:
            if kw['input']:
                cargs['input'] = kw['input']
                kw['stdin'] = subprocess.PIPE
            del kw['input']

        if 'cwd' in kw:
            if not isinstance(kw['cwd'], str):
                kw['cwd'] = kw['cwd'].abspath()

        try:
            ret, out, err = Utils.run_process(cmd, kw, cargs)
        except Exception as e:
            raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

        if out:
            if not isinstance(out, str):
                out = out.decode(sys.stdout.encoding or 'iso8859-1',
                                 errors='replace')
            if self.logger:
                self.logger.debug('out: %s', out)
            else:
                Logs.info(out, extra={'stream': sys.stdout, 'c1': ''})
        if err:
            if not isinstance(err, str):
                err = err.decode(sys.stdout.encoding or 'iso8859-1',
                                 errors='replace')
            if self.logger:
                self.logger.error('err: %s' % err)
            else:
                Logs.info(err, extra={'stream': sys.stderr, 'c1': ''})

        return ret
Пример #43
0
    def execute(self):
        """
        Wraps :py:func:`waflib.Context.Context.execute` on the context class
        """
        if not Configure.autoconfig:
            return execute_method(self)

        # Disable autoconfig so waf's version doesn't run (and don't end up on loop of bad configure)
        Configure.autoconfig = False

        if self.variant == '':
            raise Errors.WafError(
                'The project is badly configured: run "waf configure" again!')

        env = ConfigSet.ConfigSet()
        do_config = False

        try:
            p = os.path.join(Context.out_dir, Build.CACHE_DIR,
                             self.variant + Build.CACHE_SUFFIX)
            env.load(p)
        except EnvironmentError:
            raise Errors.WafError(
                'The project is not configured for board {0}: run "waf configure --board {0} [...]" first!'
                .format(self.variant))

        lock_env = ConfigSet.ConfigSet()

        try:
            lock_env.load(os.path.join(Context.top_dir, Options.lockfile))
        except EnvironmentError:
            Logs.warn('Configuring the project')
            do_config = True
        else:
            if lock_env.run_dir != Context.run_dir:
                do_config = True
            else:
                h = 0

                for f in env.CONFIGURE_FILES:
                    try:
                        h = Utils.h_list((h, Utils.readf(f, 'rb')))
                    except EnvironmentError:
                        do_config = True
                        break
                else:
                    do_config = h != env.CONFIGURE_HASH

        if do_config:
            cmd = lock_env.config_cmd or 'configure'
            tmp = Options.options.__dict__

            if env.OPTIONS and sorted(env.OPTIONS.keys()) == sorted(
                    tmp.keys()):
                Options.options.__dict__ = env.OPTIONS
            else:
                raise Errors.WafError(
                    'The project configure options have changed: run "waf configure" again!'
                )

            try:
                run_command(cmd)
            finally:
                Options.options.__dict__ = tmp

            run_command(self.cmd)
        else:
            return execute_method(self)
Пример #44
0
			sys.exit(0)
		Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE)
		sys.exit(1)
	try:
		os.chdir(Context.run_dir)
	except OSError:
		Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)
		sys.exit(1)
	try:
		set_main_module(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))
	except Errors.WafError ,e:
		Logs.pprint('RED',e.verbose_msg)
		Logs.error(str(e))
		sys.exit(1)
	except Exception ,e:
		Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e)
		traceback.print_exc(file=sys.stdout)
		sys.exit(2)
	try:
		run_commands()
	except Errors.WafError ,e:
		if Logs.verbose>1:
			Logs.pprint('RED',e.verbose_msg)
		Logs.error(e.msg)
		sys.exit(1)
	except SystemExit:
		raise
	except Exception ,e:
		traceback.print_exc(file=sys.stdout)
		sys.exit(2)
	except KeyboardInterrupt:
Пример #45
0
def can_retrieve_cache(self):
    """
    Used by :py:meth:`waflib.Task.cache_outputs`

    Retrieve build nodes from the cache
    update the file timestamps to help cleaning the least used entries from the cache
    additionally, set an attribute 'cached' to avoid re-creating the same cache files

    Suppose there are files in `cache/dir1/file1` and `cache/dir2/file2`:

    #. read the timestamp of dir1
    #. try to copy the files
    #. look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
    #. should an exception occur, ignore the data
    """
    bld = self.generator.bld
    if not isinstance(bld, Build.BuildContext):
        return False

    if not getattr(self, 'outputs', None):
        return False

    if not hasattr(self, 'can_retrieve_cache_checked'):
        self.can_retrieve_cache_checked = True
    else:
        return False

    bld.artifacts_cache_metrics.tasks_processed.add(self)

    sig = self.signature()
    ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)

    # first try to access the cache folder for the task
    dname = os.path.join(bld.artifacts_cache, ssig)
    if not os.path.exists(dname):
        bld.artifacts_cache_metrics.tasks_missed.add(self)
        return False

    for node in self.outputs:
        orig = os.path.join(dname, node.name)
        # Maximum Path Length Limitation on Windows is 260 characters, starting from Windows 10, we can enable long path to remove this limitation.
        # In case long path is not enabled, extended-length path to bypass this limitation.
        orig = Utils.extended_path(orig)
        try:
            t1 = os.stat(orig).st_mtime
        except OSError:
            bld.artifacts_cache_metrics.tasks_missed.add(self)
            return False
        dir_name = os.path.dirname(node.abspath())
        try:
            os.makedirs(dir_name)
        except Exception:
            pass

        try:
            # Do not use shutil.copy2(orig, node.abspath()), otherwise, it will cause threading issue with compiler and linker.
            # shutil.copy2() first calls shutil.copyfile() to copy the file contents, and then calls os.copystat() to copy the file stats, after the file contents are copied, waf is able to get the node's signature and might think the runnable status of a task is ready to run, but the copied file is then opened by os.copystat(), and compiler or linker who use the copied file as input file will fail.
            if Utils.is_win32:
                os.system('copy {} {} /Y>nul'.format(orig, node.abspath()))
            else:
                os.system('cp {} {}'.format(orig, node.abspath()))
            # is it the same file?
            try:
                t2 = os.stat(orig).st_mtime
                if t1 != t2:
                    bld.artifacts_cache_metrics.tasks_failed_to_retrieve.add(
                        self)
                    return False
            except OSError:
                bld.artifacts_cache_metrics.tasks_failed_to_retrieve.add(self)
                return False
        except Exception as e:
            Logs.warn(
                '[WARN] task: failed retrieving file {} due to exception\n{}\n'
                .format(node.abspath(), e))
            bld.artifacts_cache_metrics.tasks_failed_to_retrieve.add(self)
            return False

    for node in self.outputs:
        node.sig = sig
        if bld.progress_bar < 1:
            bld.to_log('restoring from cache %r\n' % node.abspath())

    # mark the cache file folder as used recently (modified)
    os.utime(dname, None)

    self.cached = True
    return True
Пример #46
0
 def write(self):
     Logs.debug('msvs: creating %r' % self.path)
     template1 = compile_template(self.project_template)
     proj_str = template1(self)
     proj_str = rm_blank_lines(proj_str)
     self.path.stealth_write(proj_str)
Пример #47
0
def configure(self):
    """
	Besides the configuration options, the environment variable QT5_ROOT may be used
	to give the location of the qt5 libraries (absolute path).

	The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
	"""
    self.find_qt5_binaries()
    self.set_qt5_libs_to_check()
    self.set_qt5_defines()
    self.find_qt5_libraries()
    self.add_qt5_rpath()
    self.simplify_qt5_libs()

    # warn about this during the configuration too
    if not has_xml:
        Logs.error(
            'No xml.sax support was found, rcc dependencies will be incomplete!'
        )

    if 'COMPILER_CXX' not in self.env:
        self.fatal(
            'No CXX compiler defined: did you forget to configure compiler_cxx first?'
        )

    # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
    frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
    uses = 'QT5CORE QT5WIDGETS QT5GUI'
    for flag in [[], '-fPIE', '-fPIC', '-std=c++11', ['-std=c++11', '-fPIE'],
                 ['-std=c++11', '-fPIC']]:
        msg = 'See if Qt files compile '
        if flag:
            msg += 'with %s' % flag
        try:
            self.check(features='qt5 cxx',
                       use=uses,
                       uselib_store='qt5',
                       cxxflags=flag,
                       fragment=frag,
                       msg=msg)
        except self.errors.ConfigurationError:
            pass
        else:
            break
    else:
        self.fatal('Could not build a simple Qt application')

    # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
    from waflib import Utils
    if Utils.unversioned_sys_platform() == 'freebsd':
        frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
        try:
            self.check(features='qt5 cxx cxxprogram',
                       use=uses,
                       fragment=frag,
                       msg='Can we link Qt programs on FreeBSD directly?')
        except self.errors.ConfigurationError:
            self.check(features='qt5 cxx cxxprogram',
                       use=uses,
                       uselib_store='qt5',
                       libpath='/usr/local/lib',
                       fragment=frag,
                       msg='Is /usr/local/lib required?')
Пример #48
0
def restore(self):
    """
    Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
    """
    try:
        env = ConfigSet.ConfigSet(
            os.path.join(self.cache_dir, 'build.config.py'))
    except (IOError, OSError):
        pass
    else:
        if env['version'] < Context.HEXVERSION:
            raise Errors.WafError('Version mismatch! reconfigure the project')
        for t in env['tools']:
            self.setup(**t)

    dbfn = os.path.join(self.variant_dir, Context.DBFILE)
    Node.Nod3 = self.node_class
    local_data = None
    cache_data = None
    data = None
    try:
        local_data_str = Utils.readf(dbfn, 'rb')
        try:
            local_data = cPickle.loads(local_data_str)
        except cPickle.UnpicklingError:
            Logs.debug(
                'build: Could not unpickle the data from local build cache {}'.
                format(dbfn))
    except (IOError, EOFError):
        # handle missing file/empty file
        Logs.debug(
            'build: Could not load the local build cache {} (missing)'.format(
                dbfn))

    if local_data:
        data = local_data

    # If artifacts cache is enabled, try to load the artifacts cache, this ensures that the task's include dependencies can be known in advance in a clean build
    if self.artifacts_cache and self.is_option_true('artifacts_cache_restore'):
        try:
            dbfn = os.path.join(self.artifacts_cache, 'wafpickle', self.cmd,
                                Context.DBFILE)
            cache_data_str = Utils.readf(dbfn, 'rb')
            try:
                cache_data = cPickle.loads(cache_data_str)
            except cPickle.UnpicklingError:
                Logs.debug(
                    'build: Could not unpickle the data from global build cache {}'
                    .format(dbfn))
        except (IOError, EOFError):
            # handle missing file/empty file
            Logs.debug(
                'build: Could not load the global build cache {} (missing)'.
                format(dbfn))
        if cache_data:
            if not local_data:
                data = cache_data
            else:
                merged_data = {}
                for x in local_data:
                    if x not in cache_data:
                        merged_data[x] = local_data

                for x in cache_data:
                    if x not in local_data:
                        merged_data[x] = cache_data[x]
                    else:
                        if isinstance(local_data[x], dict):
                            cache_data[x].update(local_data[x])
                            merged_data[x] = cache_data[x]
                        else:
                            merged_data[x] = local_data[x]
                data = merged_data
                data['cached_engine_path'] = cache_data['cached_engine_path']
                data['cached_tp_root_path'] = cache_data['cached_tp_root_path']
    if data:
        try:
            Node.pickle_lock.acquire()
            for x in Build.SAVED_ATTRS:
                if x in data:
                    setattr(self, x, data[x])
                else:
                    Logs.debug(
                        "build: SAVED_ATTRS key {} missing from cache".format(
                            x))
        finally:
            Node.pickle_lock.release()

    self.init_dirs()
Пример #49
0
def apply_run_py_script(tg):
    """Task generator for running either Python 2 or Python 3 on a single
	script. 
	
	Attributes:

		* source -- A **single** source node or string. (required)
		* target -- A single target or list of targets (nodes or strings). 
		* deps -- A single dependency or list of dependencies (nodes or strings)
		* add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable.
	
	If the build environment has an attribute "PROJECT_PATHS" with
	a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
	
	"""

    # Set the Python version to use, default to 3.
    v = getattr(tg, 'version', 3)
    if v not in (2, 3):
        raise ValueError(
            "Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s"
            % v)

    # Convert sources and targets to nodes
    src_node = tg.path.find_resource(tg.source)
    tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]

    # Create the task.
    tsk = tg.create_task('run_py_%d_script' % v, src=src_node, tgt=tgt_nodes)

    # custom execution environment
    # TODO use a list and  os.sep.join(lst) at the end instead of concatenating strings
    tsk.env.env = dict(os.environ)
    tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
    project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
    if project_paths and 'PROJECT_ROOT' in project_paths:
        tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths[
            'PROJECT_ROOT'].abspath()
    if getattr(tg, 'add_to_pythonpath', None):
        tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath

    # Clean up the PYTHONPATH -- replace double occurrences of path separator
    tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep,
                                       tsk.env.env['PYTHONPATH'])

    # Clean up the PYTHONPATH -- doesn't like starting with path separator
    if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
        tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]

    # dependencies (if the attribute 'deps' changes, trigger a recompilation)
    for x in tg.to_list(getattr(tg, 'deps', [])):
        node = tg.path.find_resource(x)
        if not node:
            tg.bld.fatal('Could not find dependency %r for running %r' %
                         (x, src_node.abspath()))
        tsk.dep_nodes.append(node)
    Logs.debug('deps: found dependencies %r for running %r' %
               (tsk.dep_nodes, src_node.abspath()))

    # Bypass the execution of process_source by setting the source to an empty list
    tg.source = []
Пример #50
0
def store(self):
    """
    Store the data for next runs, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary
    file to avoid problems on ctrl+c.
    """

    # Write ArtifactsCacheMetrics to file

    if self.artifacts_cache and self.is_option_true(
            'artifacts_cache_restore') and getattr(
                self, 'artifacts_cache_metrics', False):
        json_data = {}
        json_data['tasks_processed_num'] = len(
            self.artifacts_cache_metrics.tasks_processed)
        json_data['tasks_missed_num'] = len(
            self.artifacts_cache_metrics.tasks_missed)
        json_data['tasks_failed_to_retrieve_num'] = len(
            self.artifacts_cache_metrics.tasks_failed_to_retrieve)
        f = os.path.join(self.variant_dir, 'ArtifactsCacheMetrics.json')
        with open(f, 'w') as output:
            json.dump(json_data, output)

        Logs.info(
            "Total number of tasks processed by waf artifacts cache: {}\n".
            format(len(self.artifacts_cache_metrics.tasks_processed)) +
            "Cache miss: {}".format(
                len(self.artifacts_cache_metrics.tasks_missed)))

    data = {}
    for x in Build.SAVED_ATTRS:
        data[x] = getattr(self, x, None)

    try:
        Node.pickle_lock.acquire()
        Node.Nod3 = self.node_class
        x = cPickle.dumps(data, -1)
    finally:
        Node.pickle_lock.release()

    def write_to_db(db, contents):
        Utils.writef(db + '.tmp', contents, m='wb')

        try:
            st = os.stat(db)
            os.remove(db)
            if not Utils.is_win32:  # win32 has no chown but we're paranoid
                os.chown(db + '.tmp', st.st_uid, st.st_gid)
        except (AttributeError, OSError):
            pass

        # do not use shutil.move (copy is not thread-safe)
        os.rename(db + '.tmp', db)

    write_to_db(os.path.join(self.variant_dir, Context.DBFILE), x)
    # Save to artifacts cache if artifacts cache is enabled
    if self.artifacts_cache and self.is_option_true('artifacts_cache_upload'):
        x = cPickle.dumps(data, -1)
        wafpickle_dir = os.path.join(self.artifacts_cache, 'wafpickle',
                                     self.cmd)
        if not os.path.exists(wafpickle_dir):
            os.makedirs(wafpickle_dir)
        try:
            write_to_db(os.path.join(wafpickle_dir, Context.DBFILE), x)
        except Exception:
            pass
Пример #51
0
    def post(self):
        """
		Creates tasks for this task generators. The following operations are performed:

		#. The body of this method is called only once and sets the attribute ``posted``
		#. The attribute ``features`` is used to add more methods in ``self.meths``
		#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
		#. The methods are then executed in order
		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
		"""
        if getattr(self, 'posted', None):
            return False
        self.posted = True

        keys = set(self.meths)
        keys.update(feats['*'])

        # add the methods listed in the features
        self.features = Utils.to_list(self.features)
        for x in self.features:
            st = feats[x]
            if st:
                keys.update(st)
            elif not x in Task.classes:
                Logs.warn(
                    'feature %r does not exist - bind at least one method to it?',
                    x)

        # copy the precedence table
        prec = {}
        prec_tbl = self.prec
        for x in prec_tbl:
            if x in keys:
                prec[x] = prec_tbl[x]

        # elements disconnected
        tmp = []
        for a in keys:
            for x in prec.values():
                if a in x:
                    break
            else:
                tmp.append(a)

        tmp.sort(reverse=True)

        # topological sort
        out = []
        while tmp:
            e = tmp.pop()
            if e in keys:
                out.append(e)
            try:
                nlst = prec[e]
            except KeyError:
                pass
            else:
                del prec[e]
                for x in nlst:
                    for y in prec:
                        if x in prec[y]:
                            break
                    else:
                        tmp.append(x)
                        tmp.sort(reverse=True)

        if prec:
            buf = ['Cycle detected in the method execution:']
            for k, v in prec.items():
                buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
            raise Errors.WafError('\n'.join(buf))
        self.meths = out

        # then we run the methods in order
        Logs.debug('task_gen: posting %s %d', self, id(self))
        for x in out:
            try:
                v = getattr(self, x)
            except AttributeError:
                raise Errors.WafError(
                    '%r is not a valid task generator method' % x)
            Logs.debug('task_gen: -> %s (%d)', x, id(self))
            v()

        Logs.debug('task_gen: posted %s', self.name)
        return True
Пример #52
0
def check_boost(self, *k, **kw):
    """
	Initialize boost libraries to be used.

	Keywords: you can pass the same parameters as with the command line (without "--boost-").
	Note that the command line has the priority, and should preferably be used.
	"""
    if not self.env['CXX']:
        self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')

    params = {'lib': k and k[0] or kw.get('lib', None)}
    for key, value in self.options.__dict__.items():
        if not key.startswith('boost_'):
            continue
        key = key[len('boost_'):]
        params[key] = value and value or kw.get(key, '')

    var = kw.get('uselib_store', 'BOOST')

    self.start_msg('Checking boost includes')
    self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
    self.env.BOOST_VERSION = self.boost_get_version(inc)
    self.end_msg(self.env.BOOST_VERSION)
    if Logs.verbose:
        Logs.pprint('CYAN', '	path : %s' % self.env['INCLUDES_%s' % var])

    if not params['lib']:
        return
    self.start_msg('Checking boost libs')
    suffix = params.get('static', None) and 'ST' or ''
    path, libs = self.boost_get_libs(**params)
    self.env['%sLIBPATH_%s' % (suffix, var)] = [path]
    self.env['%sLIB_%s' % (suffix, var)] = libs
    self.end_msg('ok')
    if Logs.verbose:
        Logs.pprint('CYAN', '	path : %s' % path)
        Logs.pprint('CYAN', '	libs : %s' % libs)

    def try_link():
        if 'system' in params['lib']:
            self.check_cxx(
                fragment="\n".join([
                    '#include <boost/system/error_code.hpp>',
                    'int main() { boost::system::error_code c; }',
                ]),
                use=var,
                execute=False,
            )
        if 'thread' in params['lib']:
            self.check_cxx(
                fragment="\n".join([
                    '#include <boost/thread.hpp>',
                    'int main() { boost::thread t; }',
                ]),
                use=var,
                execute=False,
            )

    if params.get('linkage_autodetect', False):
        self.start_msg("Attempting to detect boost linkage flags")
        toolset = self.boost_get_toolset(kw.get('toolset', ''))
        if toolset in ['vc']:
            # disable auto-linking feature, causing error LNK1181
            # because the code wants to be linked against
            self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']

            # if no dlls are present, we guess the .lib files are not stubs
            has_dlls = False
            for x in Utils.listdir(path):
                if x.endswith(self.env.cxxshlib_PATTERN % ''):
                    has_dlls = True
                    break
            if not has_dlls:
                self.env['STLIBPATH_%s' % var] = [path]
                self.env['STLIB_%s' % var] = libs
                del self.env['LIB_%s' % var]
                del self.env['LIBPATH_%s' % var]

            # we attempt to play with some known-to-work CXXFLAGS combinations
            for cxxflags in (['/MD', '/EHsc'], []):
                self.env.stash()
                self.env["CXXFLAGS_%s" % var] += cxxflags
                try:
                    try_link()
                    self.end_msg("ok: winning cxxflags combination: %s" %
                                 (self.env["CXXFLAGS_%s" % var]))
                    exc = None
                    break
                except Errors.ConfigurationError as e:
                    self.env.revert()
                    exc = e

            if exc is not None:
                self.end_msg(
                    "Could not auto-detect boost linking flags combination, you may report it to boost.py author",
                    ex=exc)
                self.fatal('The configuration failed')
        else:
            self.end_msg(
                "Boost linkage flags auto-detection not implemented (needed ?) for this toolchain"
            )
            self.fatal('The configuration failed')
    else:
        self.start_msg('Checking for boost linkage')
        try:
            try_link()
        except Errors.ConfigurationError as e:
            self.end_msg(
                "Could not link against boost libraries using supplied options"
            )
            self.fatal('The configuration failed')
        self.end_msg('ok')
Пример #53
0
def write_news(name, in_files, out_file, top_entries=None, extra_entries=None):
    import rdflib
    import textwrap
    from time import strftime, strptime

    doap = rdflib.Namespace('http://usefulinc.com/ns/doap#')
    dcs  = rdflib.Namespace('http://ontologi.es/doap-changeset#')
    rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
    foaf = rdflib.Namespace('http://xmlns.com/foaf/0.1/')
    rdf  = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
    m    = rdflib.ConjunctiveGraph()

    try:
        for i in in_files:
            m.parse(i, format='n3')
    except:
        Logs.warn('Error parsing data, unable to generate NEWS')
        return

    proj = m.value(None, rdf.type, doap.Project)
    for f in m.triples([proj, rdfs.seeAlso, None]):
        if f[2].endswith('.ttl'):
            m.parse(f[2], format='n3')

    entries = {}
    for r in m.triples([proj, doap.release, None]):
        release   = r[2]
        revision  = m.value(release, doap.revision, None)
        date      = m.value(release, doap.created, None)
        blamee    = m.value(release, dcs.blame, None)
        changeset = m.value(release, dcs.changeset, None)
        dist      = m.value(release, doap['file-release'], None)

        if revision and date and blamee and changeset:
            entry = '%s (%s) stable;\n' % (name, revision)

            for i in m.triples([changeset, dcs.item, None]):
                item = textwrap.wrap(m.value(i[2], rdfs.label, None), width=79)
                entry += '\n  * ' + '\n    '.join(item)
                if dist and top_entries is not None:
                    if not str(dist) in top_entries:
                        top_entries[str(dist)] = []
                    top_entries[str(dist)] += [
                        '%s: %s' % (name, '\n    '.join(item))]

            if extra_entries:
                for i in extra_entries[str(dist)]:
                    entry += '\n  * ' + i

            entry += '\n\n --'

            blamee_name = m.value(blamee, foaf.name, None)
            blamee_mbox = m.value(blamee, foaf.mbox, None)
            if blamee_name and blamee_mbox:
                entry += ' %s <%s>' % (blamee_name,
                                       blamee_mbox.replace('mailto:', ''))
                
            entry += '  %s\n\n' % (
                strftime('%a, %d %b %Y %H:%M:%S +0000', strptime(date, '%Y-%m-%d')))

            entries[revision] = entry
        else:
            Logs.warn('Ignored incomplete %s release description' % name)

    if len(entries) > 0:
        news = open(out_file, 'w')
        for e in sorted(entries.keys(), reverse=True):
            news.write(entries[e])
        news.close()
Пример #54
0
    def post(self):
        """
		Create task objects. The following operations are performed:

		#. The body of this method is called only once and sets the attribute ``posted``
		#. The attribute ``features`` is used to add more methods in ``self.meths``
		#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
		#. The methods are then executed in order
		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
		"""

        # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
        if getattr(self, 'posted', None):
            #error("OBJECT ALREADY POSTED" + str( self))
            return False
        self.posted = True

        keys = set(self.meths)

        # add the methods listed in the features
        self.features = Utils.to_list(self.features)
        for x in self.features + ['*']:
            st = feats[x]
            if not st:
                if not x in Task.classes:
                    Logs.warn(
                        'feature %r does not exist - bind at least one method to it'
                        % x)
            keys.update(list(st))  # ironpython 2.7 wants the cast to list

        # copy the precedence table
        prec = {}
        prec_tbl = self.prec or task_gen.prec
        for x in prec_tbl:
            if x in keys:
                prec[x] = prec_tbl[x]

        # elements disconnected
        tmp = []
        for a in keys:
            for x in prec.values():
                if a in x: break
            else:
                tmp.append(a)

        tmp.sort()

        # topological sort
        out = []
        while tmp:
            e = tmp.pop()
            if e in keys: out.append(e)
            try:
                nlst = prec[e]
            except KeyError:
                pass
            else:
                del prec[e]
                for x in nlst:
                    for y in prec:
                        if x in prec[y]:
                            break
                    else:
                        tmp.append(x)

        if prec:
            raise Errors.WafError('Cycle detected in the method execution %r' %
                                  prec)
        out.reverse()
        self.meths = out

        # then we run the methods in order
        Logs.debug('task_gen: posting %s %d' % (self, id(self)))
        for x in out:
            try:
                v = getattr(self, x)
            except AttributeError:
                raise Errors.WafError(
                    '%r is not a valid task generator method' % x)
            Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
            v()

        Logs.debug('task_gen: posted %s' % self.name)
        return True
Пример #55
0
def display_header(title):
    Logs.pprint('BOLD', title)
Пример #56
0
def get_msvc_version(conf, compiler, version, target, vcvars):
    """
	Checks that an installed compiler actually runs and uses vcvars to obtain the
	environment needed by the compiler.

	:param compiler: compiler type, for looking up the executable name
	:param version: compiler version, for debugging only
	:param target: target architecture
	:param vcvars: batch file to run to check the environment
	:return: the location of the compiler executable, the location of include dirs, and the library paths
	:rtype: tuple of strings
	"""
    Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)

    try:
        conf.msvc_cnt += 1
    except AttributeError:
        conf.msvc_cnt = 1
    batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
    batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars, target))
    sout = conf.cmd_and_log(
        ['cmd.exe', '/E:on', '/V:on', '/C',
         batfile.abspath()])
    lines = sout.splitlines()

    if not lines[0]:
        lines.pop(0)

    MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
    for line in lines:
        if line.startswith('PATH='):
            path = line[5:]
            MSVC_PATH = path.split(';')
        elif line.startswith('INCLUDE='):
            MSVC_INCDIR = [i for i in line[8:].split(';') if i]
        elif line.startswith('LIB='):
            MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
    if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
        conf.fatal(
            'msvc: Could not find a valid architecture for building (get_msvc_version_3)'
        )

    # Check if the compiler is usable at all.
    # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
    env = dict(os.environ)
    env.update(PATH=path)
    compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
    cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)

    # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
    if 'CL' in env:
        del (env['CL'])

    try:
        conf.cmd_and_log(cxx + ['/help'], env=env)
    except UnicodeError:
        st = traceback.format_exc()
        if conf.logger:
            conf.logger.error(st)
        conf.fatal('msvc: Unicode error - check the code page?')
    except Exception as e:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler,
                   version, target, str(e))
        conf.fatal(
            'msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)'
        )
    else:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version,
                   target)
    finally:
        conf.env[compiler_name] = ''

    return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
Пример #57
0
    def execute(self):
        global root_dir

        self.restore()
        if not self.all_envs:
            self.load_envs()
        self.load_user_settings()
        self.recurse([self.run_dir])

        root_dir = Context.launch_dir
        xcode_project_name = self.get_xcode_project_name()
        if not xcode_project_name:
            xcode_project_name = getattr(Context.g_module, Context.APPNAME,
                                         'project')

        project = PBXProject(xcode_project_name, ('Xcode 3.2', 46), self)
        project.set_project_spec(self.options.project_spec)

        platform_name = self.get_target_platform_name()
        project.set_platform_name(platform_name)
        project.set_settings(self.get_settings())

        resource_group = PBXGroup("Resources")
        project.mainGroup.children.append(resource_group)

        spec_modules = self.spec_modules(project.project_spec)
        project_modules = []
        for project_name in self.get_enabled_game_project_list():
            project_modules = project_modules + self.project_and_platform_modules(
                project_name, platform_name)

        target_platforms = ['all']
        for platform in self.get_target_platforms():
            target_platforms.append(self.platform_to_platform_alias(platform))
            target_platforms.append(platform)

        target_platforms = set(target_platforms)

        # add the xctest wrapper source files to the project for platforms that leverage it for commandline installation
        if platform_name in XCTEST_WRAPPER_TARGETS:
            xctest_root_node = self.srcnode.make_node(
                XCTEST_WRAPPER_REL_PATH.split('/'))
            test_files = [
                xctest_root_node.make_node(test_file)
                for test_file in XCTEST_WRAPPER_FILES
            ]

            project.mainGroup.add(self.srcnode, test_files)

            xcrun_cmd = [
                'xcrun', '--sdk', PLATFORM_SDK_NAME[platform_name],
                '--show-sdk-path'
            ]
            sdk_path = subprocess.check_output(xcrun_cmd).decode(
                sys.stdout.encoding or 'iso8859-1', 'replace').strip()

            # the installTest requires UIKit to show a dialog
            frameworks_group = PBXGroup('Frameworks')
            ui_kit = PBXFileReference(
                'UIKit.framework',
                os.path.join(sdk_path, FRAMEWORKS_REL_PATH, 'UIKit.framework'),
                'wrapper.framework', '<absolute>')
            frameworks_group.children.append(ui_kit)

            project.mainGroup.children.append(frameworks_group)

        source_files = []
        for group in self.groups:
            for task_generator in group:
                if not isinstance(task_generator, TaskGen.task_gen):
                    continue

                if (task_generator.target
                        not in spec_modules) and (task_generator.target
                                                  not in project_modules):
                    Logs.debug(
                        'xcode: Skipping %s because it is not part of the spec',
                        task_generator.name)
                    continue

                task_generator.post()

                platforms = target_platforms.intersection(
                    task_generator.platforms)
                if not platforms:
                    Logs.debug(
                        'xcode: Skipping %s because it is not supported on platform %s',
                        task_generator.name, platform_name)
                    continue

                source_files = list(
                    set(source_files + self.collect_source(task_generator)))

                # Match any C/C++ program feature
                features = Utils.to_list(
                    getattr(task_generator, 'features', ''))
                have_feature_match = False
                for a_feature in features:
                    if re.search("c.*program", a_feature) != None:
                        have_feature_match = True
                        break
                else:
                    Logs.debug(
                        'xcode: Skipping %s because it is not a program',
                        task_generator.name)
                    continue

                pbx_native_target = project.add_task_gen(task_generator, self)
                xcassets_path = getattr(task_generator, 'darwin_xcassets',
                                        None)
                if xcassets_path:
                    app_resources_group = PBXGroup(task_generator.name)
                    resource_group.children.append(app_resources_group)
                    xcassets_folder_node = self.engine_node.make_node(
                        xcassets_path)
                    xcode_assets_folder_ref = PBXFileReference(
                        'xcassets', xcassets_folder_node.abspath(),
                        'folder.assetcatalog')
                    app_resources_group.children.append(
                        xcode_assets_folder_ref)
                    pbx_native_target.add_resources_build_phase_to_target(
                        [xcode_assets_folder_ref])

        project.mainGroup.add(self.srcnode, source_files)
        project.targets.sort(
            key=lambda target:
            [isinstance(target, PBXLegacyTarget), target.name])

        # Create a dummy target that builds all source files so Xcode find file/symbol functionality works
        dummy_target = PBXNativeTarget(project.platform_name, self)

        source_file_references = []
        project.mainGroup.get_child_file_references_recursive(
            source_file_references)
        dummy_target.buildPhases = [
            PBXSourcesBuildPhase(source_file_references)
        ]

        dummy_target.name = 'DummyTargetForSymbols'
        dummy_target.productName = 'DummyTargetForSymbols'
        dummy_target.productType = 'com.apple.product-type.tool'
        dummy_target.productReference = PBXFileReference(
            'DummyTargetForSymbols', 'DummyTargetForSymbols',
            'compiled.mach-o.executable', 'BUILT_PRODUCTS_DIR')

        project._output.children.append(dummy_target.productReference)
        project.targets.append(dummy_target)

        # Create game resource group/folder structure and attach it to the native
        # projects
        root_assets_folder = self.srcnode.make_node("Cache")

        for game_project in self.get_enabled_game_project_list():
            game_resources_group = PBXGroup(game_project)
            resource_group.children.append(game_resources_group)

            dev_assets_folder = root_assets_folder.make_node(
                game_project).make_node(self.get_dev_source_assets_subdir())
            dev_assets_folder_ref = PBXFileReference(
                'assets_dev', dev_assets_folder.abspath(), 'folder')
            game_resources_group.children.append(dev_assets_folder_ref)

            release_assets_folder = root_assets_folder.make_node(
                game_project).make_node(
                    self.get_release_source_assets_subdir())
            release_assets_folder_ref = PBXFileReference(
                'assets_release', release_assets_folder.abspath(), 'folder')
            game_resources_group.children.append(release_assets_folder_ref)

            xcode_assets_folder = self.launch_node().make_node(
                self.game_code_folder(game_project) +
                self.get_xcode_source_assets_subdir())
            xcode_assets_folder_ref = PBXFileReference(
                'xcassets', xcode_assets_folder.abspath(),
                'folder.assetcatalog')
            game_resources_group.children.append(xcode_assets_folder_ref)

            for target in project.targets:
                launcher_name = self.get_launcher_name(platform_name) or ''
                if isinstance(
                        target, PBXNativeTarget
                ) and target.name == game_project + launcher_name:
                    target.add_remove_embedded_provisioning_build_phase_to_target(
                    )
                    target.add_resources_build_phase_to_target(
                        [xcode_assets_folder_ref])

        project.mainGroup.sort_recursive()

        projectDir = self.srcnode.make_node("/%s.xcodeproj" %
                                            xcode_project_name)

        projectDir.mkdir()
        node = projectDir.make_node('project.pbxproj')
        project.write(open(node.abspath(), 'w'))

        # Generate settings to make Xcode use the Legacy Build System
        project_ws_node = projectDir.make_node('project.xcworkspace')
        project_ws_node.mkdir()

        shared_data_node = project_ws_node.make_node("xcshareddata")
        shared_data_node.mkdir()
        wpfile = shared_data_node.make_node("WorkspaceSettings.xcsettings")

        with open(wpfile.abspath(), "w") as f:
            f.write(XCODE_WORKSPACE_SETTINGS)
Пример #58
0
def run_tests(ctx, appname, tests, desired_status=0, dirs=['src'], name='*'):
    failures = 0
    diropts  = ''
    for i in dirs:
        diropts += ' -d ' + i

    # Run all tests
    for i in tests:
        s = i
        if type(i) == type([]):
            s = ' '.join(i)
        print('')
        Logs.pprint('BOLD', '** Test', sep='')
        Logs.pprint('NORMAL', '%s' % s)
        cmd = i
        if Options.options.grind:
            cmd = 'valgrind ' + i
        if subprocess.call(cmd, shell=True) == desired_status:
            Logs.pprint('GREEN', '** Pass')
        else:
            failures += 1
            Logs.pprint('RED', '** FAIL')

    print('')
    if failures == 0:
        Logs.pprint('GREEN', '** Pass: All %s.%s tests passed' % (appname, name))
    else:
        Logs.pprint('RED', '** FAIL: %d %s.%s tests failed' % (failures, appname, name))
Пример #59
0
    def execute(self):
        """
		See :py:func:`waflib.Context.Context.execute`
		"""
        self.init_dirs()

        self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
        self.cachedir.mkdir()

        path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
        self.logger = Logs.make_logger(path, 'cfg')

        app = getattr(Context.g_module, 'APPNAME', '')
        if app:
            ver = getattr(Context.g_module, 'VERSION', '')
            if ver:
                app = "%s (%s)" % (app, ver)

        params = {
            'now': time.ctime(),
            'pyver': sys.hexversion,
            'systype': sys.platform,
            'args': " ".join(sys.argv),
            'wafver': Context.WAFVERSION,
            'abi': Context.ABI,
            'app': app
        }
        self.to_log(conf_template % params)
        self.msg('Setting top to', self.srcnode.abspath())
        self.msg('Setting out to', self.bldnode.abspath())

        if id(self.srcnode) == id(self.bldnode):
            Logs.warn('Setting top == out')
        elif id(self.path) != id(self.srcnode):
            if self.srcnode.is_child_of(self.path):
                Logs.warn(
                    'Are you certain that you do not want to set top="." ?')

        super(ConfigurationContext, self).execute()

        self.store()

        Context.top_dir = self.srcnode.abspath()
        Context.out_dir = self.bldnode.abspath()

        # this will write a configure lock so that subsequent builds will
        # consider the current path as the root directory (see prepare_impl).
        # to remove: use 'waf distclean'
        env = ConfigSet.ConfigSet()
        env.argv = sys.argv
        env.options = Options.options.__dict__
        env.config_cmd = self.cmd

        env.run_dir = Context.run_dir
        env.top_dir = Context.top_dir
        env.out_dir = Context.out_dir

        # conf.hash & conf.files hold wscript files paths and hash
        # (used only by Configure.autoconfig)
        env.hash = self.hash
        env.files = self.files
        env.environ = dict(self.environ)

        if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN')
                or getattr(Options.options, 'no_lock_in_run')):
            env.store(os.path.join(Context.run_dir, Options.lockfile))
        if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP')
                or getattr(Options.options, 'no_lock_in_top')):
            env.store(os.path.join(Context.top_dir, Options.lockfile))
        if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT')
                or getattr(Options.options, 'no_lock_in_out')):
            env.store(os.path.join(Context.out_dir, Options.lockfile))
Пример #60
0
def init_vala_task(self):
	"""
	Initializes the vala task with the relevant data (acts as a constructor)
	"""
	self.profile = getattr(self, 'profile', 'gobject')

	if self.profile == 'gobject':
		self.uselib = Utils.to_list(getattr(self, 'uselib', []))
		if not 'GOBJECT' in self.uselib:
			self.uselib.append('GOBJECT')

	def addflags(flags):
		self.env.append_value('VALAFLAGS', flags)

	if self.profile:
		addflags('--profile=%s' % self.profile)

	valatask = self.valatask

	# output directory
	if hasattr(self, 'vala_dir'):
		if isinstance(self.vala_dir, str):
			valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
			try:
				valatask.vala_dir_node.mkdir()
			except OSError:
				raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
		else:
			valatask.vala_dir_node = self.vala_dir
	else:
		valatask.vala_dir_node = self.path.get_bld()
	addflags('--directory=%s' % valatask.vala_dir_node.abspath())

	if hasattr(self, 'thread'):
		if self.profile == 'gobject':
			if not 'GTHREAD' in self.uselib:
				self.uselib.append('GTHREAD')
		else:
			#Vala doesn't have threading support for dova nor posix
			Logs.warn("Profile %s means no threading support" % self.profile)
			self.thread = False

		if self.thread:
			addflags('--thread')

	self.is_lib = 'cprogram' not in self.features
	if self.is_lib:
		addflags('--library=%s' % self.target)

		h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
		valatask.outputs.append(h_node)
		addflags('--header=%s' % h_node.name)

		valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))

		if getattr(self, 'gir', None):
			gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
			addflags('--gir=%s' % gir_node.name)
			valatask.outputs.append(gir_node)

	self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
	if self.vala_target_glib:
		addflags('--target-glib=%s' % self.vala_target_glib)

	addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])

	packages_private = Utils.to_list(getattr(self, 'packages_private', []))
	addflags(['--pkg=%s' % x for x in packages_private])

	def _get_api_version():
		api_version = '1.0'
		if hasattr(Context.g_module, 'API_VERSION'):
			version = Context.g_module.API_VERSION.split(".")
			if version[0] == "0":
				api_version = "0." + version[1]
			else:
				api_version = version[0] + ".0"
		return api_version

	self.includes = Utils.to_list(getattr(self, 'includes', []))
	self.uselib = self.to_list(getattr(self, 'uselib', []))
	valatask.install_path = getattr(self, 'install_path', '')

	valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
	valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE'])
	valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
	valatask.install_binding = getattr(self, 'install_binding', True)

	self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
	self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
	#includes =  []

	if hasattr(self, 'use'):
		local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
		seen = []
		while len(local_packages) > 0:
			package = local_packages.pop()
			if package in seen:
				continue
			seen.append(package)

			# check if the package exists
			try:
				package_obj = self.bld.get_tgen_by_name(package)
			except Errors.WafError:
				continue
			package_name = package_obj.target
			for task in package_obj.tasks:
				for output in task.outputs:
					if output.name == package_name + ".vapi":
						valatask.set_run_after(task)
						if package_name not in packages:
							packages.append(package_name)
						if output.parent not in vapi_dirs:
							vapi_dirs.append(output.parent)
						if output.parent not in self.includes:
							self.includes.append(output.parent)

			if hasattr(package_obj, 'use'):
				lst = self.to_list(package_obj.use)
				lst.reverse()
				local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages

	addflags(['--pkg=%s' % p for p in packages])

	for vapi_dir in vapi_dirs:
		if isinstance(vapi_dir, Node.Node):
			v_node = vapi_dir
		else:
			v_node = self.path.find_dir(vapi_dir)
		if not v_node:
			Logs.warn('Unable to locate Vala API directory: %r' % vapi_dir)
		else:
			addflags('--vapidir=%s' % v_node.abspath())

	self.dump_deps_node = None
	if self.is_lib and self.packages:
		self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
		valatask.outputs.append(self.dump_deps_node)

	# TODO remove in waf 1.9
	self.includes.append(self.bld.srcnode.abspath())
	self.includes.append(self.bld.bldnode.abspath())
	#for include in includes:
	#	try:
	#		self.includes.append(self.path.find_dir(include).abspath())
	#		self.includes.append(self.path.find_dir(include).get_bld().abspath())
	#	except AttributeError:
	#		Logs.warn("Unable to locate include directory: '%s'" % include)


	if self.is_lib and valatask.install_binding:
		headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
		try:
			self.install_vheader.source = headers_list
		except AttributeError:
			self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env)

		vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
		try:
			self.install_vapi.source = vapi_list
		except AttributeError:
			self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env)

		gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
		try:
			self.install_gir.source = gir_list
		except AttributeError:
			self.install_gir = self.bld.install_files(getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), gir_list, self.env)

	if hasattr(self, 'vala_resources'):
		nodes = self.to_nodes(self.vala_resources)
		valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
		valatask.inputs.extend(nodes)
		for x in nodes:
			addflags(['--gresources', x.abspath()])