Beispiel #1
0
def process_rule(self):
	if not getattr(self,'rule',None):
		return
	name=str(getattr(self,'name',None)or self.target or self.rule)
	cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'))
	tsk=self.create_task(name)
	if getattr(self,'target',None):
		if isinstance(self.target,str):
			self.target=self.target.split()
		if not isinstance(self.target,list):
			self.target=[self.target]
		for x in self.target:
			if isinstance(x,str):
				tsk.outputs.append(self.path.find_or_declare(x))
			else:
				x.parent.mkdir()
				tsk.outputs.append(x)
		if getattr(self,'install_path',None):
			self.bld.install_files(self.install_path,tsk.outputs)
	if getattr(self,'source',None):
		tsk.inputs=self.to_nodes(self.source)
		self.source=[]
	if getattr(self,'scan',None):
		cls.scan=self.scan
	if getattr(self,'cwd',None):
		tsk.cwd=self.cwd
	if getattr(self,'update_outputs',None)or getattr(self,'on_results',None):
		Task.update_outputs(cls)
	if getattr(self,'always',None):
		Task.always_run(cls)
	for x in['after','before','ext_in','ext_out']:
		setattr(cls,x,getattr(self,x,[]))
Beispiel #2
0
def set_precedence_constraints(tasks):
	cstr_groups = Utils.defaultdict(list)
	for x in tasks:
		x.run_after = SetOfTasks(x)
		x.run_after_groups = []
		x.waiting_sets = []

		h = x.hash_constraints()
		cstr_groups[h].append(x)

	# create sets which can be reused for all tasks
	for k in cstr_groups.keys():
		cstr_groups[k] = set(cstr_groups[k])

	# this list should be short
	for key1, key2 in itertools.combinations(cstr_groups.keys(), 2):
		group1 = cstr_groups[key1]
		group2 = cstr_groups[key2]
		# get the first entry of the set
		t1 = next(iter(group1))
		t2 = next(iter(group2))

		# add the constraints based on the comparisons
		if Task.is_before(t1, t2):
			for x in group2:
				x.run_after_groups.append(group1)
			for k in group1:
				k.waiting_sets.append(group1)
		elif Task.is_before(t2, t1):
			for x in group1:
				x.run_after_groups.append(group2)
			for k in group2:
				k.waiting_sets.append(group2)
Beispiel #3
0
def process_rule(self):
	if not getattr(self,'rule',None):
		return
	name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule))
	try:
		cache=self.bld.cache_rule_attr
	except AttributeError:
		cache=self.bld.cache_rule_attr={}
	cls=None
	if getattr(self,'cache_rule','True'):
		try:
			cls=cache[(name,self.rule)]
		except KeyError:
			pass
	if not cls:
		cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None))
		if getattr(self,'scan',None):
			cls.scan=self.scan
		elif getattr(self,'deps',None):
			def scan(self):
				nodes=[]
				for x in self.generator.to_list(getattr(self.generator,'deps',None)):
					node=self.generator.path.find_resource(x)
					if not node:
						self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
					nodes.append(node)
				return[nodes,[]]
			cls.scan=scan
		if getattr(self,'update_outputs',None):
			Task.update_outputs(cls)
		if getattr(self,'always',None):
			Task.always_run(cls)
		for x in('after','before','ext_in','ext_out'):
			setattr(cls,x,getattr(self,x,[]))
		if getattr(self,'cache_rule','True'):
			cache[(name,self.rule)]=cls
		if getattr(self,'cls_str',None):
			setattr(cls,'__str__',self.cls_str)
		if getattr(self,'cls_keyword',None):
			setattr(cls,'keyword',self.cls_keyword)
	tsk=self.create_task(name)
	if getattr(self,'target',None):
		if isinstance(self.target,str):
			self.target=self.target.split()
		if not isinstance(self.target,list):
			self.target=[self.target]
		for x in self.target:
			if isinstance(x,str):
				tsk.outputs.append(self.path.find_or_declare(x))
			else:
				x.parent.mkdir()
				tsk.outputs.append(x)
		if getattr(self,'install_path',None):
			self.bld.install_files(self.install_path,tsk.outputs)
	if getattr(self,'source',None):
		tsk.inputs=self.to_nodes(self.source)
		self.source=[]
	if getattr(self,'cwd',None):
		tsk.cwd=self.cwd
Beispiel #4
0
def process_rule(self):
    """
    Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

        def build(bld):
            bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
    """
    if not getattr(self, 'rule', None):
        return

    # create the task class
    name = str(getattr(self, 'name', None) or self.target or self.rule)
    cls = Task.task_factory(name, self.rule,
        getattr(self, 'vars', []),
        shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'))

    # now create one instance
    tsk = self.create_task(name)

    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir() # if a node was given, create the required folders
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            # from waf 1.5
            # although convenient, it does not 1. allow to name the target file and 2. symlinks
            # TODO remove in waf 1.7
            self.bld.install_files(self.install_path, tsk.outputs)

    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        # bypass the execution of process_source by setting the source to an empty list
        self.source = []

    if getattr(self, 'scan', None):
        cls.scan = self.scan

    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd

    # TODO remove on_results in waf 1.7
    if getattr(self, 'update_outputs', None) or getattr(self, 'on_results', None):
        Task.update_outputs(cls)

    if getattr(self, 'always', None):
        Task.always_run(cls)

    for x in ['after', 'before', 'ext_in', 'ext_out']:
        setattr(cls, x, getattr(self, x, []))
Beispiel #5
0
def make_interpreted_test(self):
	"""Create interpreted unit tests."""
	for x in ['test_scripts_source', 'test_scripts_template']:
		if not hasattr(self, x):
			Logs.warn('a test_scripts taskgen i missing %s' % x)
			return

	self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))

	script_nodes = self.to_nodes(self.test_scripts_source)
	for script_node in script_nodes:
		tsk = self.create_task('utest', [script_node])
		tsk.vars = lst + tsk.vars
		tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())

	self.handle_ut_cwd('test_scripts_cwd')

	env = getattr(self, 'test_scripts_env', None)
	if env:
		self.ut_env = env
	else:
		self.ut_env = dict(os.environ)

	paths = getattr(self, 'test_scripts_paths', {})
	for (k,v) in paths.items():
		p = self.ut_env.get(k, '').split(os.pathsep)
		if isinstance(v, str):
			v = v.split(os.pathsep)
		self.ut_env[k] = os.pathsep.join(p + v)
Beispiel #6
0
def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
	ext_in=Utils.to_list(ext_in)
	ext_out=Utils.to_list(ext_out)
	if not name:
		name=rule
	cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
	def x_file(self,node):
		ext=decider and decider(self,node)or cls.ext_out
		if ext_in:
			_ext_in=ext_in[0]
		tsk=self.create_task(name,node)
		cnt=0
		keys=set(self.mappings.keys())|set(self.__class__.mappings.keys())
		for x in ext:
			k=node.change_ext(x,ext_in=_ext_in)
			tsk.outputs.append(k)
			if reentrant!=None:
				if cnt<int(reentrant):
					self.source.append(k)
			else:
				for y in keys:
					if k.name.endswith(y):
						self.source.append(k)
						break
			cnt+=1
		if install_path:
			self.bld.install_files(install_path,tsk.outputs)
		return tsk
	for x in cls.ext_in:
		task_gen.mappings[x]=x_file
	return x_file
Beispiel #7
0
def make_pytest(self):
	"""
	Creates a ``utest`` task with a modified PYTHONPATH environment for Python.
	"""
	nodes = self.to_nodes(self.pytest_source)
	tsk = self.create_task('utest', nodes)
	tsk.dep_nodes.extend(self.pytest_dep_nodes)

	if getattr(self, 'ut_str', None):
		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
		tsk.vars = lst + tsk.vars

	if getattr(self, 'ut_cwd', None):
		if isinstance(self.ut_cwd, str):
			# we want a Node instance
			if os.path.isabs(self.ut_cwd):
				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
			else:
				self.ut_cwd = self.path.make_node(self.ut_cwd)
	else:
		if tsk.inputs:
			self.ut_cwd = tsk.inputs[0].parent
		else:
			raise Errors.WafError("no valid input files for pytest task, check pytest_source value")

	if not self.ut_cwd.exists():
		self.ut_cwd.mkdir()

	self.ut_env = dict(os.environ)
	self.ut_env['PYTHONPATH'] = os.pathsep.join(self.pytest_paths) + self.ut_env.get('PYTHONPATH', '')
Beispiel #8
0
def declare_chain(
    name="",
    rule=None,
    reentrant=True,
    color="BLUE",
    ext_in=[],
    ext_out=[],
    before=[],
    after=[],
    decider=None,
    scan=None,
):
    ext_in = Utils.to_list(ext_in)
    ext_out = Utils.to_list(ext_out)
    cls = Task.task_factory(
        name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan
    )

    def x_file(self, node):
        ext = decider and decider(self, node) or cls.ext_out
        if ext_in:
            _ext_in = ext_in[0]
        out_source = [node.change_ext(x, ext_in=_ext_in) for x in ext]
        if reentrant:
            for i in range(reentrant):
                self.source.append(out_source[i])
        tsk = self.create_task(name, node, out_source)

    for x in cls.ext_in:
        task_gen.mappings[x] = x_file
    return x_file
Beispiel #9
0
def compile_template(line):
    """
        Compile a template expression into a python function (like jsps, but way shorter)
        """
    extr = []

    def repl(match):
        g = match.group
        if g("dollar"):
            return "$"
        elif g("backslash"):
            return "\\"
        elif g("subst"):
            extr.append(g("code"))
            return "<<|@|>>"
        return None

    line2 = reg_act.sub(repl, line)
    params = line2.split("<<|@|>>")
    assert extr

    indent = 0
    buf = []
    app = buf.append

    def app(txt):
        buf.append(indent * "\t" + txt)

    for x in range(len(extr)):
        if params[x]:
            app("lst.append(%r)" % params[x])

        f = extr[x]
        if f.startswith("if") or f.startswith("for"):
            app(f + ":")
            indent += 1
        elif f.startswith("py:"):
            app(f[3:])
        elif f.startswith("endif") or f.startswith("endfor"):
            indent -= 1
        elif f.startswith("else") or f.startswith("elif"):
            indent -= 1
            app(f + ":")
            indent += 1
        elif f.startswith("xml:"):
            app("lst.append(xml_escape(%s))" % f[4:])
        else:
            # app('lst.append((%s) or "cannot find %s")' % (f, f))
            app("lst.append(%s)" % f)

    if extr:
        if params[-1]:
            app("lst.append(%r)" % params[-1])

    fun = COMPILE_TEMPLATE % "\n\t".join(buf)
    # print(fun)
    return Task.funex(fun)
Beispiel #10
0
def compile_template(line):
	"""
	Compile a template expression into a python function (like jsps, but way shorter)
	"""
	extr = []
	def repl(match):
		g = match.group
		if g('dollar'): return "$"
		elif g('backslash'):
			return "\\"
		elif g('subst'):
			extr.append(g('code'))
			return "<<|@|>>"
		return None

	line2 = reg_act.sub(repl, line)
	params = line2.split('<<|@|>>')
	assert(extr)


	indent = 0
	buf = []
	dvars = []
	app = buf.append

	def app(txt):
		buf.append(indent * '\t' + txt)

	for x in range(len(extr)):
		if params[x]:
			app("lst.append(%r)" % params[x])

		f = extr[x]
		if f.startswith('if') or f.startswith('for'):
			app(f + ':')
			indent += 1
		elif f.startswith('py:'):
			app(f[3:])
		elif f.startswith('endif') or f.startswith('endfor'):
			indent -= 1
		elif f.startswith('else') or f.startswith('elif'):
			indent -= 1
			app(f + ':')
			indent += 1
		elif f.startswith('xml:'):
			app('lst.append(xml_escape(%s))' % f[4:])
		else:
			#app('lst.append((%s) or "cannot find %s")' % (f, f))
			app('lst.append(%s)' % f)

	if extr:
		if params[-1]:
			app("lst.append(%r)" % params[-1])

	fun = COMPILE_TEMPLATE % "\n\t".join(buf)
	#print(fun)
	return Task.funex(fun)
Beispiel #11
0
	def run(self):
		run_str = """mkdir -p ${TGT[0].parent.abspath()} && echo '<?xml version="1.0"?>

<Library DMSystem="oaDMFileSys">
    <oaDMFileSys libReadOnly="No"
                 origFileSystem="Unix"/>
</Library>' >> ${TGT[0].abspath()}"""
		(f, dvars) = Task.compile_fun(run_str, False)
		return f(self)
Beispiel #12
0
Datei: Build.py Projekt: zsx/waf
	def get_build_iterator(self):
		"""creates a generator object that returns tasks executable in parallel (yield)"""
		self.cur = 0

		if self.targets and self.targets != '*':
			(self._min_grp, self._exact_tg) = self.get_targets()

		global lazy_post
		if self.post_mode != POST_LAZY:
			while self.cur < len(self.groups):
				self.post_group()
				self.cur += 1
			self.cur = 0

		while self.cur < len(self.groups):
			# first post the task generators for the group
			if self.post_mode != POST_AT_ONCE:
				self.post_group()

			# then extract the tasks
			tasks = []
			for tg in self.groups[self.cur]:
				# TODO a try-except might be more efficient
				if isinstance(tg, Task.TaskBase):
					tasks.append(tg)
				else:
					tasks.extend(tg.tasks)

			# if the constraints are set properly (ext_in/ext_out, before/after)
			# the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
			# (but leave set_file_constraints for the installation step)
			#
			# if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary
			#
			Task.set_file_constraints(tasks)
			Task.set_precedence_constraints(tasks)

			self.cur += 1
			if not tasks: # return something else the build will stop
				continue
			yield tasks
		while 1:
			yield []
Beispiel #13
0
	def get_build_iterator(self):
		"""
		Creates a Python generator object that returns lists of tasks that may be processed in parallel.

		:return: tasks which can be executed immediatly
		:rtype: generator returning lists of :py:class:`waflib.Task.TaskBase`
		"""
		self.cur = 0

		if self.targets and self.targets != '*':
			(self._min_grp, self._exact_tg) = self.get_targets()

		global lazy_post
		if self.post_mode != POST_LAZY:
			while self.cur < len(self.groups):
				self.post_group()
				self.cur += 1
			self.cur = 0

		while self.cur < len(self.groups):
			# first post the task generators for the group
			if self.post_mode != POST_AT_ONCE:
				self.post_group()

			# then extract the tasks
			tasks = self.get_tasks_group(self.cur)
			# if the constraints are set properly (ext_in/ext_out, before/after)
			# the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
			# (but leave set_file_constraints for the installation step)
			#
			# if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary
			#
			Task.set_file_constraints(tasks)
			Task.set_precedence_constraints(tasks)

			self.cur_tasks = tasks
			self.cur += 1
			if not tasks: # return something else the build will stop
				continue
			yield tasks

		while 1:
			yield []
Beispiel #14
0
def declare_chain(name='', rule=None, reentrant=True, color='BLUE',
    ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
    """
    Create a new mapping and a task class for processing files by extension.
    See Tools/flex.py for an example.

    :param name: name for the task class
    :type name: string
    :param rule: function to execute or string to be compiled in a function
    :type rule: string or function
    :param reentrant: re-inject the output file in the process
    :type reentrant: bool
    :param color: color for the task output
    :type color: string
    :param ext_in: execute the task only after the files of such extensions are created
    :type ext_in: list of string
    :param ext_out: execute the task only before files of such extensions are processed
    :type ext_out: list of string
    :param before: execute instances of this task before classes of the given names
    :type before: list of string
    :param after: execute instances of this task after classes of the given names
    :type after: list of string
    :param decider: if present, use it to create the output nodes for the task
    :type decider: function
    :param scan: scanner function for the task
    :type scan: function
    :param install_path: installation path for the output nodes
    :type install_path: string
    """
    ext_in = Utils.to_list(ext_in)
    ext_out = Utils.to_list(ext_out)
    if not name:
        name = rule
    cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)

    def x_file(self, node):
        ext = decider and decider(self, node) or cls.ext_out
        if ext_in:
            _ext_in = ext_in[0]
        out_source = [node.change_ext(x, ext_in=_ext_in) for x in ext]
        if reentrant:
            for i in range(reentrant):
                self.source.append(out_source[i])
        tsk = self.create_task(name, node, out_source)
        if install_path:
            self.bld.install_files(install_path, out_source)
        return tsk

    for x in cls.ext_in:
        task_gen.mappings[x] = x_file
    return x_file
Beispiel #15
0
def process_rule(self):
    if not getattr(self, "rule", None):
        return
    name = str(getattr(self, "name", None) or self.target or self.rule)
    cls = Task.task_factory(
        name,
        self.rule,
        getattr(self, "vars", []),
        shell=getattr(self, "shell", True),
        color=getattr(self, "color", "BLUE"),
    )
    tsk = self.create_task(name)
    if getattr(self, "target", None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir()
                tsk.outputs.append(x)
        if getattr(self, "install_path", None):
            self.bld.install_files(self.install_path, tsk.outputs)
    if getattr(self, "source", None):
        tsk.inputs = self.to_nodes(self.source)
        self.source = []
    if getattr(self, "scan", None):
        cls.scan = self.scan
    if getattr(self, "cwd", None):
        tsk.cwd = self.cwd
    if getattr(self, "update_outputs", None) or getattr(self, "on_results", None):
        Task.update_outputs(cls)
    if getattr(self, "always", None):
        Task.always_run(cls)
    for x in ["after", "before", "ext_in", "ext_out"]:
        setattr(cls, x, getattr(self, x, []))
Beispiel #16
0
 def get_build_iterator(self):
     self.cur = 0
     if self.targets and self.targets != "*":
         (self._min_grp, self._exact_tg) = self.get_targets()
     global lazy_post
     if self.post_mode != POST_LAZY:
         while self.cur < len(self.groups):
             self.post_group()
             self.cur += 1
         self.cur = 0
     while self.cur < len(self.groups):
         if self.post_mode != POST_AT_ONCE:
             self.post_group()
         tasks = self.get_tasks_group(self.cur)
         Task.set_file_constraints(tasks)
         Task.set_precedence_constraints(tasks)
         self.cur_tasks = tasks
         self.cur += 1
         if not tasks:
             continue
         yield tasks
     while 1:
         yield []
Beispiel #17
0
def make_pytest(self):
	"""
	Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``:

	- Paths in `pytest_paths` attribute are used to populate PYTHONPATH
	- Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH)
	"""
	nodes = self.to_nodes(self.pytest_source)
	tsk = self.create_task('utest', nodes)
	
	tsk.dep_nodes.extend(self.pytest_dep_nodes)
	if getattr(self, 'ut_str', None):
		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
		tsk.vars = lst + tsk.vars

	if getattr(self, 'ut_cwd', None):
		if isinstance(self.ut_cwd, str):
			# we want a Node instance
			if os.path.isabs(self.ut_cwd):
				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
			else:
				self.ut_cwd = self.path.make_node(self.ut_cwd)
	else:
		if tsk.inputs:
			self.ut_cwd = tsk.inputs[0].parent
		else:
			raise Errors.WafError("no valid input files for pytest task, check pytest_source value")

	if not self.ut_cwd.exists():
		self.ut_cwd.mkdir()

	if not hasattr(self, 'ut_env'):
		self.ut_env = dict(os.environ)
		def add_paths(var, lst):
			# Add list of paths to a variable, lst can contain strings or nodes
			lst = [ str(n) for n in lst ]
			Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
			self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')

		# Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH
		add_paths('PYTHONPATH', self.pytest_paths)

		if Utils.is_win32:
			add_paths('PATH', self.pytest_libpaths)
		elif Utils.unversioned_sys_platform() == 'darwin':
			add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths)
			add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
		else:
			add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
Beispiel #18
0
def make_test(self):
	"""Create the unit test task. There can be only one unit test task by task generator."""
	if not getattr(self, 'link_task', None):
		return

	tsk = self.create_task('utest', self.link_task.outputs)
	if getattr(self, 'ut_str', None):
		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
		tsk.vars = lst + tsk.vars

	if getattr(self, 'ut_cwd', None):
		if isinstance(self.ut_cwd, str):
			# we want a Node instance
			if os.path.isabs(self.ut_cwd):
				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
			else:
				self.ut_cwd = self.path.make_node(self.ut_cwd)
	else:
		self.ut_cwd = tsk.inputs[0].parent

	if not hasattr(self, 'ut_paths'):
		paths = []
		for x in self.tmp_use_sorted:
			try:
				y = self.bld.get_tgen_by_name(x).link_task
			except AttributeError:
				pass
			else:
				if not isinstance(y, ccroot.stlink_task):
					paths.append(y.outputs[0].parent.abspath())
		self.ut_paths = os.pathsep.join(paths) + os.pathsep

	if not hasattr(self, 'ut_env'):
		self.ut_env = dct = dict(os.environ)
		def add_path(var):
			dct[var] = self.ut_paths + dct.get(var,'')
		if Utils.is_win32:
			add_path('PATH')
		elif Utils.unversioned_sys_platform() == 'darwin':
			add_path('DYLD_LIBRARY_PATH')
			add_path('LD_LIBRARY_PATH')
		else:
			add_path('LD_LIBRARY_PATH')
Beispiel #19
0
def declare_chain(name='', rule=None, reentrant=True, color='BLUE',
	ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None):
	"""
	see Tools/flex.py for an example
	while i do not like such wrappers, some people really do
	"""

	cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan)

	def x_file(self, node):
		ext = decider and decider(self, node) or cls.ext_out
		out_source = [node.change_ext(x) for x in ext]
		if reentrant:
			for i in range(reentrant):
				self.source.append(out_source[i])
		tsk = self.create_task(name, node, out_source)

	for x in cls.ext_in:
		task_gen.mappings[x] = x_file
	return x_file
Beispiel #20
0
def declare_chain(name='',rule=None,reentrant=True,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
	ext_in=Utils.to_list(ext_in)
	ext_out=Utils.to_list(ext_out)
	if not name:
		name=rule
	cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
	def x_file(self,node):
		ext=decider and decider(self,node)or cls.ext_out
		if ext_in:
			_ext_in=ext_in[0]
		out_source=[node.change_ext(x,ext_in=_ext_in)for x in ext]
		if reentrant:
			for i in range(reentrant):
				self.source.append(out_source[i])
		tsk=self.create_task(name,node,out_source)
		if install_path:
			self.bld.install_files(install_path,out_source)
		return tsk
	for x in cls.ext_in:
		task_gen.mappings[x]=x_file
	return x_file
Beispiel #21
0
def make_javatest(self):
	"""
	Creates a ``utest`` task with a populated environment for Java Unit test execution

	"""
	tsk = self.create_task('utest')
	tsk.set_run_after(self.javac_task)

	# Put test input files as waf_unit_test relies on that for some prints and log generation
	# If jtest_source is there, this is specially useful for passing XML for TestNG
	# that contain test specification, use that as inputs, otherwise test sources
	if getattr(self, 'jtest_source', None):
		tsk.inputs = self.to_nodes(self.jtest_source)
	else:
		if self.javac_task.srcdir[0].exists():
			tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)

	if getattr(self, 'ut_str', None):
		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
		tsk.vars = lst + tsk.vars

	if getattr(self, 'ut_cwd', None):
		if isinstance(self.ut_cwd, str):
			# we want a Node instance
			if os.path.isabs(self.ut_cwd):
				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
			else:
				self.ut_cwd = self.path.make_node(self.ut_cwd)
	else:
		self.ut_cwd = self.bld.bldnode

	# Get parent CLASSPATH and add output dir of test, we run from wscript dir
	# We have to change it from list to the standard java -cp format (: separated)
	tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()

	if not self.ut_cwd.exists():
		self.ut_cwd.mkdir()

	if not hasattr(self, 'ut_env'):
		self.ut_env = dict(os.environ)
Beispiel #22
0
def process_rule(self):
	if not getattr(self,'rule',None):
		return
	name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule))
	try:
		cache=self.bld.cache_rule_attr
	except AttributeError:
		cache=self.bld.cache_rule_attr={}
	cls=None
	if getattr(self,'cache_rule','True'):
		try:
			cls=cache[(name,self.rule)]
		except KeyError:
			pass
	if not cls:
		rule=self.rule
		if hasattr(self,'chmod'):
			def chmod_fun(tsk):
				for x in tsk.outputs:
					os.chmod(x.abspath(),self.chmod)
			rule=(self.rule,chmod_fun)
		cls=Task.task_factory(name,rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None))
		if getattr(self,'scan',None):
			cls.scan=self.scan
		elif getattr(self,'deps',None):
			def scan(self):
				nodes=[]
				for x in self.generator.to_list(getattr(self.generator,'deps',None)):
					node=self.generator.path.find_resource(x)
					if not node:
						self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
					nodes.append(node)
				return[nodes,[]]
			cls.scan=scan
		if getattr(self,'always',None):
			cls.always_run=True
		if getattr(self,'timeout',None):
			cls.timeout=self.timeout
		for x in('after','before','ext_in','ext_out'):
			setattr(cls,x,getattr(self,x,[]))
		if getattr(self,'cache_rule','True'):
			cache[(name,self.rule)]=cls
		if getattr(self,'cls_str',None):
			setattr(cls,'__str__',self.cls_str)
		if getattr(self,'cls_keyword',None):
			setattr(cls,'keyword',self.cls_keyword)
	tsk=self.create_task(name)
	if getattr(self,'target',None):
		if isinstance(self.target,str):
			self.target=self.target.split()
		if not isinstance(self.target,list):
			self.target=[self.target]
		for x in self.target:
			if isinstance(x,str):
				tsk.outputs.append(self.path.find_or_declare(x))
			else:
				x.parent.mkdir()
				tsk.outputs.append(x)
		if getattr(self,'install_path',None):
			self.install_task=self.add_install_files(install_to=self.install_path,install_from=tsk.outputs,chmod=getattr(self,'chmod',Utils.O644))
	if getattr(self,'source',None):
		tsk.inputs=self.to_nodes(self.source)
		self.source=[]
	if getattr(self,'cwd',None):
		tsk.cwd=self.cwd
            u(self.env.get_flat('CMAKE_BLD_DIR'))
            u(self.env.get_flat('CMAKE_TARGET'))
            self.uid_ = m.digest()

        return self.uid_

    def __str__(self):
        return '%s %s' % (self.cmake.name, self.cmake_target)

    def keyword(self):
        return 'CMake Build'


# allow tasks to depend on possible headers or other resources if the user
# declares outputs for the cmake build
cmake_build_task = Task.update_outputs(cmake_build_task)

cmake_build_task.original_post_run = cmake_build_task.post_run


def _cmake_build_task_post_run(self):
    self.output_patterns = Utils.to_list(self.output_patterns)
    if not self.output_patterns:
        return self.original_post_run()
    bldnode = self.cmake.bldnode
    for node in bldnode.ant_glob(self.output_patterns, remove=False):
        self.set_outputs(node)
    return self.original_post_run()


cmake_build_task.post_run = _cmake_build_task_post_run
Beispiel #24
0
def process_rule(self):
    """
    Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

            def build(bld):
                    bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')

    Main attributes processed:

    * rule: command to execute, it can be a tuple of strings for multiple commands
    * chmod: permissions for the resulting files (integer value such as Utils.O755)
    * shell: set to False to execute the command directly (default is True to use a shell)
    * scan: scanner function
    * vars: list of variables to trigger rebuilts, such as CFLAGS
    * cls_str: string to display when executing the task
    * cls_keyword: label to display when executing the task
    * cache_rule: by default, try to re-use similar classes, set to False to disable
    * source: list of Node or string objects representing the source files required by this task
    * target: list of Node or string objects representing the files that this task creates
    * cwd: current working directory (Node or string)
    * stdout: standard output, set to None to prevent waf from capturing the text
    * stderr: standard error, set to None to prevent waf from capturing the text
    * timeout: timeout for command execution (Python 3)
    * always: whether to always run the command (False by default)
    * deep_inputs: whether the task must depend on the input file tasks too (False by default)
    """
    if not getattr(self, "rule", None):
        return

    # create the task class
    name = str(
        getattr(self, "name", None) or self.target
        or getattr(self.rule, "__name__", self.rule))

    # or we can put the class in a cache for performance reasons
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}

    chmod = getattr(self, "chmod", None)
    shell = getattr(self, "shell", True)
    color = getattr(self, "color", "BLUE")
    scan = getattr(self, "scan", None)
    _vars = getattr(self, "vars", [])
    cls_str = getattr(self, "cls_str", None)
    cls_keyword = getattr(self, "cls_keyword", None)
    use_cache = getattr(self, "cache_rule", "True")
    deep_inputs = getattr(self, "deep_inputs", False)

    scan_val = has_deps = hasattr(self, "deps")
    if scan:
        scan_val = id(scan)

    key = Utils.h_list((
        name,
        self.rule,
        chmod,
        shell,
        color,
        cls_str,
        cls_keyword,
        scan_val,
        _vars,
        deep_inputs,
    ))

    cls = None
    if use_cache:
        try:
            cls = cache[key]
        except KeyError:
            pass
    if not cls:
        rule = self.rule
        if chmod is not None:

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), tsk.generator.chmod)

            if isinstance(rule, tuple):
                rule = list(rule)
                rule.append(chmod_fun)
                rule = tuple(rule)
            else:
                rule = (rule, chmod_fun)

        cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

        if cls_str:
            setattr(cls, "__str__", self.cls_str)

        if cls_keyword:
            setattr(cls, "keyword", self.cls_keyword)

        if deep_inputs:
            Task.deep_inputs(cls)

        if scan:
            cls.scan = self.scan
        elif has_deps:

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, "deps", None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            "Could not find %r (was it declared?)" % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan

        if use_cache:
            cache[key] = cls

    # now create one instance
    tsk = self.create_task(name)

    for x in ("after", "before", "ext_in", "ext_out"):
        setattr(tsk, x, getattr(self, x, []))

    if hasattr(self, "stdout"):
        tsk.stdout = self.stdout

    if hasattr(self, "stderr"):
        tsk.stderr = self.stderr

    if getattr(self, "timeout", None):
        tsk.timeout = self.timeout

    if getattr(self, "always", None):
        tsk.always_run = True

    if getattr(self, "target", None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir(
                )  # if a node was given, create the required folders
                tsk.outputs.append(x)
        if getattr(self, "install_path", None):
            self.install_task = self.add_install_files(
                install_to=self.install_path,
                install_from=tsk.outputs,
                chmod=getattr(self, "chmod", Utils.O644),
            )

    if getattr(self, "source", None):
        tsk.inputs = self.to_nodes(self.source)
        # bypass the execution of process_source by setting the source to an empty list
        self.source = []

    if getattr(self, "cwd", None):
        tsk.cwd = self.cwd

    if isinstance(tsk.run, functools.partial):
        # Python documentation says: "partial objects defined in classes
        # behave like static methods and do not transform into bound
        # methods during instance attribute look-up."
        tsk.run = functools.partial(tsk.run, tsk)
Beispiel #25
0
            u(self.__class__.__name__)
            u(self.env.get_flat('CMAKE_BLD_DIR'))
            u(self.env.get_flat('CMAKE_TARGET'))
            self.uid_ = m.digest()

        return self.uid_

    def __str__(self):
        return '%s %s' % (self.cmake.name, self.cmake_target)

    def keyword(self):
        return 'CMake Build'

# allow tasks to depend on possible headers or other resources if the user
# declares outputs for the cmake build
cmake_build_task = Task.update_outputs(cmake_build_task)

cmake_build_task.original_post_run = cmake_build_task.post_run
def _cmake_build_task_post_run(self):
    self.output_patterns = Utils.to_list(self.output_patterns)
    if not self.output_patterns:
        return self.original_post_run()
    bldnode = self.cmake.bldnode
    for node in bldnode.ant_glob(self.output_patterns, remove=False):
        self.set_outputs(node)
    return self.original_post_run()
cmake_build_task.post_run = _cmake_build_task_post_run

class CMakeConfig(object):
    '''
    CMake configuration. This object shouldn't be instantiated directly. Use
Beispiel #26
0
def process_rule(self):
    """
	Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

		def build(bld):
			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
	"""
    if not getattr(self, 'rule', None):
        return

    # create the task class
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))

    # or we can put the class in a cache for performance reasons
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}

    cls = None
    if getattr(self, 'cache_rule', 'True'):
        try:
            cls = cache[(name, self.rule)]
        except KeyError:
            pass
    if not cls:

        rule = self.rule
        if hasattr(self, 'chmod'):

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), self.chmod)

            rule = (self.rule, chmod_fun)

        cls = Task.task_factory(name,
                                rule,
                                getattr(self, 'vars', []),
                                shell=getattr(self, 'shell', True),
                                color=getattr(self, 'color', 'BLUE'),
                                scan=getattr(self, 'scan', None))
        if getattr(self, 'scan', None):
            cls.scan = self.scan
        elif getattr(self, 'deps', None):

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan

        if getattr(self, 'update_outputs', None):
            Task.update_outputs(cls)

        if getattr(self, 'always', None):
            Task.always_run(cls)

        for x in ('after', 'before', 'ext_in', 'ext_out'):
            setattr(cls, x, getattr(self, x, []))

        if getattr(self, 'cache_rule', 'True'):
            cache[(name, self.rule)] = cls

        if getattr(self, 'cls_str', None):
            setattr(cls, '__str__', self.cls_str)

        if getattr(self, 'cls_keyword', None):
            setattr(cls, 'keyword', self.cls_keyword)

    # now create one instance
    tsk = self.create_task(name)

    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir(
                )  # if a node was given, create the required folders
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.bld.install_files(self.install_path,
                                   tsk.outputs,
                                   chmod=getattr(self, 'chmod', Utils.O644))

    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        # bypass the execution of process_source by setting the source to an empty list
        self.source = []

    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd
Beispiel #27
0
def process_rule(self):
    """
	Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

		def build(bld):
			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
	"""
    if not getattr(self, 'rule', None):
        return

    # create the task class
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))

    # or we can put the class in a cache for performance reasons
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}

    chmod = getattr(self, 'chmod', None)
    shell = getattr(self, 'shell', True)
    color = getattr(self, 'color', 'BLUE')
    scan = getattr(self, 'scan', None)
    _vars = getattr(self, 'vars', [])
    cls_str = getattr(self, 'cls_str', None)
    cls_keyword = getattr(self, 'cls_keyword', None)
    use_cache = getattr(self, 'cache_rule', 'True')

    scan_val = has_deps = hasattr(self, 'deps')
    if scan:
        scan_val = id(scan)

    key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str,
                        cls_keyword, scan_val, _vars))

    cls = None
    if use_cache:
        try:
            cls = cache[key]
        except KeyError:
            pass
    if not cls:
        rule = self.rule
        if chmod is not None:

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), tsk.generator.chmod)

            if isinstance(rule, tuple):
                rule = list(rule)
                rule.append(chmod_fun)
                rule = tuple(rule)
            else:
                rule = (rule, chmod_fun)

        cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

        if cls_str:
            setattr(cls, '__str__', self.cls_str)

        if cls_keyword:
            setattr(cls, 'keyword', self.cls_keyword)

        if scan:
            cls.scan = self.scan
        elif has_deps:

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan

        # TODO use these values in the cache key if provided
        # (may cause excessive caching)
        for x in ('after', 'before', 'ext_in', 'ext_out'):
            setattr(cls, x, getattr(self, x, []))

        if use_cache:
            cache[key] = cls

    # now create one instance
    tsk = self.create_task(name)

    if getattr(self, 'timeout', None):
        tsk.timeout = self.timeout

    if getattr(self, 'always', None):
        tsk.always_run = True

    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir(
                )  # if a node was given, create the required folders
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.install_task = self.add_install_files(
                install_to=self.install_path,
                install_from=tsk.outputs,
                chmod=getattr(self, 'chmod', Utils.O644))

    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        # bypass the execution of process_source by setting the source to an empty list
        self.source = []

    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd

    if isinstance(tsk.run, functools.partial):
        # Python documentation says: "partial objects defined in classes
        # behave like static methods and do not transform into bound
        # methods during instance attribute look-up."
        tsk.run = functools.partial(tsk.run, tsk)
Beispiel #28
0
                gen = bld.producer
                gen.outstanding.insert(0, task)
                gen.total += 1
                moctasks.append(task)
        self.run_after.update(set(moctasks))
        self.moc_done = 1

    run = Task.classes['cxx'].__dict__['run']


class trans_update(Task.Task):
    run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
    color = 'BLUE'


Task.update_outputs(trans_update)


class XMLHandler(ContentHandler):
    def __init__(self):
        self.buf = []
        self.files = []

    def startElement(self, name, attrs):
        if name == 'file':
            self.buf = []

    def endElement(self, name):
        if name == 'file':
            self.files.append(str(''.join(self.buf)))
Beispiel #29
0
        elif normalise_varargs(parsed_old_sigs[s]) != normalise_varargs(parsed_sigs[s]):
            Logs.error('%s: symbol %s has changed - please update major version\n\told_signature: %s\n\tnew_signature: %s' % (
                libname, s, parsed_old_sigs[s], parsed_sigs[s]))
            got_error = True

    for s in parsed_sigs:
        if not s in parsed_old_sigs:
            Logs.error('%s: symbol %s has been added - please mark it _PRIVATE_ or update minor version\n\tsignature: %s' % (
                libname, s, parsed_sigs[s]))
            got_error = True

    if got_error:
        raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)


t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
t.quiet = True
# allow "waf --abi-check" to force re-checking the ABI
if '--abi-check' in sys.argv:
    t.always_run = True

@after('apply_link')
@feature('abi_check')
def abi_check(self):
    '''check that ABI matches saved signatures'''
    env = self.bld.env
    if not env.ABI_CHECK or self.abi_directory is None:
        return

    # if the platform doesn't support -fvisibility=hidden then the ABI
    # checks become fairly meaningless
class pdflatex(tex):
    texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',
                                    shell=False)
Beispiel #31
0
#!/usr/bin/env python
# encoding: utf-8
# Mark Coggeshall, 2010

"SAS support"

import os
from waflib import Task, Errors, Logs
from waflib.TaskGen import feature, before_method

sas_fun, _ = Task.compile_fun(
    'sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)


class sas(Task.Task):
    vars = ['SAS', 'SASFLAGS']

    def run(task):
        command = 'SAS'
        fun = sas_fun

        node = task.inputs[0]
        logfilenode = node.change_ext('.log')
        lstfilenode = node.change_ext('.lst')

        # set the cwd
        task.cwd = task.inputs[0].parent.get_src().abspath()
        Logs.debug('runner: %r on %r', command, node)

        SASINPUTS = node.parent.get_bld().abspath(
        ) + os.pathsep + node.parent.get_src().abspath() + os.pathsep
Beispiel #32
0
	task.generator = self
	copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
	self.tasks.append(task)

	task.inputs = inputs
	task.outputs = outputs
	task.dep_vars = self.to_list(self.dep_vars)

	for dep in self.dependencies:
		assert dep is not self
		dep.post()
		for dep_task in dep.tasks:
			task.set_run_after(dep_task)

	if not task.inputs:
		# the case for svnversion, always run, and update the output nodes
		task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
		task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)

	# TODO the case with no outputs?

def post_run(self):
	for x in self.outputs:
		x.sig = Utils.h_file(x.abspath())

def runnable_status(self):
	return self.RUN_ME

Task.task_factory('copy', vars=[], func=action_process_file_func)

Beispiel #33
0
def process_rule(self):
    if not getattr(self, 'rule', None):
        return
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}
    cls = None
    if getattr(self, 'cache_rule', 'True'):
        try:
            cls = cache[(name, self.rule)]
        except KeyError:
            pass
    if not cls:
        cls = Task.task_factory(name,
                                self.rule,
                                getattr(self, 'vars', []),
                                shell=getattr(self, 'shell', True),
                                color=getattr(self, 'color', 'BLUE'),
                                scan=getattr(self, 'scan', None))
        if getattr(self, 'scan', None):
            cls.scan = self.scan
        elif getattr(self, 'deps', None):

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan
        if getattr(self, 'update_outputs', None):
            Task.update_outputs(cls)
        if getattr(self, 'always', None):
            Task.always_run(cls)
        for x in ['after', 'before', 'ext_in', 'ext_out']:
            setattr(cls, x, getattr(self, x, []))
        if getattr(self, 'cache_rule', 'True'):
            cache[(name, self.rule)] = cls
    tsk = self.create_task(name)
    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir()
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.bld.install_files(self.install_path, tsk.outputs)
    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        self.source = []
    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd
Beispiel #34
0
def declare_chain(name='',
                  rule=None,
                  reentrant=True,
                  color='BLUE',
                  ext_in=[],
                  ext_out=[],
                  before=[],
                  after=[],
                  decider=None,
                  scan=None,
                  install_path=None,
                  shell=False):
    """
	Create a new mapping and a task class for processing files by extension.
	See Tools/flex.py for an example.

	:param name: name for the task class
	:type name: string
	:param rule: function to execute or string to be compiled in a function
	:type rule: string or function
	:param reentrant: re-inject the output file in the process
	:type reentrant: bool
	:param color: color for the task output
	:type color: string
	:param ext_in: execute the task only after the files of such extensions are created
	:type ext_in: list of string
	:param ext_out: execute the task only before files of such extensions are processed
	:type ext_out: list of string
	:param before: execute instances of this task before classes of the given names
	:type before: list of string
	:param after: execute instances of this task after classes of the given names
	:type after: list of string
	:param decider: if present, use it to create the output nodes for the task
	:type decider: function
	:param scan: scanner function for the task
	:type scan: function
	:param install_path: installation path for the output nodes
	:type install_path: string
	"""
    ext_in = Utils.to_list(ext_in)
    ext_out = Utils.to_list(ext_out)
    if not name:
        name = rule
    cls = Task.task_factory(name,
                            rule,
                            color=color,
                            ext_in=ext_in,
                            ext_out=ext_out,
                            before=before,
                            after=after,
                            scan=scan,
                            shell=shell)

    def x_file(self, node):
        ext = decider and decider(self, node) or cls.ext_out
        if ext_in:
            _ext_in = ext_in[0]
        out_source = [node.change_ext(x, ext_in=_ext_in) for x in ext]
        if reentrant:
            for i in range(reentrant):
                self.source.append(out_source[i])
        tsk = self.create_task(name, node, out_source)
        if install_path:
            self.bld.install_files(install_path, out_source)
        return tsk

    for x in cls.ext_in:
        task_gen.mappings[x] = x_file
    return x_file
Beispiel #35
0
def process_rule(self):
    if not getattr(self, 'rule', None):
        return
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}
    chmod = getattr(self, 'chmod', None)
    shell = getattr(self, 'shell', True)
    color = getattr(self, 'color', 'BLUE')
    scan = getattr(self, 'scan', None)
    _vars = getattr(self, 'vars', [])
    cls_str = getattr(self, 'cls_str', None)
    cls_keyword = getattr(self, 'cls_keyword', None)
    use_cache = getattr(self, 'cache_rule', 'True')
    scan_val = has_deps = hasattr(self, 'deps')
    if scan:
        scan_val = id(scan)
    key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str,
                        cls_keyword, scan_val, _vars))
    cls = None
    if use_cache:
        try:
            cls = cache[key]
        except KeyError:
            pass
    if not cls:
        rule = self.rule
        if chmod is not None:

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), tsk.generator.chmod)

            if isinstance(rule, tuple):
                rule = list(rule)
                rule.append(chmod_fun)
                rule = tuple(rule)
            else:
                rule = (rule, chmod_fun)
        cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
        if cls_str:
            setattr(cls, '__str__', self.cls_str)
        if cls_keyword:
            setattr(cls, 'keyword', self.cls_keyword)
        if scan:
            cls.scan = self.scan
        elif has_deps:

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan
        for x in ('after', 'before', 'ext_in', 'ext_out'):
            setattr(cls, x, getattr(self, x, []))
        if use_cache:
            cache[key] = cls
    tsk = self.create_task(name)
    if getattr(self, 'timeout', None):
        tsk.timeout = self.timeout
    if getattr(self, 'always', None):
        tsk.always_run = True
    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir()
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.install_task = self.add_install_files(
                install_to=self.install_path,
                install_from=tsk.outputs,
                chmod=getattr(self, 'chmod', Utils.O644))
    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        self.source = []
    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd
    if isinstance(tsk.run, functools.partial):
        tsk.run = functools.partial(tsk.run, tsk)
Beispiel #36
0
                    cwd=self.env.GBENCHMARK_BUILD,
                    quiet=Context.BOTH,
                )
            return 0
        except WafError as e:
            print(e)
            if hasattr(e, 'stderr'):
                print('')
                print(e.stderr)
            return 1

    def __str__(self):
        return 'Google Benchmark'


gbenchmark_build = Task.always_run(Task.update_outputs(gbenchmark_build))

build_task = None


@feature('gbenchmark')
@before_method('process_use')
def append_gbenchmark_use(self):
    self.use = self.to_list(getattr(self, 'use', []))
    if 'GBENCHMARK' not in self.use:
        self.use.append('GBENCHMARK')


@feature('gbenchmark')
@after_method('process_source')
def wait_for_gbenchmark_build(self):
Beispiel #37
0
    def get_build_iterator(self):
        if not self.files:
            while 1:
                yield super(MakeContext, self).get_build_iterator()

        for g in self.groups:
            for tg in g:
                try:
                    f = tg.post
                except AttributeError:
                    pass
                else:
                    f()

            provides = {}
            uses = {}
            all_tasks = []
            tasks = []
            for pat in self.files.split(','):
                matcher = self.get_matcher(pat)
                for tg in g:
                    if isinstance(tg, Task.Task):
                        lst = [tg]
                    else:
                        lst = tg.tasks
                    for tsk in lst:
                        all_tasks.append(tsk)

                        do_exec = False
                        for node in tsk.inputs:
                            try:
                                uses[node].append(tsk)
                            except:
                                uses[node] = [tsk]

                            if matcher(node, output=False):
                                do_exec = True
                                break

                        for node in tsk.outputs:
                            try:
                                provides[node].append(tsk)
                            except:
                                provides[node] = [tsk]

                            if matcher(node, output=True):
                                do_exec = True
                                break
                        if do_exec:
                            tasks.append(tsk)

            # so we have the tasks that we need to process, the list of all tasks,
            # the map of the tasks providing nodes, and the map of tasks using nodes

            if not tasks:
                # if there are no tasks matching, return everything in the current group
                result = all_tasks
            else:
                # this is like a big filter...
                result = set()
                seen = set()
                cur = set(tasks)
                while cur:
                    result |= cur
                    tosee = set()
                    for tsk in cur:
                        for node in tsk.inputs:
                            if node in seen:
                                continue
                            seen.add(node)
                            tosee |= set(provides.get(node, []))
                    cur = tosee
                result = list(result)

            Task.set_file_constraints(result)
            Task.set_precedence_constraints(result)
            yield result

        while 1:
            yield []
class pclint(lint,Task.Task):
    color = 'CYAN'

    run_str_pclint = '${PCLINT} ${PCLINTOPTS} ${SRC}'
    run_str_cc     = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -E -dM -dD -v ${CC_SRC_F}${SRC[0].abspath()}'

    (run_pclint, pclint_vars) = Task.compile_fun(run_str_pclint)
    (run_cc, cc_vars)         = Task.compile_fun(run_str_cc)

    vars = pclint_vars + cc_vars + [
      'PCLINT_OPT_WARNINGLEVEL',
      'PCLINT_OPT_PASSES',
      'PCLINT_OPT_OPTIONS',
      'PCLINT_OPT_ADDITIONALRULES',
    ]

    log_str = '[PCLINT] $name $out'

    sizeflags = [
        ('__SIZEOF_INT__' ,        '-si'),
        ('__SIZEOF_LONG__',        '-sl'),
        ('__SIZEOF_LONG_LONG__',   '-sll'),
        ('__SIZEOF_SHORT__',       '-ss'),
        ('__SIZEOF_FLOAT__',       '-sf'),
        ('__SIZEOF_DOUBLE__',      '-sd'),
        ('__SIZEOF_LONG_DOUBLE__', '-sld'),
        ('__SIZEOF_WCHAR_T__',     '-sw'),
        ('__SIZEOF_POINTER__',     '-sp'),
    ]

    @property
    def name(self):
        return self.generator.name

    @property
    def out(self):
        return self.outputs[1].nice_path()

    def run(self):
        global stdout_sep, stderr_sep

        tgen    = self.generator
        bld     = tgen.bld
        env     = self.env
        inputs  = self.inputs
        outputs = self.outputs

        for x in outputs:
            x.delete()

        source_nodes = inputs[:]

        pclintlnt_template_node = inputs[0]
        pclint_output_lnt_node  = inputs[1]

        pclint_rules_lnt_nodes = [x for x in inputs[2:] if x.suffix().lower() == '.lnt']
        source_nodes           = [x for x in inputs[2:] if x.suffix().lower() != '.lnt']

        pclintlnt_node, output_node = outputs

        defines, includes = self.extract_compiler_params()

        # pclint specific header files
        includes.insert(0, os.path.join(module_path, "pclint_includes"))

        tgenincludes = set(x.abspath() for x in tgen.to_incnodes(getattr(tgen, 'includes', [])))

        dct = dict()

        dct['INCPATHFLAGS'] = '\n'.join('-"i%s"' % x for x in includes)
        dct['LIBPATHFLAGS'] = '\n'.join('-libdir("%s")' % x for x in tgenincludes)

        dct['DEFINEFLAGS'] = '\n'.join(('-d%s=%s' if v[0] in '"' else '-"d%s=%s"') % (n, v) for n, v in defines)

        dct['OUTPUTFORMATFLAGS'] = pclint_output_lnt_node.read()

        defines_map = dict(defines)

        if env['CC_NAME'] in ('gcc', 'clang', 'xpic-llvm'):
            if '__SIZEOF_INT__' in defines_map:
                dct['SIZEFLAGS'] = '\n'.join('%s%s' % (f, defines_map.get(d,'0')) for d,f in self.sizeflags)
            else:
                # probably an old gcc:
                compiler_32bit_sizes = (
                  ('-si',  4),
                  ('-sl',  4),
                  ('-sll', 8),
                  ('-ss',  2),
                  ('-sf',  4),
                  ('-sd',  8),
                  ('-sld', 8),
                  ('-sw',  4),
                  ('-sp',  4),
                )

                dct['SIZEFLAGS'] = '/* Unable to extract type sizes from compiler, using built in 32 bit type size */\n' +\
                                    '\n'.join('%s%d' % x for x in compiler_32bit_sizes)
        else:
            dct['SIZEFLAGS'] = '/* Unable to extract type sizes from compiler "%s" */\n' % env['CC_NAME']

        dct['WARNINGLEVELFLAGS'] = '-w%u' % self.opts.warninglevel
        dct['PASSESFLAGS'] = '-passes(%u)' % self.opts.passes

        sep = '/*{0:*^76}*/\n'
        dct['RULEFLAGS'] = '\n'.join((sep.format(' %s ' % x.nice_path()) + x.read() + '\n' + sep.format(' %s ' % x.nice_path())) for x in pclint_rules_lnt_nodes)

        dct['SOURCEFILES'] = '\n'.join('"%s"' % x.path_from(bld.bldnode) for x in source_nodes)

        env.stash()
        try:
            template_string = string.Template(pclintlnt_template_node.read())

            pclintlnt_node.parent.mkdir()
            pclintlnt_node.write(template_string.substitute(dct))

            try:
                self.inputs = [pclintlnt_node]
                out, err = self.run_pclint()
            except Exception, e:
                out = getattr(e, 'stdout', None)
                err = getattr(e, 'stderr', None)
            finally:
                self.inputs = inputs
        finally:
Beispiel #39
0
        cmd = [self.env['VALAC']] + self.env['VALAFLAGS']
        cmd.extend([a.abspath() for a in self.inputs])
        ret = self.exec_command(cmd, cwd=self.outputs[0].parent.abspath())
        if ret:
            return ret
        for x in self.outputs:
            if id(x.parent) != id(self.outputs[0].parent):
                shutil.move(self.outputs[0].parent.abspath() + os.sep + x.name,
                            x.abspath())
        if self.generator.dump_deps_node:
            self.generator.dump_deps_node.write('\n'.join(
                self.generator.packages))
        return ret


valac = Task.update_outputs(valac)


@taskgen_method
def init_vala_task(self):
    self.profile = getattr(self, 'profile', 'gobject')
    if self.profile == 'gobject':
        self.uselib = Utils.to_list(getattr(self, 'uselib', []))
        if not 'GOBJECT' in self.uselib:
            self.uselib.append('GOBJECT')

    def addflags(flags):
        self.env.append_value('VALAFLAGS', flags)

    if self.profile:
        addflags('--profile=%s' % self.profile)
Beispiel #40
0
                    cmd,
                    cwd=self.env.GBENCHMARK_BUILD,
                    quiet=Context.BOTH,
                )
            return 0
        except WafError as e:
            print(e)
            if hasattr(e, 'stderr'):
                print('')
                print(e.stderr)
            return 1

    def __str__(self):
        return 'Google Benchmark'

gbenchmark_build = Task.always_run(Task.update_outputs(gbenchmark_build))

build_task = None

@feature('gbenchmark')
@before_method('process_use')
def append_gbenchmark_use(self):
    self.use = self.to_list(getattr(self, 'use', []))
    if 'GBENCHMARK' not in self.use:
        self.use.append('GBENCHMARK')

@feature('gbenchmark')
@after_method('process_source')
def wait_for_gbenchmark_build(self):
    global build_task
Beispiel #41
0
def declare_chain(name='',
                  rule=None,
                  reentrant=None,
                  color='BLUE',
                  ext_in=[],
                  ext_out=[],
                  before=[],
                  after=[],
                  decider=None,
                  scan=None,
                  install_path=None,
                  shell=False):
    """
	Create a new mapping and a task class for processing files by extension.
	See Tools/flex.py for an example.

	:param name: name for the task class
	:type name: string
	:param rule: function to execute or string to be compiled in a function
	:type rule: string or function
	:param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
	:type reentrant: int
	:param color: color for the task output
	:type color: string
	:param ext_in: execute the task only after the files of such extensions are created
	:type ext_in: list of string
	:param ext_out: execute the task only before files of such extensions are processed
	:type ext_out: list of string
	:param before: execute instances of this task before classes of the given names
	:type before: list of string
	:param after: execute instances of this task after classes of the given names
	:type after: list of string
	:param decider: if present, use it to create the output nodes for the task
	:type decider: function
	:param scan: scanner function for the task
	:type scan: function
	:param install_path: installation path for the output nodes
	:type install_path: string
	"""
    ext_in = Utils.to_list(ext_in)
    ext_out = Utils.to_list(ext_out)
    if not name:
        name = rule
    cls = Task.task_factory(name,
                            rule,
                            color=color,
                            ext_in=ext_in,
                            ext_out=ext_out,
                            before=before,
                            after=after,
                            scan=scan,
                            shell=shell)

    def x_file(self, node):
        ext = decider and decider(self, node) or cls.ext_out
        if ext_in:
            _ext_in = ext_in[0]

        tsk = self.create_task(name, node)
        cnt = 0

        keys = set(self.mappings.keys()) | set(self.__class__.mappings.keys())
        for x in ext:
            k = node.change_ext(x, ext_in=_ext_in)
            tsk.outputs.append(k)

            if reentrant != None:
                if cnt < int(reentrant):
                    self.source.append(k)
            else:
                # reinject downstream files into the build
                for y in keys:  # ~ nfile * nextensions :-/
                    if k.name.endswith(y):
                        self.source.append(k)
                        break
            cnt += 1

        if install_path:
            self.bld.install_files(install_path, tsk.outputs)
        return tsk

    for x in cls.ext_in:
        task_gen.mappings[x] = x_file
    return x_file
Beispiel #42
0
def process_rule(self):
	"""
	Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

		def build(bld):
			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
	"""
	if not getattr(self, 'rule', None):
		return

	# create the task class
	name = str(getattr(self, 'name', None) or self.target or self.rule)

	# or we can put the class in a cache for performance reasons
	try:
		cache = self.bld.cache_rule_attr
	except AttributeError:
		cache = self.bld.cache_rule_attr = {}

	cls = None
	if getattr(self, 'cache_rule', 'True'):
		try:
			cls = cache[(name, self.rule)]
		except KeyError:
			pass
	if not cls:
		cls = Task.task_factory(name, self.rule,
			getattr(self, 'vars', []),
			shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'),
			scan = getattr(self, 'scan', None))
		if getattr(self, 'scan', None):
			cls.scan = self.scan
		elif getattr(self, 'deps', None):
			def scan(self):
				nodes = []
				for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
					node = self.generator.path.find_resource(x)
					if not node:
						self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
					nodes.append(node)
				return [nodes, []]
			cls.scan = scan

		if getattr(self, 'update_outputs', None):
			Task.update_outputs(cls)

		if getattr(self, 'always', None):
			Task.always_run(cls)

		for x in ['after', 'before', 'ext_in', 'ext_out']:
			setattr(cls, x, getattr(self, x, []))

		if getattr(self, 'cache_rule', 'True'):
			cache[(name, self.rule)] = cls

	# now create one instance
	tsk = self.create_task(name)

	if getattr(self, 'target', None):
		if isinstance(self.target, str):
			self.target = self.target.split()
		if not isinstance(self.target, list):
			self.target = [self.target]
		for x in self.target:
			if isinstance(x, str):
				tsk.outputs.append(self.path.find_or_declare(x))
			else:
				x.parent.mkdir() # if a node was given, create the required folders
				tsk.outputs.append(x)
		if getattr(self, 'install_path', None):
			# from waf 1.5
			# although convenient, it does not 1. allow to name the target file and 2. symlinks
			# TODO remove in waf 1.7
			self.bld.install_files(self.install_path, tsk.outputs)

	if getattr(self, 'source', None):
		tsk.inputs = self.to_nodes(self.source)
		# bypass the execution of process_source by setting the source to an empty list
		self.source = []

	if getattr(self, 'cwd', None):
		tsk.cwd = self.cwd
Beispiel #43
0
class tex(Task.Task):
    bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',
                                     shell=False)
    bibtex_fun.__doc__ = """
	Execute the program **bibtex**
	"""
    makeindex_fun, _ = Task.compile_fun(
        '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
    makeindex_fun.__doc__ = """
	Execute the program **makeindex**
	"""

    def exec_command(self, cmd, **kw):
        bld = self.generator.bld
        try:
            if not kw.get('cwd', None):
                kw['cwd'] = bld.cwd
        except AttributeError:
            bld.cwd = kw['cwd'] = bld.variant_dir
        return Utils.subprocess.Popen(cmd, **kw).wait()

    def scan_aux(self, node):
        nodes = [node]
        re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)

        def parse_node(node):
            code = node.read()
            for match in re_aux.finditer(code):
                path = match.group('file')
                found = node.parent.find_or_declare(path)
                if found and found not in nodes:
                    Logs.debug('tex: found aux node ' + found.abspath())
                    nodes.append(found)
                    parse_node(found)

        parse_node(node)
        return nodes

    def scan(self):
        node = self.inputs[0]
        nodes = []
        names = []
        seen = []
        if not node: return (nodes, names)

        def parse_node(node):
            if node in seen:
                return
            seen.append(node)
            code = node.read()
            global re_tex
            for match in re_tex.finditer(code):
                for path in match.group('file').split(','):
                    if path:
                        add_name = True
                        found = None
                        for k in exts_deps_tex:
                            Logs.debug('tex: trying %s%s' % (path, k))
                            found = node.parent.find_resource(path + k)
                            for tsk in self.generator.tasks:
                                if not found or found in tsk.outputs:
                                    break
                            else:
                                nodes.append(found)
                                add_name = False
                                for ext in exts_tex:
                                    if found.name.endswith(ext):
                                        parse_node(found)
                                        break
                        if add_name:
                            names.append(path)

        parse_node(node)
        for x in nodes:
            x.parent.get_bld().mkdir()
        Logs.debug("tex: found the following : %s and names %s" %
                   (nodes, names))
        return (nodes, names)

    def check_status(self, msg, retcode):
        if retcode != 0:
            raise Errors.WafError("%r command exit status %r" % (msg, retcode))

    def bibfile(self):
        for aux_node in self.aux_nodes:
            try:
                ct = aux_node.read()
            except (OSError, IOError):
                Logs.error('Error reading %s: %r' % aux_node.abspath())
                continue
            if g_bibtex_re.findall(ct):
                Logs.warn('calling bibtex')
                self.env.env = {}
                self.env.env.update(os.environ)
                self.env.env.update({
                    'BIBINPUTS': self.TEXINPUTS,
                    'BSTINPUTS': self.TEXINPUTS
                })
                self.env.SRCFILE = aux_node.name[:-4]
                self.check_status('error when calling bibtex',
                                  self.bibtex_fun())

    def bibunits(self):
        try:
            bibunits = bibunitscan(self)
        except OSError:
            Logs.error('error bibunitscan')
        else:
            if bibunits:
                fn = ['bu' + str(i) for i in xrange(1, len(bibunits) + 1)]
                if fn:
                    Logs.warn('calling bibtex on bibunits')
                for f in fn:
                    self.env.env = {
                        'BIBINPUTS': self.TEXINPUTS,
                        'BSTINPUTS': self.TEXINPUTS
                    }
                    self.env.SRCFILE = f
                    self.check_status('error when calling bibtex',
                                      self.bibtex_fun())

    def makeindex(self):
        try:
            idx_path = self.idx_node.abspath()
            os.stat(idx_path)
        except OSError:
            Logs.warn('index file %s absent, not calling makeindex' % idx_path)
        else:
            Logs.warn('calling makeindex')
            self.env.SRCFILE = self.idx_node.name
            self.env.env = {}
            self.check_status('error when calling makeindex %s' % idx_path,
                              self.makeindex_fun())

    def bibtopic(self):
        p = self.inputs[0].parent.get_bld()
        if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
            self.aux_nodes += p.ant_glob('*[0-9].aux')

    def run(self):
        env = self.env
        if not env['PROMPT_LATEX']:
            env.append_value('LATEXFLAGS', '-interaction=batchmode')
            env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
            env.append_value('XELATEXFLAGS', '-interaction=batchmode')
        fun = self.texfun
        node = self.inputs[0]
        srcfile = node.abspath()
        texinputs = self.env.TEXINPUTS or ''
        self.TEXINPUTS = node.parent.get_bld().abspath(
        ) + os.pathsep + node.parent.get_src().abspath(
        ) + os.pathsep + texinputs + os.pathsep
        self.cwd = self.inputs[0].parent.get_bld().abspath()
        Logs.warn('first pass on %s' % self.__class__.__name__)
        self.env.env = {}
        self.env.env.update(os.environ)
        self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
        self.env.SRCFILE = srcfile
        self.check_status('error when calling latex', fun())
        self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
        self.idx_node = node.change_ext('.idx')
        self.bibtopic()
        self.bibfile()
        self.bibunits()
        self.makeindex()
        hash = ''
        for i in range(10):
            prev_hash = hash
            try:
                hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
                hash = Utils.h_list(hashes)
            except (OSError, IOError):
                Logs.error('could not read aux.h')
                pass
            if hash and hash == prev_hash:
                break
            Logs.warn('calling %s' % self.__class__.__name__)
            self.env.env = {}
            self.env.env.update(os.environ)
            self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
            self.env.SRCFILE = srcfile
            self.check_status(
                'error when calling %s' % self.__class__.__name__, fun())
Beispiel #44
0
        self.deps = []
        self.scanned = set()
        for node in task.inputs:
            self.scan_file(task, node)
        return self.deps, []

dsp_scanner = FaustScanner()

def scan_dsp(task):
    return dsp_scanner.scan(task)

# definition of task "dsp"
Task.task_factory(
    name    = 'dsp',
    func    = dsp2cc,
    color   = 'BLUE',
    ext_in  = '.dsp',
    ext_out = '.cc',
    before  = 'cxx',
    ).scan = scan_dsp

@extension('.dsp')
def dsp_file(self, node):
    tsk = self.create_task('dsp')
    tsk.proc = self.bld.bldnode.find_node(self.proc)
    tsk.proc_args = getattr(self, "proc_args", [])
    tsk.gen_dir_suffix = getattr(self, "gen_dir_suffix", "-generated")
    tsk.set_inputs(node)
    tsk.dep_vars = ['DSP2CC_ARGS']
    tsk.env['DSP2CC_ARGS'] = tsk.proc_args
    parent = node.parent
    o = node.change_ext('.cc')
Beispiel #45
0
def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
	ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
	"""
	Create a new mapping and a task class for processing files by extension.
	See Tools/flex.py for an example.

	:param name: name for the task class
	:type name: string
	:param rule: function to execute or string to be compiled in a function
	:type rule: string or function
	:param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
	:type reentrant: int
	:param color: color for the task output
	:type color: string
	:param ext_in: execute the task only after the files of such extensions are created
	:type ext_in: list of string
	:param ext_out: execute the task only before files of such extensions are processed
	:type ext_out: list of string
	:param before: execute instances of this task before classes of the given names
	:type before: list of string
	:param after: execute instances of this task after classes of the given names
	:type after: list of string
	:param decider: if present, use it to create the output nodes for the task
	:type decider: function
	:param scan: scanner function for the task
	:type scan: function
	:param install_path: installation path for the output nodes
	:type install_path: string
	"""
	ext_in = Utils.to_list(ext_in)
	ext_out = Utils.to_list(ext_out)
	if not name:
		name = rule
	cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)

	def x_file(self, node):
		ext = decider and decider(self, node) or cls.ext_out
		if ext_in:
			_ext_in = ext_in[0]

		tsk = self.create_task(name, node)
		cnt = 0

		keys = list(self.mappings.keys()) + list(self.__class__.mappings.keys())
		for x in ext:
			k = node.change_ext(x, ext_in=_ext_in)
			tsk.outputs.append(k)

			if reentrant != None:
				if cnt < int(reentrant):
					self.source.append(k)
			else:
				for y in keys: # ~ nfile * nextensions :-/
					if k.name.endswith(y):
						self.source.append(k)
						break
			cnt += 1

		if install_path:
			self.bld.install_files(install_path, tsk.outputs)
		return tsk

	for x in cls.ext_in:
		task_gen.mappings[x] = x_file
	return x_file
	def get_build_iterator(self):
		if not self.files:
			while 1:
				yield super(MakeContext, self).get_build_iterator()

		for g in self.groups:
			for tg in g:
				try:
					f = tg.post
				except AttributeError:
					pass
				else:
					f()

			provides = {}
			uses = {}
			all_tasks = []
			tasks = []
			for pat in self.files.split(','):
				matcher = self.get_matcher(pat)
				for tg in g:
					if isinstance(tg, Task.TaskBase):
						lst = [tg]
					else:
						lst = tg.tasks
					for tsk in lst:
						all_tasks.append(tsk)

						do_exec = False
						for node in getattr(tsk, 'inputs', []):
							try:
								uses[node].append(tsk)
							except:
								uses[node] = [tsk]

							if matcher(node, output=False):
								do_exec = True
								break

						for node in getattr(tsk, 'outputs', []):
							try:
								provides[node].append(tsk)
							except:
								provides[node] = [tsk]

							if matcher(node, output=True):
								do_exec = True
								break
						if do_exec:
							tasks.append(tsk)

			# so we have the tasks that we need to process, the list of all tasks,
			# the map of the tasks providing nodes, and the map of tasks using nodes

			if not tasks:
				# if there are no tasks matching, return everything in the current group
				result = all_tasks
			else:
				# this is like a big filter...
				result = set([])
				seen = set([])
				cur = set(tasks)
				while cur:
					result |= cur
					tosee = set([])
					for tsk in cur:
						for node in getattr(tsk, 'inputs', []):
							if node in seen:
								continue
							seen.add(node)
							tosee |= set(provides.get(node, []))
					cur = tosee
				result = list(result)

			Task.set_file_constraints(result)
			Task.set_precedence_constraints(result)
			yield result

		while 1:
			yield []
Beispiel #47
0
    output = None
    try:
        input = open(infile, 'rb')
        outf = open(outfile, 'wb')
        output = gzip.GzipFile(os.path.basename(infile), fileobj=outf)
        output.write(input.read())
    finally:
        if input:
            input.close()
        if output:  # Must close before outf to flush compressed data.
            output.close()
        if outf:
            outf.close()


Task.task_factory('man', gzip_func, color='BLUE')


@feature('man')
@before_method('process_source')
def process_man(self):
    source = self.to_nodes(getattr(self, 'source', []))
    self.source = []

    section = getattr(self, 'section', None)

    for node in source:
        if not node:
            raise Errors.BuildError(
                'cannot find input file %s for processing' % x)
Beispiel #48
0
							h_node=x.find_node(base2)
							if h_node:
								break
						if h_node:
							m_node=h_node.change_ext(k+'.moc')
							break
			if not h_node:
				raise Errors.WafError('No source found for %r which is a moc file'%d)
			task=self.create_moc_task(h_node,m_node)
			moctasks.append(task)
		self.run_after.update(set(moctasks))
		self.moc_done=1
class trans_update(Task.Task):
	run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'
	color='BLUE'
Task.update_outputs(trans_update)
class XMLHandler(ContentHandler):
	def __init__(self):
		self.buf=[]
		self.files=[]
	def startElement(self,name,attrs):
		if name=='file':
			self.buf=[]
	def endElement(self,name):
		if name=='file':
			self.files.append(str(''.join(self.buf)))
	def characters(self,cars):
		self.buf.append(cars)
@extension(*EXT_RCC)
def create_rcc_task(self,node):
	rcnode=node.change_ext('_rc.cpp')
class splint(lint,Task.Task):
    color = 'CYAN'

    run_str_splint = '${SPLINT} ${SPLINTOPTS} ${SPLINTCPPPATH_ST:INCPATHS} ${SPLINTDEFINES_ST:DEFINES} ${SPLINTDUMPOPTS} ${SRC}'
    run_str_cc     = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -E -dM -dD -v ${CC_SRC_F}${SRC[0].abspath()}'

    (run_splint, splint_vars) = Task.compile_fun(run_str_splint)
    (run_cc, cc_vars)         = Task.compile_fun(run_str_cc)

    vars = splint_vars + cc_vars

    log_str = '[SPLINT] $name $out'

    @property
    def name(self):
        return self.generator.name

    @property
    def out(self):
        return self.outputs[1].nice_path()

    def run(self):
        global stdout_sep, stderr_sep

        tgen    = self.generator
        env     = self.env
        inputs  = self.inputs
        outputs = self.outputs

        for x in outputs:
            x.delete()

        splintrc_template_node = inputs[0]
        source_nodes           = inputs[1:]

        splintrc_node, output_node = outputs

        defines, includes = self.extract_compiler_params()

        # for some reason splint seems to completely ignore stdint.h from gcc
        # I have no idea why. Therefore we must supply our own stdint.h
        # in order to have splint run properly
        includes.insert(0, os.path.join(module_path, "splint_includes"))

        tgenincludes = set(x.abspath() for x in tgen.to_incnodes(getattr(tgen, 'includes', [])))

        sysincludes = list(x for x in includes if x not in tgenincludes)

        # splint seems to have problems with resolving some headers:
        # It seems to completely ignore stdint.h from compiler files.

        dct = dict()

        # the following does not work
        # splint does not support spaces in splintrc
        dct['INCPATHFLAGS'] = ''
        dct['SYSDIRSFLAG']  = ''

        #dct['INCPATHFLAGS'] = os.linesep.join('+I"%s"' % x.replace(' ', '\\\\ ') for x in gccincludes)
        #dct['SYSDIRSFLAG'] = '-sys-dirs %s' % os.pathsep.join(sysincludes)

        env.stash()
        try:
            template_string = string.Template(splintrc_template_node.read())
            splintrc_node.write(template_string.substitute(dct))

            env.append_value('SPLINTOPTS', ['-f',splintrc_node.abspath()])

            # the following two are required beccause auf splint problems
            # currently it seems not to work when they are specified via
            # splintrc
            if not dct['INCPATHFLAGS']:
                env['INCPATHS'] = includes

            if not dct['SYSDIRSFLAG']:
                env.append_value('SPLINTOPTS', ['-systemdirs', os.pathsep.join(sysincludes)])

            try:
                self.inputs = source_nodes
                out, err = self.run_splint()
            except Exception, e:
                out = getattr(e, 'stdout', None)
                err = getattr(e, 'stderr', None)
            finally:
                self.inputs = inputs
        finally:
Beispiel #50
0
    self.source.append(c_node)
    if valatask.is_lib:
        headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
        self.install_vheader = self.bld.install_files(valatask.header_path,
                                                      headers_list, self.env)
        vapi_list = [
            o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))
        ]
        self.install_vapi = self.bld.install_files(valatask.vapi_path,
                                                   vapi_list, self.env)
        gir_list = [o for o in valatask.outputs if o.suffix() == ".gir"]
        self.install_gir = self.bld.install_files(valatask.gir_path, gir_list,
                                                  self.env)


valac_task = Task.update_outputs(valac_task)


def find_valac(self, valac_name, min_version):
    valac = self.find_program(valac_name, var='VALAC')
    try:
        output = self.cmd_and_log(valac + ' --version')
    except Exception:
        valac_version = None
    else:
        ver = re.search(r'\d+.\d+.\d+', output).group(0).split('.')
        valac_version = tuple([int(x) for x in ver])
    self.msg('Checking for %s version >= %r' % (valac_name, min_version),
             valac_version, valac_version and valac_version >= min_version)
    if valac and valac_version < min_version:
        self.fatal("%s version %r is too old, need >= %r" %
Beispiel #51
0
#! /usr/bin/env python
# encoding: utf-8

import waflib.Tools.asm
from waflib.TaskGen import feature, extension
from waflib import Task

def configure(conf):
	yasm=conf.find_program(['yasm'],var='YASM',path_list=['/usr/bin','/usr/local/bin'])
	if not yasm:conf.fatal('could not find yasm, install it or set PATH env var')
	conf.env.AS_TGT_F=['-o']
	conf.env.ASLNK_TGT_F=['-o']

def apply_yasm_vars(self):
	self.env.append_value('YASM_FLAGS',self.to_list(getattr(self,'yasm_flags',[])))
	self.env.append_value('YASM_INCLUDES'," -I".join([''] + self.to_list(self.env.INCPATHS)).split())
feature('asm')(apply_yasm_vars)

Task.simple_task_type('yasm','${YASM} ${YASM_FLAGS} ${YASM_INCLUDES} ${SRC} -o ${TGT}',color='BLUE',ext_out='.o',shell=False)

def yasm_hook(self,node):
	self.meths.append('apply_yasm_vars')
	return self.create_compiled_task('yasm',node)

extension('.s')(yasm_hook)
Beispiel #52
0
	def run(self):
		cmd = self.env.VALAC + self.env.VALAFLAGS
		resources = getattr(self, 'vala_exclude', [])
		cmd.extend([a.abspath() for a in self.inputs if a not in resources])
		ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())

		if ret:
			return ret

		if self.generator.dump_deps_node:
			self.generator.dump_deps_node.write('\n'.join(self.generator.packages))

		return ret

valac = Task.update_outputs(valac) # no decorators for python2 classes

@taskgen_method
def init_vala_task(self):
	"""
	Initializes the vala task with the relevant data (acts as a constructor)
	"""
	self.profile = getattr(self, 'profile', 'gobject')

	if self.profile == 'gobject':
		self.uselib = Utils.to_list(getattr(self, 'uselib', []))
		if not 'GOBJECT' in self.uselib:
			self.uselib.append('GOBJECT')

	def addflags(flags):
		self.env.append_value('VALAFLAGS', flags)
		headers_list=[o for o in valatask.outputs if o.suffix()==".h"]
		try:
			self.install_vheader.source=headers_list
		except AttributeError:
			self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env)
		vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))]
		try:
			self.install_vapi.source=vapi_list
		except AttributeError:
			self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env)
		gir_list=[o for o in valatask.outputs if o.suffix()==".gir"]
		try:
			self.install_gir.source=gir_list
		except AttributeError:
			self.install_gir=self.bld.install_files(valatask.gir_path,gir_list,self.env)
valac=Task.update_outputs(valac)
def find_valac(self,valac_name,min_version):
	valac=self.find_program(valac_name,var='VALAC')
	try:
		output=self.cmd_and_log(valac+' --version')
	except Exception:
		valac_version=None
	else:
		ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.')
		valac_version=tuple([int(x)for x in ver])
	self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version)
	if valac and valac_version<min_version:
		self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version))
	self.env['VALAC_VERSION']=valac_version
	return valac
def check_vala(self,min_version=(0,8,0),branch=None):
Beispiel #54
0
class pdflatex(tex):
    "Compiles PdfLaTeX files"
    texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',
                                    shell=False)
Beispiel #55
0
class xelatex(tex):
    "XeLaTeX files"
    texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',
                                    shell=False)
Beispiel #56
0
class tex(Task.Task):
    """
	Compiles a tex/latex file.

	.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
	"""

    bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',
                                     shell=False)
    bibtex_fun.__doc__ = """
	Execute the program **bibtex**
	"""

    makeindex_fun, _ = Task.compile_fun(
        '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
    makeindex_fun.__doc__ = """
	Execute the program **makeindex**
	"""

    makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',
                                             shell=False)
    makeglossaries_fun.__doc__ = """
	Execute the program **makeglossaries**
	"""

    def exec_command(self, cmd, **kw):
        """
		Executes TeX commands without buffering (latex may prompt for inputs)

		:return: the return code
		:rtype: int
		"""
        if self.env.PROMPT_LATEX:
            # capture the outputs in configuration tests
            kw['stdout'] = kw['stderr'] = None
        return super(tex, self).exec_command(cmd, **kw)

    def scan_aux(self, node):
        """
		Recursive regex-based scanner that finds included auxiliary files.
		"""
        nodes = [node]
        re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)

        def parse_node(node):
            code = node.read()
            for match in re_aux.finditer(code):
                path = match.group('file')
                found = node.parent.find_or_declare(path)
                if found and found not in nodes:
                    Logs.debug('tex: found aux node %r', found)
                    nodes.append(found)
                    parse_node(found)

        parse_node(node)
        return nodes

    def scan(self):
        """
		Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`

		Depending on your needs you might want:

		* to change re_tex::

			from waflib.Tools import tex
			tex.re_tex = myregex

		* or to change the method scan from the latex tasks::

			from waflib.Task import classes
			classes['latex'].scan = myscanfunction
		"""
        node = self.inputs[0]

        nodes = []
        names = []
        seen = []
        if not node:
            return (nodes, names)

        def parse_node(node):
            if node in seen:
                return
            seen.append(node)
            code = node.read()
            for match in re_tex.finditer(code):

                multibib = match.group('type')
                if multibib and multibib.startswith('bibliography'):
                    multibib = multibib[len('bibliography'):]
                    if multibib.startswith('style'):
                        continue
                else:
                    multibib = None

                for path in match.group('file').split(','):
                    if path:
                        add_name = True
                        found = None
                        for k in exts_deps_tex:

                            # issue 1067, scan in all texinputs folders
                            for up in self.texinputs_nodes:
                                Logs.debug('tex: trying %s%s', path, k)
                                found = up.find_resource(path + k)
                                if found:
                                    break

                            for tsk in self.generator.tasks:
                                if not found or found in tsk.outputs:
                                    break
                            else:
                                nodes.append(found)
                                add_name = False
                                for ext in exts_tex:
                                    if found.name.endswith(ext):
                                        parse_node(found)
                                        break

                            # multibib stuff
                            if found and multibib and found.name.endswith(
                                    '.bib'):
                                try:
                                    self.multibibs.append(found)
                                except AttributeError:
                                    self.multibibs = [found]

                            # no break, people are crazy
                        if add_name:
                            names.append(path)

        parse_node(node)

        for x in nodes:
            x.parent.get_bld().mkdir()

        Logs.debug("tex: found the following : %s and names %s", nodes, names)
        return (nodes, names)

    def check_status(self, msg, retcode):
        """
		Checks an exit status and raise an error with a particular message

		:param msg: message to display if the code is non-zero
		:type msg: string
		:param retcode: condition
		:type retcode: boolean
		"""
        if retcode != 0:
            raise Errors.WafError('%r command exit status %r' % (msg, retcode))

    def info(self, *k, **kw):
        try:
            info = self.generator.bld.conf.logger.info
        except AttributeError:
            info = Logs.info
        info(*k, **kw)

    def bibfile(self):
        """
		Parses *.aux* files to find bibfiles to process.
		If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
		"""
        for aux_node in self.aux_nodes:
            try:
                ct = aux_node.read()
            except EnvironmentError:
                Logs.error('Error reading %s: %r', aux_node.abspath())
                continue

            if g_bibtex_re.findall(ct):
                self.info('calling bibtex')

                self.env.env = {}
                self.env.env.update(os.environ)
                self.env.env.update({
                    'BIBINPUTS': self.texinputs(),
                    'BSTINPUTS': self.texinputs()
                })
                self.env.SRCFILE = aux_node.name[:-4]
                self.check_status('error when calling bibtex',
                                  self.bibtex_fun())

        for node in getattr(self, 'multibibs', []):
            self.env.env = {}
            self.env.env.update(os.environ)
            self.env.env.update({
                'BIBINPUTS': self.texinputs(),
                'BSTINPUTS': self.texinputs()
            })
            self.env.SRCFILE = node.name[:-4]
            self.check_status('error when calling bibtex', self.bibtex_fun())

    def bibunits(self):
        """
		Parses *.aux* file to find bibunit files. If there are bibunit files,
		runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
		"""
        try:
            bibunits = bibunitscan(self)
        except OSError:
            Logs.error('error bibunitscan')
        else:
            if bibunits:
                fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
                if fn:
                    self.info('calling bibtex on bibunits')

                for f in fn:
                    self.env.env = {
                        'BIBINPUTS': self.texinputs(),
                        'BSTINPUTS': self.texinputs()
                    }
                    self.env.SRCFILE = f
                    self.check_status('error when calling bibtex',
                                      self.bibtex_fun())

    def makeindex(self):
        """
		Searches the filesystem for *.idx* files to process. If present,
		runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
		"""
        self.idx_node = self.inputs[0].change_ext('.idx')
        try:
            idx_path = self.idx_node.abspath()
            os.stat(idx_path)
        except OSError:
            self.info('index file %s absent, not calling makeindex', idx_path)
        else:
            self.info('calling makeindex')

            self.env.SRCFILE = self.idx_node.name
            self.env.env = {}
            self.check_status('error when calling makeindex %s' % idx_path,
                              self.makeindex_fun())

    def bibtopic(self):
        """
		Lists additional .aux files from the bibtopic package
		"""
        p = self.inputs[0].parent.get_bld()
        if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
            self.aux_nodes += p.ant_glob('*[0-9].aux')

    def makeglossaries(self):
        """
		Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
		"""
        src_file = self.inputs[0].abspath()
        base_file = os.path.basename(src_file)
        base, _ = os.path.splitext(base_file)
        for aux_node in self.aux_nodes:
            try:
                ct = aux_node.read()
            except EnvironmentError:
                Logs.error('Error reading %s: %r', aux_node.abspath())
                continue

            if g_glossaries_re.findall(ct):
                if not self.env.MAKEGLOSSARIES:
                    raise Errors.WafError(
                        "The program 'makeglossaries' is missing!")
                Logs.warn('calling makeglossaries')
                self.env.SRCFILE = base
                self.check_status(
                    'error when calling makeglossaries %s' % base,
                    self.makeglossaries_fun())
                return

    def texinputs(self):
        """
		Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables

		:rtype: string
		"""
        return os.pathsep.join([k.abspath()
                                for k in self.texinputs_nodes]) + os.pathsep

    def run(self):
        """
		Runs the whole TeX build process

		Multiple passes are required depending on the usage of cross-references,
		bibliographies, glossaries, indexes and additional contents
		The appropriate TeX compiler is called until the *.aux* files stop changing.
		"""
        env = self.env

        if not env.PROMPT_LATEX:
            env.append_value('LATEXFLAGS', '-interaction=batchmode')
            env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
            env.append_value('XELATEXFLAGS', '-interaction=batchmode')

        # important, set the cwd for everybody
        self.cwd = self.inputs[0].parent.get_bld()

        self.info('first pass on %s', self.__class__.__name__)

        # Hash .aux files before even calling the LaTeX compiler
        cur_hash = self.hash_aux_nodes()

        self.call_latex()

        # Find the .aux files again since bibtex processing can require it
        self.hash_aux_nodes()

        self.bibtopic()
        self.bibfile()
        self.bibunits()
        self.makeindex()
        self.makeglossaries()

        for i in range(10):
            # There is no need to call latex again if the .aux hash value has not changed
            prev_hash = cur_hash
            cur_hash = self.hash_aux_nodes()
            if not cur_hash:
                Logs.error('No aux.h to process')
            if cur_hash and cur_hash == prev_hash:
                break

            # run the command
            self.info('calling %s', self.__class__.__name__)
            self.call_latex()

    def hash_aux_nodes(self):
        """
		Returns a hash of the .aux file contents

		:rtype: string or bytes
		"""
        try:
            self.aux_nodes
        except AttributeError:
            try:
                self.aux_nodes = self.scan_aux(
                    self.inputs[0].change_ext('.aux'))
            except IOError:
                return None
        return Utils.h_list(
            [Utils.h_file(x.abspath()) for x in self.aux_nodes])

    def call_latex(self):
        """
		Runs the TeX compiler once
		"""
        self.env.env = {}
        self.env.env.update(os.environ)
        self.env.env.update({'TEXINPUTS': self.texinputs()})
        self.env.SRCFILE = self.inputs[0].abspath()
        self.check_status('error when calling latex', self.texfun())
	task.generator = self
	copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
	self.tasks.append(task)

	task.inputs = inputs
	task.outputs = outputs
	task.dep_vars = self.to_list(self.dep_vars)

	for dep in self.dependencies:
		assert dep is not self
		dep.post()
		for dep_task in dep.tasks:
			task.set_run_after(dep_task)

	if not task.inputs:
		# the case for svnversion, always run, and update the output nodes
		task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
		task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)

	# TODO the case with no outputs?

def post_run(self):
	for x in self.outputs:
		x.sig = Utils.h_file(x.abspath())

def runnable_status(self):
	return self.RUN_ME

Task.task_factory('copy', vars=[], func=action_process_file_func)

Beispiel #58
0
						link = self.generator.link_task
					except AttributeError:
						pass
					else:
						if not tsk.outputs[0] in link.inputs:
							link.inputs.append(tsk.outputs[0])
							link.set_run_after(tsk)

							# any change in the order of the input nodes may cause a recompilation
							link.inputs.sort(key=lambda x: x.abspath())

			# if you want to modify some flags
			# you *must* have the task recompute the signature
			self.env.append_value('CXXFLAGS', '-O2')
			delattr(self, 'cache_sig')
			return super(waflib.Tools.c.c, self).runnable_status()

		return ret

@TaskGen.extension('.c')
def c_hook(self, node):
	# re-bind the extension to this new class
	return self.create_compiled_task('c2', node)

# modify the existing class to output the targets in the same directory as the original files
Task.update_outputs(c2)
Task.update_outputs(waflib.Tools.c.cprogram)
Task.update_outputs(waflib.Tools.c.cshlib)
Task.update_outputs(waflib.Tools.c.cstlib)

Beispiel #59
0
class latex(tex):
    "Compiles LaTeX files"
    texfun, vars = Task.compile_fun("${LATEX} ${LATEXFLAGS} ${SRCFILE}", shell=False)
Beispiel #60
0
replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
signatures from each slave and finds out the command-line to run.

Just import this module in the configuration (no other change required).
This is provided as an example, for performance unity builds are recommended (fewer tasks and fewer jobs to execute).
"""

import os
from waflib import TaskGen, Task, Build, Logs
from waflib.TaskGen import extension, feature, before_method, after_method

MAX_BATCH = 50

c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST}'
#c_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
c_fun, _ = Task.compile_fun_noshell(c_str)

cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST}'
#cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
cxx_fun, _ = Task.compile_fun_noshell(cxx_str)

count = 70000
class batch_task(Task.Task):
	color = 'RED'

	after = ['c', 'cxx']
	before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']

	def __str__(self):
		return '(batch compilation for %d slaves)\n' % len(self.slaves)