Beispiel #1
0
	def get_bld_sig(self):
		"""
		Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose
		of build dependency calculation. This method uses a per-context cache.

		:return: a hash representing the object contents
		:rtype: string or bytes
		"""
		# previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node
		try:
			cache = self.ctx.cache_sig
		except AttributeError:
			cache = self.ctx.cache_sig = {}
		try:
			ret = cache[self]
		except KeyError:
			p = self.abspath()
			try:
				ret = cache[self] = self.h_file()
			except EnvironmentError:
				if self.isdir():
					# allow folders as build nodes, do not use the creation time
					st = os.stat(p)
					ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode])
					return ret
				raise
		return ret
Beispiel #2
0
	def hash_env_vars(self, env, vars_lst):
		"""
		Hashes configuration set variables::

			def build(bld):
				bld.hash_env_vars(bld.env, ['CXX', 'CC'])

		This method uses an internal cache.

		:param env: Configuration Set
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		:param vars_lst: list of variables
		:type vars_list: list of string
		"""

		if not env.table:
			env = env.parent
			if not env:
				return Utils.SIG_NIL

		idx = str(id(env)) + str(vars_lst)
		try:
			cache = self.cache_env
		except AttributeError:
			cache = self.cache_env = {}
		else:
			try:
				return self.cache_env[idx]
			except KeyError:
				pass

		lst = [env[a] for a in vars_lst]
		cache[idx] = ret = Utils.h_list(lst)
		Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
		return ret
Beispiel #3
0
	def sig_deep_inputs(self):
		"""
		Enable rebuilds on input files task signatures. Not used by default.

		Example: hashes of output programs can be unchanged after being re-linked,
		despite the libraries being different. This method can thus prevent stale unit test
		results (waf_unit_test.py).

		Hashing input file timestamps is another possibility for the implementation.
		This may cause unnecessary rebuilds when input tasks are frequently executed.
		Here is an implementation example::

			lst = []
			for node in self.inputs + self.dep_nodes:
				st = os.stat(node.abspath())
				lst.append(st.st_mtime)
				lst.append(st.st_size)
			self.m.update(Utils.h_list(lst))

		The downside of the implementation is that it absolutely requires all build directory
		files to be declared within the current build.
		"""
		bld = self.generator.bld
		lst = [bld.task_sigs[bld.node_sigs[node]] for node in (self.inputs + self.dep_nodes) if node.is_bld()]
		self.m.update(Utils.h_list(lst))
Beispiel #4
0
	def execute(self):
		if not Configure.autoconfig:
			return execute_method(self)
		env=ConfigSet.ConfigSet()
		do_config=False
		try:
			env.load(os.path.join(Context.top_dir,Options.lockfile))
		except Exception:
			Logs.warn('Configuring the project')
			do_config=True
		else:
			if env.run_dir!=Context.run_dir:
				do_config=True
			else:
				h=0
				for f in env['files']:
					h=Utils.h_list((h,Utils.readf(f,'rb')))
				do_config=h!=env.hash
		if do_config:
			cmd=env['config_cmd']or'configure'
			if Configure.autoconfig=='clobber':
				tmp=Options.options.__dict__
				Options.options.__dict__=env.options
				try:
					run_command(cmd)
				finally:
					Options.options.__dict__=tmp
			else:
				run_command(cmd)
			run_command(self.cmd)
		else:
			return execute_method(self)
Beispiel #5
0
	def execute(self):
		if not Configure.autoconfig:
			return execute_method(self)

		env = ConfigSet.ConfigSet()
		do_config = False
		try:
			env.load(os.path.join(Context.top_dir, Options.lockfile))
		except Exception:
			Logs.warn('Configuring the project')
			do_config = True
		else:
			if env.run_dir != Context.run_dir:
				do_config = True
			else:
				h = 0
				for f in env['files']:
					h = Utils.h_list((h, Utils.readf(f, 'rb')))
				do_config = h != env.hash

		if do_config:
			Options.commands.insert(0, self.cmd)
			Options.commands.insert(0, 'configure')
			if Configure.autoconfig == 'clobber':
				Options.options.__dict__ = env.options
			return

		return execute_method(self)
Beispiel #6
0
def rsync_and_ssh(task):

	# remove a warning
	task.uid_ = id(task)

	bld = task.generator.bld

	task.env.user, _, _ = task.env.login.partition('@')
	task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
	task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
	task.env.local_dir = bld.srcnode.abspath() + '/'

	task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
	task.env.build_dir = bld.bldnode.abspath()

	ret = task.exec_command(bld.make_mkdir_command(task))
	if ret:
		return ret
	ret = task.exec_command(bld.make_send_command(task))
	if ret:
		return ret
	ret = task.exec_command(bld.make_exec_command(task))
	if ret:
		return ret
	ret = task.exec_command(bld.make_save_command(task))
	if ret:
		return ret
Beispiel #7
0
Datei: Build.py Projekt: zsx/waf
	def hash_env_vars(self, env, vars_lst):
		"""hash environment variables
		['CXX', ..] -> [env['CXX'], ..] -> md5()

		cached by build context
		"""

		if not env.table:
			env = env.parent
			if not env:
				return Utils.SIG_NIL

		idx = str(id(env)) + str(vars_lst)
		try:
			cache = self.cache_env
		except AttributeError:
			cache = self.cache_env = {}
		else:
			try:
				return self.cache_env[idx]
			except KeyError:
				pass

		lst = [str(env[a]) for a in vars_lst]
		ret = Utils.h_list(lst)
		Logs.debug('envhash: %r %r', ret, lst)

		cache[idx] = ret

		return ret
Beispiel #8
0
	def hash_aux_nodes(self):
		try:
			self.aux_nodes
		except AttributeError:
			try:
				self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
			except IOError:
				return None
		return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
Beispiel #9
0
def run_c_code(self,*k,**kw):
	lst=[str(v)for(p,v)in kw.items()if p!='env']
	h=Utils.h_list(lst)
	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
	try:
		os.makedirs(dir)
	except OSError:
		pass
	try:
		os.stat(dir)
	except OSError:
		self.fatal('cannot use the configuration test folder %r'%dir)
	cachemode=getattr(Options.options,'confcache',None)
	if cachemode==CACHE_RESULTS:
		try:
			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
		except OSError:
			pass
		else:
			ret=proj['cache_run_c_code']
			if isinstance(ret,str)and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret
	bdir=os.path.join(dir,'testbuild')
	if not os.path.exists(bdir):
		os.makedirs(bdir)
	self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar=0
	bld.targets='*'
	if kw['compile_filename']:
		node=bld.srcnode.make_node(kw['compile_filename'])
		node.write(kw['code'])
	bld.logger=self.logger
	bld.all_envs.update(self.all_envs)
	bld.env=kw['env']
	o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
	for k,v in kw.items():
		setattr(o,k,v)
	if not kw.get('quiet',None):
		self.to_log("==>\n%s\n<=="%kw['code'])
	bld.targets='*'
	ret=-1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret='Test does not build: %s'%Utils.ex_stack()
			self.fatal(ret)
		else:
			ret=getattr(bld,'retval',0)
	finally:
		proj=ConfigSet.ConfigSet()
		proj['cache_run_c_code']=ret
		proj.store(os.path.join(dir,'cache_run_c_code'))
	return ret
Beispiel #10
0
	def sig_vars(self):
		bld=self.generator.bld
		env=self.env
		upd=self.m.update
		vars=self.generator.bld.raw_deps.get(self.uid(),[])
		act_sig=bld.hash_env_vars(env,vars)
		upd(act_sig)
		lst=[getattr(self.generator,x,'')for x in vars]
		upd(Utils.h_list(lst))
		return self.m.digest()
Beispiel #11
0
	def post_recurse(self, node):
		"""
		Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse`

		:param node: script
		:type node: :py:class:`waflib.Node.Node`
		"""
		super(ConfigurationContext, self).post_recurse(node)
		self.hash = Utils.h_list((self.hash, node.read('rb')))
		self.files.append(node.abspath())
def run_build(self,*k,**kw):
	lst=[str(v)for(p,v)in kw.items()if p!='env']
	h=Utils.h_list(lst)
	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
	try:
		os.makedirs(dir)
	except OSError:
		pass
	try:
		os.stat(dir)
	except OSError:
		self.fatal('cannot use the configuration test folder %r'%dir)
	cachemode=getattr(Options.options,'confcache',None)
	if cachemode==1:
		try:
			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build'))
		except OSError:
			pass
		except IOError:
			pass
		else:
			ret=proj['cache_run_build']
			if isinstance(ret,str)and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret
	bdir=os.path.join(dir,'testbuild')
	if not os.path.exists(bdir):
		os.makedirs(bdir)
	self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar=0
	bld.targets='*'
	bld.logger=self.logger
	bld.all_envs.update(self.all_envs)
	bld.env=kw['env']
	bld.kw=kw
	bld.conf=self
	kw['build_fun'](bld)
	ret=-1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret='Test does not build: %s'%Utils.ex_stack()
			self.fatal(ret)
		else:
			ret=getattr(bld,'retval',0)
	finally:
		if cachemode==1:
			proj=ConfigSet.ConfigSet()
			proj['cache_run_build']=ret
			proj.store(os.path.join(dir,'cache_run_build'))
		else:
			shutil.rmtree(dir)
	return ret
Beispiel #13
0
def run_c_code(self, *k, **kw):
    lst = [str(v) for (p, v) in kw.items() if p != "env"]
    h = Utils.h_list(lst)
    dir = self.bldnode.abspath() + os.sep + (sys.platform != "win32" and "." or "") + "conf_check_" + Utils.to_hex(h)
    try:
        os.makedirs(dir)
    except:
        pass
    try:
        os.stat(dir)
    except:
        self.fatal("cannot use the configuration test folder %r" % dir)
    cachemode = getattr(Options.options, "confcache", None)
    if cachemode == CACHE_RESULTS:
        try:
            proj = ConfigSet.ConfigSet(os.path.join(dir, "cache_run_c_code"))
            ret = proj["cache_run_c_code"]
        except:
            pass
        else:
            if isinstance(ret, str) and ret.startswith("Test does not build"):
                self.fatal(ret)
            return ret
    bdir = os.path.join(dir, "testbuild")
    if not os.path.exists(bdir):
        os.makedirs(bdir)
    self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
    bld.init_dirs()
    bld.progress_bar = 0
    bld.targets = "*"
    if kw["compile_filename"]:
        node = bld.srcnode.make_node(kw["compile_filename"])
        node.write(kw["code"])
    bld.logger = self.logger
    bld.all_envs.update(self.all_envs)
    bld.env = kw["env"]
    o = bld(features=kw["features"], source=kw["compile_filename"], target="testprog")
    for k, v in kw.items():
        setattr(o, k, v)
    self.to_log("==>\n%s\n<==" % kw["code"])
    bld.targets = "*"
    ret = -1
    try:
        try:
            bld.compile()
        except Errors.WafError:
            ret = "Test does not build: %s" % Utils.ex_stack()
            self.fatal(ret)
        else:
            ret = getattr(bld, "retval", 0)
    finally:
        proj = ConfigSet.ConfigSet()
        proj["cache_run_c_code"] = ret
        proj.store(os.path.join(dir, "cache_run_c_code"))
    return ret
Beispiel #14
0
	def sig_vars(self):
		bld=self.generator.bld
		env=self.env
		upd=self.m.update
		if getattr(self.generator,'subst_fun',None):
			upd(Utils.h_fun(self.generator.subst_fun).encode())
		vars=self.generator.bld.raw_deps.get(self.uid(),[])
		act_sig=bld.hash_env_vars(env,vars)
		upd(act_sig)
		lst=[getattr(self.generator,x,'')for x in vars]
		upd(Utils.h_list(lst))
		return self.m.digest()
Beispiel #15
0
	def run(self):
		env=self.env
		if not env['PROMPT_LATEX']:
			env.append_value('LATEXFLAGS','-interaction=batchmode')
			env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
			env.append_value('XELATEXFLAGS','-interaction=batchmode')
		fun=self.texfun
		node=self.inputs[0]
		srcfile=node.abspath()
		texinputs=self.env.TEXINPUTS or''
		self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep\
		  +node.parent.get_src().abspath()+os.pathsep\
		  +self.generator.path.get_src().abspath()+os.pathsep\
		  +self.generator.path.get_bld().abspath()+os.pathsep\
		  +texinputs+os.pathsep
		self.cwd=self.inputs[0].parent.get_bld().abspath()
		Logs.warn('first pass on %s'%self.__class__.__name__)
		self.env.env={}
		self.env.env.update(os.environ)
		self.env.env.update({'TEXINPUTS':self.TEXINPUTS,'TEXPICTS':self.TEXINPUTS})
		self.env.SRCFILE=srcfile
		self.env.LOF_FILES=node.change_ext(".lof").abspath()
		print "LOF_FILES:",self.env.LOF_FILES
		#print fun
		#print self.env
		fun_clean=self.texfun_clean
		fun_clean()
		self.check_status('error when calling latex',fun())
		fun_clean()
		self.aux_nodes=self.scan_aux(node.change_ext('.aux'))
		self.idx_node=node.change_ext('.idx')
		self.bibtopic()
		self.bibfile()
		self.bibunits()
		self.makeindex()
		hash=''
		for i in range(10):
			prev_hash=hash
			try:
				hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes]
				hash=Utils.h_list(hashes)
			except(OSError,IOError):
				Logs.error('could not read aux.h')
				pass
			if hash and hash==prev_hash:
				break
			Logs.warn('calling %s'%self.__class__.__name__)
			self.env.env={}
			self.env.env.update(os.environ)
			self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
			self.env.SRCFILE=srcfile
			#fun_clean()
			self.check_status('error when calling %s'%self.__class__.__name__,fun())
Beispiel #16
0
	def hash_aux_nodes(self):
		"""
		Returns a hash of the .aux file contents

		:rtype: string or bytes
		"""
		try:
			self.aux_nodes
		except AttributeError:
			try:
				self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
			except IOError:
				return None
		return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
Beispiel #17
0
 def run(self):
     env = self.env
     if not env["PROMPT_LATEX"]:
         env.append_value("LATEXFLAGS", "-interaction=batchmode")
         env.append_value("PDFLATEXFLAGS", "-interaction=batchmode")
         env.append_value("XELATEXFLAGS", "-interaction=batchmode")
     fun = self.texfun
     node = self.inputs[0]
     srcfile = node.abspath()
     texinputs = self.env.TEXINPUTS or ""
     self.TEXINPUTS = (
         node.parent.get_bld().abspath()
         + os.pathsep
         + node.parent.get_src().abspath()
         + os.pathsep
         + texinputs
         + os.pathsep
     )
     self.cwd = self.inputs[0].parent.get_bld().abspath()
     Logs.warn("first pass on %s" % self.__class__.__name__)
     self.env.env = {}
     self.env.env.update(os.environ)
     self.env.env.update({"TEXINPUTS": self.TEXINPUTS})
     self.env.SRCFILE = srcfile
     self.check_status("error when calling latex", fun())
     self.aux_nodes = self.scan_aux(node.change_ext(".aux"))
     self.idx_node = node.change_ext(".idx")
     self.bibfile()
     self.bibunits()
     self.makeindex()
     hash = ""
     for i in range(10):
         prev_hash = hash
         try:
             hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
             hash = Utils.h_list(hashes)
         except (OSError, IOError):
             Logs.error("could not read aux.h")
             pass
         if hash and hash == prev_hash:
             break
         Logs.warn("calling %s" % self.__class__.__name__)
         self.env.env = {}
         self.env.env.update(os.environ)
         self.env.env.update({"TEXINPUTS": self.TEXINPUTS})
         self.env.SRCFILE = srcfile
         self.check_status("error when calling %s" % self.__class__.__name__, fun())
Beispiel #18
0
def run_c_code(self, *k, **kw):
	lst = [str(v) for (p, v) in kw.items() if p != 'env']
	h = Utils.h_list(lst)
	dir = self.bldnode.abspath() + os.sep + '.conf_check_' + Utils.to_hex(h)

	try:
		os.makedirs(dir)
	except:
		pass

	try:
		os.stat(dir)
	except:
		self.fatal('cannot use the configuration test folder %r' % dir)

	bdir = os.path.join(dir, 'testbuild')

	if not os.path.exists(bdir):
		os.makedirs(bdir)

	self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir) # keep the temporary build context on an attribute for debugging
	bld.load() # configuration test cache
	bld.targets = '*'

	if kw['compile_filename']:
		node = bld.srcnode.make_node(kw['compile_filename'])
		node.write(kw['code'])

	bld.logger = self.logger
	bld.all_envs.update(self.all_envs)
	bld.all_envs['default'] = kw['env']

	o = bld(features=kw['features'], source=kw['compile_filename'], target='testprog')

	for k, v in kw.items():
		setattr(o, k, v)

	self.to_log("==>\n%s\n<==" % kw['code'])

	# compile the program
	bld.targets = '*'
	try:
		bld.compile()
	except Errors.WafError:
		self.fatal('Test does not build: %s' % Utils.ex_stack())

	return getattr(bld, 'retval', 0)
Beispiel #19
0
	def execute(self):
		"""
		Wraps :py:func:`waflib.Context.Context.execute` on the context class
		"""
		if not Configure.autoconfig:
			return execute_method(self)

		env = ConfigSet.ConfigSet()
		do_config = False
		try:
			env.load(os.path.join(Context.top_dir, Options.lockfile))
		except EnvironmentError:
			Logs.warn('Configuring the project')
			do_config = True
		else:
			if env.run_dir != Context.run_dir:
				do_config = True
			else:
				h = 0
				for f in env.files:
					try:
						h = Utils.h_list((h, Utils.readf(f, 'rb')))
					except EnvironmentError:
						do_config = True
						break
				else:
					do_config = h != env.hash

		if do_config:
			cmd = env.config_cmd or 'configure'
			if Configure.autoconfig == 'clobber':
				tmp = Options.options.__dict__
				launch_dir_tmp = Context.launch_dir
				if env.options:
					Options.options.__dict__ = env.options
				Context.launch_dir = env.launch_dir
				try:
					run_command(cmd)
				finally:
					Options.options.__dict__ = tmp
					Context.launch_dir = launch_dir_tmp
			else:
				run_command(cmd)
			run_command(self.cmd)
		else:
			return execute_method(self)
Beispiel #20
0
	def get_bld_sig(self):
		try:
			cache=self.ctx.cache_sig
		except AttributeError:
			cache=self.ctx.cache_sig={}
		try:
			ret=cache[self]
		except KeyError:
			p=self.abspath()
			try:
				ret=cache[self]=self.h_file()
			except EnvironmentError:
				if self.isdir():
					st=os.stat(p)
					ret=cache[self]=Utils.h_list([p,st.st_ino,st.st_mode])
					return ret
				raise
		return ret
Beispiel #21
0
	def hash_env_vars(self,env,vars_lst):
		if not env.table:
			env=env.parent
			if not env:
				return Utils.SIG_NIL
		idx=str(id(env))+str(vars_lst)
		try:
			cache=self.cache_env
		except AttributeError:
			cache=self.cache_env={}
		else:
			try:
				return self.cache_env[idx]
			except KeyError:
				pass
		lst=[env[a]for a in vars_lst]
		cache[idx]=ret=Utils.h_list(lst)
		Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst)
		return ret
Beispiel #22
0
    def sig_vars(self):
        """
        Compute a hash (signature) of the variables used in the substitution
        """
        bld = self.generator.bld
        env = self.env
        upd = self.m.update

        # raw_deps: persistent custom values returned by the scanner
        vars = self.generator.bld.raw_deps.get(self.uid(), [])

        # hash both env vars and task generator attributes
        act_sig = bld.hash_env_vars(env, vars)
        upd(act_sig)

        lst = [getattr(self.generator, x, '') for x in vars]
        upd(Utils.h_list(lst))

        return self.m.digest()
Beispiel #23
0
 def hash_env_vars(self, env, vars_lst):
     if not env.table:
         env = env.parent
         if not env:
             return Utils.SIG_NIL
     idx = str(id(env)) + str(vars_lst)
     try:
         cache = self.cache_env
     except AttributeError:
         cache = self.cache_env = {}
     else:
         try:
             return self.cache_env[idx]
         except KeyError:
             pass
     lst = [env[a] for a in vars_lst]
     ret = Utils.h_list(lst)
     Logs.debug('envhash: %r %r', ret, lst)
     cache[idx] = ret
     return ret
 def execute(self):
     if not Configure.autoconfig:
         return execute_method(self)
     env = ConfigSet.ConfigSet()
     do_config = False
     try:
         env.load(os.path.join(Context.top_dir, Options.lockfile))
     except EnvironmentError:
         Logs.warn('Configuring the project')
         do_config = True
     else:
         if env.run_dir != Context.run_dir:
             do_config = True
         else:
             h = 0
             for f in env.files:
                 try:
                     h = Utils.h_list((h, Utils.readf(f, 'rb')))
                 except EnvironmentError:
                     do_config = True
                     break
             else:
                 do_config = h != env.hash
     if do_config:
         cmd = env.config_cmd or 'configure'
         if Configure.autoconfig == 'clobber':
             tmp = Options.options.__dict__
             launch_dir_tmp = Context.launch_dir
             if env.options:
                 Options.options.__dict__ = env.options
             Context.launch_dir = env.launch_dir
             try:
                 run_command(cmd)
             finally:
                 Options.options.__dict__ = tmp
                 Context.launch_dir = launch_dir_tmp
         else:
             run_command(cmd)
         run_command(self.cmd)
     else:
         return execute_method(self)
Beispiel #25
0
	def run(self):
		env=self.env
		if not env['PROMPT_LATEX']:
			env.append_value('LATEXFLAGS','-interaction=batchmode')
			env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
			env.append_value('XELATEXFLAGS','-interaction=batchmode')
		fun=self.texfun
		node=self.inputs[0]
		srcfile=node.abspath()
		texinputs=self.env.TEXINPUTS or''
		self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep
		self.cwd=self.inputs[0].parent.get_bld().abspath()
		Logs.warn('first pass on %s'%self.__class__.__name__)
		self.env.env={}
		self.env.env.update(os.environ)
		self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
		self.env.SRCFILE=srcfile
		self.check_status('error when calling latex',fun())
		self.aux_nodes=self.scan_aux(node.change_ext('.aux'))
		self.idx_node=node.change_ext('.idx')
		self.bibfile()
		self.bibunits()
		self.makeindex()
		hash=''
		for i in range(10):
			prev_hash=hash
			try:
				hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes]
				hash=Utils.h_list(hashes)
			except(OSError,IOError):
				Logs.error('could not read aux.h')
				pass
			if hash and hash==prev_hash:
				break
			Logs.warn('calling %s'%self.__class__.__name__)
			self.env.env={}
			self.env.env.update(os.environ)
			self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
			self.env.SRCFILE=srcfile
			self.check_status('error when calling %s'%self.__class__.__name__,fun())
Beispiel #26
0
def apply_subst(self):
    Utils.def_attrs(self, fun=subst_func)
    lst = self.to_list(self.source)
    self.meths.remove('process_source')

    self.dict = getattr(self, 'dict', {})

    for filename in lst:
        node = self.path.find_resource(filename)
        if not node:
            raise Errors.WafError('cannot find input file %s for processing' %
                                  filename)

        if self.target:
            newnode = self.path.find_or_declare(self.target)
        else:
            newnode = node.change_ext('')

        try:
            self.dict = self.dict.get_merged_dict()
        except AttributeError:
            pass

        if self.dict and not self.env['DICT_HASH']:
            self.env = self.env.derive()
            keys = list(self.dict.keys())
            keys.sort()
            lst = [self.dict[x] for x in keys]
            self.env['DICT_HASH'] = str(Utils.h_list(lst))

        tsk = self.create_task('copy', node, newnode)
        tsk.fun = self.fun
        tsk.dict = self.dict
        tsk.dep_vars = ['DICT_HASH']
        tsk.chmod = getattr(self, 'chmod', Utils.O644)

        if not tsk.env:
            tsk.debug()
            raise Errors.WafError('task without an environment')
Beispiel #27
0
    def sig_vars(self):
        """
		Compute a hash (signature) of the variables used in the substitution
		"""
        bld = self.generator.bld
        env = self.env
        upd = self.m.update

        if getattr(self.generator, 'subst_fun', None):
            upd(Utils.h_fun(self.generator.subst_fun).encode())

        # raw_deps: persistent custom values returned by the scanner
        vars = self.generator.bld.raw_deps.get(self.uid(), [])

        # hash both env vars and task generator attributes
        act_sig = bld.hash_env_vars(env, vars)
        upd(act_sig)

        lst = [getattr(self.generator, x, '') for x in vars]
        upd(Utils.h_list(lst))

        return self.m.digest()
Beispiel #28
0
def apply_subst(self):
    Utils.def_attrs(self, fun=subst_func)
    lst = self.to_list(self.source)
    self.meths.remove("process_source")

    self.dict = getattr(self, "dict", {})

    for filename in lst:
        node = self.path.find_resource(filename)
        if not node:
            raise Errors.WafError("cannot find input file %s for processing" % filename)

        if self.target:
            newnode = self.path.find_or_declare(self.target)
        else:
            newnode = node.change_ext("")

        try:
            self.dict = self.dict.get_merged_dict()
        except AttributeError:
            pass

        if self.dict and not self.env["DICT_HASH"]:
            self.env = self.env.derive()
            keys = list(self.dict.keys())
            keys.sort()
            lst = [self.dict[x] for x in keys]
            self.env["DICT_HASH"] = str(Utils.h_list(lst))

        tsk = self.create_task("copy", node, newnode)
        tsk.fun = self.fun
        tsk.dict = self.dict
        tsk.dep_vars = ["DICT_HASH"]
        tsk.chmod = getattr(self, "chmod", Utils.O644)

        if not tsk.env:
            tsk.debug()
            raise Errors.WafError("task without an environment")
Beispiel #29
0
 def execute(self):
     if not Configure.autoconfig:
         return execute_method(self)
     env = ConfigSet.ConfigSet()
     do_config = False
     try:
         env.load(os.path.join(Context.top_dir, Options.lockfile))
     except Exception:
         Logs.warn('Configuring the project')
         do_config = True
     else:
         if env.run_dir != Context.run_dir:
             do_config = True
         else:
             h = 0
             for f in env['files']:
                 h = Utils.h_list((h, Utils.readf(f, 'rb')))
             do_config = h != env.hash
     if do_config:
         Options.commands.insert(0, self.cmd)
         Options.commands.insert(0, 'configure')
         return
     return execute_method(self)
Beispiel #30
0
    def hash_env_vars(self, env, vars_lst):
        """
		Hash configuration set variables::

			def build(bld):
				bld.hash_env_vars(bld.env, ['CXX', 'CC'])

		:param env: Configuration Set
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		:param vars_lst: list of variables
		:type vars_list: list of string
		"""

        if not env.table:
            env = env.parent
            if not env:
                return Utils.SIG_NIL

        idx = str(id(env)) + str(vars_lst)
        try:
            cache = self.cache_env
        except AttributeError:
            cache = self.cache_env = {}
        else:
            try:
                return self.cache_env[idx]
            except KeyError:
                pass

        lst = [env[a] for a in vars_lst]
        ret = Utils.h_list(lst)
        Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)

        cache[idx] = ret

        return ret
 def uid(self):
     lst = self.inputs + self.outputs + [
         self.link, self.generator.path.abspath()
     ]
     return Utils.h_list(lst)
Beispiel #32
0
def run_build(self, *k, **kw):
	"""
	Create a temporary build context to execute a build. A reference to that build
	context is kept on self.test_bld for debugging purposes, and you should not rely
	on it too much (read the note on the cache below).
	The parameters given in the arguments to this function are passed as arguments for
	a single task generator created in the build. Only three parameters are obligatory:

	:param features: features to pass to a task generator created in the build
	:type features: list of string
	:param compile_filename: file to create for the compilation (default: *test.c*)
	:type compile_filename: string
	:param code: code to write in the filename to compile
	:type code: string

	Though this function returns *0* by default, the build may set an attribute named *retval* on the
	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.

	This function also provides a limited cache. To use it, provide the following option::

		def options(opt):
			opt.add_option('--confcache', dest='confcache', default=0,
				action='count', help='Use a configuration cache')

	And execute the configuration with the following command-line::

		$ waf configure --confcache

	"""
	lst = [str(v) for (p, v) in kw.items() if p != 'env']
	h = Utils.h_list(lst)
	dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)

	try:
		os.makedirs(dir)
	except OSError:
		pass

	try:
		os.stat(dir)
	except OSError:
		self.fatal('cannot use the configuration test folder %r' % dir)

	cachemode = getattr(Options.options, 'confcache', None)
	if cachemode == 1:
		try:
			proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
		except EnvironmentError:
			pass
		else:
			ret = proj['cache_run_build']
			if isinstance(ret, str) and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret

	bdir = os.path.join(dir, 'testbuild')

	if not os.path.exists(bdir):
		os.makedirs(bdir)

	cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
	self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar = 0
	bld.targets = '*'

	bld.logger = self.logger
	bld.all_envs.update(self.all_envs) # not really necessary
	bld.env = kw['env']

	bld.kw = kw
	bld.conf = self
	kw['build_fun'](bld)
	ret = -1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret = 'Test does not build: %s' % traceback.format_exc()
			self.fatal(ret)
		else:
			ret = getattr(bld, 'retval', 0)
	finally:
		if cachemode == 1:
			# cache the results each time
			proj = ConfigSet.ConfigSet()
			proj['cache_run_build'] = ret
			proj.store(os.path.join(dir, 'cache_run_build'))
		else:
			shutil.rmtree(dir)
	return ret
Beispiel #33
0
    def run(self):
        """
		Runs the TeX build process.

		It may require multiple passes, depending on the usage of cross-references,
		bibliographies, content susceptible of needing such passes.
		The appropriate TeX compiler is called until the *.aux* files stop changing.

		Makeindex and bibtex are called if necessary.
		"""
        env = self.env

        if not env['PROMPT_LATEX']:
            env.append_value('LATEXFLAGS', '-interaction=batchmode')
            env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
            env.append_value('XELATEXFLAGS', '-interaction=batchmode')

        fun = self.texfun

        node = self.inputs[0]
        srcfile = node.abspath()

        texinputs = self.env.TEXINPUTS or ''
        self.TEXINPUTS = node.parent.get_bld().abspath(
        ) + os.pathsep + node.parent.get_src().abspath(
        ) + os.pathsep + texinputs + os.pathsep

        self.aux_node = node.change_ext(
            '.aux')  # TODO waf 1.7 remove (left for compatibility)

        # important, set the cwd for everybody
        self.cwd = self.inputs[0].parent.get_bld().abspath()

        warn('first pass on %s' % self.__class__.__name__)

        self.env.env = {}
        self.env.env.update(os.environ)
        self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
        self.env.SRCFILE = srcfile
        self.check_status('error when calling latex', fun())

        self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
        self.idx_node = node.change_ext('.idx')

        self.bibfile()
        self.bibunits()
        self.makeindex()

        hash = ''
        for i in range(10):
            # prevent against infinite loops - one never knows

            # watch the contents of file.aux and stop if file.aux does not change anymore
            prev_hash = hash
            try:
                hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
                hash = Utils.h_list(hashes)
            except (OSError, IOError):
                error('could not read aux.h')
                pass
            if hash and hash == prev_hash:
                break

            # run the command
            warn('calling %s' % self.__class__.__name__)

            self.env.env = {}
            self.env.env.update(os.environ)
            self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
            self.env.SRCFILE = srcfile
            self.check_status(
                'error when calling %s' % self.__class__.__name__, fun())
Beispiel #34
0
	def run(self):
		"""
		Runs the TeX build process.

		It may require multiple passes, depending on the usage of cross-references,
		bibliographies, content susceptible of needing such passes.
		The appropriate TeX compiler is called until the *.aux* files stop changing.

		Makeindex and bibtex are called if necessary.
		"""
		env = self.env

		if not env['PROMPT_LATEX']:
			env.append_value('LATEXFLAGS', '-interaction=batchmode')
			env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
			env.append_value('XELATEXFLAGS', '-interaction=batchmode')

		fun = self.texfun

		node = self.inputs[0]
		srcfile = node.abspath()

		texinputs = self.env.TEXINPUTS or ''
		self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep + texinputs + os.pathsep

		# important, set the cwd for everybody
		self.cwd = self.inputs[0].parent.get_bld().abspath()

		Logs.warn('first pass on %s' % self.__class__.__name__)

		self.env.env = {}
		self.env.env.update(os.environ)
		self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
		self.env.SRCFILE = srcfile
		self.check_status('error when calling latex', fun())

		self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
		self.idx_node = node.change_ext('.idx')

		self.bibtopic()
		self.bibfile()
		self.bibunits()
		self.makeindex()

		hash = ''
		for i in range(10):
			# prevent against infinite loops - one never knows

			# watch the contents of file.aux and stop if file.aux does not change anymore
			prev_hash = hash
			try:
				hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
				hash = Utils.h_list(hashes)
			except (OSError, IOError):
				Logs.error('could not read aux.h')
				pass
			if hash and hash == prev_hash:
				break

			# run the command
			Logs.warn('calling %s' % self.__class__.__name__)

			self.env.env = {}
			self.env.env.update(os.environ)
			self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
			self.env.SRCFILE = srcfile
			self.check_status('error when calling %s' % self.__class__.__name__, fun())
Beispiel #35
0
def process_rule(self):
    if not getattr(self, 'rule', None):
        return
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}
    chmod = getattr(self, 'chmod', None)
    shell = getattr(self, 'shell', True)
    color = getattr(self, 'color', 'BLUE')
    scan = getattr(self, 'scan', None)
    _vars = getattr(self, 'vars', [])
    cls_str = getattr(self, 'cls_str', None)
    cls_keyword = getattr(self, 'cls_keyword', None)
    use_cache = getattr(self, 'cache_rule', 'True')
    scan_val = has_deps = hasattr(self, 'deps')
    if scan:
        scan_val = id(scan)
    key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str,
                        cls_keyword, scan_val, _vars))
    cls = None
    if use_cache:
        try:
            cls = cache[key]
        except KeyError:
            pass
    if not cls:
        rule = self.rule
        if chmod is not None:

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), tsk.generator.chmod)

            if isinstance(rule, tuple):
                rule = list(rule)
                rule.append(chmod_fun)
                rule = tuple(rule)
            else:
                rule = (rule, chmod_fun)
        cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
        if cls_str:
            setattr(cls, '__str__', self.cls_str)
        if cls_keyword:
            setattr(cls, 'keyword', self.cls_keyword)
        if scan:
            cls.scan = self.scan
        elif has_deps:

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan
        if use_cache:
            cache[key] = cls
    tsk = self.create_task(name)
    for x in ('after', 'before', 'ext_in', 'ext_out'):
        setattr(tsk, x, getattr(self, x, []))
    if hasattr(self, 'stdout'):
        tsk.stdout = self.stdout
    if hasattr(self, 'stderr'):
        tsk.stderr = self.stderr
    if getattr(self, 'timeout', None):
        tsk.timeout = self.timeout
    if getattr(self, 'always', None):
        tsk.always_run = True
    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir()
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.install_task = self.add_install_files(
                install_to=self.install_path,
                install_from=tsk.outputs,
                chmod=getattr(self, 'chmod', Utils.O644))
    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        self.source = []
    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd
    if isinstance(tsk.run, functools.partial):
        tsk.run = functools.partial(tsk.run, tsk)
Beispiel #36
0
    def execute(self):
        """
        Wraps :py:func:`waflib.Context.Context.execute` on the context class
        """
        if not Configure.autoconfig:
            return execute_method(self)

        # Disable autoconfig so waf's version doesn't run (and don't end up on loop of bad configure)
        Configure.autoconfig = False

        if self.variant == '':
            raise Errors.WafError('The project is badly configured: run "waf configure" again!')

        env = ConfigSet.ConfigSet()
        do_config = False

        try:
            p = os.path.join(Context.out_dir, Build.CACHE_DIR, self.variant + Build.CACHE_SUFFIX)
            env.load(p)
        except EnvironmentError:
            raise Errors.WafError('The project is not configured for board {0}: run "waf configure --board {0} [...]" first!'.format(self.variant))

        lock_env = ConfigSet.ConfigSet()

        try:
            lock_env.load(os.path.join(Context.top_dir, Options.lockfile))
        except EnvironmentError:
            Logs.warn('Configuring the project')
            do_config = True
        else:
            if lock_env.run_dir != Context.run_dir:
                do_config = True
            else:
                h = 0

                for f in env.CONFIGURE_FILES:
                    try:
                        h = Utils.h_list((h, Utils.readf(f, 'rb')))
                    except EnvironmentError:
                        do_config = True
                        break
                else:
                    do_config = h != env.CONFIGURE_HASH

        if do_config:
            cmd = lock_env.config_cmd or 'configure'
            tmp = Options.options.__dict__

            if env.OPTIONS and sorted(env.OPTIONS.keys()) == sorted(tmp.keys()):
                Options.options.__dict__ = env.OPTIONS
            else:
                raise Errors.WafError('The project configure options have changed: run "waf configure" again!')

            try:
                run_command(cmd)
            finally:
                Options.options.__dict__ = tmp

            run_command(self.cmd)
        else:
            return execute_method(self)
Beispiel #37
0
 def uid(self):
     lst = [self.__class__.__name__, self.generator.outdir.abspath()]
     for x in self.srcdir:
         lst.append(x.abspath())
     return Utils.h_list(lst)
Beispiel #38
0
	def uid(self):
		"""Returns a unique identifier for the task"""
		lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()]
		return Utils.h_list(lst)
Beispiel #39
0
 def uid(self):
     """Identify java tasks by input&output folder"""
     lst = [self.__class__.__name__, self.generator.outdir.abspath()]
     for x in self.srcdir:
         lst.append(x.abspath())
     return Utils.h_list(lst)
Beispiel #40
0
def process_rule(self):
    """
	Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

		def build(bld):
			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
	"""
    if not getattr(self, 'rule', None):
        return

    # create the task class
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))

    # or we can put the class in a cache for performance reasons
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}

    chmod = getattr(self, 'chmod', None)
    shell = getattr(self, 'shell', True)
    color = getattr(self, 'color', 'BLUE')
    scan = getattr(self, 'scan', None)
    _vars = getattr(self, 'vars', [])
    cls_str = getattr(self, 'cls_str', None)
    cls_keyword = getattr(self, 'cls_keyword', None)
    use_cache = getattr(self, 'cache_rule', 'True')

    scan_val = has_deps = hasattr(self, 'deps')
    if scan:
        scan_val = id(scan)

    key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str,
                        cls_keyword, scan_val, _vars))

    cls = None
    if use_cache:
        try:
            cls = cache[key]
        except KeyError:
            pass
    if not cls:
        rule = self.rule
        if chmod is not None:

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), tsk.generator.chmod)

            if isinstance(rule, tuple):
                rule = list(rule)
                rule.append(chmod_fun)
                rule = tuple(rule)
            else:
                rule = (rule, chmod_fun)

        cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

        if cls_str:
            setattr(cls, '__str__', self.cls_str)

        if cls_keyword:
            setattr(cls, 'keyword', self.cls_keyword)

        if scan:
            cls.scan = self.scan
        elif has_deps:

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan

        # TODO use these values in the cache key if provided
        # (may cause excessive caching)
        for x in ('after', 'before', 'ext_in', 'ext_out'):
            setattr(cls, x, getattr(self, x, []))

        if use_cache:
            cache[key] = cls

    # now create one instance
    tsk = self.create_task(name)

    if getattr(self, 'timeout', None):
        tsk.timeout = self.timeout

    if getattr(self, 'always', None):
        tsk.always_run = True

    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir(
                )  # if a node was given, create the required folders
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.install_task = self.add_install_files(
                install_to=self.install_path,
                install_from=tsk.outputs,
                chmod=getattr(self, 'chmod', Utils.O644))

    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        # bypass the execution of process_source by setting the source to an empty list
        self.source = []

    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd

    if isinstance(tsk.run, functools.partial):
        # Python documentation says: "partial objects defined in classes
        # behave like static methods and do not transform into bound
        # methods during instance attribute look-up."
        tsk.run = functools.partial(tsk.run, tsk)
Beispiel #41
0
def run_build(self, *k, **kw):
    """
	Create a temporary build context to execute a build. A temporary reference to that build
	context is kept on self.test_bld for debugging purposes.
	The arguments to this function are passed to a single task generator for that build.
	Only three parameters are mandatory:

	:param features: features to pass to a task generator created in the build
	:type features: list of string
	:param compile_filename: file to create for the compilation (default: *test.c*)
	:type compile_filename: string
	:param code: input file contents
	:type code: string

	Though this function returns *0* by default, the build may bind attribute named *retval* on the
	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.

	The temporary builds creates a temporary folder; the name of that folder is calculated
	by hashing input arguments to this function, with the exception of :py:class:`waflib.ConfigSet.ConfigSet`
	objects which are used for both reading and writing values.

	This function also features a cache which is disabled by default; that cache relies
	on the hash value calculated as indicated above::

		def options(opt):
			opt.add_option('--confcache', dest='confcache', default=0,
				action='count', help='Use a configuration cache')

	And execute the configuration with the following command-line::

		$ waf configure --confcache

	"""
    buf = []
    for key in sorted(kw.keys()):
        v = kw[key]
        if isinstance(v, ConfigSet.ConfigSet):
            # values are being written to, so they are excluded from contributing to the hash
            continue
        elif hasattr(v, '__call__'):
            buf.append(Utils.h_fun(v))
        else:
            buf.append(str(v))
    h = Utils.h_list(buf)
    dir = self.bldnode.abspath() + os.sep + (
        not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)

    cachemode = kw.get('confcache', getattr(Options.options, 'confcache',
                                            None))

    if not cachemode and os.path.exists(dir):
        shutil.rmtree(dir)

    try:
        os.makedirs(dir)
    except OSError:
        pass

    try:
        os.stat(dir)
    except OSError:
        self.fatal('cannot use the configuration test folder %r' % dir)

    if cachemode == 1:
        try:
            proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
        except EnvironmentError:
            pass
        else:
            ret = proj['cache_run_build']
            if isinstance(ret, str) and ret.startswith('Test does not build'):
                self.fatal(ret)
            return ret

    bdir = os.path.join(dir, 'testbuild')

    if not os.path.exists(bdir):
        os.makedirs(bdir)

    cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls',
                                                  'build')
    self.test_bld = bld = Context.create_context(cls_name,
                                                 top_dir=dir,
                                                 out_dir=bdir)
    bld.init_dirs()
    bld.progress_bar = 0
    bld.targets = '*'

    bld.logger = self.logger
    bld.all_envs.update(self.all_envs)  # not really necessary
    bld.env = kw['env']

    bld.kw = kw
    bld.conf = self
    kw['build_fun'](bld)
    ret = -1
    try:
        try:
            bld.compile()
        except Errors.WafError:
            ret = 'Test does not build: %s' % traceback.format_exc()
            self.fatal(ret)
        else:
            ret = getattr(bld, 'retval', 0)
    finally:
        if cachemode:
            # cache the results each time
            proj = ConfigSet.ConfigSet()
            proj['cache_run_build'] = ret
            proj.store(os.path.join(dir, 'cache_run_build'))
        else:
            shutil.rmtree(dir)
    return ret
Beispiel #42
0
	def run(self):
		"""
		Runs the TeX build process.

		It may require multiple passes, depending on the usage of cross-references,
		bibliographies, content susceptible of needing such passes.
		The appropriate TeX compiler is called until the *.aux* files stop changing.

		Makeindex and bibtex are called if necessary.
		"""
		env = self.env

		if not env['PROMPT_LATEX']:
			env.append_value('LATEXFLAGS', '-interaction=batchmode')
			env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
			env.append_value('XELATEXFLAGS', '-interaction=batchmode')

		fun = self.texfun

		node = self.inputs[0]
		srcfile = node.abspath()

		texinputs = self.env.TEXINPUTS or ''
		self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep + texinputs + os.pathsep

		# important, set the cwd for everybody
		self.cwd = self.inputs[0].parent.get_bld().abspath()

		Logs.warn('first pass on %s' % self.__class__.__name__)

		hash = ''
		try:
			self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
			hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
			hash = Utils.h_list(hashes)
		except (OSError, IOError):
			pass
		hash_bcf = ''
		try:
			self.bcf_node = node.change_ext('.bcf')
			hashes = [Utils.h_file(self.bcf_node.abspath())]
			hash_bcf = Utils.h_list(hashes)
		except (OSError, IOError):
			pass
		hash_bbl = ''
		try:
			self.bbl_node = node.change_ext('.bbl')
			hashes = [Utils.h_file(self.bbl_node.abspath())]
			hash_bbl = Utils.h_list(hashes)
		except (OSError, IOError):
			pass
		hash_idx = ''
		try:
			self.idx_node = node.change_ext('.idx')
			hashes = [Utils.h_file(self.idx_node.abspath())]
			hash_idx = Utils.h_list(hashes)
		except (OSError, IOError):
			pass
		hash_ind = ''
		try:
			self.ind_node = node.change_ext('.ind')
			hashes = [Utils.h_file(self.ind_node.abspath())]
			hash_ind = Utils.h_list(hashes)
		except (OSError, IOError):
			pass
		hash_nlo = ''
		try:
			self.nlo_node = node.change_ext('.nlo')
			hashes = [Utils.h_file(self.nlo_node.abspath())]
			hash_nlo = Utils.h_list(hashes)
		except (OSError, IOError):
			pass
		hash_nls = ''
		self.nls_node = node.change_ext('.nls')
		try:
			hashes = [Utils.h_file(self.nls_node.abspath())]
			hash_nls = Utils.h_list(hashes)
		except (OSError, IOError):
			pass

		self.env.env = {}
		self.env.env.update(os.environ)
		self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
		self.env.SRCFILE = srcfile
		self.check_status('error when calling latex', fun())

		self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
		self.idx_node = node.change_ext('.idx')

		self.bibtopic()
		self.bibfile()

		prev_bcf_hash = hash_bcf
		try:
			self.bcf_node = node.change_ext('.bcf')
			hashes = [Utils.h_file(self.bcf_node.abspath())]
			hash_bcf = Utils.h_list(hashes)
		except (OSError, IOError):
			Logs.error('could not read bcf.h')
			pass
		if hash_bcf and hash_bcf != prev_bcf_hash:
			self.bibunits()
		else:
			Logs.warn('%s unchanged, not calling bibliography engine' % (self.bcf_node))

		prev_idx_hash = hash_idx
		try:
			self.idx_node = node.change_ext('.idx')
			hashes = [Utils.h_file(self.idx_node.abspath())]
			hash_idx = Utils.h_list(hashes)
		except (OSError, IOError):
			Logs.error('could not read idx.h')
			pass
		if hash_idx and hash_idx != prev_idx_hash:
			self.makeindex()
		else:
			Logs.warn('%s unchanged, not calling indexing engine' % (self.idx_node))

		prev_nlo_hash = hash_nlo
		try:
			self.nlo_node = node.change_ext('.nlo')
			hashes = [Utils.h_file(self.nlo_node.abspath())]
			hash_nlo = Utils.h_list(hashes)
		except (OSError, IOError):
			Logs.error('could not read nlo.h')
			pass
		if hash_nlo and hash_nlo != prev_nlo_hash:
			self.makenomen()
		else:
			Logs.warn('%s unchanged, not calling nomenclature engine' % (self.nlo_node))

		for i in range(10):
			# prevent against infinite loops - one never knows

			# watch the contents of file.aux and stop if file.aux does not change anymore
			prev_hash = hash
			try:
				hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
				hash = Utils.h_list(hashes)
			except (OSError, IOError):
				Logs.error('could not read aux.h')
				pass
			prev_hash_bbl = hash_bbl
			try:
				hashes = [Utils.h_file(self.bbl_node.abspath())]
				hash_bbl = Utils.h_list(hashes)
			except (OSError, IOError):
				Logs.error('could not read bbl.h')
				pass
			prev_hash_ind = hash_ind
			try:
				hashes = [Utils.h_file(self.ind_node.abspath())]
				hash_ind = Utils.h_list(hashes)
			except (OSError, IOError):
				Logs.error('could not read ind.h')
				pass
			prev_hash_nls = hash_nls
			try:
				hashes = [Utils.h_file(self.nls_node.abspath())]
				hash_nls = Utils.h_list(hashes)
			except (OSError, IOError):
				Logs.error('could not read nls.h')
				pass
			if hash and hash == prev_hash:
				Logs.warn('.aux files unchanged')
			if hash_bbl and hash_bbl == prev_hash_bbl:
				Logs.warn('%s unchanged' % (self.bcf_node))
			if hash_ind and hash_ind == prev_hash_ind:
				Logs.warn('%s unchanged' % (self.ind_node))
			if hash_nls and hash_nls == prev_hash_nls:
				Logs.warn('%s unchanged' % (self.nls_node))
			if hash and hash == prev_hash and hash_bbl and hash_bbl == prev_hash_bbl and hash_ind and hash_ind == prev_hash_ind and hash_nls and hash_nls == prev_hash_nls:
				Logs.warn('Breaking loop now.')
				break

			# run the command
			Logs.warn('calling %s' % self.__class__.__name__)

			self.env.env = {}
			self.env.env.update(os.environ)
			self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
			self.env.SRCFILE = srcfile
			self.check_status('error when calling %s' % self.__class__.__name__, fun())
Beispiel #43
0
	def uid(self):
		lst=self.inputs+self.outputs+[self.link,self.generator.path.abspath()]
		return Utils.h_list(lst)
Beispiel #44
0
def process_rule(self):
    """
	Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

		def build(bld):
			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')

	Main attributes processed:

	* rule: command to execute, it can be a tuple of strings for multiple commands
	* chmod: permissions for the resulting files (integer value such as Utils.O755)
	* shell: set to False to execute the command directly (default is True to use a shell)
	* scan: scanner function
	* vars: list of variables to trigger rebuilds, such as CFLAGS
	* cls_str: string to display when executing the task
	* cls_keyword: label to display when executing the task
	* cache_rule: by default, try to re-use similar classes, set to False to disable
	* source: list of Node or string objects representing the source files required by this task
	* target: list of Node or string objects representing the files that this task creates
	* cwd: current working directory (Node or string)
	* stdout: standard output, set to None to prevent waf from capturing the text
	* stderr: standard error, set to None to prevent waf from capturing the text
	* timeout: timeout for command execution (Python 3)
	* always: whether to always run the command (False by default)
	* deep_inputs: whether the task must depend on the input file tasks too (False by default)
	"""
    if not getattr(self, 'rule', None):
        return

    # create the task class
    name = str(
        getattr(self, 'name', None) or self.target
        or getattr(self.rule, '__name__', self.rule))

    # or we can put the class in a cache for performance reasons
    try:
        cache = self.bld.cache_rule_attr
    except AttributeError:
        cache = self.bld.cache_rule_attr = {}

    chmod = getattr(self, 'chmod', None)
    shell = getattr(self, 'shell', True)
    color = getattr(self, 'color', 'BLUE')
    scan = getattr(self, 'scan', None)
    _vars = getattr(self, 'vars', [])
    cls_str = getattr(self, 'cls_str', None)
    cls_keyword = getattr(self, 'cls_keyword', None)
    use_cache = getattr(self, 'cache_rule', 'True')
    deep_inputs = getattr(self, 'deep_inputs', False)

    scan_val = has_deps = hasattr(self, 'deps')
    if scan:
        scan_val = id(scan)

    key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str,
                        cls_keyword, scan_val, _vars, deep_inputs))

    cls = None
    if use_cache:
        try:
            cls = cache[key]
        except KeyError:
            pass
    if not cls:
        rule = self.rule
        if chmod is not None:

            def chmod_fun(tsk):
                for x in tsk.outputs:
                    os.chmod(x.abspath(), tsk.generator.chmod)

            if isinstance(rule, tuple):
                rule = list(rule)
                rule.append(chmod_fun)
                rule = tuple(rule)
            else:
                rule = (rule, chmod_fun)

        cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

        if cls_str:
            setattr(cls, '__str__', self.cls_str)

        if cls_keyword:
            setattr(cls, 'keyword', self.cls_keyword)

        if deep_inputs:
            Task.deep_inputs(cls)

        if scan:
            cls.scan = self.scan
        elif has_deps:

            def scan(self):
                nodes = []
                for x in self.generator.to_list(
                        getattr(self.generator, 'deps', None)):
                    node = self.generator.path.find_resource(x)
                    if not node:
                        self.generator.bld.fatal(
                            'Could not find %r (was it declared?)' % x)
                    nodes.append(node)
                return [nodes, []]

            cls.scan = scan

        if use_cache:
            cache[key] = cls

    # now create one instance
    tsk = self.create_task(name)

    for x in ('after', 'before', 'ext_in', 'ext_out'):
        setattr(tsk, x, getattr(self, x, []))

    if hasattr(self, 'stdout'):
        tsk.stdout = self.stdout

    if hasattr(self, 'stderr'):
        tsk.stderr = self.stderr

    if getattr(self, 'timeout', None):
        tsk.timeout = self.timeout

    if getattr(self, 'always', None):
        tsk.always_run = True

    if getattr(self, 'target', None):
        if isinstance(self.target, str):
            self.target = self.target.split()
        if not isinstance(self.target, list):
            self.target = [self.target]
        for x in self.target:
            if isinstance(x, str):
                tsk.outputs.append(self.path.find_or_declare(x))
            else:
                x.parent.mkdir(
                )  # if a node was given, create the required folders
                tsk.outputs.append(x)
        if getattr(self, 'install_path', None):
            self.install_task = self.add_install_files(
                install_to=self.install_path,
                install_from=tsk.outputs,
                chmod=getattr(self, 'chmod', Utils.O644))

    if getattr(self, 'source', None):
        tsk.inputs = self.to_nodes(self.source)
        # bypass the execution of process_source by setting the source to an empty list
        self.source = []

    if getattr(self, 'cwd', None):
        tsk.cwd = self.cwd

    if isinstance(tsk.run, functools.partial):
        # Python documentation says: "partial objects defined in classes
        # behave like static methods and do not transform into bound
        # methods during instance attribute look-up."
        tsk.run = functools.partial(tsk.run, tsk)
Beispiel #45
0
def run_c_code(self, *k, **kw):
    """
	Create a temporary build context to execute a build. A reference to that build
	context is kept on self.test_bld for debugging purposes.
	The parameters given in the arguments to this function are passed as arguments for
	a single task generator created in the build. Only three parameters are obligatory:

	:param features: features to pass to a task generator created in the build
	:type features: list of string
	:param compile_filename: file to create for the compilation (default: *test.c*)
	:type compile_filename: string
	:param code: code to write in the filename to compile
	:type code: string

	Though this function returns *0* by default, the build may set an attribute named *retval* on the
	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.

	This function also provides a limited cache. To use it, provide the following option::

		def options(opt):
			opt.add_option('--confcache', dest='confcache', default=0,
				action='count', help='Use a configuration cache')

	And execute the configuration with the following command-line::

		$ waf configure --confcache

	"""

    lst = [str(v) for (p, v) in kw.items() if p != 'env']
    h = Utils.h_list(lst)
    dir = self.bldnode.abspath() + os.sep + (
        not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)

    try:
        os.makedirs(dir)
    except:
        pass

    try:
        os.stat(dir)
    except:
        self.fatal('cannot use the configuration test folder %r' % dir)

    cachemode = getattr(Options.options, 'confcache', None)
    if cachemode == CACHE_RESULTS:
        try:
            proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_c_code'))
            ret = proj['cache_run_c_code']
        except:
            pass
        else:
            if isinstance(ret, str) and ret.startswith('Test does not build'):
                self.fatal(ret)
            return ret

    bdir = os.path.join(dir, 'testbuild')

    if not os.path.exists(bdir):
        os.makedirs(bdir)

    self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
    bld.init_dirs()
    bld.progress_bar = 0
    bld.targets = '*'

    if kw['compile_filename']:
        node = bld.srcnode.make_node(kw['compile_filename'])
        node.write(kw['code'])

    bld.logger = self.logger
    bld.all_envs.update(self.all_envs)  # not really necessary
    bld.env = kw['env']

    o = bld(features=kw['features'],
            source=kw['compile_filename'],
            target='testprog')

    for k, v in kw.items():
        setattr(o, k, v)

    self.to_log("==>\n%s\n<==" % kw['code'])

    # compile the program
    bld.targets = '*'

    ret = -1
    try:
        try:
            bld.compile()
        except Errors.WafError:
            ret = 'Test does not build: %s' % Utils.ex_stack()
            self.fatal(ret)
        else:
            ret = getattr(bld, 'retval', 0)
    finally:
        # cache the results each time
        proj = ConfigSet.ConfigSet()
        proj['cache_run_c_code'] = ret
        proj.store(os.path.join(dir, 'cache_run_c_code'))

    return ret
Beispiel #46
0
def run_c_code(self, *k, **kw):
	"""
	Create a temporary build context to execute a build. A reference to that build
	context is kept on self.test_bld for debugging purposes, and you should not rely
	on it too much (read the note on the cache below).
	The parameters given in the arguments to this function are passed as arguments for
	a single task generator created in the build. Only three parameters are obligatory:

	:param features: features to pass to a task generator created in the build
	:type features: list of string
	:param compile_filename: file to create for the compilation (default: *test.c*)
	:type compile_filename: string
	:param code: code to write in the filename to compile
	:type code: string

	Though this function returns *0* by default, the build may set an attribute named *retval* on the
	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.

	This function also provides a limited cache. To use it, provide the following option::

		def options(opt):
			opt.add_option('--confcache', dest='confcache', default=0,
				action='count', help='Use a configuration cache')

	And execute the configuration with the following command-line::

		$ waf configure --confcache

	"""

	lst = [str(v) for (p, v) in kw.items() if p != 'env']
	h = Utils.h_list(lst)
	dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)

	try:
		os.makedirs(dir)
	except:
		pass

	try:
		os.stat(dir)
	except:
		self.fatal('cannot use the configuration test folder %r' % dir)

	cachemode = getattr(Options.options, 'confcache', None)
	if cachemode == CACHE_RESULTS:
		try:
			proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_c_code'))
			ret = proj['cache_run_c_code']
		except:
			pass
		else:
			if isinstance(ret, str) and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret

	bdir = os.path.join(dir, 'testbuild')

	if not os.path.exists(bdir):
		os.makedirs(bdir)

	self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar = 0
	bld.targets = '*'

	if kw['compile_filename']:
		node = bld.srcnode.make_node(kw['compile_filename'])
		node.write(kw['code'])

	bld.logger = self.logger
	bld.all_envs.update(self.all_envs) # not really necessary
	bld.env = kw['env']

	o = bld(features=kw['features'], source=kw['compile_filename'], target='testprog')

	for k, v in kw.items():
		setattr(o, k, v)

	self.to_log("==>\n%s\n<==" % kw['code'])

	# compile the program
	bld.targets = '*'

	ret = -1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret = 'Test does not build: %s' % Utils.ex_stack()
			self.fatal(ret)
		else:
			ret = getattr(bld, 'retval', 0)
	finally:
		# cache the results each time
		proj = ConfigSet.ConfigSet()
		proj['cache_run_c_code'] = ret
		proj.store(os.path.join(dir, 'cache_run_c_code'))

	return ret
Beispiel #47
0
def run_build(self, *k, **kw):
    buf = []
    for key in sorted(kw.keys()):
        v = kw[key]
        if isinstance(v, ConfigSet.ConfigSet):
            continue
        elif hasattr(v, '__call__'):
            buf.append(Utils.h_fun(v))
        else:
            buf.append(str(v))
    h = Utils.h_list(buf)
    dir = self.bldnode.abspath() + os.sep + (
        not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
    cachemode = kw.get('confcache', getattr(Options.options, 'confcache',
                                            None))
    if not cachemode and os.path.exists(dir):
        shutil.rmtree(dir)
    try:
        os.makedirs(dir)
    except OSError:
        pass
    try:
        os.stat(dir)
    except OSError:
        self.fatal('cannot use the configuration test folder %r' % dir)
    if cachemode == 1:
        try:
            proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
        except EnvironmentError:
            pass
        else:
            ret = proj['cache_run_build']
            if isinstance(ret, str) and ret.startswith('Test does not build'):
                self.fatal(ret)
            return ret
    bdir = os.path.join(dir, 'testbuild')
    if not os.path.exists(bdir):
        os.makedirs(bdir)
    cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls',
                                                  'build')
    self.test_bld = bld = Context.create_context(cls_name,
                                                 top_dir=dir,
                                                 out_dir=bdir)
    bld.init_dirs()
    bld.progress_bar = 0
    bld.targets = '*'
    bld.logger = self.logger
    bld.all_envs.update(self.all_envs)
    bld.env = kw['env']
    bld.kw = kw
    bld.conf = self
    kw['build_fun'](bld)
    ret = -1
    try:
        try:
            bld.compile()
        except Errors.WafError:
            ret = 'Test does not build: %s' % traceback.format_exc()
            self.fatal(ret)
        else:
            ret = getattr(bld, 'retval', 0)
    finally:
        if cachemode:
            proj = ConfigSet.ConfigSet()
            proj['cache_run_build'] = ret
            proj.store(os.path.join(dir, 'cache_run_build'))
        else:
            shutil.rmtree(dir)
    return ret
 def uid(self):
     """Returns a unique identifier for the task"""
     lst = self.inputs + self.outputs + [
         self.link, self.generator.path.abspath()
     ]
     return Utils.h_list(lst)
Beispiel #49
0
	def post_recurse(self,node):
		super(ConfigurationContext,self).post_recurse(node)
		self.hash=Utils.h_list((self.hash,node.read('rb')))
		self.files.append(node.abspath())
Beispiel #50
0
	def uid(self):
		lst = [self.dest, self.path] + self.source
		return Utils.h_list(repr(lst))
Beispiel #51
0
def process_rule(self):
	"""
	Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::

		def build(bld):
			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')

	Main attributes processed:

	* rule: command to execute, it can be a tuple of strings for multiple commands
	* chmod: permissions for the resulting files (integer value such as Utils.O755)
	* shell: set to False to execute the command directly (default is True to use a shell)
	* scan: scanner function
	* vars: list of variables to trigger rebuilds, such as CFLAGS
	* cls_str: string to display when executing the task
	* cls_keyword: label to display when executing the task
	* cache_rule: by default, try to re-use similar classes, set to False to disable
	* source: list of Node or string objects representing the source files required by this task
	* target: list of Node or string objects representing the files that this task creates
	* cwd: current working directory (Node or string)
	* stdout: standard output, set to None to prevent waf from capturing the text
	* stderr: standard error, set to None to prevent waf from capturing the text
	* timeout: timeout for command execution (Python 3)
	* always: whether to always run the command (False by default)
	* deep_inputs: whether the task must depend on the input file tasks too (False by default)
	"""
	if not getattr(self, 'rule', None):
		return

	# create the task class
	name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule))

	# or we can put the class in a cache for performance reasons
	try:
		cache = self.bld.cache_rule_attr
	except AttributeError:
		cache = self.bld.cache_rule_attr = {}

	chmod = getattr(self, 'chmod', None)
	shell = getattr(self, 'shell', True)
	color = getattr(self, 'color', 'BLUE')
	scan = getattr(self, 'scan', None)
	_vars = getattr(self, 'vars', [])
	cls_str = getattr(self, 'cls_str', None)
	cls_keyword = getattr(self, 'cls_keyword', None)
	use_cache = getattr(self, 'cache_rule', 'True')
	deep_inputs = getattr(self, 'deep_inputs', False)

	scan_val = has_deps = hasattr(self, 'deps')
	if scan:
		scan_val = id(scan)

	key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs))

	cls = None
	if use_cache:
		try:
			cls = cache[key]
		except KeyError:
			pass
	if not cls:
		rule = self.rule
		if chmod is not None:
			def chmod_fun(tsk):
				for x in tsk.outputs:
					os.chmod(x.abspath(), tsk.generator.chmod)
			if isinstance(rule, tuple):
				rule = list(rule)
				rule.append(chmod_fun)
				rule = tuple(rule)
			else:
				rule = (rule, chmod_fun)

		cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)

		if cls_str:
			setattr(cls, '__str__', self.cls_str)

		if cls_keyword:
			setattr(cls, 'keyword', self.cls_keyword)

		if deep_inputs:
			Task.deep_inputs(cls)

		if scan:
			cls.scan = self.scan
		elif has_deps:
			def scan(self):
				nodes = []
				for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
					node = self.generator.path.find_resource(x)
					if not node:
						self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
					nodes.append(node)
				return [nodes, []]
			cls.scan = scan

		if use_cache:
			cache[key] = cls

	# now create one instance
	tsk = self.create_task(name)

	for x in ('after', 'before', 'ext_in', 'ext_out'):
		setattr(tsk, x, getattr(self, x, []))

	if hasattr(self, 'stdout'):
		tsk.stdout = self.stdout

	if hasattr(self, 'stderr'):
		tsk.stderr = self.stderr

	if getattr(self, 'timeout', None):
		tsk.timeout = self.timeout

	if getattr(self, 'always', None):
		tsk.always_run = True

	if getattr(self, 'target', None):
		if isinstance(self.target, str):
			self.target = self.target.split()
		if not isinstance(self.target, list):
			self.target = [self.target]
		for x in self.target:
			if isinstance(x, str):
				tsk.outputs.append(self.path.find_or_declare(x))
			else:
				x.parent.mkdir() # if a node was given, create the required folders
				tsk.outputs.append(x)
		if getattr(self, 'install_path', None):
			self.install_task = self.add_install_files(install_to=self.install_path,
				install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))

	if getattr(self, 'source', None):
		tsk.inputs = self.to_nodes(self.source)
		# bypass the execution of process_source by setting the source to an empty list
		self.source = []

	if getattr(self, 'cwd', None):
		tsk.cwd = self.cwd

	if isinstance(tsk.run, functools.partial):
		# Python documentation says: "partial objects defined in classes
		# behave like static methods and do not transform into bound
		# methods during instance attribute look-up."
		tsk.run = functools.partial(tsk.run, tsk)
Beispiel #52
0
	def post_recurse(self,node):
		super(ConfigurationContext,self).post_recurse(node)
		self.hash=Utils.h_list((self.hash,node.read('rb')))
		self.files.append(node.abspath())
 def uid(self):
     return Utils.h_list([
         Task.Task.uid(self), self.generator.idx,
         self.generator.path.abspath(), self.generator.target
     ])
Beispiel #54
0
 def uid(self):
     lst = [self.dest, self.path] + self.source
     return Utils.h_list(repr(lst))
Beispiel #55
0
 def uid(self):
     """ Since this task has no inputs, return unique id from the commands.
     """
     return Utils.h_list([self.__class__.__name__] + self.commands)