Beispiel #1
0
	def process(self):
		m=self.generator.bld.producer
		try:
			del self.generator.bld.task_sigs[self.uid()]
		except KeyError:
			pass
		try:
			ret=self.run()
		except Exception:
			self.err_msg=Utils.ex_stack()
			self.hasrun=EXCEPTION
			m.error_handler(self)
			return
		if ret:
			self.err_code=ret
			self.hasrun=CRASHED
		else:
			try:
				self.post_run()
			except Errors.WafError:
				pass
			except Exception:
				self.err_msg=Utils.ex_stack()
				self.hasrun=EXCEPTION
			else:
				self.hasrun=SUCCESS
		if self.hasrun!=SUCCESS:
			m.error_handler(self)
Beispiel #2
0
 def process(self):
     m = self.master
     if m.stop:
         m.out.put(self)
         return
     try:
         del self.generator.bld.task_sigs[self.uid()]
     except:
         pass
     try:
         self.generator.bld.returned_tasks.append(self)
         self.log_display(self.generator.bld)
         ret = self.run()
     except Exception:
         self.err_msg = Utils.ex_stack()
         self.hasrun = EXCEPTION
         m.error_handler(self)
         m.out.put(self)
         return
     if ret:
         self.err_code = ret
         self.hasrun = CRASHED
     else:
         try:
             self.post_run()
         except Errors.WafError:
             pass
         except Exception:
             self.err_msg = Utils.ex_stack()
             self.hasrun = EXCEPTION
         else:
             self.hasrun = SUCCESS
     if self.hasrun != SUCCESS:
         m.error_handler(self)
     m.out.put(self)
Beispiel #3
0
Datei: Task.py Projekt: SjB/waf
	def process(self):
		"""
		Assume that the task has had a new attribute ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`.
		Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing).
		"""
		m = self.master
		if m.stop:
			m.out.put(self)
			return

		# TODO remove the task signature immediately before it is executed
		# in case of failure the task will be executed again
		#try:
		#	del self.generator.bld.task_sigs[self.uid()]
		#except:
		#	pass

		self.generator.bld.returned_tasks.append(self)
		self.log_display(self.generator.bld)

		try:
			ret = self.run()
		except Exception as e:
			self.err_msg = Utils.ex_stack()
			self.hasrun = EXCEPTION

			# TODO cleanup
			m.error_handler(self)
			m.out.put(self)
			return

		if ret:
			self.err_code = ret
			self.hasrun = CRASHED
		else:
			try:
				self.post_run()
			except Errors.WafError:
				pass
			except Exception:
				self.err_msg = Utils.ex_stack()
				self.hasrun = EXCEPTION
			else:
				self.hasrun = SUCCESS
		if self.hasrun != SUCCESS:
			m.error_handler(self)

		m.out.put(self)
Beispiel #4
0
	def load(self,input,tooldir=None,funs=None,with_sys_path=True,cache=False):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			if cache:
				mag=(tool,id(self.env),tooldir,funs)
				if mag in self.tool_cache:
					self.to_log('(tool %s is already loaded, skipping)'%tool)
					continue
				self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path)
			except ImportError as e:
				self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
			if funs is not None:
				self.eval_rules(funs)
			else:
				func=getattr(module,'configure',None)
				if func:
					if type(func)is type(Utils.readf):func(self)
					else:self.eval_rules(func)
			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
Beispiel #5
0
	def task_status(self, tsk):
		"""
		Obtains the task status to decide whether to run it immediately or not.

		:return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
		:rtype: integer
		"""
		try:
			return tsk.runnable_status()
		except Exception:
			self.processed += 1
			tsk.err_msg = Utils.ex_stack()
			if not self.stop and self.bld.keep:
				self.skip(tsk)
				if self.bld.keep == 1:
					# if -k stop at the first exception, if -kk try to go as far as possible
					if Logs.verbose > 1 or not self.error:
						self.error.append(tsk)
					self.stop = True
				else:
					if Logs.verbose > 1:
						self.error.append(tsk)
				return Task.EXCEPTION
			tsk.hasrun = Task.EXCEPTION

			self.error_handler(tsk)
			return Task.EXCEPTION
Beispiel #6
0
 def load(self, input, tooldir=None, funs=None, download=True):
     tools = Utils.to_list(input)
     if tooldir:
         tooldir = Utils.to_list(tooldir)
     for tool in tools:
         mag = (tool, id(self.env), funs)
         if mag in self.tool_cache:
             self.to_log("(tool %s is already loaded, skipping)" % tool)
             continue
         self.tool_cache.append(mag)
         module = None
         try:
             module = Context.load_tool(tool, tooldir)
         except ImportError, e:
             if Options.options.download:
                 module = download_tool(tool, ctx=self)
                 if not module:
                     self.fatal(
                         "Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s"
                         % (tool, sys.path, e)
                     )
             else:
                 self.fatal(
                     "Could not load the Waf tool %r from %r (try the --download option?):\n%s" % (tool, sys.path, e)
                 )
         except Exception, e:
             self.to_log("imp %r (%r & %r)" % (tool, tooldir, funs))
             self.to_log(Utils.ex_stack())
             raise
Beispiel #7
0
	def start(self, node, env):
		debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

		self.env = env
		bld = node.ctx
		try:
			self.parse_cache = bld.parse_cache
		except AttributeError:
			bld.parse_cache = {}
			self.parse_cache = bld.parse_cache

		self.addlines(node)

		# macros may be defined on the command-line, so they must be parsed as if they were part of the file
		if env['DEFINES']:
			lst = ['%s %s' % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
			self.lines = [('define', x) for x in lst] + self.lines

		while self.lines:
			(kind, line) = self.lines.pop(0)
			if kind == POPFILE:
				self.currentnode_stack.pop()
				continue
			try:
				self.process_line(kind, line)
			except Exception as e:
				if Logs.verbose:
					debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
Beispiel #8
0
	def load(self,input,tooldir=None,funs=None,download=True):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			mag=(tool,id(self.env),funs)
			if mag in self.tool_cache:
				self.to_log('(tool %s is already loaded, skipping)'%tool)
				continue
			self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir)
			except ImportError as e:
				if Options.options.download:
					module=download_tool(tool,ctx=self)
					if not module:
						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
				else:
					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
			if funs is not None:
				self.eval_rules(funs)
			else:
				func=getattr(module,'configure',None)
				if func:
					if type(func)is type(Utils.readf):func(self)
					else:self.eval_rules(func)
			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
Beispiel #9
0
	def run(self):
		dest = self.inputs[0]
		f = open(dest.abspath(), 'rb')
		content = f.read()
		content = filter(lambda x: x.isalpha(), content)
		f.close()
		content = content.decode()
		bld = self.generator.bld
		if content.find('BIGenDianSyS') >= 0:
			bld.retval = 'big'
		if content.find('LiTTleEnDian') >= 0:
			if getattr(bld, 'retval', None):
				# finding both strings is unlikely to happen, but who knows?
				bld.fatal('Unable to determine the byte order\n%s'% Utils.ex_stack())
			else:
				bld.retval = 'little'
		if not hasattr(bld, 'retval') or bld.retval not in ('big', 'little'):
			bld.fatal('Unable to determine the byte order\n%s'% Utils.ex_stack())
Beispiel #10
0
def run_c_code(self,*k,**kw):
	lst=[str(v)for(p,v)in kw.items()if p!='env']
	h=Utils.h_list(lst)
	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
	try:
		os.makedirs(dir)
	except OSError:
		pass
	try:
		os.stat(dir)
	except OSError:
		self.fatal('cannot use the configuration test folder %r'%dir)
	cachemode=getattr(Options.options,'confcache',None)
	if cachemode==CACHE_RESULTS:
		try:
			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
		except OSError:
			pass
		else:
			ret=proj['cache_run_c_code']
			if isinstance(ret,str)and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret
	bdir=os.path.join(dir,'testbuild')
	if not os.path.exists(bdir):
		os.makedirs(bdir)
	self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar=0
	bld.targets='*'
	if kw['compile_filename']:
		node=bld.srcnode.make_node(kw['compile_filename'])
		node.write(kw['code'])
	bld.logger=self.logger
	bld.all_envs.update(self.all_envs)
	bld.env=kw['env']
	o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
	for k,v in kw.items():
		setattr(o,k,v)
	if not kw.get('quiet',None):
		self.to_log("==>\n%s\n<=="%kw['code'])
	bld.targets='*'
	ret=-1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret='Test does not build: %s'%Utils.ex_stack()
			self.fatal(ret)
		else:
			ret=getattr(bld,'retval',0)
	finally:
		proj=ConfigSet.ConfigSet()
		proj['cache_run_c_code']=ret
		proj.store(os.path.join(dir,'cache_run_c_code'))
	return ret
Beispiel #11
0
def run_c_code(self, *k, **kw):
    lst = [str(v) for (p, v) in kw.items() if p != "env"]
    h = Utils.h_list(lst)
    dir = self.bldnode.abspath() + os.sep + (sys.platform != "win32" and "." or "") + "conf_check_" + Utils.to_hex(h)
    try:
        os.makedirs(dir)
    except:
        pass
    try:
        os.stat(dir)
    except:
        self.fatal("cannot use the configuration test folder %r" % dir)
    cachemode = getattr(Options.options, "confcache", None)
    if cachemode == CACHE_RESULTS:
        try:
            proj = ConfigSet.ConfigSet(os.path.join(dir, "cache_run_c_code"))
            ret = proj["cache_run_c_code"]
        except:
            pass
        else:
            if isinstance(ret, str) and ret.startswith("Test does not build"):
                self.fatal(ret)
            return ret
    bdir = os.path.join(dir, "testbuild")
    if not os.path.exists(bdir):
        os.makedirs(bdir)
    self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
    bld.init_dirs()
    bld.progress_bar = 0
    bld.targets = "*"
    if kw["compile_filename"]:
        node = bld.srcnode.make_node(kw["compile_filename"])
        node.write(kw["code"])
    bld.logger = self.logger
    bld.all_envs.update(self.all_envs)
    bld.env = kw["env"]
    o = bld(features=kw["features"], source=kw["compile_filename"], target="testprog")
    for k, v in kw.items():
        setattr(o, k, v)
    self.to_log("==>\n%s\n<==" % kw["code"])
    bld.targets = "*"
    ret = -1
    try:
        try:
            bld.compile()
        except Errors.WafError:
            ret = "Test does not build: %s" % Utils.ex_stack()
            self.fatal(ret)
        else:
            ret = getattr(bld, "retval", 0)
    finally:
        proj = ConfigSet.ConfigSet()
        proj["cache_run_c_code"] = ret
        proj.store(os.path.join(dir, "cache_run_c_code"))
    return ret
def run_build(self,*k,**kw):
	lst=[str(v)for(p,v)in kw.items()if p!='env']
	h=Utils.h_list(lst)
	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
	try:
		os.makedirs(dir)
	except OSError:
		pass
	try:
		os.stat(dir)
	except OSError:
		self.fatal('cannot use the configuration test folder %r'%dir)
	cachemode=getattr(Options.options,'confcache',None)
	if cachemode==1:
		try:
			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build'))
		except OSError:
			pass
		except IOError:
			pass
		else:
			ret=proj['cache_run_build']
			if isinstance(ret,str)and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret
	bdir=os.path.join(dir,'testbuild')
	if not os.path.exists(bdir):
		os.makedirs(bdir)
	self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar=0
	bld.targets='*'
	bld.logger=self.logger
	bld.all_envs.update(self.all_envs)
	bld.env=kw['env']
	bld.kw=kw
	bld.conf=self
	kw['build_fun'](bld)
	ret=-1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret='Test does not build: %s'%Utils.ex_stack()
			self.fatal(ret)
		else:
			ret=getattr(bld,'retval',0)
	finally:
		if cachemode==1:
			proj=ConfigSet.ConfigSet()
			proj['cache_run_build']=ret
			proj.store(os.path.join(dir,'cache_run_build'))
		else:
			shutil.rmtree(dir)
	return ret
Beispiel #13
0
	def process(self):
		"""
		Assume that the task has had a ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`.
		Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing).

		:return: 0 or None if everything is fine
		:rtype: integer
		"""
		# remove the task signature immediately before it is executed
		# in case of failure the task will be executed again
		m = self.generator.bld.producer
		try:
			# TODO another place for this?
			del self.generator.bld.task_sigs[self.uid()]
		except KeyError:
			pass

		try:
			ret = self.run()
		except Exception:
			self.err_msg = Utils.ex_stack()
			self.hasrun = EXCEPTION

			# TODO cleanup
			m.error_handler(self)
			return

		if ret:
			self.err_code = ret
			self.hasrun = CRASHED
		else:
			try:
				self.post_run()
			except Errors.WafError:
				pass
			except Exception:
				self.err_msg = Utils.ex_stack()
				self.hasrun = EXCEPTION
			else:
				self.hasrun = SUCCESS
		if self.hasrun != SUCCESS:
			m.error_handler(self)
Beispiel #14
0
    def load(self, input, tooldir=None, funs=None, download=True):
        """
		Load Waf tools, which will be imported whenever a build is started.

		:param input: waf tools to import
		:type input: list of string
		:param tooldir: paths for the imports
		:type tooldir: list of string
		:param funs: functions to execute from the waf tools
		:type funs: list of string
		:param download: whether to download the tool from the waf repository
		:type download: bool
		"""

        tools = Utils.to_list(input)
        if tooldir: tooldir = Utils.to_list(tooldir)
        for tool in tools:
            # avoid loading the same tool more than once with the same functions
            # used by composite projects

            mag = (tool, id(self.env), funs)
            if mag in self.tool_cache:
                self.to_log('(tool %s is already loaded, skipping)' % tool)
                continue
            self.tool_cache.append(mag)

            module = None
            try:
                module = Context.load_tool(tool, tooldir)
            except ImportError as e:
                if Options.options.download:
                    module = download_tool(tool, ctx=self)
                    if not module:
                        self.fatal(
                            'Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'
                            % (tool, sys.path, e))
                else:
                    self.fatal(
                        'Could not load the Waf tool %r from %r (try the --download option?):\n%s'
                        % (tool, sys.path, e))
            except Exception as e:
                self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
                self.to_log(Utils.ex_stack())
                raise

            if funs is not None:
                self.eval_rules(funs)
            else:
                func = getattr(module, 'configure', None)
                if func:
                    if type(func) is type(Utils.readf): func(self)
                    else: self.eval_rules(func)

            self.tools.append({'tool': tool, 'tooldir': tooldir, 'funs': funs})
def run_build(self,*k,**kw):
	lst=[str(v)for(p,v)in kw.items()if p!='env']
	h=Utils.h_list(lst)
	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
	try:
		os.makedirs(dir)
	except OSError:
		pass
	try:
		os.stat(dir)
	except OSError:
		self.fatal('cannot use the configuration test folder %r'%dir)
	cachemode=getattr(Options.options,'confcache',None)
	if cachemode==1:
		try:
			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build'))
		except EnvironmentError:
			pass
		else:
			ret=proj['cache_run_build']
			if isinstance(ret,str)and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret
	bdir=os.path.join(dir,'testbuild')
	if not os.path.exists(bdir):
		os.makedirs(bdir)
	self.test_bld=bld=Context.create_context('build',top_dir=dir,out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar=0
	bld.targets='*'
	bld.logger=self.logger
	bld.all_envs.update(self.all_envs)
	bld.env=kw['env']
	bld.kw=kw
	bld.conf=self
	kw['build_fun'](bld)
	ret=-1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret='Test does not build: %s'%Utils.ex_stack()
			self.fatal(ret)
		else:
			ret=getattr(bld,'retval',0)
	finally:
		if cachemode==1:
			proj=ConfigSet.ConfigSet()
			proj['cache_run_build']=ret
			proj.store(os.path.join(dir,'cache_run_build'))
		else:
			shutil.rmtree(dir)
	return ret
Beispiel #16
0
    def load(self, input, tooldir=None, funs=None, download=True):
        """
        Load Waf tools, which will be imported whenever a build is started.

        :param input: waf tools to import
        :type input: list of string
        :param tooldir: paths for the imports
        :type tooldir: list of string
        :param funs: functions to execute from the waf tools
        :type funs: list of string
        :param download: whether to download the tool from the waf repository
        :type download: bool
        """

        tools = Utils.to_list(input)
        if tooldir:
            tooldir = Utils.to_list(tooldir)
        for tool in tools:
            # avoid loading the same tool more than once with the same functions
            # used by composite projects

            mag = (tool, id(self.env), funs)
            if mag in self.tool_cache:
                self.to_log('(tool %s is already loaded, skipping)' % tool)
                continue
            self.tool_cache.append(mag)

            module = None
            try:
                module = Context.load_tool(tool, tooldir)
            except ImportError as e:
                if Options.options.download:
                    module = download_tool(tool, ctx=self)
                    if not module:
                        self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
                else:
                    self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
            except Exception as e:
                self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
                self.to_log(Utils.ex_stack())
                raise

            if funs is not None:
                self.eval_rules(funs)
            else:
                func = getattr(module, 'configure', None)
                if func:
                    if type(func) is type(Utils.readf):
                        func(self)
                    else:
                        self.eval_rules(func)

            self.tools.append({'tool': tool, 'tooldir': tooldir, 'funs': funs})
Beispiel #17
0
def get_msvc_version(conf, compiler, version, target, vcvars):
    Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
    try:
        conf.msvc_cnt += 1
    except AttributeError:
        conf.msvc_cnt = 1
    batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
    batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars, target))
    sout = conf.cmd_and_log(
        ['cmd.exe', '/E:on', '/V:on', '/C',
         batfile.abspath()])
    lines = sout.splitlines()
    if not lines[0]:
        lines.pop(0)
    MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
    for line in lines:
        if line.startswith('PATH='):
            path = line[5:]
            MSVC_PATH = path.split(';')
        elif line.startswith('INCLUDE='):
            MSVC_INCDIR = [i for i in line[8:].split(';') if i]
        elif line.startswith('LIB='):
            MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
    if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
        conf.fatal(
            'msvc: Could not find a valid architecture for building (get_msvc_version_3)'
        )
    env = dict(os.environ)
    env.update(PATH=path)
    compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
    cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
    if 'CL' in env:
        del (env['CL'])
    try:
        conf.cmd_and_log(cxx + ['/help'], env=env)
    except UnicodeError:
        st = Utils.ex_stack()
        if conf.logger:
            conf.logger.error(st)
        conf.fatal('msvc: Unicode error - check the code page?')
    except Exception, e:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler,
                   version, target, str(e))
        conf.fatal(
            'msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)'
        )
Beispiel #18
0
	def start(self):
		self.total=self.bld.total()
		while not self.stop:
			self.refill_task_list()
			tsk=self.get_next_task()
			if not tsk:
				if self.count:
					continue
				else:
					break
			if tsk.hasrun:
				self.processed+=1
				continue
			if self.stop:
				break
			try:
				st=tsk.runnable_status()
			except Exception:
				self.processed+=1
				tsk.err_msg=Utils.ex_stack()
				if not self.stop and self.bld.keep:
					tsk.hasrun=Task.SKIPPED
					if self.bld.keep==1:
						if Logs.verbose>1 or not self.error:
							self.error.append(tsk)
						self.stop=True
					else:
						if Logs.verbose>1:
							self.error.append(tsk)
					continue
				tsk.hasrun=Task.EXCEPTION
				self.error_handler(tsk)
				continue
			if st==Task.ASK_LATER:
				self.postpone(tsk)
			elif st==Task.SKIP_ME:
				self.processed+=1
				tsk.hasrun=Task.SKIPPED
				self.add_more_tasks(tsk)
			else:
				tsk.position=(self.processed,self.total)
				self.count+=1
				tsk.master=self
				self.processed+=1
				if self.numjobs==1:
					tsk.process()
				else:
					self.add_task(tsk)
		while self.error and self.count:
			self.get_out()
		assert(self.count==0 or self.stop)
		self.free_task_pool()
Beispiel #19
0
 def start(self):
     self.total = self.bld.total()
     while not self.stop:
         self.refill_task_list()
         tsk = self.get_next_task()
         if not tsk:
             if self.count:
                 continue
             else:
                 break
         if tsk.hasrun:
             self.processed += 1
             continue
         if self.stop:
             break
         try:
             st = tsk.runnable_status()
         except Exception:
             self.processed += 1
             tsk.err_msg = Utils.ex_stack()
             if not self.stop and self.bld.keep:
                 tsk.hasrun = Task.SKIPPED
                 if self.bld.keep == 1:
                     if Logs.verbose > 1 or not self.error:
                         self.error.append(tsk)
                     self.stop = True
                 else:
                     if Logs.verbose > 1:
                         self.error.append(tsk)
                 continue
             tsk.hasrun = Task.EXCEPTION
             self.error_handler(tsk)
             continue
         if st == Task.ASK_LATER:
             self.postpone(tsk)
         elif st == Task.SKIP_ME:
             self.processed += 1
             tsk.hasrun = Task.SKIPPED
             self.add_more_tasks(tsk)
         else:
             tsk.position = (self.processed, self.total)
             self.count += 1
             tsk.master = self
             self.processed += 1
             if self.numjobs == 1:
                 tsk.process()
             else:
                 self.add_task(tsk)
     while self.error and self.count:
         self.get_out()
     assert (self.count == 0 or self.stop)
     self.free_task_pool()
Beispiel #20
0
def get_ifort_version_win32(conf,compiler,version,target,vcvars):
	try:
		conf.msvc_cnt+=1
	except AttributeError:
		conf.msvc_cnt=1
	batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt)
	batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
"""%(vcvars,target))
	sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()])
	batfile.delete()
	lines=sout.splitlines()
	if not lines[0]:
		lines.pop(0)
	MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
	for line in lines:
		if line.startswith('PATH='):
			path=line[5:]
			MSVC_PATH=path.split(';')
		elif line.startswith('INCLUDE='):
			MSVC_INCDIR=[i for i in line[8:].split(';')if i]
		elif line.startswith('LIB='):
			MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
	if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
		conf.fatal('msvc: Could not find a valid architecture for building (get_ifort_version_win32)')
	env=dict(os.environ)
	env.update(PATH=path)
	compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
	fc=conf.find_program(compiler_name,path_list=MSVC_PATH)
	if'CL'in env:
		del(env['CL'])
	try:
		try:
			conf.cmd_and_log(fc+['/help'],env=env)
		except UnicodeError:
			st=Utils.ex_stack()
			if conf.logger:
				conf.logger.error(st)
			conf.fatal('msvc: Unicode error - check the code page?')
		except Exception as e:
			debug('msvc: get_ifort_version: %r %r %r -> failure %s'%(compiler,version,target,str(e)))
			conf.fatal('msvc: cannot run the compiler in get_ifort_version (run with -v to display errors)')
		else:
			debug('msvc: get_ifort_version: %r %r %r -> OK',compiler,version,target)
	finally:
		conf.env[compiler_name]=''
	return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
Beispiel #21
0
Datei: Runner.py Projekt: zsx/waf
def process_task(tsk):
    m = tsk.master
    if m.stop:
        m.out.put(tsk)
        return

    try:
        tsk.generator.bld.to_log(tsk.display())
        if tsk.__class__.stat:
            ret = tsk.__class__.stat(tsk)
        # actual call to task's run() function
        else:
            ret = tsk.call_run()
    except Exception as e:
        tsk.err_msg = Utils.ex_stack()
        tsk.hasrun = Task.EXCEPTION

        # TODO cleanup
        m.error_handler(tsk)
        m.out.put(tsk)
        return

    if ret:
        tsk.err_code = ret
        tsk.hasrun = Task.CRASHED
    else:
        try:
            tsk.post_run()
        except Errors.WafError:
            pass
        except Exception:
            tsk.err_msg = Utils.ex_stack()
            tsk.hasrun = Task.EXCEPTION
        else:
            tsk.hasrun = Task.SUCCESS
    if tsk.hasrun != Task.SUCCESS:
        m.error_handler(tsk)

    m.out.put(tsk)
Beispiel #22
0
	def process(self):
		m=self.master
		if m.stop:
			m.out.put(self)
			return
		try:
			ret=self.run()
		except Exception ,e:
			self.err_msg=Utils.ex_stack()
			self.hasrun=EXCEPTION
			m.error_handler(self)
			m.out.put(self)
			return
Beispiel #23
0
def process(tsk):
    m = tsk.master
    if m.stop:
        m.out.put(tsk)
        return

    try:
        tsk.generator.bld.printout(tsk.display())
        if tsk.__class__.stat:
            ret = tsk.__class__.stat(tsk)
            # actual call to task's run() function
        else:
            ret = tsk.call_run()
    except Exception as e:
        tsk.err_msg = Utils.ex_stack()
        tsk.hasrun = EXCEPTION

        # TODO cleanup
        m.error_handler(tsk)
        m.out.put(tsk)
        return

    if ret:
        tsk.err_code = ret
        tsk.hasrun = CRASHED
    else:
        try:
            tsk.post_run()
        except Errors.WafError:
            pass
        except Exception:
            tsk.err_msg = Utils.ex_stack()
            tsk.hasrun = EXCEPTION
        else:
            tsk.hasrun = SUCCESS
    if tsk.hasrun != SUCCESS:
        m.error_handler(tsk)

    m.out.put(tsk)
Beispiel #24
0
	def process(self):
		"""
		process a task and then put it back in the queue "master.out"
		TODO find a better name for this method
		"""
		m = self.master
		if m.stop:
			m.out.put(self)
			return

		try:
			ret = self.run()
		except Exception as e:
			self.err_msg = Utils.ex_stack()
			self.hasrun = EXCEPTION

			# TODO cleanup
			m.error_handler(self)
			m.out.put(self)
			return

		if ret:
			self.err_code = ret
			self.hasrun = CRASHED
		else:
			try:
				self.post_run()
			except Errors.WafError:
				pass
			except Exception:
				self.err_msg = Utils.ex_stack()
				self.hasrun = EXCEPTION
			else:
				self.hasrun = SUCCESS
		if self.hasrun != SUCCESS:
			m.error_handler(self)

		m.out.put(self)
Beispiel #25
0
def process(tsk):
	m = tsk.master
	if m.stop:
		m.out.put(tsk)
		return

	tsk.master.set_running(1, id(Utils.threading.current_thread()), tsk)

	try:
		tsk.generator.bld.to_log(tsk.display())
		ret = tsk.run()
	except Exception as e:
		tsk.err_msg = Utils.ex_stack()
		tsk.hasrun = Task.EXCEPTION

		m.error_handler(tsk)
		m.out.put(tsk)
		return

	if ret:
		tsk.err_code = ret
		tsk.hasrun = Task.CRASHED
	else:
		try:
			tsk.post_run()
		except Errors.WafError:
			pass
		except Exception:
			tsk.err_msg = Utils.ex_stack()
			tsk.hasrun = Task.EXCEPTION
		else:
			tsk.hasrun = Task.SUCCESS
	if tsk.hasrun != Task.SUCCESS:
		m.error_handler(tsk)

	tsk.master.set_running(-1, id(Utils.threading.current_thread()), tsk)
	m.out.put(tsk)
Beispiel #26
0
	def load(self, input, tooldir=None, funs=None, with_sys_path=True, cache=False):
		"""
		Load Waf tools, which will be imported whenever a build is started.

		:param input: waf tools to import
		:type input: list of string
		:param tooldir: paths for the imports
		:type tooldir: list of string
		:param funs: functions to execute from the waf tools
		:type funs: list of string
		:param cache: whether to prevent the tool from running twice
		:type cache: bool
		"""

		tools = Utils.to_list(input)
		if tooldir:
			tooldir = Utils.to_list(tooldir)
		for tool in tools:
			# avoid loading the same tool more than once with the same functions
			# used by composite projects

			if cache:
				mag = (tool, id(self.env), tooldir, funs)
				if mag in self.tool_cache:
					self.to_log('(tool %s is already loaded, skipping)' % tool)
					continue
				self.tool_cache.append(mag)

			module = None
			try:
				module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
			except ImportError as e:
				self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
				self.to_log(Utils.ex_stack())
				raise

			if funs is not None:
				self.eval_rules(funs)
			else:
				func = getattr(module, 'configure', None)
				if func:
					if type(func) is type(Utils.readf):
						func(self)
					else:
						self.eval_rules(func)

			self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
Beispiel #27
0
 def process(self):
     m = self.master
     if m.stop:
         m.out.put(self)
         return
     self.generator.bld.returned_tasks.append(self)
     self.log_display(self.generator.bld)
     try:
         ret = self.run()
     except Exception, e:
         self.err_msg = Utils.ex_stack()
         self.hasrun = EXCEPTION
         m.error_handler(self)
         m.out.put(self)
         return
Beispiel #28
0
	def start(self):
		self.total=self.bld.total()
		while not self.stop:
			self.refill_task_list()
			tsk=self.get_next_task()
			if not tsk:
				if self.count:
					continue
				else:
					break
			if tsk.hasrun:
				self.processed+=1
				continue
			try:
				st=tsk.runnable_status()
			except Exception:
				self.processed+=1
				if self.stop and not self.bld.keep:
					tsk.hasrun=Task.SKIPPED
					continue
				tsk.err_msg=Utils.ex_stack()
				tsk.hasrun=Task.EXCEPTION
				self.error_handler(tsk)
				continue
			if st==Task.ASK_LATER:
				self.postpone(tsk)
				if self.outstanding:
					for x in tsk.run_after:
						if x in self.outstanding:
							self.outstanding.remove(x)
							self.outstanding.insert(0,x)
			elif st==Task.SKIP_ME:
				self.processed+=1
				tsk.hasrun=Task.SKIPPED
				self.add_more_tasks(tsk)
			else:
				tsk.position=(self.processed,self.total)
				self.count+=1
				tsk.master=self
				self.processed+=1
				if self.numjobs==1:
					tsk.process()
				else:
					self.add_task(tsk)
		while self.error and self.count:
			self.get_out()
		assert(self.count==0 or self.stop)
		self.free_task_pool()
Beispiel #29
0
def run_c_code(self, *k, **kw):
	lst = [str(v) for (p, v) in kw.items() if p != 'env']
	h = Utils.h_list(lst)
	dir = self.bldnode.abspath() + os.sep + '.conf_check_' + Utils.to_hex(h)

	try:
		os.makedirs(dir)
	except:
		pass

	try:
		os.stat(dir)
	except:
		self.fatal('cannot use the configuration test folder %r' % dir)

	bdir = os.path.join(dir, 'testbuild')

	if not os.path.exists(bdir):
		os.makedirs(bdir)

	self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir) # keep the temporary build context on an attribute for debugging
	bld.load() # configuration test cache
	bld.targets = '*'

	if kw['compile_filename']:
		node = bld.srcnode.make_node(kw['compile_filename'])
		node.write(kw['code'])

	bld.logger = self.logger
	bld.all_envs.update(self.all_envs)
	bld.all_envs['default'] = kw['env']

	o = bld(features=kw['features'], source=kw['compile_filename'], target='testprog')

	for k, v in kw.items():
		setattr(o, k, v)

	self.to_log("==>\n%s\n<==" % kw['code'])

	# compile the program
	bld.targets = '*'
	try:
		bld.compile()
	except Errors.WafError:
		self.fatal('Test does not build: %s' % Utils.ex_stack())

	return getattr(bld, 'retval', 0)
Beispiel #30
0
    def load(self, input, tooldir=None, funs=None):
        """
		Load Waf tools, which will be imported whenever a build is started.

		:param input: waf tools to import
		:type input: list of string
		:param tooldir: paths for the imports
		:type tooldir: list of string
		:param funs: functions to execute from the waf tools
		:type funs: list of string
		"""

        tools = Utils.to_list(input)
        if tooldir:
            tooldir = Utils.to_list(tooldir)
        for tool in tools:
            # avoid loading the same tool more than once with the same functions
            # used by composite projects

            mag = (tool, id(self.env), tooldir, funs)
            if mag in self.tool_cache:
                self.to_log("(tool %s is already loaded, skipping)" % tool)
                continue
            self.tool_cache.append(mag)

            module = None
            try:
                module = Context.load_tool(tool, tooldir, ctx=self)
            except ImportError as e:
                self.fatal("Could not load the Waf tool %r from %r\n%s" % (tool, sys.path, e))
            except Exception as e:
                self.to_log("imp %r (%r & %r)" % (tool, tooldir, funs))
                self.to_log(Utils.ex_stack())
                raise

            if funs is not None:
                self.eval_rules(funs)
            else:
                func = getattr(module, "configure", None)
                if func:
                    if type(func) is type(Utils.readf):
                        func(self)
                    else:
                        self.eval_rules(func)

            self.tools.append({"tool": tool, "tooldir": tooldir, "funs": funs})
Beispiel #31
0
	def load(self, input, tooldir=None, funs=None, with_sys_path=True, cache=False):
		"""
		Load Waf tools, which will be imported whenever a build is started.

		:param input: waf tools to import
		:type input: list of string
		:param tooldir: paths for the imports
		:type tooldir: list of string
		:param funs: functions to execute from the waf tools
		:type funs: list of string
		:param cache: whether to prevent the tool from running twice
		:type cache: bool
		"""

		tools = Utils.to_list(input)
		if tooldir: tooldir = Utils.to_list(tooldir)
		for tool in tools:
			# avoid loading the same tool more than once with the same functions
			# used by composite projects

			if cache:
				mag = (tool, id(self.env), tooldir, funs)
				if mag in self.tool_cache:
					self.to_log('(tool %s is already loaded, skipping)' % tool)
					continue
				self.tool_cache.append(mag)

			module = None
			try:
				module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
			except ImportError as e:
				self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
			except Exception as e:
				self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
				self.to_log(Utils.ex_stack())
				raise

			if funs is not None:
				self.eval_rules(funs)
			else:
				func = getattr(module, 'configure', None)
				if func:
					if type(func) is type(Utils.readf): func(self)
					else: self.eval_rules(func)

			self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
Beispiel #32
0
	def load(self,input,tooldir=None,funs=None):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			mag=(tool,id(self.env),funs)
			if mag in self.tool_cache:
				self.to_log('(tool %s is already loaded, skipping)'%tool)
				continue
			self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir,ctx=self)
			except ImportError ,e:
				self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e))
			except Exception ,e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
Beispiel #33
0
 def task_status(self, tsk):
     try:
         return tsk.runnable_status()
     except Exception:
         self.processed += 1
         tsk.err_msg = Utils.ex_stack()
         if not self.stop and self.bld.keep:
             self.skip(tsk)
             if self.bld.keep == 1:
                 if Logs.verbose > 1 or not self.error:
                     self.error.append(tsk)
                 self.stop = True
             else:
                 if Logs.verbose > 1:
                     self.error.append(tsk)
             return Task.EXCEPTION
         tsk.hasrun = Task.EXCEPTION
         self.error_handler(tsk)
         return Task.EXCEPTION
Beispiel #34
0
	def process(self):
		m=self.master
		if m.stop:
			m.out.put(self)
			return
		try:
			del self.generator.bld.task_sigs[self.uid()]
		except:
			pass
		self.generator.bld.returned_tasks.append(self)
		self.log_display(self.generator.bld)
		try:
			ret=self.run()
		except Exception ,e:
			self.err_msg=Utils.ex_stack()
			self.hasrun=EXCEPTION
			m.error_handler(self)
			m.out.put(self)
			return
Beispiel #35
0
	def task_status(self,tsk):
		try:
			return tsk.runnable_status()
		except Exception:
			self.processed+=1
			tsk.err_msg=Utils.ex_stack()
			if not self.stop and self.bld.keep:
				self.skip(tsk)
				if self.bld.keep==1:
					if Logs.verbose>1 or not self.error:
						self.error.append(tsk)
					self.stop=True
				else:
					if Logs.verbose>1:
						self.error.append(tsk)
				return Task.EXCEPTION
			tsk.hasrun=Task.EXCEPTION
			self.error_handler(tsk)
			return Task.EXCEPTION
Beispiel #36
0
	def load(self,input,tooldir=None,funs=None,download=True):
		tools=Utils.to_list(input)
		if tooldir:tooldir=Utils.to_list(tooldir)
		for tool in tools:
			mag=(tool,id(self.env),funs)
			if mag in self.tool_cache:
				self.to_log('(tool %s is already loaded, skipping)'%tool)
				continue
			self.tool_cache.append(mag)
			module=None
			try:
				module=Context.load_tool(tool,tooldir)
			except ImportError ,e:
				if Options.options.download:
					module=download_tool(tool,ctx=self)
					if not module:
						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
				else:
					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
			except Exception ,e:
				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
				self.to_log(Utils.ex_stack())
				raise
Beispiel #37
0
def get_ifort_version_win32(conf, compiler, version, target, vcvars):
	# FIXME hack
	try:
		conf.msvc_cnt += 1
	except AttributeError:
		conf.msvc_cnt = 1
	batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
	batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars,target))
	sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
	batfile.delete()
	lines = sout.splitlines()

	if not lines[0]:
		lines.pop(0)

	MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
	for line in lines:
		if line.startswith('PATH='):
			path = line[5:]
			MSVC_PATH = path.split(';')
		elif line.startswith('INCLUDE='):
			MSVC_INCDIR = [i for i in line[8:].split(';') if i]
		elif line.startswith('LIB='):
			MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
	if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
		conf.fatal('msvc: Could not find a valid architecture for building (get_ifort_version_win32)')

	# Check if the compiler is usable at all.
	# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
	env = dict(os.environ)
	env.update(PATH = path)
	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
	fc = conf.find_program(compiler_name, path_list=MSVC_PATH)

	# delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
	if 'CL' in env:
		del(env['CL'])

	try:
		try:
			conf.cmd_and_log(fc + ['/help'], env=env)
		except UnicodeError:
			st = Utils.ex_stack()
			if conf.logger:
				conf.logger.error(st)
			conf.fatal('msvc: Unicode error - check the code page?')
		except Exception as e:
			debug('msvc: get_ifort_version: %r %r %r -> failure %s' % (compiler, version, target, str(e)))
			conf.fatal('msvc: cannot run the compiler in get_ifort_version (run with -v to display errors)')
		else:
			debug('msvc: get_ifort_version: %r %r %r -> OK', compiler, version, target)
	finally:
		conf.env[compiler_name] = ''

	return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
Beispiel #38
0
def get_msvc_version(conf, compiler, version, target, vcvars):
	"""
	Checks that an installed compiler actually runs and uses vcvars to obtain the
	environment needed by the compiler.

	:param compiler: compiler type, for looking up the executable name
	:param version: compiler version, for debugging only
	:param target: target architecture
	:param vcvars: batch file to run to check the environment
	:return: the location of the compiler executable, the location of include dirs, and the library paths
	:rtype: tuple of strings
	"""
	Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)

	try:
		conf.msvc_cnt += 1
	except AttributeError:
		conf.msvc_cnt = 1
	batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
	batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars,target))
	sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
	lines = sout.splitlines()

	if not lines[0]:
		lines.pop(0)

	MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
	for line in lines:
		if line.startswith('PATH='):
			path = line[5:]
			MSVC_PATH = path.split(';')
		elif line.startswith('INCLUDE='):
			MSVC_INCDIR = [i for i in line[8:].split(';') if i]
		elif line.startswith('LIB='):
			MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
	if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
		conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')

	# Check if the compiler is usable at all.
	# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
	env = dict(os.environ)
	env.update(PATH = path)
	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
	cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)

	# delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
	if 'CL' in env:
		del(env['CL'])

	try:
		conf.cmd_and_log(cxx + ['/help'], env=env)
	except UnicodeError:
		st = Utils.ex_stack()
		if conf.logger:
			conf.logger.error(st)
		conf.fatal('msvc: Unicode error - check the code page?')
	except Exception as e:
		Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
		conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
	else:
		Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
	finally:
		conf.env[compiler_name] = ''

	return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
Beispiel #39
0
def run_c_code(self, *k, **kw):
    """
	Create a temporary build context to execute a build. A reference to that build
	context is kept on self.test_bld for debugging purposes.
	The parameters given in the arguments to this function are passed as arguments for
	a single task generator created in the build. Only three parameters are obligatory:

	:param features: features to pass to a task generator created in the build
	:type features: list of string
	:param compile_filename: file to create for the compilation (default: *test.c*)
	:type compile_filename: string
	:param code: code to write in the filename to compile
	:type code: string

	Though this function returns *0* by default, the build may set an attribute named *retval* on the
	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.

	This function also provides a limited cache. To use it, provide the following option::

		def options(opt):
			opt.add_option('--confcache', dest='confcache', default=0,
				action='count', help='Use a configuration cache')

	And execute the configuration with the following command-line::

		$ waf configure --confcache

	"""

    lst = [str(v) for (p, v) in kw.items() if p != 'env']
    h = Utils.h_list(lst)
    dir = self.bldnode.abspath() + os.sep + (
        not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)

    try:
        os.makedirs(dir)
    except:
        pass

    try:
        os.stat(dir)
    except:
        self.fatal('cannot use the configuration test folder %r' % dir)

    cachemode = getattr(Options.options, 'confcache', None)
    if cachemode == CACHE_RESULTS:
        try:
            proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_c_code'))
            ret = proj['cache_run_c_code']
        except:
            pass
        else:
            if isinstance(ret, str) and ret.startswith('Test does not build'):
                self.fatal(ret)
            return ret

    bdir = os.path.join(dir, 'testbuild')

    if not os.path.exists(bdir):
        os.makedirs(bdir)

    self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
    bld.init_dirs()
    bld.progress_bar = 0
    bld.targets = '*'

    if kw['compile_filename']:
        node = bld.srcnode.make_node(kw['compile_filename'])
        node.write(kw['code'])

    bld.logger = self.logger
    bld.all_envs.update(self.all_envs)  # not really necessary
    bld.env = kw['env']

    o = bld(features=kw['features'],
            source=kw['compile_filename'],
            target='testprog')

    for k, v in kw.items():
        setattr(o, k, v)

    self.to_log("==>\n%s\n<==" % kw['code'])

    # compile the program
    bld.targets = '*'

    ret = -1
    try:
        try:
            bld.compile()
        except Errors.WafError:
            ret = 'Test does not build: %s' % Utils.ex_stack()
            self.fatal(ret)
        else:
            ret = getattr(bld, 'retval', 0)
    finally:
        # cache the results each time
        proj = ConfigSet.ConfigSet()
        proj['cache_run_c_code'] = ret
        proj.store(os.path.join(dir, 'cache_run_c_code'))

    return ret
Beispiel #40
0
class TaskBase(evil):
    color = 'GREEN'
    ext_in = []
    ext_out = []
    before = []
    after = []
    hcode = ''

    def __init__(self, *k, **kw):
        self.hasrun = NOT_RUN
        try:
            self.generator = kw['generator']
        except KeyError:
            self.generator = self

    def __repr__(self):
        return '\n\t{task %r: %s %s}' % (self.__class__.__name__, id(self),
                                         str(getattr(self, 'fun', '')))

    def __str__(self):
        if hasattr(self, 'fun'):
            return 'executing: %s\n' % self.fun.__name__
        return self.__class__.__name__ + '\n'

    def __hash__(self):
        return id(self)

    def exec_command(self, cmd, **kw):
        bld = self.generator.bld
        try:
            if not kw.get('cwd', None):
                kw['cwd'] = bld.cwd
        except AttributeError:
            bld.cwd = kw['cwd'] = bld.variant_dir
        return bld.exec_command(cmd, **kw)

    def runnable_status(self):
        return RUN_ME

    def process(self):
        m = self.master
        if m.stop:
            m.out.put(self)
            return
        try:
            del self.generator.bld.task_sigs[self.uid()]
        except:
            pass
        self.generator.bld.returned_tasks.append(self)
        self.log_display(self.generator.bld)
        try:
            ret = self.run()
        except Exception, e:
            self.err_msg = Utils.ex_stack()
            self.hasrun = EXCEPTION
            m.error_handler(self)
            m.out.put(self)
            return
        if ret:
            self.err_code = ret
            self.hasrun = CRASHED
        else:
            try:
                self.post_run()
            except Errors.WafError:
                pass
            except Exception:
                self.err_msg = Utils.ex_stack()
                self.hasrun = EXCEPTION
            else:
                self.hasrun = SUCCESS
        if self.hasrun != SUCCESS:
            m.error_handler(self)
        m.out.put(self)
Beispiel #41
0
    def start(self, node, env):
        """
		Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
		and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.

		:param node: source file
		:type node: :py:class:`waflib.Node.Node`
		:param env: config set containing additional defines to take into account
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		"""

        debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

        bld = node.ctx
        try:
            self.parse_cache = bld.parse_cache
        except AttributeError:
            bld.parse_cache = {}
            self.parse_cache = bld.parse_cache

        self.addlines(node)

        # macros may be defined on the command-line, so they must be parsed as if they were part of the file
        if env['DEFINES']:
            try:
                lst = [
                    '%s %s' % (x[0], trimquotes('='.join(x[1:])))
                    for x in [y.split('=') for y in env['DEFINES']]
                ]
                lst.reverse()
                self.lines.extend([('define', x) for x in lst])
            except AttributeError:
                # if the defines are invalid the compiler will tell the user
                pass

        while self.lines:
            (token, line) = self.lines.pop()
            if token == POPFILE:
                self.count_files -= 1
                self.currentnode_stack.pop()
                continue

            try:
                ve = Logs.verbose
                if ve:
                    debug('preproc: line is %s - %s state is %s', token, line,
                          self.state)
                state = self.state

                # make certain we define the state if we are about to enter in an if block
                if token[:2] == 'if':
                    state.append(undefined)
                elif token == 'endif':
                    state.pop()

                # skip lines when in a dead 'if' branch, wait for the endif
                if token[0] != 'e':
                    if skipped in self.state or ignored in self.state:
                        continue

                if token == 'if':
                    ret = eval_macro(tokenize(line), self.defs)
                    if ret: state[-1] = accepted
                    else: state[-1] = ignored
                elif token == 'ifdef':
                    m = re_mac.match(line)
                    if m and m.group(0) in self.defs: state[-1] = accepted
                    else: state[-1] = ignored
                elif token == 'ifndef':
                    m = re_mac.match(line)
                    if m and m.group(0) in self.defs: state[-1] = ignored
                    else: state[-1] = accepted
                elif token == 'include' or token == 'import':
                    (kind, inc) = extract_include(line, self.defs)
                    if inc in self.ban_includes:
                        continue
                    if token == 'import': self.ban_includes.add(inc)
                    if ve:
                        debug('preproc: include found %s    (%s) ', inc, kind)
                    if kind == '"' or not strict_quotes:
                        self.tryfind(inc)
                elif token == 'elif':
                    if state[-1] == accepted:
                        state[-1] = skipped
                    elif state[-1] == ignored:
                        if eval_macro(tokenize(line), self.defs):
                            state[-1] = accepted
                elif token == 'else':
                    if state[-1] == accepted: state[-1] = skipped
                    elif state[-1] == ignored: state[-1] = accepted
                elif token == 'define':
                    try:
                        self.defs[define_name(line)] = line
                    except Exception:
                        raise PreprocError("Invalid define line %s" % line)
                elif token == 'undef':
                    m = re_mac.match(line)
                    if m and m.group(0) in self.defs:
                        self.defs.__delitem__(m.group(0))
                        #print "undef %s" % name
                elif token == 'pragma':
                    if re_pragma_once.match(line.lower()):
                        self.ban_includes.add(self.curfile)
            except Exception as e:
                if Logs.verbose:
                    debug('preproc: line parsing failed (%s): %s %s', e, line,
                          Utils.ex_stack())
Beispiel #42
0
 def start(self, node, env):
     Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
     self.current_file = node
     self.addlines(node)
     if env.DEFINES:
         lst = format_defines(env.DEFINES)
         lst.reverse()
         self.lines.extend([('define', x) for x in lst])
     while self.lines:
         (token, line) = self.lines.pop()
         if token == POPFILE:
             self.count_files -= 1
             self.currentnode_stack.pop()
             continue
         try:
             state = self.state
             if token[:2] == 'if':
                 state.append(undefined)
             elif token == 'endif':
                 state.pop()
             if token[0] != 'e':
                 if skipped in self.state or ignored in self.state:
                     continue
             if token == 'if':
                 ret = eval_macro(tokenize(line), self.defs)
                 if ret:
                     state[-1] = accepted
                 else:
                     state[-1] = ignored
             elif token == 'ifdef':
                 m = re_mac.match(line)
                 if m and m.group() in self.defs:
                     state[-1] = accepted
                 else:
                     state[-1] = ignored
             elif token == 'ifndef':
                 m = re_mac.match(line)
                 if m and m.group() in self.defs:
                     state[-1] = ignored
                 else:
                     state[-1] = accepted
             elif token == 'include' or token == 'import':
                 (kind, inc) = extract_include(line, self.defs)
                 if kind == '"' or not strict_quotes:
                     self.current_file = self.tryfind(inc)
                     if token == 'import':
                         self.ban_includes.add(self.current_file)
             elif token == 'elif':
                 if state[-1] == accepted:
                     state[-1] = skipped
                 elif state[-1] == ignored:
                     if eval_macro(tokenize(line), self.defs):
                         state[-1] = accepted
             elif token == 'else':
                 if state[-1] == accepted:
                     state[-1] = skipped
                 elif state[-1] == ignored:
                     state[-1] = accepted
             elif token == 'define':
                 try:
                     self.defs[self.define_name(line)] = line
                 except AttributeError:
                     raise PreprocError('Invalid define line %r' % line)
             elif token == 'undef':
                 m = re_mac.match(line)
                 if m and m.group() in self.defs:
                     self.defs.__delitem__(m.group())
             elif token == 'pragma':
                 if re_pragma_once.match(line.lower()):
                     self.ban_includes.add(self.current_file)
         except Exception, e:
             if Logs.verbose:
                 Logs.debug('preproc: line parsing failed (%s): %s %s', e,
                            line, Utils.ex_stack())
Beispiel #43
0
	def start(self,node,env):
		debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
		bld=node.ctx
		try:
			self.parse_cache=bld.parse_cache
		except AttributeError:
			self.parse_cache=bld.parse_cache={}
		self.current_file=node
		self.addlines(node)
		if env['DEFINES']:
			try:
				lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
				lst.reverse()
				self.lines.extend([('define',x)for x in lst])
			except AttributeError:
				pass
		while self.lines:
			(token,line)=self.lines.pop()
			if token==POPFILE:
				self.count_files-=1
				self.currentnode_stack.pop()
				continue
			try:
				ve=Logs.verbose
				if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
				state=self.state
				if token[:2]=='if':
					state.append(undefined)
				elif token=='endif':
					state.pop()
				if token[0]!='e':
					if skipped in self.state or ignored in self.state:
						continue
				if token=='if':
					ret=eval_macro(tokenize(line),self.defs)
					if ret:state[-1]=accepted
					else:state[-1]=ignored
				elif token=='ifdef':
					m=re_mac.match(line)
					if m and m.group(0)in self.defs:state[-1]=accepted
					else:state[-1]=ignored
				elif token=='ifndef':
					m=re_mac.match(line)
					if m and m.group(0)in self.defs:state[-1]=ignored
					else:state[-1]=accepted
				elif token=='include'or token=='import':
					(kind,inc)=extract_include(line,self.defs)
					if ve:debug('preproc: include found %s    (%s) ',inc,kind)
					if kind=='"'or not strict_quotes:
						self.current_file=self.tryfind(inc)
						if token=='import':
							self.ban_includes.add(self.current_file)
				elif token=='elif':
					if state[-1]==accepted:
						state[-1]=skipped
					elif state[-1]==ignored:
						if eval_macro(tokenize(line),self.defs):
							state[-1]=accepted
				elif token=='else':
					if state[-1]==accepted:state[-1]=skipped
					elif state[-1]==ignored:state[-1]=accepted
				elif token=='define':
					try:
						self.defs[define_name(line)]=line
					except Exception:
						raise PreprocError("Invalid define line %s"%line)
				elif token=='undef':
					m=re_mac.match(line)
					if m and m.group(0)in self.defs:
						self.defs.__delitem__(m.group(0))
				elif token=='pragma':
					if re_pragma_once.match(line.lower()):
						self.ban_includes.add(self.current_file)
			except Exception ,e:
				if Logs.verbose:
					debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
Beispiel #44
0
    def start(self):
        """
		Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
		If only one job is used, then execute the tasks one by one, without consumers.
		"""

        file_filter_list = []
        if self.bld.options.file_filter != "":
            file_filter_list = self.bld.options.file_filter.split(";")

        self.total = self.bld.total()
        if self.total == 0:
            self.stop = True

        while not self.stop:

            self.refill_task_list()

            # consider the next task
            tsk = self.get_next_task()
            if not tsk:
                if self.count:
                    # tasks may add new ones after they are run
                    continue
                else:
                    # no tasks to run, no tasks running, time to exit
                    break

            if tsk.hasrun:
                # if the task is marked as "run", just skip it
                self.processed += 1
                continue

            if self.stop:  # stop immediately after a failure was detected
                break

            try:
                if not file_filter_list:
                    st = tsk.runnable_status(
                    )  # No file filter, execute all tasks
                else:
                    # File filter, check if we should compile this task
                    bExecuteTask = False
                    st = Task.SKIP_ME
                    # check if the file is used in this task.  If so, we must execute this task
                    for input in tsk.inputs:
                        if input.abspath() in file_filter_list:
                            bExecuteTask = True
                            break

                    # this task is included in the filter
                    if bExecuteTask:
                        # a task may require other tasks run first.  These may have been skipped earlier.
                        if not hasattr(tsk, 'required_tasks'):
                            tsk.required_tasks = []

                            def add_dependent_tasks(depends, tsk):
                                for t in tsk.run_after:
                                    if t.hasrun == Task.NOT_RUN or t.hasrun == Task.SKIPPED:
                                        add_dependent_tasks(depends, t)
                                        depends.append(t)

                            # cant run a task until the run_after list is completed for all tasks in the dependency chain
                            # recurse and create a list of everything that needs to be considered
                            add_dependent_tasks(tsk.required_tasks, tsk)

                        if tsk.required_tasks:
                            # process the run_after tasks first.  postpone the current task similar to ASK_LATER handling
                            self.postpone(tsk)
                            # grab a prereq and replace the task under consideration.  These tasks may have been skipped earlier
                            tsk = tsk.required_tasks.pop(0)
                            st = tsk.runnable_status()
                            # fallout, do normal task processing
                        else:
                            # prerequisites already handled, must be runnable now.  Computing the status for side effects
                            st = tsk.runnable_status()
                            assert (st != Task.ASK_LATER)
                            st = Task.RUN_ME  # but forcing the task to run anyways

                            # override the inputs for special handling
                            for input in tsk.inputs:
                                if input.abspath() in file_filter_list:
                                    # patch output file to handle special commands
                                    override_output_file = self.bld.is_option_true(
                                        'show_preprocessed_file'
                                    ) or self.bld.is_option_true(
                                        'show_disassembly')
                                    if override_output_file == True:

                                        # Get file extension
                                        if self.bld.is_option_true(
                                                'show_disassembly'):
                                            file_ext = '.diasm'
                                        elif self.bld.is_option_true(
                                                'show_preprocessed_file'):
                                            file_ext = '.i'
                                        else:
                                            self.bld.fatal(
                                                "Command option file extension output file implementation missing."
                                            )

                                        # Set output file
                                        out_file = input.change_ext(file_ext)
                                        tsk.outputs[0] = out_file

                                        # Add post build message to allow VS user to open the file
                                        if getattr(self.bld.options,
                                                   'execsolution', ""):
                                            self.bld.post_build_msg_warning.append(
                                                '%s(0): warning: %s.' %
                                                (out_file.abspath(),
                                                 "Click here to open output file"
                                                 ))
                            # fallout, resume normal task processing with the overrides
            except Exception:
                self.processed += 1
                # TODO waf 1.7 this piece of code should go in the error_handler
                tsk.err_msg = Utils.ex_stack()
                if not self.stop and self.bld.keep:
                    tsk.hasrun = Task.SKIPPED
                    if self.bld.keep == 1:
                        # if -k stop at the first exception, if -kk try to go as far as possible
                        if Logs.verbose > 1 or not self.error:
                            self.error.append(tsk)
                        self.stop = True
                    else:
                        if Logs.verbose > 1:
                            self.error.append(tsk)
                    continue
                tsk.hasrun = Task.EXCEPTION
                self.error_handler(tsk)
                continue

            if st == Task.ASK_LATER:
                self.postpone(tsk)
            elif st == Task.SKIP_ME:
                self.processed += 1
                tsk.hasrun = Task.SKIPPED
                self.add_more_tasks(tsk)
            else:
                # run me: put the task in ready queue
                tsk.position = (self.processed, self.total)
                self.count += 1
                tsk.master = self
                self.processed += 1

                if self.numjobs == 1 or self.bld.options.file_filter != '':
                    tsk.process()
                else:
                    self.add_task(tsk)

        # self.count represents the tasks that have been made available to the consumer threads
        # collect all the tasks after an error else the message may be incomplete
        while self.error and self.count:
            self.get_out()

        #print loop
        assert (self.count == 0 or self.stop)

        # free the task pool, if any
        self.free_task_pool()
Beispiel #45
0
    def start(self):
        """
		Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
		If only one job is used, then execute the tasks one by one, without consumers.
		"""

        self.total = self.bld.total()
        if self.total == 0:
            self.stop = True

        while not self.stop:

            self.refill_task_list()

            # consider the next task
            tsk = self.get_next_task()
            if not tsk:
                if self.count:
                    # tasks may add new ones after they are run
                    continue
                else:
                    # no tasks to run, no tasks running, time to exit
                    break

            if tsk.hasrun:
                # if the task is marked as "run", just skip it
                self.processed += 1
                continue

            if self.stop:  # stop immediately after a failure was detected
                break

            try:
                if self.bld.options.file_filter == "":
                    st = tsk.runnable_status(
                    )  # No file filter, execute all tasks
                else:  # File filter, check if we should compile this task

                    st = Task.SKIP_ME
                    if not hasattr(self, 'required_tasks'):
                        self.required_tasks = []

                    # Check if we need to execute this task
                    bExecuteTask = False
                    file_filter_list = self.bld.options.file_filter.split(";")

                    for input in tsk.inputs:
                        if input.abspath() in file_filter_list:
                            bExecuteTask = True
                            break

                    if tsk in self.required_tasks:
                        bExecuteTask = True

                    if bExecuteTask:
                        st = tsk.runnable_status()

                        if st == Task.ASK_LATER:
                            for t in tsk.run_after:
                                self.required_tasks += [t]
                        else:
                            for input in tsk.inputs:
                                if input.abspath() in file_filter_list:
                                    st = Task.RUN_ME

                                    # patch output file to handle special commands
                                    override_output_file = self.bld.is_option_true(
                                        'show_preprocessed_file'
                                    ) or self.bld.is_option_true(
                                        'show_disassembly')
                                    if override_output_file == True:

                                        # Get file extension
                                        if self.bld.is_option_true(
                                                'show_disassembly'):
                                            file_ext = '.diasm'
                                        elif self.bld.is_option_true(
                                                'show_preprocessed_file'):
                                            file_ext = '.i'
                                        else:
                                            self.bld.fatal(
                                                "Command option file extension output file implementation missing."
                                            )

                                        # Set output file
                                        out_file = input.change_ext(file_ext)
                                        tsk.outputs[0] = out_file

                                        # Add post build message to allow VS user to open the file
                                        if getattr(self.bld.options,
                                                   'execsolution', ""):
                                            self.bld.post_build_msg_warning.append(
                                                '%s(0): warning: %s.' %
                                                (out_file.abspath(),
                                                 "Click here to open output file"
                                                 ))
                                    break

            except Exception:
                self.processed += 1
                # TODO waf 1.7 this piece of code should go in the error_handler
                tsk.err_msg = Utils.ex_stack()
                if not self.stop and self.bld.keep:
                    tsk.hasrun = Task.SKIPPED
                    if self.bld.keep == 1:
                        # if -k stop at the first exception, if -kk try to go as far as possible
                        if Logs.verbose > 1 or not self.error:
                            self.error.append(tsk)
                        self.stop = True
                    else:
                        if Logs.verbose > 1:
                            self.error.append(tsk)
                    continue
                tsk.hasrun = Task.EXCEPTION
                self.error_handler(tsk)
                continue

            if st == Task.ASK_LATER:
                self.postpone(tsk)
            elif st == Task.SKIP_ME:
                self.processed += 1
                tsk.hasrun = Task.SKIPPED
                self.add_more_tasks(tsk)
            else:
                # run me: put the task in ready queue
                tsk.position = (self.processed, self.total)
                self.count += 1
                tsk.master = self
                self.processed += 1

                if self.numjobs == 1 or self.bld.options.file_filter != '':
                    tsk.process()
                else:
                    self.add_task(tsk)

        # self.count represents the tasks that have been made available to the consumer threads
        # collect all the tasks after an error else the message may be incomplete
        while self.error and self.count:
            self.get_out()

        #print loop
        assert (self.count == 0 or self.stop)

        # free the task pool, if any
        self.free_task_pool()
Beispiel #46
0
def run_c_code(self, *k, **kw):
	"""
	Create a temporary build context to execute a build. A reference to that build
	context is kept on self.test_bld for debugging purposes, and you should not rely
	on it too much (read the note on the cache below).
	The parameters given in the arguments to this function are passed as arguments for
	a single task generator created in the build. Only three parameters are obligatory:

	:param features: features to pass to a task generator created in the build
	:type features: list of string
	:param compile_filename: file to create for the compilation (default: *test.c*)
	:type compile_filename: string
	:param code: code to write in the filename to compile
	:type code: string

	Though this function returns *0* by default, the build may set an attribute named *retval* on the
	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.

	This function also provides a limited cache. To use it, provide the following option::

		def options(opt):
			opt.add_option('--confcache', dest='confcache', default=0,
				action='count', help='Use a configuration cache')

	And execute the configuration with the following command-line::

		$ waf configure --confcache

	"""

	lst = [str(v) for (p, v) in kw.items() if p != 'env']
	h = Utils.h_list(lst)
	dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)

	try:
		os.makedirs(dir)
	except:
		pass

	try:
		os.stat(dir)
	except:
		self.fatal('cannot use the configuration test folder %r' % dir)

	cachemode = getattr(Options.options, 'confcache', None)
	if cachemode == CACHE_RESULTS:
		try:
			proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_c_code'))
			ret = proj['cache_run_c_code']
		except:
			pass
		else:
			if isinstance(ret, str) and ret.startswith('Test does not build'):
				self.fatal(ret)
			return ret

	bdir = os.path.join(dir, 'testbuild')

	if not os.path.exists(bdir):
		os.makedirs(bdir)

	self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir)
	bld.init_dirs()
	bld.progress_bar = 0
	bld.targets = '*'

	if kw['compile_filename']:
		node = bld.srcnode.make_node(kw['compile_filename'])
		node.write(kw['code'])

	bld.logger = self.logger
	bld.all_envs.update(self.all_envs) # not really necessary
	bld.env = kw['env']

	o = bld(features=kw['features'], source=kw['compile_filename'], target='testprog')

	for k, v in kw.items():
		setattr(o, k, v)

	self.to_log("==>\n%s\n<==" % kw['code'])

	# compile the program
	bld.targets = '*'

	ret = -1
	try:
		try:
			bld.compile()
		except Errors.WafError:
			ret = 'Test does not build: %s' % Utils.ex_stack()
			self.fatal(ret)
		else:
			ret = getattr(bld, 'retval', 0)
	finally:
		# cache the results each time
		proj = ConfigSet.ConfigSet()
		proj['cache_run_c_code'] = ret
		proj.store(os.path.join(dir, 'cache_run_c_code'))

	return ret
Beispiel #47
0
 def start(self, node, env):
     debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
     bld = node.ctx
     try:
         self.parse_cache = bld.parse_cache
     except AttributeError:
         bld.parse_cache = {}
         self.parse_cache = bld.parse_cache
     self.addlines(node)
     if env['DEFINES']:
         try:
             lst = [
                 '%s %s' % (x[0], trimquotes('='.join(x[1:])))
                 for x in [y.split('=') for y in env['DEFINES']]
             ]
             lst.reverse()
             self.lines.extend([('define', x) for x in lst])
         except AttributeError:
             pass
     while self.lines:
         (token, line) = self.lines.pop()
         if token == POPFILE:
             self.count_files -= 1
             self.currentnode_stack.pop()
             continue
         try:
             ve = Logs.verbose
             if ve:
                 debug('preproc: line is %s - %s state is %s', token, line,
                       self.state)
             state = self.state
             if token[:2] == 'if':
                 state.append(undefined)
             elif token == 'endif':
                 state.pop()
             if token[0] != 'e':
                 if skipped in self.state or ignored in self.state:
                     continue
             if token == 'if':
                 ret = eval_macro(tokenize(line), self.defs)
                 if ret: state[-1] = accepted
                 else: state[-1] = ignored
             elif token == 'ifdef':
                 m = re_mac.match(line)
                 if m and m.group(0) in self.defs: state[-1] = accepted
                 else: state[-1] = ignored
             elif token == 'ifndef':
                 m = re_mac.match(line)
                 if m and m.group(0) in self.defs: state[-1] = ignored
                 else: state[-1] = accepted
             elif token == 'include' or token == 'import':
                 (kind, inc) = extract_include(line, self.defs)
                 if inc in self.ban_includes:
                     continue
                 if token == 'import': self.ban_includes.add(inc)
                 if ve:
                     debug('preproc: include found %s    (%s) ', inc, kind)
                 if kind == '"' or not strict_quotes:
                     self.tryfind(inc)
             elif token == 'elif':
                 if state[-1] == accepted:
                     state[-1] = skipped
                 elif state[-1] == ignored:
                     if eval_macro(tokenize(line), self.defs):
                         state[-1] = accepted
             elif token == 'else':
                 if state[-1] == accepted: state[-1] = skipped
                 elif state[-1] == ignored: state[-1] = accepted
             elif token == 'define':
                 try:
                     self.defs[define_name(line)] = line
                 except:
                     raise PreprocError("Invalid define line %s" % line)
             elif token == 'undef':
                 m = re_mac.match(line)
                 if m and m.group(0) in self.defs:
                     self.defs.__delitem__(m.group(0))
             elif token == 'pragma':
                 if re_pragma_once.match(line.lower()):
                     self.ban_includes.add(self.curfile)
         except Exception, e:
             if Logs.verbose:
                 debug('preproc: line parsing failed (%s): %s %s', e, line,
                       Utils.ex_stack())
Beispiel #48
0
    def start(self):
        """
        Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
        If only one job is used, then execute the tasks one by one, without consumers.
        """

        self.total = self.bld.total()

        while not self.stop:

            self.refill_task_list()

            # consider the next task
            tsk = self.get_next_task()
            if not tsk:
                if self.count:
                    # tasks may add new ones after they are run
                    continue
                else:
                    # no tasks to run, no tasks running, time to exit
                    break

            if tsk.hasrun:
                # if the task is marked as "run", just skip it
                self.processed += 1
                continue

            if self.stop:  # stop immediately after a failure was detected
                break

            try:
                st = tsk.runnable_status()
            except Exception:
                self.processed += 1
                if not self.stop and self.bld.keep:
                    tsk.hasrun = Task.SKIPPED
                    if self.bld.keep == 1:
                        # if -k stop at the first exception, if -kk try to go as far as possible
                        self.stop = True
                    continue
                tsk.err_msg = Utils.ex_stack()
                tsk.hasrun = Task.EXCEPTION
                self.error_handler(tsk)
                continue

            if st == Task.ASK_LATER:
                self.postpone(tsk)
                # TODO optimize this
                if self.outstanding:
                    for x in tsk.run_after:
                        if x in self.outstanding:
                            self.outstanding.remove(x)
                            self.outstanding.insert(0, x)
            elif st == Task.SKIP_ME:
                self.processed += 1
                tsk.hasrun = Task.SKIPPED
                self.add_more_tasks(tsk)
            else:
                # run me: put the task in ready queue
                tsk.position = (self.processed, self.total)
                self.count += 1
                tsk.master = self
                self.processed += 1

                if self.numjobs == 1:
                    tsk.process()
                else:
                    self.add_task(tsk)

        # self.count represents the tasks that have been made available to the consumer threads
        # collect all the tasks after an error else the message may be incomplete
        while self.error and self.count:
            self.get_out()

        #print loop
        assert (self.count == 0 or self.stop)

        # free the task pool, if any
        self.free_task_pool()
Beispiel #49
0
    def start(self):
        self.total = self.bld.total()

        while not self.stop:

            self.refill_task_list()

            # consider the next task
            tsk = self.get_next_task()
            if not tsk:
                if self.count:
                    # tasks may add new ones after they are run
                    continue
                else:
                    # no tasks to run, no tasks running, time to exit
                    break

            if tsk.hasrun:
                # if the task is marked as "run", just skip it
                self.processed += 1
                continue

            if self.stop:  # stop immediately after a failure was detected
                break

            try:
                st = tsk.runnable_status()
            except Exception:
                self.processed += 1
                # TODO waf 1.7 this piece of code should go in the error_handler
                tsk.err_msg = Utils.ex_stack()
                if not self.stop and self.bld.keep:
                    tsk.hasrun = Task.SKIPPED
                    if self.bld.keep == 1:
                        # if -k stop at the first exception, if -kk try to go as far as possible
                        if Logs.verbose > 1 or not self.error:
                            self.error.append(tsk)
                        self.stop = True
                    else:
                        if Logs.verbose > 1:
                            self.error.append(tsk)
                    continue
                tsk.hasrun = Task.EXCEPTION
                self.error_handler(tsk)
                continue

            if st == Task.ASK_LATER:
                self.postpone(tsk)
            elif st == Task.SKIP_ME:
                self.processed += 1
                tsk.hasrun = Task.SKIPPED
                self.add_more_tasks(tsk)

                # shrinking sets
                try:
                    ws = tsk.waiting_sets
                except AttributeError:
                    pass
                else:
                    for k in ws:
                        try:
                            k.remove(tsk)
                        except KeyError:
                            pass

            else:
                # run me: put the task in ready queue
                tsk.position = (self.processed, self.total)
                self.count += 1
                tsk.master = self
                self.processed += 1

                if self.numjobs == 1:
                    tsk.process()
                else:
                    self.add_task(tsk)

        # self.count represents the tasks that have been made available to the consumer threads
        # collect all the tasks after an error else the message may be incomplete
        while self.error and self.count:
            self.get_out()

        #print loop
        assert (self.count == 0 or self.stop)

        # free the task pool, if any
        self.free_task_pool()
Beispiel #50
0
Datei: msvc.py Projekt: ralic/waf
def get_msvc_version(conf, compiler, version, target, vcvars):
    """
	Checks that an installed compiler actually runs and uses vcvars to obtain the
	environment needed by the compiler.

	:param compiler: compiler type, for looking up the executable name
	:param version: compiler version, for debugging only
	:param target: target architecture
	:param vcvars: batch file to run to check the environment
	:return: the location of the compiler executable, the location of include dirs, and the library paths
	:rtype: tuple of strings
	"""
    Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)

    try:
        conf.msvc_cnt += 1
    except AttributeError:
        conf.msvc_cnt = 1
    batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
    batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars, target))
    sout = conf.cmd_and_log(
        ['cmd.exe', '/E:on', '/V:on', '/C',
         batfile.abspath()])
    lines = sout.splitlines()

    if not lines[0]:
        lines.pop(0)

    MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
    for line in lines:
        if line.startswith('PATH='):
            path = line[5:]
            MSVC_PATH = path.split(';')
        elif line.startswith('INCLUDE='):
            MSVC_INCDIR = [i for i in line[8:].split(';') if i]
        elif line.startswith('LIB='):
            MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
    if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
        conf.fatal(
            'msvc: Could not find a valid architecture for building (get_msvc_version_3)'
        )

    # Check if the compiler is usable at all.
    # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
    env = dict(os.environ)
    env.update(PATH=path)
    compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
    cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)

    # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
    if 'CL' in env:
        del (env['CL'])

    try:
        conf.cmd_and_log(cxx + ['/help'], env=env)
    except UnicodeError:
        st = Utils.ex_stack()
        if conf.logger:
            conf.logger.error(st)
        conf.fatal('msvc: Unicode error - check the code page?')
    except Exception as e:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler,
                   version, target, str(e))
        conf.fatal(
            'msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)'
        )
    else:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version,
                   target)
    finally:
        conf.env[compiler_name] = ''

    return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)