示例#1
0
文件: ccroot.py 项目: ventosus/lv2
	def exec_mf(self):
		"""
		Create manifest files for VS-like compilers (msvc, ifort, ...)
		"""
		if not self.env.MT:
			return 0

		manifest = None
		for out_node in self.outputs:
			if out_node.name.endswith('.manifest'):
				manifest = out_node.abspath()
				break
		else:
			# Should never get here.  If we do, it means the manifest file was
			# never added to the outputs list, thus we don't have a manifest file
			# to embed, so we just return.
			return 0

		# embedding mode. Different for EXE's and DLL's.
		# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
		mode = ''
		for x in Utils.to_list(self.generator.features):
			if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
				mode = 1
			elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
				mode = 2

		Logs.debug('msvc: embedding manifest in mode %r', mode)

		lst = [] + self.env.MT
		lst.extend(Utils.to_list(self.env.MTFLAGS))
		lst.extend(['-manifest', manifest])
		lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))

		return super(link_task, self).exec_command(lst)
示例#2
0
文件: Build.py 项目: blablack/ams-lv2
	def hash_env_vars(self, env, vars_lst):
		"""
		Hashes configuration set variables::

			def build(bld):
				bld.hash_env_vars(bld.env, ['CXX', 'CC'])

		This method uses an internal cache.

		:param env: Configuration Set
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		:param vars_lst: list of variables
		:type vars_list: list of string
		"""

		if not env.table:
			env = env.parent
			if not env:
				return Utils.SIG_NIL

		idx = str(id(env)) + str(vars_lst)
		try:
			cache = self.cache_env
		except AttributeError:
			cache = self.cache_env = {}
		else:
			try:
				return self.cache_env[idx]
			except KeyError:
				pass

		lst = [env[a] for a in vars_lst]
		cache[idx] = ret = Utils.h_list(lst)
		Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
		return ret
示例#3
0
文件: Task.py 项目: jgoppert/mavsim
 def can_retrieve_cache(self):
     if not getattr(self, "outputs", None):
         return None
     sig = self.signature()
     ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
     dname = os.path.join(self.generator.bld.cache_global, ssig)
     try:
         t1 = os.stat(dname).st_mtime
     except OSError:
         return None
     for node in self.outputs:
         orig = os.path.join(dname, node.name)
         try:
             shutil.copy2(orig, node.abspath())
             os.utime(orig, None)
         except (OSError, IOError):
             Logs.debug("task: failed retrieving file")
             return None
     try:
         t2 = os.stat(dname).st_mtime
     except OSError:
         return None
     if t1 != t2:
         return None
     for node in self.outputs:
         node.sig = sig
         if self.generator.bld.progress_bar < 1:
             self.generator.bld.to_log("restoring from cache %r\n" % node.abspath())
     self.cached = True
     return True
示例#4
0
文件: hscript.py 项目: atlas-org/lcg
def configure(ctx):
    msg.debug('[configure] package name: '+PACKAGE['name'])
    ctx.load('find_root')
    ctx.load('find_xrootd')
    ctx.load('find_gccxml')

    if ctx.hwaf_enabled_tag("STANDALONE"):
        ctx.load('find_root')
        ctx.find_root()
        return

    macro = ctx.hwaf_declare_macro
    
    macro("ROOT_native_version", "${ROOT_config_version}")
    macro("ROOT_base", "${LCG_releases}/ROOT/${ROOT_native_version}")

    macro("ROOT_home", "${ROOT_base}/${LCG_platform}/root")

    macro("ROOTSYS", "${ROOT_home}")
    ctx.hwaf_declare_runtime_env("ROOTSYS")
    
    macro("ROOT_HOME", "${ROOT_home}")
    

    macro("ROOT_export_paths", "${ROOTSYS}")
    ctx.lcg_declare_external_package('ROOT', path='${ROOTSYS}')

    path = ctx.hwaf_subst_vars("${ROOTSYS}")
    binpath = ctx.hwaf_subst_vars("${ROOTSYS}/bin")
    ctx.find_root(path_list=[binpath], extra_paths=[path])
    
    return
示例#5
0
文件: Build.py 项目: blablack/ams-lv2
	def clean(self):
		"""
		Remove most files from the build directory, and reset all caches.

		Custom lists of files to clean can be declared as `bld.clean_files`.
		For example, exclude `build/program/myprogram` from getting removed::

			def build(bld):
				bld.clean_files = bld.bldnode.ant_glob('**',
					excl='.lock* config.log c4che/* config.h program/myprogram',
					quiet=True, generator=True)
		"""
		Logs.debug('build: clean called')

		if hasattr(self, 'clean_files'):
			for n in self.clean_files:
				n.delete()
		elif self.bldnode != self.srcnode:
			# would lead to a disaster if top == out
			lst = []
			for env in self.all_envs.values():
				lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES])
			for n in self.bldnode.ant_glob('**/*', excl='.lock* *conf_check_*/** config.log c4che/*', quiet=True):
				if n in lst:
					continue
				n.delete()
		self.root.children = {}

		for v in SAVED_ATTRS:
			if v == 'root':
				continue
			setattr(self, v, {})
示例#6
0
文件: Context.py 项目: RunarFreyr/waz
	def cmd_and_log(self, cmd, **kw):
		"""
		execute a command, return the stdout
		this method should be used whenever possible for proper logging

		to obtain stdout+stderr, pass output=BOTH in the arguments (or output=0)
		to obtain just stderr, pass output=STDERR in the arguments (or output=-1)

		@param cmd: args for subprocess.Popen
		@param kw: keyword arguments for subprocess.Popen
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % cmd)

		if 'quiet' in kw:
			quiet = kw['quiet']
			del kw['quiet']
		else:
			quiet = None

		if 'output' in kw:
			to_ret = kw['output']
			del kw['output']
		else:
			to_ret = STDOUT

		kw['stdout'] = kw['stderr'] = subprocess.PIPE
		if not quiet:
			self.to_log(cmd)
		try:
			p = subprocess.Popen(cmd, **kw)
			(out, err) = p.communicate()
		except Exception as e:
			try:
				self.to_log(str(err))
			except:
				pass
			raise Errors.WafError('Execution failure', ex=e)

		if not isinstance(out, str):
			out = out.decode('utf-8')
		if not isinstance(err, str):
			err = err.decode('utf-8')

		if out and quiet != STDOUT and quiet != BOTH:
			self.to_log('out: %s' % out)
		if err and quiet != STDERR and quiet != BOTH:
			self.to_log('err: %s' % err)

		if p.returncode:
			e = Errors.WafError('command %r returned %r' % (cmd, p.returncode))
			e.returncode = p.returncode
			raise e

		if to_ret == BOTH:
			return (out, err)
		elif to_ret == STDERR:
			return err
		return out
示例#7
0
文件: Task.py 项目: jgoppert/mavsim
 def runnable_status(self):
     for t in self.run_after:
         if not t.hasrun:
             return ASK_LATER
     bld = self.generator.bld
     try:
         new_sig = self.signature()
     except Errors.TaskNotReady:
         return ASK_LATER
     key = self.uid()
     try:
         prev_sig = bld.task_sigs[key]
     except KeyError:
         Logs.debug("task: task %r must run as it was never run before or the task code changed" % self)
         return RUN_ME
     for node in self.outputs:
         try:
             if node.sig != new_sig:
                 return RUN_ME
         except AttributeError:
             Logs.debug("task: task %r must run as the output nodes do not exist" % self)
             return RUN_ME
     if new_sig != prev_sig:
         return RUN_ME
     return SKIP_ME
示例#8
0
def configure(cfg):
    msg.debug('orch: CONFIG CALLED')

    if not cfg.options.orch_config:
        raise RuntimeError('No Orchestration configuration file given (--orch-config)')
    orch_config = []
    for lst in util.string2list(cfg.options.orch_config):
        lst = lst.strip()
        orch_config += glob(lst)
    okay = True
    for maybe in orch_config:
        if os.path.exists(maybe):
            continue
        msg.error('No such file: %s' % maybe)
        okay = False
    if not okay or not orch_config:
        raise ValueError('missing configuration files')
            
    cfg.msg('Orch configuration files', '"%s"' % '", "'.join(orch_config))

    extra = dict(cfg.env)
    extra['top'] = context.top_dir
    extra['out'] = context.out_dir # usually {top}/tmp
    extra['DESTDIR'] = getattr(cfg.options, 'destdir', '')
    suite = pkgconf.load(orch_config, start = cfg.options.orch_start, **extra)

    envmunge.decompose(cfg, suite)

    cfg.msg('Orch configure envs', '"%s"' % '", "'.join(cfg.all_envs.keys()))
    bind_functions(cfg)
    return
示例#9
0
文件: Context.py 项目: RunarFreyr/waz
	def exec_command(self, cmd, **kw):
		"""
		execute a command, return the exit status
		if the context has the attribute 'log', capture and log the process stderr/stdout

		this method should be used whenever possible for proper logging

		@param cmd: args for subprocess.Popen
		@param kw: keyword arguments for subprocess.Popen
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % cmd)

		try:
			if self.logger:
				# warning: may deadlock with a lot of output (subprocess limitation)

				self.logger.info(cmd)

				kw['stdout'] = kw['stderr'] = subprocess.PIPE
				p = subprocess.Popen(cmd, **kw)
				(out, err) = p.communicate()
				if out:
					self.logger.debug('out: %s' % out.decode('utf-8'))
				if err:
					self.logger.error('err: %s' % err.decode('utf-8'))
				return p.returncode
			else:
				p = subprocess.Popen(cmd, **kw)
				return p.wait()
		except OSError:
			return -1
	def load(self,filename):
		tbl=self.table
		code=Utils.readf(filename,m='rU')
		for m in re_imp.finditer(code):
			g=m.group
			tbl[g(2)]=eval(g(3))
		Logs.debug('env: %s'%str(self.table))
示例#11
0
 def run(self):
     env = self.env
     gen = self.generator
     path = gen.path
     bld = gen.bld
     bjam = gen.bld.root.find_dir(env.BJAM_SRC)
     if not bjam:
         Logs.error("Can not find bjam source")
         return -1
     bjam_exe_relpath = "bin." + env.BJAM_UNAME + "/bjam"
     bjam_exe = bjam.find_resource(bjam_exe_relpath)
     if bjam_exe:
         env.BJAM = bjam_exe.srcpath()
         return 0
     bjam_cmd = ["./build.sh"]
     Logs.debug("runner: " + bjam.srcpath() + "> " + str(bjam_cmd))
     result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
     if not result == 0:
         Logs.error("bjam failed")
         return -1
     bjam_exe = bjam.find_resource(bjam_exe_relpath)
     if bjam_exe:
         env.BJAM = bjam_exe.srcpath()
         return 0
     Logs.error("bjam failed")
     return -1
示例#12
0
 def restore(self):
     try:
         env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, "build.config.py"))
     except (IOError, OSError):
         pass
     else:
         if env["version"] < Context.HEXVERSION:
             raise Errors.WafError("Version mismatch! reconfigure the project")
         for t in env["tools"]:
             self.setup(**t)
     f = None
     try:
         try:
             f = open(os.path.join(self.variant_dir, Context.DBFILE), "rb")
         except (IOError, EOFError):
             Logs.debug("build: could not load the build cache (missing)")
         else:
             try:
                 waflib.Node.pickle_lock.acquire()
                 waflib.Node.Nod3 = self.node_class
                 try:
                     data = cPickle.load(f)
                 except Exception, e:
                     Logs.debug("build: could not load the build cache %r" % e)
                 else:
                     for x in SAVED_ATTRS:
                         setattr(self, x, data[x])
示例#13
0
		def parse_node(node):
			if node in seen:
				return
			seen.append(node)
			code = node.read()
			global re_tex
			for match in re_tex.finditer(code):
				for path in match.group('file').split(','):
					if path:
						add_name = True
						found = None
						for k in exts_deps_tex:
							Logs.debug('tex: trying %s%s' % (path, k))
							found = node.parent.find_resource(path + k)

							for tsk in self.generator.tasks:
								if not found or found in tsk.outputs:
									break
							else:
								nodes.append(found)
								add_name = False
								for ext in exts_tex:
									if found.name.endswith(ext):
										parse_node(found)
										break
							# no break, people are crazy
						if add_name:
							names.append(path)
示例#14
0
文件: javaw.py 项目: PseudoSky/voodoo
	def run(self):
		env=self.env
		gen=self.generator
		bld=gen.bld
		wd=bld.bldnode.abspath()
		def to_list(xx):
			if isinstance(xx,str):return[xx]
			return xx
		cmd=[]
		cmd.extend(to_list(env['JAVAC']))
		cmd.extend(['-classpath'])
		cmd.extend(to_list(env['CLASSPATH']))
		cmd.extend(['-d'])
		cmd.extend(to_list(env['OUTDIR']))
		cmd.extend(to_list(env['JAVACFLAGS']))
		files=[a.path_from(bld.bldnode)for a in self.inputs]
		tmp=None
		try:
			if len(str(files))+len(str(cmd))>8192:
				(fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath())
				try:
					os.write(fd,'\n'.join(files))
				finally:
					if tmp:
						os.close(fd)
				if Logs.verbose:
					Logs.debug('runner: %r'%(cmd+files))
				cmd.append('@'+tmp)
			else:
				cmd+=files
			ret=self.exec_command(cmd,cwd=wd,env=env.env or None)
		finally:
			if tmp:
				os.remove(tmp)
		return ret
示例#15
0
	def restore(self):
		"""
		Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
		"""
		try:
			env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py'))
		except (IOError, OSError):
			pass
		else:
			if env['version'] < Context.HEXVERSION:
				raise Errors.WafError('Version mismatch! reconfigure the project')
			for t in env['tools']:
				self.setup(**t)

		dbfn = os.path.join(self.variant_dir, Context.DBFILE)
		try:
			data = Utils.readf(dbfn, 'rb')
		except (IOError, EOFError):
			# handle missing file/empty file
			Logs.debug('build: Could not load the build cache %s (missing)' % dbfn)
		else:
			try:
				waflib.Node.pickle_lock.acquire()
				waflib.Node.Nod3 = self.node_class
				try:
					data = cPickle.loads(data)
				except Exception as e:
					Logs.debug('build: Could not pickle the build cache %s: %r' % (dbfn, e))
				else:
					for x in SAVED_ATTRS:
						setattr(self, x, data[x])
			finally:
				waflib.Node.pickle_lock.release()

		self.init_dirs()
示例#16
0
文件: fc.py 项目: ETLin/ns3-h264-svc
	def scan(self):
		tmp=fc_scan.fortran_parser(self.generator.includes_nodes)
		tmp.task=self
		tmp.start(self.inputs[0])
		if Logs.verbose:
			Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names))
		return(tmp.nodes,tmp.names)
def configure(conf):
    """
	Try to find a suitable Fortran compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
	"""
    try:
        test_for_compiler = conf.options.check_fc
    except AttributeError:
        conf.fatal("Add options(opt): opt.load('compiler_fc')")
    for compiler in test_for_compiler.split():
        conf.env.stash()
        conf.start_msg("Checking for %r (fortran compiler)" % compiler)
        try:
            conf.load(compiler)
        except conf.errors.ConfigurationError as e:
            conf.env.revert()
            conf.end_msg(False)
            Logs.debug("compiler_fortran: %r" % e)
        else:
            if conf.env["FC"]:
                conf.end_msg(conf.env.get_flat("FC"))
                conf.env.COMPILER_FORTRAN = compiler
                break
            conf.end_msg(False)
    else:
        conf.fatal("could not configure a fortran compiler!")
示例#18
0
文件: waffle.py 项目: hwaf/waffle
def configure(ctx):
    msg.debug('configure...')
    import os
    import os.path

    ctx.load('hep-waftools-base',   tooldir='hep-waftools')
    ctx.load('hep-waftools-system', tooldir='hep-waftools')
    
    g_module = waflib.Context.g_module

    # taken from hepwaf: PREFIX
    # taken from hepwaf: HEPWAF_PROJECT_NAME
    # taken from hepwaf: CMTCFG
    # taken from hepwaf: CMTPKGS
    # taken from hepwaf: INSTALL_AREA
    
    ctx.env.VERSION = g_module.VERSION

    ctx.load('hwaf', tooldir='hep-waftools')
    ctx.hepwaf_configure()

    #print ctx.env.CPPFLAGS
    if waflib.Options.options.usrcfg:
        # store the configuration...
        ctx.env.store(WAFFLE_CFG)
        pass

    #ctx.setenv(ctx.env.CMTCFG, env=ctx.env)
    return
示例#19
0
文件: Build.py 项目: janbre/NUTS
	def restore(self):
		try:
			env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
		except(IOError,OSError):
			pass
		else:
			if env['version']<Context.HEXVERSION:
				raise Errors.WafError('Version mismatch! reconfigure the project')
			for t in env['tools']:
				self.setup(**t)
		f=None
		try:
			dbfn=os.path.join(self.variant_dir,Context.DBFILE)
			try:
				f=open(dbfn,'rb')
			except(IOError,EOFError):
				Logs.debug('build: could not load the build cache %s (missing)'%dbfn)
			else:
				try:
					waflib.Node.pickle_lock.acquire()
					waflib.Node.Nod3=self.node_class
					try:
						data=cPickle.load(f)
					except Exception ,e:
						Logs.debug('build: could not pickle the build cache %s: %r'%(dbfn,e))
					else:
						for x in SAVED_ATTRS:
							setattr(self,x,data[x])
示例#20
0
文件: sas.py 项目: DigitalDan05/waf
	def run(task):
		command = 'SAS'
		fun = sas_fun

		node = task.inputs[0]
		logfilenode = node.change_ext('.log')
		lstfilenode = node.change_ext('.lst')

		# set the cwd
		task.cwd = task.inputs[0].parent.get_src().abspath()
		Logs.debug('runner: %s on %s' % (command, node.abspath))

		SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
		task.env.env = {'SASINPUTS': SASINPUTS}

		task.env.SRCFILE = node.abspath()
		task.env.LOGFILE = logfilenode.abspath()
		task.env.LSTFILE = lstfilenode.abspath()
		ret = fun(task)
		if ret:
			Logs.error('Running %s on %r returned a non-zero exit' % (command, node))
			Logs.error('SRCFILE = %r' % node)
			Logs.error('LOGFILE = %r' % logfilenode)
			Logs.error('LSTFILE = %r' % lstfilenode)
		return ret
示例#21
0
def configure(conf):
	"""
	Detects a suitable D compiler

	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
	"""
	try:
		test_for_compiler = conf.options.check_d_compiler or default_compilers()
	except AttributeError:
		conf.fatal("Add options(opt): opt.load('compiler_d')")

	for compiler in re.split('[ ,]+', test_for_compiler):
		conf.env.stash()
		conf.start_msg('Checking for %r (D compiler)' % compiler)
		try:
			conf.load(compiler)
		except conf.errors.ConfigurationError as e:
			conf.env.revert()
			conf.end_msg(False)
			Logs.debug('compiler_d: %r', e)
		else:
			if conf.env.D:
				conf.end_msg(conf.env.get_flat('D'))
				conf.env.COMPILER_D = compiler
				conf.env.commit()
				break
			conf.env.revert()
			conf.end_msg(False)
	else:
		conf.fatal('could not configure a D compiler!')
示例#22
0
	def cmd_and_log(self,cmd,**kw):
		subprocess=Utils.subprocess
		kw['shell']=isinstance(cmd,str)
		Logs.debug('runner: %r'%cmd)
		if'quiet'in kw:
			quiet=kw['quiet']
			del kw['quiet']
		else:
			quiet=None
		if'output'in kw:
			to_ret=kw['output']
			del kw['output']
		else:
			to_ret=STDOUT
		kw['stdout']=kw['stderr']=subprocess.PIPE
		if quiet is None:
			self.to_log(cmd)
		try:
			p=subprocess.Popen(cmd,**kw)
			(out,err)=p.communicate()
		except Exception ,e:
			try:
				self.to_log(str(err))
			except:
				pass
			raise Errors.WafError('Execution failure',ex=e)
示例#23
0
def hash_env_vars(self, env, vars_lst):
    # reimplement so that the resulting hash does not depend on local paths
    if not env.table:
        env = env.parent
        if not env:
            return Utils.SIG_NIL

    idx = str(id(env)) + str(vars_lst)
    try:
        cache = self.cache_env
    except AttributeError:
        cache = self.cache_env = {}
    else:
        try:
            return self.cache_env[idx]
        except KeyError:
            pass

    v = str([env[a] for a in vars_lst])
    v = v.replace(self.srcnode.abspath().__repr__()[:-1], "")
    m = Utils.md5()
    m.update(v.encode())
    ret = m.digest()

    Logs.debug("envhash: %r %r", ret, v)

    cache[idx] = ret

    return ret
示例#24
0
文件: hwaf-runtime.py 项目: hwaf/hwaf
    def execute(self):

        self.init_dirs()
        self.cachedir = self.bldnode.find_node(waflib.Build.CACHE_DIR)
        if not self.cachedir:
            self.fatal(
                "no CACHE_DIR (%s). run 'hwaf configure' first" %
                osp.join(self.bldnode.abspath(), waflib.Build.CACHE_DIR))
            pass

        msg.debug("hwaf: run-cmd options: %s" % waflib.Options.commands)
        if not waflib.Options.commands:
            self.fatal('%s expects at least one package name. got: %s' %
                       (self.cmd, waflib.Options.commands))

        args = []
        while waflib.Options.commands:
            arg = waflib.Options.commands.pop(0)
            args.append(arg)
            pass

        msg.debug("hwaf: run-cmd args: %s" % args)

        self.env.load(self.cachedir.find_node("_cache.py").abspath())
        self.env.HWAF_ENABLE_INSTALL_AREA = '1'
        self.hwaf_setup_runtime()
        ret = hwaf_run_cmd_with_runtime_env(self, args)
        return ret
示例#25
0
文件: winres.py 项目: AliZafar120/ns3
	def addlines(self,node):
		self.currentnode_stack.append(node.parent)
		filepath=node.abspath()
		self.count_files+=1
		if self.count_files>c_preproc.recursion_limit:
			raise c_preproc.PreprocError("recursion limit exceeded")
		pc=self.parse_cache
		Logs.debug('preproc: reading file %r',filepath)
		try:
			lns=pc[filepath]
		except KeyError:
			pass
		else:
			self.lines.extend(lns)
			return
		try:
			lines=self.filter_comments(filepath)
			lines.append((c_preproc.POPFILE,''))
			lines.reverse()
			pc[filepath]=lines
			self.lines.extend(lines)
		except IOError:
			raise c_preproc.PreprocError("could not read the file %s"%filepath)
		except Exception:
			if Logs.verbose>0:
				Logs.error("parsing %s failed"%filepath)
				traceback.print_exc()
示例#26
0
	def scan(self):
		parser = FyppIncludeParser(self.generator.includes_nodes)
		nodes, names = parser.parse(self.inputs[0])
		if Logs.verbose:
			Logs.debug('deps: deps for %r: %r; unresolved: %r' 
				% (self.inputs, nodes, names))
		return (nodes, names)
示例#27
0
文件: Build.py 项目: Gnurou/glmark2
	def restore(self):
		try:
			env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
		except EnvironmentError:
			pass
		else:
			if env.version<Context.HEXVERSION:
				raise Errors.WafError('Version mismatch! reconfigure the project')
			for t in env.tools:
				self.setup(**t)
		dbfn=os.path.join(self.variant_dir,Context.DBFILE)
		try:
			data=Utils.readf(dbfn,'rb')
		except(EnvironmentError,EOFError):
			Logs.debug('build: Could not load the build cache %s (missing)',dbfn)
		else:
			try:
				Node.pickle_lock.acquire()
				Node.Nod3=self.node_class
				try:
					data=cPickle.loads(data)
				except Exception as e:
					Logs.debug('build: Could not pickle the build cache %s: %r',dbfn,e)
				else:
					for x in SAVED_ATTRS:
						setattr(self,x,data[x])
			finally:
				Node.pickle_lock.release()
		self.init_dirs()
示例#28
0
def compile_fun_shell(line):
	extr=[]
	def repl(match):
		g=match.group
		if g('dollar'):return"$"
		elif g('backslash'):return'\\\\'
		elif g('subst'):extr.append((g('var'),g('code')));return"%s"
		return None
	line=reg_act.sub(repl,line)or line
	parm=[]
	dvars=[]
	app=parm.append
	for(var,meth)in extr:
		if var=='SRC':
			if meth:app('tsk.inputs%s'%meth)
			else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])')
		elif var=='TGT':
			if meth:app('tsk.outputs%s'%meth)
			else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])')
		elif meth:
			if meth.startswith(':'):
				app('" ".join([env[%r] %% x for x in env[%r]])'%(var,meth[1:]))
				dvars.extend([var,meth[1:]])
			else:
				app('%s%s'%(var,meth))
		else:
			if not var in dvars:dvars.append(var)
			app("p('%s')"%var)
	if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
	else:parm=''
	c=COMPILE_TEMPLATE_SHELL%(line,parm)
	Logs.debug('action: %s'%c)
	return(funex(c),dvars)
示例#29
0
def put_files_cache(self):
	if not Task.push_addr:
		return
	if not self.outputs:
		return
	if getattr(self, 'cached', None):
		return

	#print "called put_files_cache", id(self)
	bld = self.generator.bld
	sig = self.signature()
	ssig = Utils.to_hex(self.uid() + sig)

	conn = None
	cnt = 0
	try:
		for node in self.outputs:
			# We could re-create the signature of the task with the signature of the outputs
			# in practice, this means hashing the output files
			# this is unnecessary
			try:
				if not conn:
					conn = get_connection(push=True)
				sock_send(conn, ssig, cnt, node.abspath())
			except Exception as e:
				Logs.debug("netcache: could not push the files %r" % e)

				# broken connection? remove this one
				close_connection(conn)
				conn = None
			cnt += 1
	finally:
		release_connection(conn, push=True)

	bld.task_sigs[self.uid()] = self.cache_sig
示例#30
0
文件: compiler_fc.py 项目: jrossi/waf
def configure(conf):
	"""
	Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
	"""
	try: test_for_compiler = conf.options.check_fc
	except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')")
	orig = conf.env
	for compiler in test_for_compiler.split():
		try:
			conf.start_msg('Checking for %r (fortran compiler)' % compiler)
			conf.env = orig.derive()
			conf.load(compiler)
		except conf.errors.ConfigurationError as e:
			conf.end_msg(False)
			Logs.debug('compiler_fortran: %r' % e)
		else:
			if conf.env['FC']:
				orig.table = conf.env.get_merged_dict()
				conf.env = orig
				conf.end_msg(True)
				conf.env.COMPILER_FORTRAN = compiler
				break
			conf.end_msg(False)
	else:
		conf.fatal('could not configure a fortran compiler!')
示例#31
0
	def cmd_and_log(self, cmd, **kw):
		"""
		Executes a process and returns stdout/stderr if the execution is successful.
		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
		will be bound to the WafError object::

			def configure(conf):
				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
				try:
					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
				except Exception as e:
					print(e.stdout, e.stderr)

		:param cmd: args for subprocess.Popen
		:type cmd: list or string
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		:type kw: dict
		:returns: process exit status
		:rtype: integer
		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r', cmd)

		if 'quiet' in kw:
			quiet = kw['quiet']
			del kw['quiet']
		else:
			quiet = None

		if 'output' in kw:
			to_ret = kw['output']
			del kw['output']
		else:
			to_ret = STDOUT

		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
			raise Errors.WafError('Program %r not found!' % cmd[0])

		kw['stdout'] = kw['stderr'] = subprocess.PIPE
		if quiet is None:
			self.to_log(cmd)

		cargs = {}
		if 'timeout' in kw:
			if sys.hexversion >= 0x3030000:
				cargs['timeout'] = kw['timeout']
				if not 'start_new_session' in kw:
					kw['start_new_session'] = True
			del kw['timeout']
		if 'input' in kw:
			if kw['input']:
				cargs['input'] = kw['input']
				kw['stdin'] = subprocess.PIPE
			del kw['input']

		if 'cwd' in kw:
			if not isinstance(kw['cwd'], str):
				kw['cwd'] = kw['cwd'].abspath()

		try:
			ret, out, err = Utils.run_process(cmd, kw, cargs)
		except Exception as e:
			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

		if not isinstance(out, str):
			out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
		if not isinstance(err, str):
			err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')

		if out and quiet != STDOUT and quiet != BOTH:
			self.to_log('out: %s' % out)
		if err and quiet != STDERR and quiet != BOTH:
			self.to_log('err: %s' % err)

		if ret:
			e = Errors.WafError('Command %r returned %r' % (cmd, ret))
			e.returncode = ret
			e.stderr = err
			e.stdout = out
			raise e

		if to_ret == BOTH:
			return (out, err)
		elif to_ret == STDERR:
			return err
		return out
示例#32
0
def load_user_settings(ctx):
    """ Apply all loaded options if they are different that the default value, and no cmd line value is presented """
    global user_settings

    _load_default_settings_file(ctx)

    write_user_settings = False
    user_settings = ConfigParser.ConfigParser()
    user_setting_file = ctx.get_user_settings_node().abspath()
    new_options = {}

    # Load existing user settings
    if not os.path.exists(user_setting_file):
        write_user_settings = True  # No file, hence we need to write it
    else:
        user_settings.read([user_setting_file])

    Logs.debug('default_settings: sys.argv = {}'.format(sys.argv))

    # Load settings and check for newly set ones
    for section_name, settings_list in ctx.default_settings.items():

        # Add not already present sections
        if not user_settings.has_section(section_name):
            user_settings.add_section(section_name)
            write_user_settings = True

        # Iterate over all options in this group
        for settings in settings_list:
            option_name = settings['attribute']
            default_value = settings.get('default_value', '')

            # Load the value from user settings if it is already present
            if user_settings.has_option(section_name, option_name):
                value = user_settings.get(section_name, settings['attribute'])
                LOADED_OPTIONS[option_name] = value
            else:
                # Add info about newly added option
                if not new_options.has_key(section_name):
                    new_options[section_name] = []

                new_options[section_name].append(option_name)

                # Load value for current option and stringify it
                value = settings.get('default_value', '')
                if getattr(ctx.options, option_name) != value:
                    value = getattr(ctx.options, option_name)

                if not isinstance(value, str):
                    value = str(value)

                if ATTRIBUTE_CALLBACKS.get(option_name, None):
                    value = ATTRIBUTE_CALLBACKS[option_name](
                        ctx, section_name, settings['attribute'], value)

                (isValid, warning,
                 error) = ctx.verify_settings_option(option_name, value)

                # Add option
                if isValid:
                    user_settings.set(section_name, settings['attribute'],
                                      str(value))
                    LOADED_OPTIONS[option_name] = value
                    write_user_settings = True

            # Check for settings provided by the cmd line
            long_form = settings['long_form']
            short_form = settings.get('short_form', None)

            # Settings on cmdline should have priority
            # explicitly search for either the long form or short form argument, make sure to handle both --option=<SomeThing> and --option <Something> cases
            bOptionSetOnCmdLine = False
            for arg in sys.argv:
                arg_tokens = arg.split('=')
                if (arg_tokens[0]
                        == long_form) or (short_form
                                          and arg_tokens[0] == short_form):
                    Logs.debug(
                        'default_settings: found either long_form, "{}", or short_form, "{}", argument in command line param {}'
                        .format(long_form, short_form, arg))
                    bOptionSetOnCmdLine = True
                    value = getattr(ctx.options, option_name)
                    break

            # Remember option for internal processing
            if bOptionSetOnCmdLine:
                LOADED_OPTIONS[option_name] = value
            elif user_settings.has_option(
                    section_name, option_name
            ):  # Load all settings not coming form the cmd line from the config file
                setattr(ctx.options, option_name,
                        user_settings.get(section_name, option_name))

    # Write user settings
    if write_user_settings:
        ctx.save_user_settings(user_settings)

    # If use_incredibuild option was set but did not pass the ib validation, turn it off
    if ctx.is_option_true(
            'use_incredibuild'
    ) and not internal_validate_incredibuild_registry_settings(ctx):
        setattr(ctx.options, 'use_incredibuild', 'False')

    # If max_cores option was set to < 0, default it to a good hardware value
    if int(ctx.options.max_cores) <= 0:
        max_cores = 8  # fallback value
        try:
            max_cores = multiprocessing.cpu_count()
        except:
            Logs.warn(
                'unable to query hardware for number of hw threads, using "%d"'
                % max_cores)
        setattr(ctx.options, 'max_cores', max_cores)

    return user_settings, new_options
示例#33
0
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
	"""
	Runs the preprocessor to determine the gcc/icc/clang version

	The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*

	:raise: :py:class:`waflib.Errors.ConfigurationError`
	"""
	cmd = cc + ['-dM', '-E', '-']
	env = conf.env.env or None
	try:
		out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
	except Errors.WafError:
		conf.fatal('Could not determine the compiler version %r' % cmd)

	if gcc:
		if out.find('__INTEL_COMPILER') >= 0:
			conf.fatal('The intel compiler pretends to be gcc')
		if out.find('__GNUC__') < 0 and out.find('__clang__') < 0:
			conf.fatal('Could not determine the compiler type')

	if icc and out.find('__INTEL_COMPILER') < 0:
		conf.fatal('Not icc/icpc')

	if clang and out.find('__clang__') < 0:
		conf.fatal('Not clang/clang++')
	if not clang and out.find('__clang__') >= 0:
		conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')

	k = {}
	if icc or gcc or clang:
		out = out.splitlines()
		for line in out:
			lst = shlex.split(line)
			if len(lst)>2:
				key = lst[1]
				val = lst[2]
				k[key] = val

		def isD(var):
			return var in k

		# Some documentation is available at http://predef.sourceforge.net
		# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
		if not conf.env.DEST_OS:
			conf.env.DEST_OS = ''
		for i in MACRO_TO_DESTOS:
			if isD(i):
				conf.env.DEST_OS = MACRO_TO_DESTOS[i]
				break
		else:
			if isD('__APPLE__') and isD('__MACH__'):
				conf.env.DEST_OS = 'darwin'
			elif isD('__unix__'): # unix must be tested last as it's a generic fallback
				conf.env.DEST_OS = 'generic'

		if isD('__ELF__'):
			conf.env.DEST_BINFMT = 'elf'
		elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
			conf.env.DEST_BINFMT = 'pe'
			if not conf.env.IMPLIBDIR:
				conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
			conf.env.LIBDIR = conf.env.BINDIR
		elif isD('__APPLE__'):
			conf.env.DEST_BINFMT = 'mac-o'

		if not conf.env.DEST_BINFMT:
			# Infer the binary format from the os name.
			conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)

		for i in MACRO_TO_DEST_CPU:
			if isD(i):
				conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
				break

		Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
		if icc:
			ver = k['__INTEL_COMPILER']
			conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
		else:
			if isD('__clang__') and isD('__clang_major__'):
				conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
			else:
				# older clang versions and gcc
				conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0'))
	return k
示例#34
0
文件: c_preproc.py 项目: faddat/lib
 def start(self, node, env):
     Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
     self.current_file = node
     self.addlines(node)
     if env.DEFINES:
         lst = format_defines(env.DEFINES)
         lst.reverse()
         self.lines.extend([('define', x) for x in lst])
     while self.lines:
         (token, line) = self.lines.pop()
         if token == POPFILE:
             self.count_files -= 1
             self.currentnode_stack.pop()
             continue
         try:
             ve = Logs.verbose
             if ve:
                 Logs.debug('preproc: line is %s - %s state is %s', token,
                            line, self.state)
             state = self.state
             if token[:2] == 'if':
                 state.append(undefined)
             elif token == 'endif':
                 state.pop()
             if token[0] != 'e':
                 if skipped in self.state or ignored in self.state:
                     continue
             if token == 'if':
                 ret = eval_macro(tokenize(line), self.defs)
                 if ret: state[-1] = accepted
                 else: state[-1] = ignored
             elif token == 'ifdef':
                 m = re_mac.match(line)
                 if m and m.group() in self.defs: state[-1] = accepted
                 else: state[-1] = ignored
             elif token == 'ifndef':
                 m = re_mac.match(line)
                 if m and m.group() in self.defs: state[-1] = ignored
                 else: state[-1] = accepted
             elif token == 'include' or token == 'import':
                 (kind, inc) = extract_include(line, self.defs)
                 if ve:
                     Logs.debug('preproc: include found %s    (%s) ', inc,
                                kind)
                 if kind == '"' or not strict_quotes:
                     self.current_file = self.tryfind(inc)
                     if token == 'import':
                         self.ban_includes.add(self.current_file)
             elif token == 'elif':
                 if state[-1] == accepted:
                     state[-1] = skipped
                 elif state[-1] == ignored:
                     if eval_macro(tokenize(line), self.defs):
                         state[-1] = accepted
             elif token == 'else':
                 if state[-1] == accepted: state[-1] = skipped
                 elif state[-1] == ignored: state[-1] = accepted
             elif token == 'define':
                 try:
                     self.defs[self.define_name(line)] = line
                 except AttributeError:
                     raise PreprocError('Invalid define line %r' % line)
             elif token == 'undef':
                 m = re_mac.match(line)
                 if m and m.group() in self.defs:
                     self.defs.__delitem__(m.group())
             elif token == 'pragma':
                 if re_pragma_once.match(line.lower()):
                     self.ban_includes.add(self.current_file)
         except Exception, e:
             if Logs.verbose:
                 Logs.debug('preproc: line parsing failed (%s): %s %s', e,
                            line, Utils.ex_stack())
示例#35
0
def copy_tree2(src,
               dst,
               overwrite_existing_file=False,
               pattern_paths=None,
               is_pattern_required=False,
               fail_on_error=True):
    """
    Copy a tree from a source folder to a destination folder.  If the destination does not exist, then create
    a copy automatically.  If a destination does exist, then either overwrite based on the owerwrite_existing_file
    parameter or based on if the file is different (currently only file size is checked)

    :param src:     The source tree to copy from
    :param dst:     The target tree to copy to
    :param overwrite_existing_file:     Flag to always overwrite (otherwise follow the copy rule)
    :param is_pattern_required: Tells the function how to interpret the pattern_paths, pattern_paths are required
     paths if is_pattern_required is True, it will be the ignore paths if it is False
    :param pattern_paths:    Any particular file/pattern to ignore
    :return:  The number of files actually copied
    """

    if os.path.isdir(src) is False:
        Logs.warn(
            '[WARN] Unable to copy {} to destination {} using copy_tree2. {} is not a directory.'
            .format(src, dst, src))
        return 0

    src = os.path.normpath(src)
    dst = os.path.normpath(dst)

    # get all non ignored paths/files in dir based on ignore path input
    def _get_non_ignored_paths_in_dir_recursively(src, ignore_paths):
        non_ignored_paths = []
        paths = os.listdir(src)

        for item in paths:
            ignore = False
            src_path = os.path.join(src, item)

            if any(path in src_path for path in ignore_paths):
                continue

            if os.path.isdir(src_path):
                non_ignored_paths.extend(
                    _get_non_ignored_paths_in_dir_recursively(
                        src_path, ignore_paths))
            else:
                non_ignored_paths.append(src_path)
        return non_ignored_paths

    # copy everything if pattern_path is none
    paths_to_copy = os.listdir(src)
    copied_files = 0

    if pattern_paths is not None:
        filtered_paths = []
        if is_pattern_required is True:
            for path in pattern_paths:
                filtered_paths.extend(glob.glob(os.path.join(src, path)))

        else:
            filtered_paths = _get_non_ignored_paths_in_dir_recursively(
                src, pattern_paths)

        # sanitize the paths in filtered_paths for further consumption (we only want relative path from src)
        for idx, item in enumerate(filtered_paths):
            item = os.path.normpath(item)
            sep = src + os.path.sep
            filtered_paths[idx] = item.replace(sep, "")

        paths_to_copy = filtered_paths

    # now we copy all files specified from the paths in paths_to_copy
    for path in paths_to_copy:
        srcname = os.path.join(src, path)
        dstname = os.path.join(dst, path)

        if os.path.isdir(srcname):
            # if we encounter a srcname that is a folder, we assume that we want the entire folder
            # pattern_paths to None tells this function that we want to copy the entire folder
            copied_files += copy_tree2(srcname,
                                       dstname,
                                       overwrite_existing_file,
                                       None,
                                       fail_on_error=fail_on_error)

        else:
            # check to see if we should copy the file
            copy = overwrite_existing_file or should_overwrite_file(
                srcname, dstname)
            if copy is False:
                continue
            file_copied = 0

            Logs.debug('lumberyard: Copying file {} to {}'.format(
                srcname, dstname))
            try:
                # In case the file is readonly, we'll remove the existing file first
                if os.path.exists(dstname):
                    os.chmod(dstname, stat.S_IWRITE)

                # In the case where the path doesn't exist
                elif os.path.exists(os.path.dirname(dstname)) is False:
                    try:
                        os.makedirs(os.path.dirname(dstname))
                    except:
                        pass
                    if not os.path.exists(dst):
                        raise shutil.Error(
                            "Unable to create target folder `{}`".format(dst))

                try:
                    file_copied = fast_copy2(srcname, dstname)
                except:
                    # Second try with detail hash check
                    file_copied = fast_copy2(srcname, dstname, True)

            except Exception as err:
                if fail_on_error:
                    raise err
                else:
                    Logs.warn(
                        '[WARN] Unable to copy {} to destination {}.  Check the file permissions or any process that may be locking it.'
                        .format(srcname, dstname))
            copied_files += file_copied

    return copied_files
示例#36
0
def get_project_settings_map(ctx):
    """
    Get the map of all project settings that were found in the root folder
    Util function to load the <engine root>/<project name>/project.json file and cache it within the build context.
    :param ctx:
    :return:
    """

    try:
        return ctx.project_settings_map
    except AttributeError:
        pass

    if not hasattr(ctx, 'engine_path'):
        ctx.calculate_engine_path()

    # Warn on a legacy projects file
    if os.path.exists(os.path.join(ctx.engine_path, '_WAF_', 'projects.json')):
        Logs.warn(
            'projects.json file is deprecated.  Please follow the migration step listed in the release notes.'
        )

    projects_settings = {}
    projects_settings_node_list = ctx.engine_node.ant_glob(
        '*/{}'.format(PROJECT_SETTINGS_FILE))

    # If we are an external project, search for project paths in the external project path as well
    if os.path.normcase(ctx.path.abspath()) != os.path.normcase(
            ctx.engine_path):
        external_settings_node_list = ctx.path.ant_glob(
            '*/{}'.format(PROJECT_SETTINGS_FILE))
        for external_settings_node in external_settings_node_list:
            if external_settings_node not in projects_settings_node_list:
                projects_settings_node_list.append(external_settings_node)

    # Build update the map of project settings from the globbing for the project.json file
    for project_settings_node in projects_settings_node_list:

        Logs.debug('lumberyard: Parsing project file {}'.format(
            project_settings_node.abspath()))
        project_json = ctx.parse_json_file(project_settings_node)
        project_name = project_json.get(PROJECT_NAME_FIELD,
                                        '').encode('ASCII', 'ignore')
        if not project_name:
            ctx.fatal(
                "Project settings file '{}' missing attribute '{}'".format(
                    project_settings_node.abspath(), PROJECT_NAME_FIELD))

        if projects_settings.has_key(project_name):
            ctx.fatal(
                'Another project named "%s" has been detected:\n%s\n%s' %
                (project_name, project_settings_node.parent.abspath(),
                 projects_settings[project_name]['project_node'].abspath()))

        project_json['project_node'] = project_settings_node.parent
        projects_settings[project_name] = project_json
        Logs.debug('lumberyard: project file %s is for project %s' %
                   (project_settings_node.abspath(), project_name))

    ctx.project_settings_map = projects_settings
    return ctx.project_settings_map
示例#37
0
def add_game_projects_to_specs(self):
    """Add game projects to specs that have them defined"""

    specs_to_include = self.loaded_specs()

    project_settings_map = self.get_project_settings_map()

    for spec_name in specs_to_include:

        # Get the defined game project per spec and only add game projects
        # to specs that have them defined
        game_projects = self.spec_game_projects(spec_name)

        if len(game_projects) == 0 and not self.spec_disable_games(spec_name):
            # Handle the legacy situation where the game projects is set in the enabled_game_projects in user_settings.options
            game_projects = split_comma_delimited_string(
                self.options.enabled_game_projects, True)

        # Skip if there are no game projects for this spec
        if len(game_projects) == 0:
            continue

        # Add both the game modules for the project (legacy) and all the launchers
        spec_dict = self.loaded_specs_dict[spec_name]
        if 'modules' not in spec_dict:
            spec_dict['modules'] = []
        if 'projects' not in spec_dict:
            spec_dict['projects'] = []
        spec_list = spec_dict['modules']
        spec_proj = spec_dict['projects']

        for project in game_projects:

            if project not in project_settings_map:
                continue

            spec_proj.append(project)

            # Add any additional modules from the project's project.json configuration
            for module in project_modules(self, project):
                if not module in spec_list:
                    spec_list.append(module)
                    Logs.debug(
                        "lumberyard: Added module to spec list: %s for %s" %
                        (module, spec_name))

            # if we have game projects, also allow the building of the launcher from templates:
            available_launchers = self.get_available_launchers(project)

            for available_launcher_spec in available_launchers:
                if available_launcher_spec not in spec_dict:
                    spec_dict[available_launcher_spec] = []
                spec_list_to_append_to = spec_dict[available_launcher_spec]
                available_spec_list = available_launchers[
                    available_launcher_spec]
                for module in available_spec_list:
                    launcher_name = project + module
                    Logs.debug(
                        "lumberyard: Added launcher %s for %s (to %s spec in in %s sub_spec)"
                        % (launcher_name, project, spec_name,
                           available_launcher_spec))
                    spec_list_to_append_to.append(launcher_name)
示例#38
0
def get_cc_version(conf, cc, gcc=False, icc=False):
    """
	Run the preprocessor to determine the compiler version

	The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*
	"""
    cmd = cc + ['-dM', '-E', '-']
    env = conf.env.env or None
    try:
        p = Utils.subprocess.Popen(cmd,
                                   stdin=Utils.subprocess.PIPE,
                                   stdout=Utils.subprocess.PIPE,
                                   stderr=Utils.subprocess.PIPE,
                                   env=env)
        p.stdin.write('\n'.encode())
        out = p.communicate()[0]
    except Exception:
        conf.fatal('Could not determine the compiler version %r' % cmd)

    if not isinstance(out, str):
        out = out.decode(sys.stdout.encoding or 'iso8859-1')

    if gcc:
        if out.find('__INTEL_COMPILER') >= 0:
            conf.fatal('The intel compiler pretends to be gcc')
        if out.find('__GNUC__') < 0 and out.find('__clang__') < 0:
            conf.fatal('Could not determine the compiler type')

    if icc and out.find('__INTEL_COMPILER') < 0:
        conf.fatal('Not icc/icpc')

    k = {}
    if icc or gcc:
        out = out.splitlines()
        for line in out:
            lst = shlex.split(line)
            if len(lst) > 2:
                key = lst[1]
                val = lst[2]
                k[key] = val

        def isD(var):
            return var in k

        def isT(var):
            return var in k and k[var] != '0'

        # Some documentation is available at http://predef.sourceforge.net
        # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
        if not conf.env.DEST_OS:
            conf.env.DEST_OS = ''
        for i in MACRO_TO_DESTOS:
            if isD(i):
                conf.env.DEST_OS = MACRO_TO_DESTOS[i]
                break
        else:
            if isD('__APPLE__') and isD('__MACH__'):
                conf.env.DEST_OS = 'darwin'
            elif isD('__unix__'
                     ):  # unix must be tested last as it's a generic fallback
                conf.env.DEST_OS = 'generic'

        if isD('__ELF__'):
            conf.env.DEST_BINFMT = 'elf'
        elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
            conf.env.DEST_BINFMT = 'pe'
            conf.env.LIBDIR = conf.env.BINDIR
        elif isD('__APPLE__'):
            conf.env.DEST_BINFMT = 'mac-o'

        if not conf.env.DEST_BINFMT:
            # Infer the binary format from the os name.
            conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)

        for i in MACRO_TO_DEST_CPU:
            if isD(i):
                conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
                break

        Logs.debug('ccroot: dest platform: ' + ' '.join([
            conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')
        ]))
        if icc:
            ver = k['__INTEL_COMPILER']
            conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1])
        else:
            if isD('__clang__'):
                conf.env['CC_VERSION'] = (k['__clang_major__'],
                                          k['__clang_minor__'],
                                          k['__clang_patchlevel__'])
            else:
                try:
                    conf.env['CC_VERSION'] = (k['__GNUC__'],
                                              k['__GNUC_MINOR__'],
                                              k['__GNUC_PATCHLEVEL__'])
                except KeyError:
                    conf.env['CC_VERSION'] = (k['__GNUC__'],
                                              k['__GNUC_MINOR__'], 0)
    return k
示例#39
0
def compile_fun_noshell(line):
    """
	Create a compiled function to execute a process without the shell
	WARNING: this method may disappear anytime, so use compile_fun instead
	"""
    extr = []

    def repl(match):
        g = match.group
        if g('dollar'): return "$"
        elif g('backslash'): return '\\'
        elif g('subst'):
            extr.append((g('var'), g('code')))
            return "<<|@|>>"
        return None

    line2 = reg_act.sub(repl, line)
    params = line2.split('<<|@|>>')
    assert (extr)

    buf = []
    dvars = []
    app = buf.append
    for x in range(len(extr)):
        params[x] = params[x].strip()
        if params[x]:
            app("lst.extend(%r)" % params[x].split())
        (var, meth) = extr[x]
        if var == 'SRC':
            if meth: app('lst.append(tsk.inputs%s)' % meth)
            else: app("lst.extend([a.path_from(cwdx) for a in tsk.inputs])")
        elif var == 'TGT':
            if meth: app('lst.append(tsk.outputs%s)' % meth)
            else: app("lst.extend([a.path_from(cwdx) for a in tsk.outputs])")
        elif meth:
            if meth.startswith(':'):
                m = meth[1:]
                if m == 'SRC':
                    m = '[a.path_from(cwdx) for a in tsk.inputs]'
                elif m == 'TGT':
                    m = '[a.path_from(cwdx) for a in tsk.outputs]'
                elif re_novar.match(m):
                    m = '[tsk.inputs%s]' % m[3:]
                elif re_novar.match(m):
                    m = '[tsk.outputs%s]' % m[3:]
                elif m[:3] not in ('tsk', 'gen', 'bld'):
                    dvars.extend([var, m])
                    m = '%r' % m
                app('lst.extend(tsk.colon(%r, %s))' % (var, m))
            else:
                app('lst.extend(gen.to_list(%s%s))' % (var, meth))
        else:
            app('lst.extend(to_list(env[%r]))' % var)
            if not var in dvars: dvars.append(var)

    if extr:
        if params[-1]:
            app("lst.extend(%r)" % params[-1].split())
    fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
    Logs.debug('action: %s' % fun.strip().splitlines())
    return (funex(fun), dvars)
示例#40
0
	def exec_command(self, cmd, **kw):
		"""
		Runs an external process and returns the exit status::

			def run(tsk):
				ret = tsk.generator.bld.exec_command('touch foo.txt')
				return ret

		If the context has the attribute 'log', then captures and logs the process stderr/stdout.
		Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
		stdout/stderr values captured.

		:param cmd: command argument for subprocess.Popen
		:type cmd: string or list
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		:type kw: dict
		:returns: process exit status
		:rtype: integer
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r', cmd)
		Logs.debug('runner_env: kw=%s', kw)

		if self.logger:
			self.logger.info(cmd)

		if 'stdout' not in kw:
			kw['stdout'] = subprocess.PIPE
		if 'stderr' not in kw:
			kw['stderr'] = subprocess.PIPE

		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
			raise Errors.WafError('Program %s not found!' % cmd[0])

		cargs = {}
		if 'timeout' in kw:
			if sys.hexversion >= 0x3030000:
				cargs['timeout'] = kw['timeout']
				if not 'start_new_session' in kw:
					kw['start_new_session'] = True
			del kw['timeout']
		if 'input' in kw:
			if kw['input']:
				cargs['input'] = kw['input']
				kw['stdin'] = subprocess.PIPE
			del kw['input']

		if 'cwd' in kw:
			if not isinstance(kw['cwd'], str):
				kw['cwd'] = kw['cwd'].abspath()

		try:
			ret, out, err = Utils.run_process(cmd, kw, cargs)
		except Exception as e:
			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

		if out:
			if not isinstance(out, str):
				out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
			if self.logger:
				self.logger.debug('out: %s', out)
			else:
				Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
		if err:
			if not isinstance(err, str):
				err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
			if self.logger:
				self.logger.error('err: %s' % err)
			else:
				Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})

		return ret
示例#41
0
 def post(self):
     if getattr(self, 'posted', None):
         return False
     self.posted = True
     keys = set(self.meths)
     keys.update(feats['*'])
     self.features = Utils.to_list(self.features)
     for x in self.features:
         st = feats[x]
         if st:
             keys.update(st)
         elif not x in Task.classes:
             Logs.warn(
                 'feature %r does not exist - bind at least one method to it?',
                 x)
     prec = {}
     prec_tbl = self.prec
     for x in prec_tbl:
         if x in keys:
             prec[x] = prec_tbl[x]
     tmp = []
     for a in keys:
         for x in prec.values():
             if a in x:
                 break
         else:
             tmp.append(a)
     tmp.sort(reverse=True)
     out = []
     while tmp:
         e = tmp.pop()
         if e in keys:
             out.append(e)
         try:
             nlst = prec[e]
         except KeyError:
             pass
         else:
             del prec[e]
             for x in nlst:
                 for y in prec:
                     if x in prec[y]:
                         break
                 else:
                     tmp.append(x)
                     tmp.sort(reverse=True)
     if prec:
         buf = ['Cycle detected in the method execution:']
         for k, v in prec.items():
             buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
         raise Errors.WafError('\n'.join(buf))
     self.meths = out
     Logs.debug('task_gen: posting %s %d', self, id(self))
     for x in out:
         try:
             v = getattr(self, x)
         except AttributeError:
             raise Errors.WafError(
                 '%r is not a valid task generator method' % x)
         Logs.debug('task_gen: -> %s (%d)', x, id(self))
         v()
     Logs.debug('task_gen: posted %s', self.name)
     return True
示例#42
0
    def sig_implicit_deps(self):
        """
		Used by :py:meth:`waflib.Task.Task.signature` hashes node signatures obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`).

		The exception :py:class:`waflib.Errors.TaskRescan` is thrown
		when a file has changed. When this occurs, :py:meth:`waflib.Task.Task.signature` is called
		once again, and this method will be executed once again, this time calling :py:meth:`waflib.Task.Task.scan`
		for searching the dependencies.

		:rtype: hash value
		"""

        bld = self.generator.bld

        # get the task signatures from previous runs
        key = self.uid()
        prev = bld.task_sigs.get((key, 'imp'), [])

        # for issue #379
        if prev:
            try:
                if prev == self.compute_sig_implicit_deps():
                    return prev
            except Errors.TaskNotReady:
                raise
            except EnvironmentError:
                # when a file was renamed (IOError usually), remove the stale nodes (headers in folders without source files)
                # this will break the order calculation for headers created during the build in the source directory (should be uncommon)
                # the behaviour will differ when top != out
                for x in bld.node_deps.get(self.uid(), []):
                    if not x.is_bld():
                        try:
                            os.stat(x.abspath())
                        except OSError:
                            try:
                                del x.parent.children[x.name]
                            except KeyError:
                                pass
            del bld.task_sigs[(key, 'imp')]
            raise Errors.TaskRescan('rescan')

        # no previous run or the signature of the dependencies has changed, rescan the dependencies
        (nodes, names) = self.scan()
        if Logs.verbose:
            Logs.debug('deps: scanner for %s returned %s %s' %
                       (str(self), str(nodes), str(names)))

        # store the dependencies in the cache
        bld.node_deps[key] = nodes
        bld.raw_deps[key] = names

        # might happen
        self.are_implicit_nodes_ready()

        # recompute the signature and return it
        try:
            bld.task_sigs[(key,
                           'imp')] = sig = self.compute_sig_implicit_deps()
        except Exception:
            if Logs.verbose:
                for k in bld.node_deps.get(self.uid(), []):
                    try:
                        k.get_bld_sig()
                    except Exception:
                        Logs.warn(
                            'Missing signature for node %r (may cause rebuilds)'
                            % k)
        else:
            return sig
示例#43
0
    def post_run(self):
        if self.env.CC_NAME not in supported_compilers:
            return super(derived_class, self).post_run()

        if getattr(self, 'cached', None):
            return Task.Task.post_run(self)

        bld = self.generator.bld
        unresolved_names = []
        resolved_nodes = []

        lowercase = self.generator.msvcdeps_drive_lowercase
        correct_case_path = bld.path.abspath()
        correct_case_path_len = len(correct_case_path)
        correct_case_path_norm = os.path.normcase(correct_case_path)

        # Dynamically bind to the cache
        try:
            cached_nodes = bld.cached_nodes
        except AttributeError:
            cached_nodes = bld.cached_nodes = {}

        for path in self.msvcdeps_paths:
            node = None
            if os.path.isabs(path):
                # Force drive letter to match conventions of main source tree
                drive, tail = os.path.splitdrive(path)

                if os.path.normcase(path[:correct_case_path_len]
                                    ) == correct_case_path_norm:
                    # Path is in the sandbox, force it to be correct.  MSVC sometimes returns a lowercase path.
                    path = correct_case_path + path[correct_case_path_len:]
                else:
                    # Check the drive letter
                    if lowercase and (drive != drive.lower()):
                        path = drive.lower() + tail
                    elif (not lowercase) and (drive != drive.upper()):
                        path = drive.upper() + tail
                node = path_to_node(bld.root, path, cached_nodes)
            else:
                base_node = bld.bldnode
                # when calling find_resource, make sure the path does not begin by '..'
                path = [k for k in Utils.split_path(path) if k and k != '.']
                while path[0] == '..':
                    path = path[1:]
                    base_node = base_node.parent

                node = path_to_node(base_node, path, cached_nodes)

            if not node:
                raise ValueError('could not find %r for %r' % (path, self))
            else:
                if not c_preproc.go_absolute:
                    if not (node.is_child_of(bld.srcnode)
                            or node.is_child_of(bld.bldnode)):
                        # System library
                        Logs.debug('msvcdeps: Ignoring system include %r' %
                                   node)
                        continue

                if id(node) == id(self.inputs[0]):
                    # Self-dependency
                    continue

                resolved_nodes.append(node)

        bld.node_deps[self.uid()] = resolved_nodes
        bld.raw_deps[self.uid()] = unresolved_names

        # Free memory (200KB for each file in CryEngine, without UberFiles, this accumulates to 1 GB)
        del self.msvcdeps_paths

        try:
            del self.cache_sig
        except:
            pass

        Task.Task.post_run(self)
示例#44
0
def check_python_version(conf, minver=None):
    assert minver is None or isinstance(minver, tuple)
    pybin = conf.env['PYTHON']
    if not pybin:
        conf.fatal('could not find the python executable')
    cmd = pybin + [
        '-c', 'import sys\nfor x in sys.version_info: print(str(x))'
    ]
    Logs.debug('python: Running python command %r' % cmd)
    lines = conf.cmd_and_log(cmd).split()
    assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines),
                                                                lines)
    pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3],
                   int(lines[4]))
    result = (minver is None) or (pyver_tuple >= minver)
    if result:
        pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
        conf.env['PYTHON_VERSION'] = pyver
        if 'PYTHONDIR' in conf.env:
            pydir = conf.env['PYTHONDIR']
        elif 'PYTHONDIR' in conf.environ:
            pydir = conf.environ['PYTHONDIR']
        else:
            if Utils.is_win32:
                (python_LIBDEST, pydir) = conf.get_python_variables([
                    "get_config_var('LIBDEST') or ''",
                    "get_python_lib(standard_lib=0) or ''"
                ])
            else:
                python_LIBDEST = None
                (pydir, ) = conf.get_python_variables(
                    ["get_python_lib(standard_lib=0) or ''"])
            if python_LIBDEST is None:
                if conf.env['LIBDIR']:
                    python_LIBDEST = os.path.join(conf.env['LIBDIR'],
                                                  "python" + pyver)
                else:
                    python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib",
                                                  "python" + pyver)
        if 'PYTHONARCHDIR' in conf.env:
            pyarchdir = conf.env['PYTHONARCHDIR']
        elif 'PYTHONARCHDIR' in conf.environ:
            pyarchdir = conf.environ['PYTHONARCHDIR']
        else:
            (pyarchdir, ) = conf.get_python_variables(
                ["get_python_lib(plat_specific=1, standard_lib=0) or ''"])
            if not pyarchdir:
                pyarchdir = pydir
        if hasattr(conf, 'define'):
            conf.define('PYTHONDIR', pydir)
            conf.define('PYTHONARCHDIR', pyarchdir)
        conf.env['PYTHONDIR'] = pydir
        conf.env['PYTHONARCHDIR'] = pyarchdir
    pyver_full = '.'.join(map(str, pyver_tuple[:3]))
    if minver is None:
        conf.msg('Checking for python version', pyver_full)
    else:
        minver_str = '.'.join(map(str, minver))
        conf.msg('Checking for python version', pyver_tuple,
                 ">= %s" % (minver_str, ) and 'GREEN' or 'YELLOW')
    if not result:
        conf.fatal('The python version is too old, expecting %r' % (minver, ))
示例#45
0
	def cmd_and_log(self, cmd, **kw):
		"""
		Execute a command and return stdout if the execution is successful.
		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
		will be bound to the WafError object::

			def configure(conf):
				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
				try:
					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
				except Exception as e:
					print(e.stdout, e.stderr)

		:param cmd: args for subprocess.Popen
		:param kw: keyword arguments for subprocess.Popen
		"""
		subprocess = Utils.subprocess
		kw['shell'] = isinstance(cmd, str)
		Logs.debug('runner: %r' % cmd)

		if 'quiet' in kw:
			quiet = kw['quiet']
			del kw['quiet']
		else:
			quiet = None

		if 'output' in kw:
			to_ret = kw['output']
			del kw['output']
		else:
			to_ret = STDOUT

		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
			raise Errors.WafError("Program %s not found!" % cmd[0])

		kw['stdout'] = kw['stderr'] = subprocess.PIPE
		if quiet is None:
			self.to_log(cmd)
		try:
			p = subprocess.Popen(cmd, **kw)
			(out, err) = p.communicate()
		except Exception as e:
			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

		if not isinstance(out, str):
			out = out.decode(sys.stdout.encoding or 'iso8859-1')
		if not isinstance(err, str):
			err = err.decode(sys.stdout.encoding or 'iso8859-1')

		if out and quiet != STDOUT and quiet != BOTH:
			self.to_log('out: %s' % out)
		if err and quiet != STDERR and quiet != BOTH:
			self.to_log('err: %s' % err)

		if p.returncode:
			e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
			e.returncode = p.returncode
			e.stderr = err
			e.stdout = out
			raise e

		if to_ret == BOTH:
			return (out, err)
		elif to_ret == STDERR:
			return err
		return out
示例#46
0
def configure(conf):  # pylint: disable=too-many-statements,too-many-branches
    """configuration step of the miniconda environment tool"""
    try:
        conf.env.PYTHON[0]
    except IndexError:
        conf.load("python")
    if conf.options.IGNORE_ENV_CHECK:
        conf.env.ENV_CHECK = False
        return

    # check that all python packages are available
    path_list = [os.path.join(i, "Scripts") for i in conf.options.CONDA_BASE_ENV]
    conf.find_program("conda", mandatory=False, path_list=path_list)
    try:
        conf.env.CONDA[0]
    except IndexError:
        conf.fatal(
            "A conda base installation is required at "
            f"{conf.options.CONDA_BASE_ENV}. \nAlternatively you can specify "
            "an installed conda base environment by passing it via "
            "'--conda-base-env'."
        )

    cmd = Utils.subst_vars("${CONDA} env list --json", conf.env).split()

    try:
        std = conf.cmd_and_log(cmd, output=Context.BOTH)
        available_conda_envs = json.loads(std[0])
    except Errors.WafError as env_search:
        Logs.error(env_search.msg.strip())
        conf.fatal("Searching for conda environments failed.")

    conda_env_spec_file = conf.path.find_node(conf.options.CONDA_ENV_FILE)
    with open(conda_env_spec_file.abspath(), "r") as stream:
        try:
            conda_spec = yaml.load(stream, Loader=yaml.Loader)
        except yaml.YAMLError as exc:
            conf.fatal(exc)
    env = None
    for env in available_conda_envs["envs"]:
        if env.lower().endswith(conda_spec["name"]):
            conf.env.CONDA_DEVEL_ENV = conda_spec["name"]
            break
    if not conf.env.CONDA_DEVEL_ENV:
        conf.fatal(f"Development environment '{conf.env.CONDA_DEVEL_ENV}'not found.")

    correct_env = False
    # now we are sure that: at least a string (we found at least *something*),
    # is returned from shutil.which() and conf.env.PYTHON[0] exist.
    # Therefore the following comparisons are (from a type perspective) safe.
    if Utils.is_win32:
        if (
            # NTFS on Windows is case insensitive, so don't be too
            # strict on string comparison when we check paths
            sys.executable.lower() == conf.env.PYTHON[0].lower()
            and sys.executable.lower() == os.path.join(env.lower(), "python.exe")
        ):
            correct_env = True
    else:
        if sys.executable == conf.env.PYTHON[0] and sys.executable == os.path.join(
            env.lower(), "python"
        ):
            correct_env = True
    if not correct_env:
        Logs.error("The development environment %s is not active." % conda_spec["name"])
        conf.fatal(
            "Run 'conda activate %s' and configure the project again."
            % conda_spec["name"]
        )

    cmd = Utils.subst_vars(
        "${CONDA} env export -n ${CONDA_DEVEL_ENV}", conf.env
    ).split()
    try:
        std = conf.cmd_and_log(
            cmd, output=Context.BOTH, quiet=Context.BOTH, input="\n".encode()
        )
    except Errors.WafError as env_export:
        Logs.error(env_export.msg.strip())
        conf.fatal(
            f"Could not export dependencies from environment {conda_spec['name']}"
        )
    conda_env_yaml = conf.path.get_bld().make_node(
        f"{conf.env.CONDA_DEVEL_ENV}_environment.yaml"
    )
    conda_env_yaml.write(std[0])
    with open(conda_env_yaml.abspath(), "r") as stream:
        try:
            current_conda_env = yaml.load(stream, Loader=yaml.Loader)
        except yaml.YAMLError as exc:
            conf.fatal(exc)

    for i in current_conda_env["dependencies"]:
        if isinstance(i, dict):
            pips = i["pip"]

    pkg_error = False
    for _pkg in conda_spec["dependencies"]:
        if isinstance(_pkg, str):
            if _pkg in current_conda_env["dependencies"]:
                Logs.debug(f"Found {_pkg.split('=')}")
            else:
                Logs.warn(f"Could not find {_pkg.split('=')}")
                pkg_error = True
        elif isinstance(_pkg, dict):
            for pip_pkg in _pkg["pip"]:
                if pip_pkg in pips:
                    Logs.debug(f"Found {pip_pkg.split('==')}")
                else:
                    Logs.warn(f"Could not find {pip_pkg.split('==')}")
                    pkg_error = True
    if pkg_error:
        conf.fatal("There are package errors.")
示例#47
0
文件: msvc.py 项目: van-de-bugger/waf
def get_msvc_version(conf, compiler, version, target, vcvars):
    """
	Checks that an installed compiler actually runs and uses vcvars to obtain the
	environment needed by the compiler.

	:param compiler: compiler type, for looking up the executable name
	:param version: compiler version, for debugging only
	:param target: target architecture
	:param vcvars: batch file to run to check the environment
	:return: the location of the compiler executable, the location of include dirs, and the library paths
	:rtype: tuple of strings
	"""
    Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)

    try:
        conf.msvc_cnt += 1
    except AttributeError:
        conf.msvc_cnt = 1
    batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
    batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
""" % (vcvars, target))
    sout = conf.cmd_and_log(
        ['cmd.exe', '/E:on', '/V:on', '/C',
         batfile.abspath()])
    lines = sout.splitlines()

    if not lines[0]:
        lines.pop(0)

    MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
    for line in lines:
        if line.startswith('PATH='):
            path = line[5:]
            MSVC_PATH = path.split(';')
        elif line.startswith('INCLUDE='):
            MSVC_INCDIR = [i for i in line[8:].split(';') if i]
        elif line.startswith('LIB='):
            MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
    if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
        conf.fatal(
            'msvc: Could not find a valid architecture for building (get_msvc_version_3)'
        )

    # Check if the compiler is usable at all.
    # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
    env = dict(os.environ)
    env.update(PATH=path)
    compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
    cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)

    # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
    if 'CL' in env:
        del (env['CL'])

    try:
        conf.cmd_and_log(cxx + ['/help'], env=env)
    except UnicodeError:
        st = traceback.format_exc()
        if conf.logger:
            conf.logger.error(st)
        conf.fatal('msvc: Unicode error - check the code page?')
    except Exception as e:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler,
                   version, target, str(e))
        conf.fatal(
            'msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)'
        )
    else:
        Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version,
                   target)
    finally:
        conf.env[compiler_name] = ''

    return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
示例#48
0
    def exec_response_command(self, cmd, **kw):
        # exec_response_command() is only called from inside msvc.py anyway
        assert self.env.CC_NAME in supported_compilers

        # Only bother adding '/showIncludes' to compile tasks
        #if isinstance(self, (c.c, cxx.cxx, msvcdeps.pch_msvc)):
        if True:
            try:
                # The Visual Studio IDE adds an environment variable that causes
                # the MS compiler to send its textual output directly to the
                # debugging window rather than normal stdout/stderr.
                #
                # This is unrecoverably bad for this tool because it will cause
                # all the dependency scanning to see an empty stdout stream and
                # assume that the file being compiled uses no headers.
                #
                # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
                #
                # Attempting to repair the situation by deleting the offending
                # envvar at this point in tool execution will not be good enough--
                # its presence poisons the 'waf configure' step earlier. We just
                # want to put a sanity check here in order to help developers
                # quickly diagnose the issue if an otherwise-good Waf tree
                # is then executed inside the MSVS IDE.
                # Note seems something changed, and this env var cannot be found anymore
                #assert 'VS_UNICODE_OUTPUT' not in kw['env']

                tmp = None

                # This block duplicated from Waflib's msvc.py
                if sys.platform.startswith('win') and isinstance(
                        cmd, list) and len(' '.join(cmd)) >= 8192:
                    tmp_files_folder = self.generator.bld.get_bintemp_folder_node(
                    ).make_node('TempFiles')
                    program = cmd[0]
                    cmd = [self.quote_response_command(x) for x in cmd]
                    (fd,
                     tmp) = tempfile.mkstemp(dir=tmp_files_folder.abspath())
                    os.write(
                        fd,
                        '\r\n'.join(i.replace('\\', '\\\\')
                                    for i in cmd[1:]).encode())
                    os.close(fd)
                    cmd = [program, '@' + tmp]
                # ... end duplication

                self.msvcdeps_paths = []

                kw['env'] = kw.get('env', os.environ.copy())
                kw['cwd'] = kw.get('cwd', os.getcwd())
                kw['quiet'] = Context.STDOUT
                kw['output'] = Context.STDOUT

                out = []
                try:
                    raw_out = self.generator.bld.cmd_and_log(cmd, **kw)
                    ret = 0
                except Errors.WafError as e:
                    try:
                        # Get error output if failed compilation
                        raw_out = e.stdout
                        ret = e.returncode
                    except:
                        # Fallback (eg the compiler itself is not found)
                        raw_out = str(e)
                        ret = -1

                for line in raw_out.splitlines():
                    if line.startswith(INCLUDE_PATTERN):
                        inc_path = line[len(INCLUDE_PATTERN):].strip()
                        Logs.debug('msvcdeps: Regex matched %s' % inc_path)
                        self.msvcdeps_paths.append(inc_path)
                        if self.generator.bld.is_option_true('show_includes'):
                            out.append(line)
                    else:
                        out.append(line)

                # Pipe through the remaining stdout content (not related to /showIncludes)
                show_output = False
                for i in out:
                    if ' ' in i:
                        show_output = True
                        break
                # Dont show outputs immediately when we have an error, defer those till we have better information
                if show_output:
                    if self.generator.bld.logger:
                        self.generator.bld.logger.debug('out: %s' %
                                                        '\n'.join(out))
                    else:
                        sys.stdout.write(os.linesep.join(out) + '\n')

            finally:
                if tmp:
                    try:
                        os.remove(tmp)
                    except OSError:
                        pass

# Create custom error message for improved readibility
            if ret != 0:
                self.err_msg = '<++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++>\n'
                self.err_msg += "Compilation failed - File: %r, Module: %r, Configuration: '%s|%s', error code %d\n" % (
                    os.path.basename(self.outputs[0].abspath()),
                    self.generator.target, self.generator.bld.env['PLATFORM'],
                    self.generator.bld.env['CONFIGURATION'], ret)
                self.err_msg += "\tInput Files:   '%s'\n" % ', '.join(
                    i.abspath() for i in self.inputs)
                self.err_msg += "\tOutput Files:  '%s'\n" % (', ').join(
                    i.abspath() for i in self.outputs)
                self.err_msg += "\tCommand:       '%s'\n" % ' '.join(
                    self.last_cmd)
                out_merged = ''
                for line in out:
                    out_merged += '\t\t' + line + '\n'
                self.err_msg += "\tOutput:\n%s" % out_merged
                self.err_msg += "<++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++>\n"

            return ret
        else:
            # Use base class's version of this method for linker tasks
            return super(derived_class, self).exec_response_command(cmd, **kw)
示例#49
0
def check_python_version(conf, minver=None):
	"""
	Check if the python interpreter is found matching a given minimum version.
	minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.

	If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
	(eg. '2.4') of the actual python version found, and PYTHONDIR is
	defined, pointing to the site-packages directory appropriate for
	this python version, where modules/packages/extensions should be
	installed.

	:param minver: minimum version
	:type minver: tuple of int
	"""
	assert minver is None or isinstance(minver, tuple)
	pybin = conf.env['PYTHON']
	if not pybin:
		conf.fatal('could not find the python executable')

	# Get python version string
	cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
	Logs.debug('python: Running python command %r' % cmd)
	lines = conf.cmd_and_log(cmd).split()
	assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
	pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))

	# compare python version with the minimum required
	result = (minver is None) or (pyver_tuple >= minver)

	if result:
		# define useful environment variables
		pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
		conf.env['PYTHON_VERSION'] = pyver

		if 'PYTHONDIR' in conf.env:
			# Check if --pythondir was specified
			pydir = conf.env['PYTHONDIR']
		elif 'PYTHONDIR' in conf.environ:
			# Check environment for PYTHONDIR
			pydir = conf.environ['PYTHONDIR']
		else:
			# Finally, try to guess
			if Utils.is_win32:
				(python_LIBDEST, pydir) = conf.get_python_variables(
					  ["get_config_var('LIBDEST') or ''",
					   "get_python_lib(standard_lib=0) or ''"])
			else:
				python_LIBDEST = None
				(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix='%s') or ''" % conf.env['PREFIX']])
			if python_LIBDEST is None:
				if conf.env['LIBDIR']:
					python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
				else:
					python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)

		if 'PYTHONARCHDIR' in conf.env:
			# Check if --pythonarchdir was specified
			pyarchdir = conf.env['PYTHONARCHDIR']
		elif 'PYTHONARCHDIR' in conf.environ:
			# Check environment for PYTHONDIR
			pyarchdir = conf.environ['PYTHONARCHDIR']
		else:
			# Finally, try to guess
			(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix='%s') or ''" % conf.env['PREFIX']])
			if not pyarchdir:
				pyarchdir = pydir

		if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
			conf.define('PYTHONDIR', pydir)
			conf.define('PYTHONARCHDIR', pyarchdir)

		conf.env['PYTHONDIR'] = pydir
		conf.env['PYTHONARCHDIR'] = pyarchdir

	# Feedback
	pyver_full = '.'.join(map(str, pyver_tuple[:3]))
	if minver is None:
		conf.msg('Checking for python version', pyver_full)
	else:
		minver_str = '.'.join(map(str, minver))
		conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW')

	if not result:
		conf.fatal('The python version is too old, expecting %r' % (minver,))
示例#50
0
    def exec_command(self, cmd, **kw):
        """
		Execute a command and return the exit status. If the context has the attribute 'log',
		capture and log the process stderr/stdout for logging purposes::

			def run(tsk):
				ret = tsk.generator.bld.exec_command('touch foo.txt')
				return ret

		This method captures the standard/error outputs (Issue 1101), but it does not return the values
		unlike :py:meth:`waflib.Context.Context.cmd_and_log`

		:param cmd: command argument for subprocess.Popen
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		"""
        subprocess = Utils.subprocess
        kw['shell'] = isinstance(cmd, str)
        Logs.debug('runner: %r' % (cmd, ))
        Logs.debug('runner_env: kw=%s' % kw)

        if self.logger:
            self.logger.info(cmd)

        if 'stdout' not in kw:
            kw['stdout'] = subprocess.PIPE
        if 'stderr' not in kw:
            kw['stderr'] = subprocess.PIPE

        if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError("Program %s not found!" % cmd[0])

        wargs = {}
        if 'timeout' in kw:
            if kw['timeout'] is not None:
                wargs['timeout'] = kw['timeout']
            del kw['timeout']
        if 'input' in kw:
            if kw['input']:
                wargs['input'] = kw['input']
                kw['stdin'] = Utils.subprocess.PIPE
            del kw['input']

        try:
            if kw['stdout'] or kw['stderr']:
                p = subprocess.Popen(cmd, **kw)
                (out, err) = p.communicate(**wargs)
                ret = p.returncode
            else:
                out, err = (None, None)
                ret = subprocess.Popen(cmd, **kw).wait(**wargs)
        except Exception as e:
            raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

        if out:
            if not isinstance(out, str):
                out = out.decode(sys.stdout.encoding or 'iso8859-1')
            if self.logger:
                self.logger.debug('out: %s' % out)
            else:
                Logs.info(out, extra={'stream': sys.stdout, 'c1': ''})
        if err:
            if not isinstance(err, str):
                err = err.decode(sys.stdout.encoding or 'iso8859-1')
            if self.logger:
                self.logger.error('err: %s' % err)
            else:
                Logs.info(err, extra={'stream': sys.stderr, 'c1': ''})

        return ret
示例#51
0
文件: c_config.py 项目: sky4D/mavsim
def get_cc_version(conf, cc, gcc=False, icc=False):
    cmd = cc + ['-dM', '-E', '-']
    try:
        p = Utils.subprocess.Popen(cmd,
                                   stdin=Utils.subprocess.PIPE,
                                   stdout=Utils.subprocess.PIPE,
                                   stderr=Utils.subprocess.PIPE)
        p.stdin.write('\n')
        out = p.communicate()[0]
    except:
        conf.fatal('could not determine the compiler version %r' % cmd)
    if not isinstance(out, str):
        out = out
    if gcc:
        if out.find('__INTEL_COMPILER') >= 0:
            conf.fatal('The intel compiler pretends to be gcc')
        if out.find('__GNUC__') < 0:
            conf.fatal('Could not determine the compiler type')
    if icc and out.find('__INTEL_COMPILER') < 0:
        conf.fatal('Not icc/icpc')
    k = {}
    if icc or gcc:
        out = out.split('\n')
        import shlex
        for line in out:
            lst = shlex.split(line)
            if len(lst) > 2:
                key = lst[1]
                val = lst[2]
                k[key] = val

        def isD(var):
            return var in k

        def isT(var):
            return var in k and k[var] != '0'

        if not conf.env.DEST_OS:
            conf.env.DEST_OS = ''
        for i in MACRO_TO_DESTOS:
            if isD(i):
                conf.env.DEST_OS = MACRO_TO_DESTOS[i]
                break
        else:
            if isD('__APPLE__') and isD('__MACH__'):
                conf.env.DEST_OS = 'darwin'
            elif isD('__unix__'):
                conf.env.DEST_OS = 'generic'
        if isD('__ELF__'):
            conf.env.DEST_BINFMT = 'elf'
        elif isD('__WINNT__') or isD('__CYGWIN__'):
            conf.env.DEST_BINFMT = 'pe'
            conf.env.LIBDIR = conf.env['PREFIX'] + '/bin'
        elif isD('__APPLE__'):
            conf.env.DEST_BINFMT = 'mac-o'
        if not conf.env.DEST_BINFMT:
            conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)
        for i in MACRO_TO_DEST_CPU:
            if isD(i):
                conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
                break
        Logs.debug('ccroot: dest platform: ' + ' '.join([
            conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')
        ]))
        if icc:
            ver = k['__INTEL_COMPILER']
            conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1])
        else:
            conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'],
                                      k['__GNUC_PATCHLEVEL__'])
    return k
示例#52
0
def NxoBuilder(self, outputfile, firmware_elfname, header_elfname,
               taglist_elfname):

    cmd = [
        self.env.get_flat('STRIP'), firmware_elfname, '-g', '--strip-unneeded',
        '-o', firmware_elfname + '.stripped'
    ]
    result = self.generator.bld.cmd_and_log(cmd, output=STDOUT, quiet=STDOUT)

    fw_elf = Elffile(firmware_elfname + '.stripped')
    fw_sections = fw_elf.parse_sections()
    symbol_table = fw_sections['.symtab']
    success = True

    # Check symbol table for undefined / common symbols which are not allowed in NXO's
    fw_symbols = fw_elf.parse_symbol_table(symbol_table, fw_sections['.strtab']
                                           or None)

    # Sort sections by idx, so we can index them via st_shndx
    sections_byidx = [
        fw_sections[x]
        for x in sorted(fw_sections, key=lambda x: fw_sections[x].idx)
    ]

    Logs.debug('nxo: Checking %u symbols for undefined references' %
               (len(fw_symbols) - 1))

    # First symbol is always undefined and can safely be ignored
    for symbol in fw_symbols[1:]:
        if symbol.st_shndx == Elffile.Elf32Section.SHN_UNDEF:
            # Found undefined symbol which is not allowed in NXO
            Logs.pprint('RED',
                        'ERROR: Found undefined symbol %r' % symbol.name)
            success = False
        elif symbol.st_shndx == Elffile.Elf32Section.SHN_COMMON:
            # Found COMMON symbol which is not allowed in NXO
            Logs.pprint('RED', 'ERROR: Found COMMON symbol %r' % symbol.name)
            success = False

    if success:
        # Check for unsupported relocation types
        Logs.debug('nxo: Checking for unsupported relocation types')
        log_relocationtypes = {}

        supported_relocations = [
            Elffile.ARM32Relocations.R_ARM_NONE,
            Elffile.ARM32Relocations.R_ARM_ABS32,
            Elffile.ARM32Relocations.R_ARM_REL32,
            Elffile.ARM32Relocations.R_ARM_CALL,
            Elffile.ARM32Relocations.R_ARM_JUMP24,
            Elffile.ARM32Relocations.R_ARM_V4BX,
            Elffile.ARM32Relocations.R_ARM_PLT32,
            Elffile.ARM32Relocations.R_ARM_PC24,
            Elffile.ARM32Relocations.R_ARM_XPC25,
            Elffile.ARM32Relocations.R_ARM_PREL31,
            Elffile.ARM32Relocations.R_ARM_THM_CALL,
            Elffile.ARM32Relocations.R_ARM_THM_JUMP24,
            Elffile.ARM32Relocations.R_ARM_THM_XPC22,
            Elffile.ARM32Relocations.R_ARM_THM_PC11,
        ]

        for section_name in fw_sections:
            section = fw_sections[section_name]
            if section.sh_type == section.SHT_RELA:
                # Found COMMON symbol which is not allowed in NXO
                Logs.pprint(
                    'RED',
                    'ERROR: Found unsupported RELA relocation section %r' %
                    section_name)
                success = False
            elif section.sh_type == section.SHT_REL:
                # Check REL section for supported relocation types
                relocs = section.parse_rel_section()
                for _reloc in relocs:
                    ref_symbol = fw_symbols[_reloc.r_sym]

                    _log_reloc_name = _reloc_name = Elffile.ARM32Relocations.get_name(
                        _reloc.r_type)

                    if (ref_symbol.st_info & 0xf) == ref_symbol.STT_SECTION:
                        ref_symbol_name = sections_byidx[
                            ref_symbol.st_shndx].name
                    else:
                        ref_symbol_name = ref_symbol.name

                    if _reloc.r_sym > len(fw_symbols):
                        # Invalid symbol reference in relocation
                        Logs.pprint(
                            'RED',
                            'ERROR: Found relocation with invalid symbol reference %d in section %s at r_offset %#010x'
                            % (_reloc.r_sym, section_name, _reloc.r_offset))
                        success = False
                    elif _reloc.r_type not in supported_relocations:
                        # Unsupported relocation
                        Logs.pprint(
                            'RED',
                            'ERROR: Found unsupported relocation type %s in section %s for symbol %s'
                            % (_reloc_name, section_name, ref_symbol_name))
                        success = False
                    else:
                        # Check relocations for other unsupported stuff (e.g. unallowed ARM<-->Thumb transitions, which would need veneers)
                        if _reloc.r_type in [
                                Elffile.ARM32Relocations.R_ARM_CALL,
                                Elffile.ARM32Relocations.R_ARM_JUMP24,
                                Elffile.ARM32Relocations.R_ARM_PLT32,
                                Elffile.ARM32Relocations.R_ARM_PC24,
                                Elffile.ARM32Relocations.R_ARM_XPC25,
                        ]:
                            # Check for unallowed transitions from ARM-->Thumb
                            if ref_symbol.st_value & 1:
                                # Mark this as a ARM-->Thumb transition for the log
                                _log_reloc_name = _log_reloc_name + " (ARM->Thumb)"
                        elif _reloc.r_type in [
                                Elffile.ARM32Relocations.R_ARM_THM_CALL,
                                Elffile.ARM32Relocations.R_ARM_THM_JUMP24,
                                Elffile.ARM32Relocations.R_ARM_THM_XPC22,
                        ]:
                            if (ref_symbol.st_value & 1) == 0:
                                # Mark this as a Thumb-->ARM transition for the log
                                _log_reloc_name = _log_reloc_name + " (Thumb-->ARM)"

                    if not _log_reloc_name in log_relocationtypes:
                        log_relocationtypes[_log_reloc_name] = 1
                    else:
                        log_relocationtypes[
                            _log_reloc_name] = log_relocationtypes[
                                _log_reloc_name] + 1

        Logs.debug('nxo: Following relocations have been checked:')
        for _tmp in log_relocationtypes:
            Logs.debug('nxo:   * %s (Count=%u)' %
                       (_tmp, log_relocationtypes[_tmp]))

    if success:
        fileheader_data = ".NXO" + ('\0' * 60)

        # Create NXO file
        header_binname = header_elfname.change_ext('.bin')

        # Use Objcopy to generate binary file
        cmd = [
            self.env.get_flat('OBJCOPY'), '-O', 'binary',
            header_elfname.abspath(),
            header_binname.abspath()
        ]
        result = self.generator.bld.cmd_and_log(cmd,
                                                output=STDOUT,
                                                quiet=STDOUT)

        tmp = io.open(header_binname.abspath(), "rb")
        fileheader_data += tmp.read()
        tmp.close()

        tmp = io.open(firmware_elfname + '.stripped', "rb")
        elf_data = tmp.read()
        tmp.close()

        taglist_data = None
        if taglist_elfname:
            # Check Taglist object file
            # It must not contain multiple data sections starting all at offset 0, which results in an unusable file produced by OBJCOPY
            taglist_elf = Elffile(taglist_elfname.abspath())
            taglist_sections = taglist_elf.parse_sections()

            # Dict offset -> section name, with sections being included in bin built by OBJCOPY
            _tagsections_in_image = {}

            for _section_name, _tag_section in taglist_sections.items():
                if (_tag_section.sh_size != 0) and (_tag_section.sh_flags
                                                    & _tag_section.SHF_ALLOC):
                    Logs.debug(
                        'nxo: Checking section %r at memory offset %#010x' %
                        (_section_name, _tag_section.sh_addr))
                    if _tag_section.sh_addr in _tagsections_in_image:
                        Logs.pprint(
                            'RED',
                            'ERROR: Taglist contains overlapping sections %r and %r at offset %#010x. Make sure to only provide a single instance for the taglist structure!'
                            % (_section_name,
                               _tagsections_in_image[_tag_section.sh_addr],
                               _tag_section.sh_addr))
                        return False
                    else:
                        _tagsections_in_image[
                            _tag_section.sh_addr] = _section_name

            taglist_binname = taglist_elfname.change_ext('.bin')
            cmd = [
                self.env.get_flat('OBJCOPY'), '-O', 'binary',
                taglist_elfname.abspath(),
                taglist_binname.abspath()
            ]
            result = self.generator.bld.cmd_and_log(cmd,
                                                    output=STDOUT,
                                                    quiet=STDOUT)

            tmp = io.open(taglist_binname.abspath(), "rb")
            taglist_data = tmp.read()
            tmp.close()

            # Append EndOfList Marker, if it does not exist yet. Some firmwares rely on the linker (taglist.ld)
            # to add the missing 64 Bit zeros, but as we extract the taglist from an object the EOL is missing
            # in this case
            _offset = 0
            _eolfound = False

            while _offset < len(taglist_data):
                _tagtype = unpack_from("<L", taglist_data, _offset)[0]
                _taglength = unpack_from("<L", taglist_data, _offset + 4)[0]

                _eolfound = False
                if (_tagtype is 0) and (_taglength is 0):
                    _eolfound = True

                _offset += 8 + _taglength

                # new tags always start at DWORD aligned offsets
                if (_taglength % 4) != 0:
                    _offset += 4 - _taglength % 4

            if not _eolfound:
                Logs.pprint(
                    'YELLOW',
                    "[NXO] '%s': Adding missing END_OF_LIST marker to taglist"
                    % outputfile)
                taglist_data += pack("<Q", 0)

        offset_common_header_data_size = 72
        offset_common_header_data_offs = 76
        offset_common_header_taglist_size = 100
        offset_common_header_taglist_offs = 104
        offset_common_header_taglist_size_max = 108

        # Modify Common Header to include correct data and tag list size / offset
        fileheader_data = fileheader_data[:offset_common_header_data_size] + pack(
            "<L",
            len(elf_data)) + fileheader_data[offset_common_header_data_size +
                                             4:]
        fileheader_data = fileheader_data[:offset_common_header_data_offs] + pack(
            "<L", len(fileheader_data)
        ) + fileheader_data[offset_common_header_data_offs + 4:]
        if taglist_data:
            fileheader_data = fileheader_data[:offset_common_header_taglist_size] + pack(
                "<L", len(taglist_data)
            ) + fileheader_data[offset_common_header_taglist_size + 4:]
            fileheader_data = fileheader_data[:offset_common_header_taglist_size_max] + pack(
                "<L", len(taglist_data)
            ) + fileheader_data[offset_common_header_taglist_size_max + 4:]
            fileheader_data = fileheader_data[:offset_common_header_taglist_offs] + pack(
                "<L",
                len(fileheader_data) + len(elf_data)
            ) + fileheader_data[offset_common_header_taglist_offs + 4:]

        nxo_file = io.open(outputfile, "wb")
        nxo_file.write(fileheader_data + elf_data)
        if taglist_data:
            nxo_file.write(taglist_data)
        size = nxo_file.tell()

        if size % 4:
            success = False
            Logs.pprint(
                'RED',
                "ERROR: [NXO] '%s': Resulting filesize is not a multiple of UINT32 (size=%r)"
                % (outputfile, size))

        nxo_file.close()

        if success:
            # Update NXO checksums
            nxupdate_fn(outputfile, outputfile)

    return success
示例#53
0
def compile_fun_noshell(line):
    """
	Creates a compiled function to execute a process without a sub-shell
	"""
    buf = []
    dvars = []
    merge = False
    app = buf.append

    def replc(m):
        # performs substitutions and populates dvars
        if m.group('and'):
            return ' and '
        elif m.group('or'):
            return ' or '
        else:
            x = m.group('var')
            if x not in dvars:
                dvars.append(x)
            return 'env[%r]' % x

    for m in reg_act_noshell.finditer(line):
        if m.group('space'):
            merge = False
            continue
        elif m.group('text'):
            app('[%r]' % m.group('text'))
        elif m.group('subst'):
            var = m.group('var')
            code = m.group('code')
            if var == 'SRC':
                if code:
                    app('[tsk.inputs%s]' % code)
                else:
                    app('[a.path_from(cwdx) for a in tsk.inputs]')
            elif var == 'TGT':
                if code:
                    app('[tsk.outputs%s]' % code)
                else:
                    app('[a.path_from(cwdx) for a in tsk.outputs]')
            elif code:
                if code.startswith(':'):
                    # a composed variable ${FOO:OUT}
                    if not var in dvars:
                        dvars.append(var)
                    m = code[1:]
                    if m == 'SRC':
                        m = '[a.path_from(cwdx) for a in tsk.inputs]'
                    elif m == 'TGT':
                        m = '[a.path_from(cwdx) for a in tsk.outputs]'
                    elif re_novar.match(m):
                        m = '[tsk.inputs%s]' % m[3:]
                    elif re_novar.match(m):
                        m = '[tsk.outputs%s]' % m[3:]
                    elif m[:3] not in ('tsk', 'gen', 'bld'):
                        dvars.append(m)
                        m = '%r' % m
                    app('tsk.colon(%r, %s)' % (var, m))
                elif code.startswith('?'):
                    # In A?B|C output env.A if one of env.B or env.C is non-empty
                    expr = re_cond.sub(replc, code[1:])
                    app('to_list(env[%r] if (%s) else [])' % (var, expr))
                else:
                    # plain code such as ${tsk.inputs[0].abspath()}
                    app('gen.to_list(%s%s)' % (var, code))
            else:
                # a plain variable such as # a plain variable like ${AR}
                app('to_list(env[%r])' % var)
                if not var in dvars:
                    dvars.append(var)
        if merge:
            tmp = 'merge(%s, %s)' % (buf[-2], buf[-1])
            del buf[-1]
            buf[-1] = tmp
        merge = True  # next turn

    buf = ['lst.extend(%s)' % x for x in buf]
    fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
    Logs.debug('action: %s', fun.strip().splitlines())
    return (funex(fun), dvars)
示例#54
0
def exec_command(self, cmd, **kw):
    if self.env.CC_NAME not in supported_compilers:
        return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)

    if not 'cwd' in kw:
        kw['cwd'] = self.get_cwd()

    if self.env.PATH:
        env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
        env['PATH'] = self.env.PATH if isinstance(
            self.env.PATH, str) else os.pathsep.join(self.env.PATH)

    # The Visual Studio IDE adds an environment variable that causes
    # the MS compiler to send its textual output directly to the
    # debugging window rather than normal stdout/stderr.
    #
    # This is unrecoverably bad for this tool because it will cause
    # all the dependency scanning to see an empty stdout stream and
    # assume that the file being compiled uses no headers.
    #
    # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
    #
    # Attempting to repair the situation by deleting the offending
    # envvar at this point in tool execution will not be good enough--
    # its presence poisons the 'waf configure' step earlier. We just
    # want to put a sanity check here in order to help developers
    # quickly diagnose the issue if an otherwise-good Waf tree
    # is then executed inside the MSVS IDE.
    assert 'VS_UNICODE_OUTPUT' not in kw['env']

    cmd, args = self.split_argfile(cmd)
    try:
        (fd, tmp) = tempfile.mkstemp()
        os.write(fd, '\r\n'.join(args).encode())
        os.close(fd)

        self.msvcdeps_paths = []
        kw['env'] = kw.get('env', os.environ.copy())
        kw['cwd'] = kw.get('cwd', os.getcwd())
        kw['quiet'] = Context.STDOUT
        kw['output'] = Context.STDOUT

        out = []
        if Logs.verbose:
            Logs.debug('argfile: @%r -> %r', tmp, args)
        try:
            raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
            ret = 0
        except Errors.WafError as e:
            # Use e.msg if e.stdout is not set
            raw_out = getattr(e, 'stdout', e.msg)

            # Return non-zero error code even if we didn't
            # get one from the exception object
            ret = getattr(e, 'returncode', 1)

        Logs.debug('msvcdeps: Running for: %s' % self.inputs[0])
        for line in raw_out.splitlines():
            if line.startswith(INCLUDE_PATTERN):
                # Only strip whitespace after log to preserve
                # dependency structure in debug output
                inc_path = line[len(INCLUDE_PATTERN):]
                Logs.debug('msvcdeps: Regex matched %s', inc_path)
                self.msvcdeps_paths.append(inc_path.strip())
            else:
                out.append(line)

        # Pipe through the remaining stdout content (not related to /showIncludes)
        if self.generator.bld.logger:
            self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
        else:
            sys.stdout.write(os.linesep.join(out) + os.linesep)

        return ret
    finally:
        try:
            os.remove(tmp)
        except OSError:
            # anti-virus and indexers can keep files open -_-
            pass
示例#55
0
def post_run(self):
    # The following code is executed by threads, it is not safe, so a lock is needed...

    if self.env.CC_NAME not in supported_compilers:
        return self.no_gccdeps_post_run()

    if getattr(self, 'cached', None):
        return Task.Task.post_run(self)

    name = self.outputs[0].abspath()
    name = re_o.sub('.d', name)
    txt = Utils.readf(name)
    #os.unlink(name)

    # Compilers have the choice to either output the file's dependencies
    # as one large Makefile rule:
    #
    #   /path/to/file.o: /path/to/dep1.h \
    #                    /path/to/dep2.h \
    #                    /path/to/dep3.h \
    #                    ...
    #
    # or as many individual rules:
    #
    #   /path/to/file.o: /path/to/dep1.h
    #   /path/to/file.o: /path/to/dep2.h
    #   /path/to/file.o: /path/to/dep3.h
    #   ...
    #
    # So the first step is to sanitize the input by stripping out the left-
    # hand side of all these lines. After that, whatever remains are the
    # implicit dependencies of task.outputs[0]
    txt = '\n'.join(
        [remove_makefile_rule_lhs(line) for line in txt.splitlines()])

    # Now join all the lines together
    txt = txt.replace('\\\n', '')

    val = txt.strip()
    lst = val.split(':')
    val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]

    nodes = []
    bld = self.generator.bld

    for x in val:

        node = None
        if os.path.isabs(x):
            lock.acquire()
            try:
                node = bld.root.find_resource(x)
            finally:
                lock.release()
        else:
            path = bld.bldnode
            x = [k for k in Utils.split_path(x) if k and k != '.']
            while lst and x[0] == '..':
                x = x[1:]
                path = path.parent

            # when calling find_resource, make sure the path does not begin by '..'
            try:
                lock.acquire()
                node = path.find_resource(x)
            finally:
                lock.release()

        if not node:
            raise ValueError('could not find %r for %r' % (x, self))
        else:
            if not c_preproc.go_absolute:
                if not (node.is_child_of(bld.srcnode)
                        or node.is_child_of(bld.bldnode)):
                    continue

            if id(node) == id(self.inputs[0]):
                # ignore the source file, it is already in the dependencies
                # this way, successful config tests may be retrieved from the cache
                continue

            nodes.append(node)

    Logs.debug('deps: real scanner for %s returned %s' %
               (str(self), str(nodes)))

    bld.node_deps[self.uid()] = nodes
    bld.raw_deps[self.uid()] = []

    try:
        del self.cache_sig
    except:
        pass

    Task.Task.post_run(self)
示例#56
0
    def start(self, node, env):
        """
		Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
		and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.

		:param node: source file
		:type node: :py:class:`waflib.Node.Node`
		:param env: config set containing additional defines to take into account
		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
		"""
        Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

        self.current_file = node
        self.addlines(node)

        # macros may be defined on the command-line, so they must be parsed as if they were part of the file
        if env.DEFINES:
            lst = format_defines(env.DEFINES)
            lst.reverse()
            self.lines.extend([('define', x) for x in lst])

        while self.lines:
            (token, line) = self.lines.pop()
            if token == POPFILE:
                self.count_files -= 1
                self.currentnode_stack.pop()
                continue

            try:
                state = self.state

                # make certain we define the state if we are about to enter in an if block
                if token[:2] == 'if':
                    state.append(undefined)
                elif token == 'endif':
                    state.pop()

                # skip lines when in a dead 'if' branch, wait for the endif
                if token[0] != 'e':
                    if skipped in self.state or ignored in self.state:
                        continue

                if token == 'if':
                    ret = eval_macro(tokenize(line), self.defs)
                    if ret:
                        state[-1] = accepted
                    else:
                        state[-1] = ignored
                elif token == 'ifdef':
                    m = re_mac.match(line)
                    if m and m.group() in self.defs:
                        state[-1] = accepted
                    else:
                        state[-1] = ignored
                elif token == 'ifndef':
                    m = re_mac.match(line)
                    if m and m.group() in self.defs:
                        state[-1] = ignored
                    else:
                        state[-1] = accepted
                elif token == 'include' or token == 'import':
                    (kind, inc) = extract_include(line, self.defs)
                    self.current_file = self.tryfind(inc, kind, env)
                    if token == 'import':
                        self.ban_includes.add(self.current_file)
                elif token == 'elif':
                    if state[-1] == accepted:
                        state[-1] = skipped
                    elif state[-1] == ignored:
                        if eval_macro(tokenize(line), self.defs):
                            state[-1] = accepted
                elif token == 'else':
                    if state[-1] == accepted:
                        state[-1] = skipped
                    elif state[-1] == ignored:
                        state[-1] = accepted
                elif token == 'define':
                    try:
                        self.defs[self.define_name(line)] = line
                    except AttributeError:
                        raise PreprocError('Invalid define line %r' % line)
                elif token == 'undef':
                    m = re_mac.match(line)
                    if m and m.group() in self.defs:
                        self.defs.__delitem__(m.group())
                        #print "undef %s" % name
                elif token == 'pragma':
                    if re_pragma_once.match(line.lower()):
                        self.ban_includes.add(self.current_file)
            except Exception as e:
                if Logs.verbose:
                    Logs.debug('preproc: line parsing failed (%s): %s %s', e,
                               line, traceback.format_exc())
示例#57
0
def get_solution_overrides(self):

    if self.cmd == 'generate_uber_files' or self.cmd == 'msvs':
        return {}

    # Only perform on VS executed builds
    try:
        sln_file = self.options.execsolution
    except:
        return {}

    if not sln_file:
        return {}

    if Utils.unversioned_sys_platform() != 'win32':
        return

    # Open sln file
    try:
        file = open(sln_file)
    except Exception as e:
        Logs.debug(
            'warning: Unable to parse .sln file to extract configuration overrides: [File:%s] [Exception:%s,%s]'
            % (sln_file, sys.exc_info()[0], e))
        return {}

    ret_vs_project_override = {}
    vs_spec = self.convert_waf_spec_to_vs_spec(self.options.project_spec)
    vs_platform = self.convert_waf_platform_to_vs_platform(
        self.env['PLATFORM'])
    vs_configuration = self.convert_waf_configuration_to_vs_configuration(
        self.env['CONFIGURATION'])

    vs_build_configuration = '[%s] %s|%s' % (
        vs_spec, vs_configuration, vs_platform)  # Example: [Hunt] Debug|x64
    vs_project_identifier = 'Project("{8BC9CEB8'  # C++ project: 8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942

    # Iterate over all basic project  information
    # Example:
    #   Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Cry3DEngine", "e:\P4\CE_STREAMS\Solutions\.depproj\Cry3DEngine.vcxproj", "{60178AE3-57FD-488C-9A53-4AE4F66419AA}"
    project_guid_to_name = {}
    file_iter = iter(file)
    for line in file_iter:
        stripped_line = line.lstrip()
        if stripped_line.startswith(vs_project_identifier) == True:
            project_info = stripped_line[51:].split(
                ','
            )  # skip first 51 character ... "Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") ="
            project_name = project_info[0].strip()[
                1:-1]  # trim left and right and remove '"'
            project_path = project_info[1].strip()[1:-1]
            project_guid = project_info[2].strip()[2:-2]

            # Store project GUID and Name pair
            project_guid_to_name[project_guid] = project_name

        elif stripped_line.startswith('Global') == True:
            file_iter.next()
            break
        else:
            continue

    # Skip to beginning of project configurations information
    for line in file_iter:
        if line.lstrip().startswith(
                'GlobalSection(ProjectConfigurationPlatforms) = postSolution'
        ) == True:
            file_iter.next()
            break

    # Loop over all project
    # Example:
    # {60178AE3-57FD-488C-9A53-4AE4F66419AA}.[Hunt] Debug|x64.ActiveCfg = [GameSDK and Tools] Debug|x64
    # or
    # {60178AE3-57FD-488C-9A53-4AE4F66419AA}.[Hunt] Debug|x64.Build.0 = [GameSDK and Tools] Debug|x64
    for line in file_iter:
        stripped_line = line.strip()

        # Reached end of section
        if stripped_line.startswith('EndGlobalSection'):
            break

        if stripped_line[39:].startswith(vs_build_configuration) == True:
            project_build_info = stripped_line.split('.')

            starts_of_override_configuration = project_build_info[-1].find('[')
            project_build_info[-1] = project_build_info[-1][
                starts_of_override_configuration:]  # remove anything prior to [xxx] e.g. "ActiveCfg = "

            vs_project_configuration = project_build_info[1]
            vs_project_override_configuation = project_build_info[-1]

            # Check for no override condition
            if vs_project_configuration == vs_project_override_configuation:
                continue

            project_guid = project_build_info[0][
                1:-1]  # remove surrounding '{' and '}'

            try:
                project_name = project_guid_to_name[project_guid]
            except:
                # Continue if project GUID not in list.
                # Since we only store C++ projects in the list this project is most likely of an other type e.g. C#. The user will have added this project to the solution manualy.
                Logs.debug(
                    'Warning: Override Handling: Unsupported project id "%s" found. Project is most likely not a C++ project'
                    % project_gui)
                continue

            # Check that spec is the same
            vs_override_spec_end = vs_project_override_configuation.find(']')
            vs_override_spec = vs_project_override_configuation[
                1:vs_override_spec_end]
            if vs_spec != vs_override_spec:
                self.cry_error(
                    'Project "%s" : Invalid override spec is of type "%s" when it should be "%s"'
                    % (project_name, vs_override_spec, vs_spec))

            # Get WAF configuration from VS project configuration e.g. [Hunt] Debug|x64 -> debug
            vs_project_configuration_end = vs_project_override_configuation.rfind(
                '|')
            vs_project_configuration_start = vs_project_override_configuation.rfind(
                ']', 0, vs_project_configuration_end) + 2
            vs_project_configuration = vs_project_override_configuation[
                vs_project_configuration_start:vs_project_configuration_end]
            waf_configuration = self.convert_vs_configuration_to_waf_configuration(
                vs_project_configuration)

            # Store override
            ret_vs_project_override[project_name] = waf_configuration

    return ret_vs_project_override
示例#58
0
def compile_fun_shell(line):
    """
	Creates a compiled function to execute a process through a sub-shell
	"""
    extr = []

    def repl(match):
        g = match.group
        if g('dollar'):
            return "$"
        elif g('backslash'):
            return '\\\\'
        elif g('subst'):
            extr.append((g('var'), g('code')))
            return "%s"
        return None

    line = reg_act.sub(repl, line) or line

    def replc(m):
        # performs substitutions and populates dvars
        if m.group('and'):
            return ' and '
        elif m.group('or'):
            return ' or '
        else:
            x = m.group('var')
            if x not in dvars:
                dvars.append(x)
            return 'env[%r]' % x

    parm = []
    dvars = []
    app = parm.append
    for (var, meth) in extr:
        if var == 'SRC':
            if meth: app('tsk.inputs%s' % meth)
            else: app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
        elif var == 'TGT':
            if meth: app('tsk.outputs%s' % meth)
            else: app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
        elif meth:
            if meth.startswith(':'):
                if var not in dvars:
                    dvars.append(var)
                m = meth[1:]
                if m == 'SRC':
                    m = '[a.path_from(cwdx) for a in tsk.inputs]'
                elif m == 'TGT':
                    m = '[a.path_from(cwdx) for a in tsk.outputs]'
                elif re_novar.match(m):
                    m = '[tsk.inputs%s]' % m[3:]
                elif re_novar.match(m):
                    m = '[tsk.outputs%s]' % m[3:]
                elif m[:3] not in ('tsk', 'gen', 'bld'):
                    dvars.append(meth[1:])
                    m = '%r' % m
                app('" ".join(tsk.colon(%r, %s))' % (var, m))
            elif meth.startswith('?'):
                # In A?B|C output env.A if one of env.B or env.C is non-empty
                expr = re_cond.sub(replc, meth[1:])
                app('p(%r) if (%s) else ""' % (var, expr))
            else:
                app('%s%s' % (var, meth))
        else:
            if var not in dvars:
                dvars.append(var)
            app("p('%s')" % var)
    if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
    else: parm = ''

    c = COMPILE_TEMPLATE_SHELL % (line, parm)
    Logs.debug('action: %s', c.strip().splitlines())
    return (funex(c), dvars)
示例#59
0
    def post(self):
        """
        Create task objects. The following operations are performed:

        #. The body of this method is called only once and sets the attribute ``posted``
        #. The attribute ``features`` is used to add more methods in ``self.meths``
        #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
        #. The methods are then executed in order
        #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
        """

        # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
        if getattr(self, 'posted', None):
            #error("OBJECT ALREADY POSTED" + str( self))
            return False
        self.posted = True

        keys = set(self.meths)

        # add the methods listed in the features
        self.features = Utils.to_list(self.features)
        for x in self.features + ['*']:
            st = feats[x]
            if not st:
                if not x in Task.classes:
                    Logs.warn(
                        'feature %r does not exist - bind at least one method to it'
                        % x)
            keys.update(list(st))  # ironpython 2.7 wants the cast to list

        # copy the precedence table
        prec = {}
        prec_tbl = self.prec or task_gen.prec
        for x in prec_tbl:
            if x in keys:
                prec[x] = prec_tbl[x]

        # elements disconnected
        tmp = []
        for a in keys:
            for x in prec.values():
                if a in x: break
            else:
                tmp.append(a)

        # TODO waf 1.7
        #tmp.sort()

        # topological sort
        out = []
        while tmp:
            e = tmp.pop()
            if e in keys: out.append(e)
            try:
                nlst = prec[e]
            except KeyError:
                pass
            else:
                del prec[e]
                for x in nlst:
                    for y in prec:
                        if x in prec[y]:
                            break
                    else:
                        tmp.append(x)

        if prec:
            raise Errors.WafError('Cycle detected in the method execution %r' %
                                  prec)
        out.reverse()
        self.meths = out

        # then we run the methods in order
        Logs.debug('task_gen: posting %s %d' % (self, id(self)))
        for x in out:
            try:
                v = getattr(self, x)
            except AttributeError:
                raise Errors.WafError(
                    '%r is not a valid task generator method' % x)
            Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
            v()

        Logs.debug('task_gen: posted %s' % self.name)
        return True
示例#60
0
		def add_paths(var, lst):
			# Add list of paths to a variable, lst can contain strings or nodes
			lst = [ str(n) for n in lst ]
			Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
			self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')