Пример #1
0
def libname_msvc(self,libname,is_static=False):
	lib=libname.lower()
	lib=re.sub('\.lib$','',lib)
	if lib in g_msvc_systemlibs:
		return lib+'.lib'
	lib=re.sub('^lib','',lib)
	if lib=='m':
		return None
	(lt_path,lt_libname,lt_static)=find_lt_names_msvc(self,lib,is_static)
	if lt_path!=None and lt_libname!=None:
		if lt_static==True:
			return os.path.join(lt_path,lt_libname)
	if lt_path!=None:
		_libpaths=[lt_path]+self.libpaths
	else:
		_libpaths=self.libpaths
	static_libs=['%ss.lib'%lib,'lib%ss.lib'%lib,'%s.lib'%lib,'lib%s.lib'%lib,]
	dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
	libnames=static_libs
	if not is_static:
		libnames=dynamic_libs+static_libs
	for path in _libpaths:
		for libn in libnames:
			if os.path.exists(os.path.join(path,libn)):
				debug('msvc: lib found: %s'%os.path.join(path,libn))
				return libn
	return None
Пример #2
0
def compile_fun(name,line):
	extr=[]
	def repl(match):
		g=match.group
		if g('dollar'):return"$"
		elif g('subst'):extr.append((g('var'),g('code')));return"%s"
		return None
	line=reg_act.sub(repl,line)
	parm=[]
	dvars=[]
	app=parm.append
	for(var,meth)in extr:
		if var=='SRC':
			if meth:app('task.inputs%s'%meth)
			else:app('" ".join([a.srcpath(env) for a in task.inputs])')
		elif var=='TGT':
			if meth:app('task.outputs%s'%meth)
			else:app('" ".join([a.bldpath(env) for a in task.outputs])')
		else:
			if not var in dvars:dvars.append(var)
			app("p('%s')"%var)
	if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
	else:parm=''
	c='''
def f(task):
	env = task.env
	p = env.get_flat
	cmd = "%s" %s
	return task.generator.bld.exec_command(cmd)
'''%(line,parm)
	debug('action: %s'%c)
	return(funex(c),dvars)
Пример #3
0
def apply_msvc_obj_vars(self):
	debug('msvc: apply_msvc_obj_vars called for msvc')
	env=self.env
	app=env.append_unique
	cpppath_st=env['CPPPATH_ST']
	lib_st=env['LIB_ST']
	staticlib_st=env['STATICLIB_ST']
	libpath_st=env['LIBPATH_ST']
	staticlibpath_st=env['STATICLIBPATH_ST']
	for i in env['LIBPATH']:
		app('LINKFLAGS',libpath_st%i)
		if not self.libpaths.count(i):
			self.libpaths.append(i)
	for i in env['LIBPATH']:
		app('LINKFLAGS',staticlibpath_st%i)
		if not self.libpaths.count(i):
			self.libpaths.append(i)
	if not env['FULLSTATIC']:
		if env['STATICLIB']or env['LIB']:
			app('LINKFLAGS',env['SHLIB_MARKER'])
	if env['STATICLIB']:
		app('LINKFLAGS',env['STATICLIB_MARKER'])
		for i in env['STATICLIB']:
			debug('msvc: libname: %s'%i)
			libname=libname_msvc(self,i,True)
			debug('msvc: libnamefixed: %s'%libname)
			if libname!=None:
				app('LINKFLAGS',libname)
	if self.env['LIB']:
		for i in env['LIB']:
			debug('msvc: libname: %s'%i)
			libname=libname_msvc(self,i)
			debug('msvc: libnamefixed: %s'%libname)
			if libname!=None:
				app('LINKFLAGS',libname)
Пример #4
0
	def listdir_src(self, parent_node, path):
		"""
		@param parent_node [Node]: parent node of path to scan.
		@param path [string]: path to folder to scan."""

		listed_files = set(Utils.listdir(path))

		self.cache_dir_contents[parent_node.id] = listed_files
		debug('build: folder contents '+str(listed_files))

		node_names = set([x.name for x in parent_node.childs.values() if x.id & 3 == Node.FILE])
		cache = self.node_sigs[0]

		# nodes to keep
		to_keep = listed_files & node_names
		for x in to_keep:
			node = parent_node.childs[x]
			try:
				# do not call node.abspath here
				cache[node.id] = Utils.h_file(path + os.sep + node.name)
			except IOError:
				raise Utils.WafError("The file %s is not readable or has become a dir" % node.abspath())

		# remove both nodes and signatures
		to_remove = node_names - listed_files
		if to_remove:
			# infrequent scenario
			cache = self.node_sigs[0]
			for name in to_remove:
				nd = parent_node.childs[name]
				if nd.id in cache:
					cache.__delitem__(nd.id)
				parent_node.childs.__delitem__(name)
Пример #5
0
def exec_mf(self):
    env = self.env
    outfile = self.inputs[0].bldpath(env)
    manifest = outfile + ".manifest"
    if os.path.exists(manifest):
        debug("msvc: manifesttool")
        mtool = env["MT"]
        if not mtool:
            return 0

        mode = ""
        # embedding mode. Different for EXE's and DLL's.
        # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
        if "cprogram" in self.generator.features:
            mode = "1"
        elif "cshlib" in self.generator.features:
            mode = "2"

        debug("msvc: embedding manifest")
        # flags = ' '.join(env['MTFLAGS'] or [])

        lst = []
        lst.extend(Utils.to_list(env["MT"]))
        lst.extend(Utils.to_list(env["MTFLAGS"]))
        lst.extend(Utils.to_list("-manifest"))
        lst.extend(Utils.to_list(manifest))
        lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))

        # cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
        # 	manifest, outfile, mode)
        lst = [lst]
        ret = self.exec_command(*lst)

    return ret
Пример #6
0
	def clean(self):
		debug('build: clean called')
		# does not clean files created during the configuration dep_files
		def clean_rec(node):
			for x in node.childs.keys():
				nd = node.childs[x]

				tp = nd.id & 3
				if tp == Node.DIR:
					clean_rec(nd)
				elif tp == Node.BUILD:
					for env in self.all_envs.values():
						pt = nd.abspath(env)
						if pt in env['waf_config_files']: continue
						try: os.remove(pt)
						except OSError: pass
					node.childs.__delitem__(x)

		# take the node of current dir, or the root node if failed to get the current
		node = self.srcnode
		clean_rec(node)

		# when cleaning all, remove cache, depedencies and signatures
		if node == self.srcnode:
			for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
				var = {}
				setattr(self, v, var)
Пример #7
0
    def start(self, node, env):
        debug("preproc: scanning %s (in %s)", node.name, node.parent.name)

        self.env = env
        variant = node.variant(env)
        bld = node.__class__.bld
        try:
            self.parse_cache = bld.parse_cache
        except AttributeError:
            bld.parse_cache = {}
            self.parse_cache = bld.parse_cache

        self.addlines(node)
        if env["DEFLINES"]:
            self.lines = [("define", x) for x in env["DEFLINES"]] + self.lines

        while self.lines:
            (kind, line) = self.lines.pop(0)
            if kind == POPFILE:
                self.currentnode_stack.pop()
                continue
            try:
                self.process_line(kind, line)
            except Exception, e:
                if Logs.verbose:
                    debug("preproc: line parsing failed (%s): %s %s", e, line, Utils.ex_stack())
Пример #8
0
def detect(conf):
    """
	for each compiler for the platform, try to configure the compiler
	in theory the tools should raise a configuration error if the compiler
	pretends to be something it is not (setting CC=icc and trying to configure gcc)
	"""
    try:
        test_for_compiler = Options.options.check_c_compiler
    except AttributeError:
        conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
    orig = conf.env
    for compiler in test_for_compiler.split():
        conf.env = orig.copy()
        try:
            conf.check_tool(compiler)
        except Configure.ConfigurationError, e:
            debug('compiler_cc: %r' % e)
        else:
            if conf.env['CC']:
                orig.table = conf.env.get_merged_dict()
                conf.env = orig
                conf.check_message(compiler, '', True)
                conf.env['COMPILER_CC'] = compiler
                break
            conf.check_message(compiler, '', False)
            break
Пример #9
0
def scan(self):
	env=self.env
	gruik=d_parser(env,env['INC_PATHS'])
	gruik.start(self.inputs[0])
	if Logs.verbose:
		debug('deps: nodes found for %s: %s %s'%(str(self.inputs[0]),str(gruik.nodes),str(gruik.names)))
	return(gruik.nodes,gruik.names)
Пример #10
0
	def sig_implicit_deps(self):
		bld=self.generator.bld
		key=self.unique_id()
		prev_sigs=bld.task_sigs.get(key,())
		if prev_sigs:
			try:
				if prev_sigs[2]==self.compute_sig_implicit_deps():
					return prev_sigs[2]
			except(KeyError,OSError):
				pass
			del bld.task_sigs[key]
			raise ValueError('rescan')
		(nodes,names)=self.scan()
		if Logs.verbose:
			debug('deps: scanner for %s returned %s %s',str(self),str(nodes),str(names))
		bld.node_deps[key]=nodes
		bld.raw_deps[key]=names
		try:
			sig=self.compute_sig_implicit_deps()
		except KeyError:
			try:
				nodes=[]
				for k in bld.node_deps.get(self.unique_id(),[]):
					if k.id&3==2:
						if not k.id in bld.node_sigs[0]:
							nodes.append(k)
					else:
						if not k.id in bld.node_sigs[self.env.variant()]:
							nodes.append(k)
			except:
				nodes='?'
			raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)'%(nodes,self))
		return sig
Пример #11
0
 def load(self):
     try:
         env = Environment.Environment(
             os.path.join(self.cachedir, 'build.config.py'))
     except (IOError, OSError):
         pass
     else:
         if env['version'] < HEXVERSION:
             raise Utils.WafError(
                 'Version mismatch! reconfigure the project')
         for t in env['tools']:
             self.setup(**t)
     try:
         gc.disable()
         f = data = None
         Node.Nodu = self.node_class
         try:
             f = open(os.path.join(self.bdir, DBFILE), 'rb')
         except (IOError, EOFError):
             pass
         try:
             if f: data = cPickle.load(f)
         except AttributeError:
             if Logs.verbose > 1: raise
         if data:
             for x in SAVED_ATTRS:
                 setattr(self, x, data[x])
         else:
             debug('build: Build cache loading failed')
     finally:
         if f: f.close()
         gc.enable()
Пример #12
0
def check_syslib_dependencies(bld, t):
    '''check for syslib depenencies'''

    if bld.get_tgen_by_name(t.sname + ".objlist"):
        return

    sname = real_name(t.sname)

    remaining = set()

    features = TO_LIST(t.features)
    if 'pyembed' in features or 'pyext' in features:
        if 'python' in bld.env.public_symbols:
            t.unsatisfied_symbols = t.unsatisfied_symbols.difference(bld.env.public_symbols['python'])

    needed = {}
    for sym in t.unsatisfied_symbols:
        if sym in bld.env.symbol_map:
            dep = bld.env.symbol_map[sym][0]
            if dep == 'c':
                continue
            if not dep in needed:
                needed[dep] = set()
            needed[dep].add(sym)
        else:
            remaining.add(sym)

    for dep in needed:
        Logs.info("Target '%s' should add syslib dep '%s' for symbols %s" % (sname, dep, " ".join(needed[dep])))

    if remaining:
        debug("deps: Target '%s' has unsatisfied symbols: %s" % (sname, " ".join(remaining)))
Пример #13
0
def exec_mf(self):
    env = self.env
    mtool = env['MT']
    if not mtool:
        return 0
    self.do_manifest = False
    outfile = self.outputs[0].bldpath(env)
    manifest = None
    for out_node in self.outputs:
        if out_node.name.endswith('.manifest'):
            manifest = out_node.bldpath(env)
            break
    if manifest is None:
        return 0
    mode = ''
    if 'cprogram' in self.generator.features:
        mode = '1'
    elif 'cshlib' in self.generator.features:
        mode = '2'
    debug('msvc: embedding manifest')
    lst = []
    lst.extend([env['MT']])
    lst.extend(Utils.to_list(env['MTFLAGS']))
    lst.extend(Utils.to_list("-manifest"))
    lst.extend(Utils.to_list(manifest))
    lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
    lst = [lst]
    return self.exec_command(*lst)
Пример #14
0
def exec_mf(self):
    env = self.env
    outfile = self.inputs[0].bldpath(env)
    manifest = outfile + '.manifest'
    if os.path.exists(manifest):
        debug('msvc: manifesttool')
        mtool = env['MT']
        if not mtool:
            return 0

        mode = ''
        # embedding mode. Different for EXE's and DLL's.
        # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
        if 'cprogram' in self.generator.features:
            mode = '1'
        elif 'cshlib' in self.generator.features:
            mode = '2'

        debug('msvc: embedding manifest')
        #flags = ' '.join(env['MTFLAGS'] or [])

        lst = []
        lst.extend(Utils.to_list(env['MT']))
        lst.extend(Utils.to_list(env['MTFLAGS']))
        lst.extend(Utils.to_list("-manifest"))
        lst.extend(Utils.to_list(manifest))
        lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))

        #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
        #	manifest, outfile, mode)
        lst = [lst]
        ret = self.exec_command(*lst)

    return ret
Пример #15
0
def replace_grouping_libraries(bld, tgt_list):
    '''replace dependencies based on grouping libraries

    If a library is marked as a grouping library, then any target that
    depends on a subsystem that is part of that grouping library gets
    that dependency replaced with a dependency on the grouping library
    '''

    targets  = LOCAL_CACHE(bld, 'TARGET_TYPE')

    grouping = {}

    # find our list of grouping libraries, mapped from the subsystems they depend on
    for t in tgt_list:
        if not getattr(t, 'grouping_library', False):
            continue
        for dep in t.samba_deps_extended:
            bld.ASSERT(dep in targets, "grouping library target %s not declared in %s" % (dep, t.sname))
            if targets[dep] == 'SUBSYSTEM':
                grouping[dep] = t.sname

    # now replace any dependencies on elements of grouping libraries
    for t in tgt_list:
        for i in range(len(t.samba_deps_extended)):
            dep = t.samba_deps_extended[i]
            if dep in grouping:
                if t.sname != grouping[dep]:
                    debug("deps: target %s: replacing dependency %s with grouping library %s" % (t.sname, dep, grouping[dep]))
                    t.samba_deps_extended[i] = grouping[dep]
Пример #16
0
 def install(self):
     debug("build: install called")
     self.flush()
     if self.is_install < 0:
         lst = []
         for x in self.uninstall:
             dir = os.path.dirname(x)
             if not dir in lst:
                 lst.append(dir)
         lst.sort()
         lst.reverse()
         nlst = []
         for y in lst:
             x = y
             while len(x) > 4:
                 if not x in nlst:
                     nlst.append(x)
                 x = os.path.dirname(x)
         nlst.sort()
         nlst.reverse()
         for x in nlst:
             try:
                 os.rmdir(x)
             except OSError:
                 pass
Пример #17
0
def compile_fun_noshell(name,line):
	extr=[]
	def repl(match):
		g=match.group
		if g('dollar'):return"$"
		elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
		return None
	line2=reg_act.sub(repl,line)
	params=line2.split('<<|@|>>')
	buf=[]
	dvars=[]
	app=buf.append
	for x in range(len(extr)):
		params[x]=params[x].strip()
		if params[x]:
			app("lst.extend(%r)"%params[x].split())
		(var,meth)=extr[x]
		if var=='SRC':
			if meth:app('lst.append(task.inputs%s)'%meth)
			else:app("lst.extend([a.srcpath(env) for a in task.inputs])")
		elif var=='TGT':
			if meth:app('lst.append(task.outputs%s)'%meth)
			else:app("lst.extend([a.bldpath(env) for a in task.outputs])")
		else:
			app('lst.extend(to_list(env[%r]))'%var)
			if not var in dvars:dvars.append(var)
	if params[-1]:
		app("lst.extend(%r)"%shlex.split(params[-1]))
	fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
	debug('action: %s',fun)
	return(funex(fun),dvars)
Пример #18
0
	def process_line(self, token, line):
		"""
		WARNING: a new state must be added for if* because the endif
		"""
		ve = Logs.verbose
		if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
		state = self.state

		# make certain we define the state if we are about to enter in an if block
		if token in ['ifdef', 'ifndef', 'if']:
			state.append(undefined)
		elif token == 'endif':
			state.pop()

		# skip lines when in a dead 'if' branch, wait for the endif
		if not token in ['else', 'elif', 'endif']:
			if skipped in self.state or ignored in self.state:
				return

		if token == 'if':
			ret = eval_macro(tokenize(line), self.defs)
			if ret: state[-1] = accepted
			else: state[-1] = ignored
		elif token == 'ifdef':
			m = re_mac.match(line)
			if m and m.group(0) in self.defs: state[-1] = accepted
			else: state[-1] = ignored
		elif token == 'ifndef':
			m = re_mac.match(line)
			if m and m.group(0) in self.defs: state[-1] = ignored
			else: state[-1] = accepted
		elif token == 'include' or token == 'import':
			(kind, inc) = extract_include(line, self.defs)
			if inc in self.ban_includes: return
			if token == 'import': self.ban_includes.add(inc)
			if ve: debug('preproc: include found %s    (%s) ', inc, kind)
			if kind == '"' or not strict_quotes:
				self.tryfind(inc)
		elif token == 'elif':
			if state[-1] == accepted:
				state[-1] = skipped
			elif state[-1] == ignored:
				if eval_macro(tokenize(line), self.defs):
					state[-1] = accepted
		elif token == 'else':
			if state[-1] == accepted: state[-1] = skipped
			elif state[-1] == ignored: state[-1] = accepted
		elif token == 'define':
			try:
				self.defs[define_name(line)] = line
			except:
				raise PreprocError("invalid define line %s" % line)
		elif token == 'undef':
			m = re_mac.match(line)
			if m and m.group(0) in self.defs:
				self.defs.__delitem__(m.group(0))
				#print "undef %s" % name
		elif token == 'pragma':
			if re_pragma_once.match(line.lower()):
				self.ban_includes.add(self.curfile)
Пример #19
0
def compile_fun_shell(name,line):
	extr=[]
	def repl(match):
		g=match.group
		if g('dollar'):return"$"
		elif g('backslash'):return'\\\\'
		elif g('subst'):extr.append((g('var'),g('code')));return"%s"
		return None
	line=reg_act.sub(repl,line)
	parm=[]
	dvars=[]
	app=parm.append
	for(var,meth)in extr:
		if var=='SRC':
			if meth:app('task.inputs%s'%meth)
			else:app('" ".join([a.srcpath(env) for a in task.inputs])')
		elif var=='TGT':
			if meth:app('task.outputs%s'%meth)
			else:app('" ".join([a.bldpath(env) for a in task.outputs])')
		else:
			if not var in dvars:dvars.append(var)
			app("p('%s')"%var)
	if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
	else:parm=''
	c=COMPILE_TEMPLATE_SHELL%(line,parm)
	debug('action: %s',c)
	return(funex(c),dvars)
Пример #20
0
	def load_dirs(self, srcdir, blddir, load_cache=1):
		"this functions should be the start of everything"

		assert(os.path.isabs(srcdir))
		assert(os.path.isabs(blddir))

		self.cachedir = os.path.join(blddir, CACHE_DIR)

		if srcdir == blddir:
			raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))

		self.bdir = blddir

		# try to load the cache file, if it does not exist, nothing happens
		self.load()

		if not self.root:
			Node.Nodu = self.node_class
			self.root = Node.Nodu('', None, Node.DIR)

		if not self.srcnode:
			self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
		debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)

		self.path = self.srcnode

		# create this build dir if necessary
		try: os.makedirs(blddir)
		except OSError: pass

		if not self.bldnode:
			self.bldnode = self.root.ensure_dir_node_from_path(blddir)

		self.init_variants()
Пример #21
0
	def runnable_status(self):
		if self.inputs and(not self.outputs):
			if not getattr(self.__class__,'quiet',None):
				warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"%self)
		for t in self.run_after:
			if not t.hasrun:
				return ASK_LATER
		env=self.env
		bld=self.generator.bld
		new_sig=self.signature()
		key=self.unique_id()
		try:
			prev_sig=bld.task_sigs[key][0]
		except KeyError:
			debug("task: task %r must run as it was never run before or the task code changed",self)
			return RUN_ME
		for node in self.outputs:
			variant=node.variant(env)
			try:
				if bld.node_sigs[variant][node.id]!=new_sig:
					return RUN_ME
			except KeyError:
				debug("task: task %r must run as the output nodes do not exist",self)
				return RUN_ME
		if Logs.verbose:self.debug_why(bld.task_sigs[key])
		if new_sig!=prev_sig:
			return RUN_ME
		return SKIP_ME
Пример #22
0
    def can_retrieve_cache(self):
        """Retrieve build nodes from the cache - the file time stamps are updated
		for cleaning the least used files from the cache dir - be careful when overridding"""
        if not Options.cache_global: return None
        if Options.options.nocache: return None
        if not self.outputs: return None

        env = self.env
        sig = self.signature()

        cnt = 0
        for node in self.outputs:
            variant = node.variant(env)

            ssig = sig.encode('hex')
            orig = os.path.join(Options.cache_global,
                                '%s_%d_%s' % (ssig, cnt, node.name))
            try:
                shutil.copy2(orig, node.abspath(env))
                # mark the cache file as used recently (modified)
                os.utime(orig, None)
            except (OSError, IOError):
                debug('task: failed retrieving file')
                return None
            else:
                cnt += 1

        for node in self.outputs:
            self.generator.bld.node_sigs[variant][node.id] = sig
            self.generator.bld.printout('restoring from cache %r\n' %
                                        node.bldpath(env))

        return 1
Пример #23
0
    def sig_implicit_deps(self):
        "the signature obtained may not be the one if the files have changed, we do it in two steps"

        bld = self.generator.bld

        # get the task signatures from previous runs
        key = self.unique_id()
        prev_sigs = bld.task_sigs.get(key, ())
        if prev_sigs:
            try:
                # for issue #379
                if prev_sigs[2] == self.compute_sig_implicit_deps():
                    return prev_sigs[2]
            except (KeyError, OSError):
                pass

        # no previous run or the signature of the dependencies has changed, rescan the dependencies
        (nodes, names) = self.scan()
        if Logs.verbose:
            debug('deps: scanner for %s returned %s %s' %
                  (str(self), str(nodes), str(names)))

        # store the dependencies in the cache
        bld.node_deps[key] = nodes
        bld.raw_deps[key] = names

        # recompute the signature and return it
        sig = self.compute_sig_implicit_deps()

        return sig
Пример #24
0
 def can_retrieve_cache(self):
     if not Options.cache_global: return None
     if Options.options.nocache: return None
     if not self.outputs: return None
     env = self.env
     sig = self.signature()
     cnt = 0
     for node in self.outputs:
         variant = node.variant(env)
         ssig = sig.encode('hex')
         orig = os.path.join(Options.cache_global,
                             '%s_%d_%s' % (ssig, cnt, node.name))
         try:
             shutil.copy2(orig, node.abspath(env))
             os.utime(orig, None)
         except (OSError, IOError):
             debug('task: failed retrieving file')
             return None
         else:
             cnt += 1
     for node in self.outputs:
         self.generator.bld.node_sigs[variant][node.id] = sig
         self.generator.bld.printout('restoring from cache %r\n' %
                                     node.bldpath(env))
     return 1
Пример #25
0
    def start(self, node, env):
        debug('preproc: scanning %s (in %s)' % (node.name, node.parent.name))

        self.env = env
        variant = node.variant(env)
        bld = node.__class__.bld
        try:
            self.parse_cache = bld.parse_cache
        except AttributeError:
            bld.parse_cache = {}
            self.parse_cache = bld.parse_cache

        self.addlines(node)
        if env['DEFLINES']:
            self.lines = [('define', x) for x in env['DEFLINES']] + self.lines

        while self.lines:
            (kind, line) = self.lines.pop(0)
            if kind == POPFILE:
                self.currentnode_stack.pop()
                continue
            try:
                self.process_line(kind, line)
            except Exception, e:
                if Logs.verbose:
                    debug('preproc: line parsing failed (%s): %s %s' %
                          (e, line, Utils.ex_stack()))
Пример #26
0
	def can_retrieve_cache(self):
		"""Retrieve build nodes from the cache - the file time stamps are updated
		for cleaning the least used files from the cache dir - be careful when overridding"""
		if not Options.cache_global: return None
		if Options.options.nocache: return None
		if not self.outputs: return None

		env = self.env
		sig = self.signature()

		cnt = 0
		for node in self.outputs:
			variant = node.variant(env)

			ssig = sig.encode('hex')
			orig = os.path.join(Options.cache_global, '%s_%d_%s' % (ssig, cnt, node.name))
			try:
				shutil.copy2(orig, node.abspath(env))
				# mark the cache file as used recently (modified)
				os.utime(orig, None)
			except (OSError, IOError):
				debug('task: failed retrieving file')
				return None
			else:
				cnt += 1

		for node in self.outputs:
			self.generator.bld.node_sigs[variant][node.id] = sig
			self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))

		return 1
Пример #27
0
	def sig_implicit_deps(self):
		"the signature obtained may not be the one if the files have changed, we do it in two steps"

		bld = self.generator.bld

		# get the task signatures from previous runs
		key = self.unique_id()
		prev_sigs = bld.task_sigs.get(key, ())
		if prev_sigs:
			try:
				# for issue #379
				if prev_sigs[2] == self.compute_sig_implicit_deps():
					return prev_sigs[2]
			except (KeyError, OSError):
				pass

		# no previous run or the signature of the dependencies has changed, rescan the dependencies
		(nodes, names) = self.scan()
		if Logs.verbose:
			debug('deps: scanner for %s returned %s %s' % (str(self), str(nodes), str(names)))

		# store the dependencies in the cache
		bld.node_deps[key] = nodes
		bld.raw_deps[key] = names

		# recompute the signature and return it
		sig = self.compute_sig_implicit_deps()

		return sig
Пример #28
0
def exec_mf(self):
	env = self.env
	mtool = env['MT']
	if not mtool:
		return 0

	self.do_manifest = False

	outfile = self.outputs[0].bldpath(env)
	manifest = self.outputs[-1].bldpath(env)

	# embedding mode. Different for EXE's and DLL's.
	# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
	mode = ''
	if 'cprogram' in self.generator.features:
		mode = '1'
	elif 'cshlib' in self.generator.features:
		mode = '2'

	debug('msvc: embedding manifest')
	#flags = ' '.join(env['MTFLAGS'] or [])

	lst = []
	lst.extend(Utils.to_list(env['MT']))
	lst.extend(Utils.to_list(env['MTFLAGS']))
	lst.extend(Utils.to_list("-manifest"))
	lst.extend(Utils.to_list(manifest))
	lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))

	#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
	#	manifest, outfile, mode)
	lst = [lst]
	return self.exec_command(*lst)
Пример #29
0
	def listdir_src(self,parent_node):
		parent_path=parent_node.abspath()
		try:
			lst=set(Utils.listdir(parent_path))
		except OSError:
			if not parent_node.childs:
				raise
			for x in parent_node.childs.values():
				if x.id&3==Node.FILE:
					raise
			lst=set([])
		self.cache_dir_contents[parent_node.id]=lst
		debug('build: folder contents %r'%lst)
		node_names=set([x.name for x in parent_node.childs.values()if x.id&3 in(Node.FILE,Node.DIR)])
		cache=self.node_sigs[0]
		to_keep=lst&node_names
		for x in to_keep:
			node=parent_node.childs[x]
			if node.id&3==Node.DIR:continue
			try:
				cache[node.id]=Utils.h_file(parent_path+os.sep+node.name)
			except IOError:
				raise Utils.WafError("The file %s is not readable or has become a dir"%node.abspath())
		to_remove=node_names-lst
		if to_remove:
			for name in to_remove:
				nd=parent_node.childs[name]
				if nd.id&3==Node.DIR:
					for x in nd.childs.values():
						if x.id&3==Node.FILE:
							break
					else:
						continue
				self.remove_node(nd)
Пример #30
0
    def compile(self):
        debug('build: compile called')
        self.flush()
        self.generator = Runner.Parallel(self, Options.options.jobs)

        def dw(on=True):
            if Options.options.progress_bar:
                if on: sys.stderr.write(Logs.colors.cursor_on)
                else: sys.stderr.write(Logs.colors.cursor_off)

        debug('build: executor starting')
        back = os.getcwd()
        os.chdir(self.bldnode.abspath())
        try:
            try:
                dw(on=False)
                self.generator.start()
            except KeyboardInterrupt:
                dw()
                self.save()
                raise
            except Exception:
                dw()
                raise
            else:
                dw()
                self.save()
            if self.generator.error:
                raise BuildError(self, self.task_manager.tasks_done)
        finally:
            os.chdir(back)
Пример #31
0
 def load(self):
     try:
         env = Environment.Environment(os.path.join(self.cachedir, "build.config.py"))
     except (IOError, OSError):
         pass
     else:
         if env["version"] < HEXVERSION:
             raise Utils.WafError("Version mismatch! reconfigure the project")
         for t in env["tools"]:
             self.setup(**t)
     try:
         gc.disable()
         f = data = None
         Node.Nodu = self.node_class
         try:
             f = open(os.path.join(self.bdir, DBFILE), "rb")
         except (IOError, EOFError):
             pass
         try:
             if f:
                 data = cPickle.load(f)
         except AttributeError:
             if Logs.verbose > 1:
                 raise
         if data:
             for x in SAVED_ATTRS:
                 setattr(self, x, data[x])
         else:
             debug("build: Build cache loading failed")
     finally:
         if f:
             f.close()
         gc.enable()
Пример #32
0
def exec_mf(self):
	env=self.env
	mtool=env['MT']
	if not mtool:
		return 0
	self.do_manifest=False
	outfile=self.outputs[0].bldpath(env)
	manifest=None
	for out_node in self.outputs:
		if out_node.name.endswith('.manifest'):
			manifest=out_node.bldpath(env)
			break
	if manifest is None:
		return 0
	mode=''
	if'cprogram'in self.generator.features:
		mode='1'
	elif'cshlib'in self.generator.features:
		mode='2'
	debug('msvc: embedding manifest')
	lst=[]
	lst.extend([env['MT']])
	lst.extend(Utils.to_list(env['MTFLAGS']))
	lst.extend(Utils.to_list("-manifest"))
	lst.extend(Utils.to_list(manifest))
	lst.extend(Utils.to_list("-outputresource:%s;%s"%(outfile,mode)))
	lst=[lst]
	return self.exec_command(*lst)
Пример #33
0
def check_python_version(conf, minver=None):
    assert minver is None or isinstance(minver, tuple)
    python = conf.env['PYTHON']
    assert python, ("python is %r !" % (python, ))
    cmd = [
        python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"
    ]
    debug('python: Running python command %r' % cmd)
    proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
    lines = proc.communicate()[0].split()
    assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines),
                                                                lines)
    pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3],
                   int(lines[4]))
    result = (minver is None) or (pyver_tuple >= minver)
    if result:
        pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
        conf.env['PYTHON_VERSION'] = pyver
        if 'PYTHONDIR' in conf.environ:
            pydir = conf.environ['PYTHONDIR']
        else:
            if sys.platform == 'win32':
                (
                    python_LIBDEST, pydir
                ) = _get_python_variables(python, [
                    "get_config_var('LIBDEST')",
                    "get_python_lib(standard_lib=0, prefix=%r)" %
                    conf.env['PREFIX']
                ], [
                    'from distutils.sysconfig import get_config_var, get_python_lib'
                ])
            else:
                python_LIBDEST = None
                (pydir, ) = _get_python_variables(python, [
                    "get_python_lib(standard_lib=0, prefix=%r)" %
                    conf.env['PREFIX']
                ], [
                    'from distutils.sysconfig import get_config_var, get_python_lib'
                ])
            if python_LIBDEST is None:
                if conf.env['LIBDIR']:
                    python_LIBDEST = os.path.join(conf.env['LIBDIR'],
                                                  "python" + pyver)
                else:
                    python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib",
                                                  "python" + pyver)
        if hasattr(conf, 'define'):
            conf.define('PYTHONDIR', pydir)
        conf.env['PYTHONDIR'] = pydir
    pyver_full = '.'.join(map(str, pyver_tuple[:3]))
    if minver is None:
        conf.check_message_custom('Python version', '', pyver_full)
    else:
        minver_str = '.'.join(map(str, minver))
        conf.check_message('Python version',
                           ">= %s" % (minver_str, ),
                           result,
                           option=pyver_full)
    if not result:
        conf.fatal("Python too old.")
Пример #34
0
 def can_retrieve_cache(self):
     if not Options.cache_global or Options.options.nocache or not self.outputs:
         return None
     env = self.env
     sig = self.signature()
     ssig = sig.encode('hex')
     dname = os.path.join(Options.cache_global, ssig)
     try:
         t1 = os.stat(dname).st_mtime
     except OSError:
         return None
     for node in self.outputs:
         variant = node.variant(env)
         orig = os.path.join(dname, node.name)
         try:
             shutil.copy2(orig, node.abspath(env))
             os.utime(orig, None)
         except (OSError, IOError):
             debug('task: failed retrieving file')
             return None
     try:
         t2 = os.stat(dname).st_mtime
     except OSError:
         return None
     if t1 != t2:
         return None
     for node in self.outputs:
         self.generator.bld.node_sigs[variant][node.id] = sig
         self.generator.bld.printout('restoring from cache %r\n' %
                                     node.bldpath(env))
     self.cached = True
     return 1
Пример #35
0
def compile_fun_shell(name, line):
    extr = []

    def repl(match):
        g = match.group
        if g('dollar'): return "$"
        elif g('backslash'): return '\\\\'
        elif g('subst'):
            extr.append((g('var'), g('code')))
            return "%s"
        return None

    line = reg_act.sub(repl, line)
    parm = []
    dvars = []
    app = parm.append
    for (var, meth) in extr:
        if var == 'SRC':
            if meth: app('task.inputs%s' % meth)
            else: app('" ".join([a.srcpath(env) for a in task.inputs])')
        elif var == 'TGT':
            if meth: app('task.outputs%s' % meth)
            else: app('" ".join([a.bldpath(env) for a in task.outputs])')
        else:
            if not var in dvars: dvars.append(var)
            app("p('%s')" % var)
    if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
    else: parm = ''
    c = COMPILE_TEMPLATE_SHELL % (line, parm)
    debug('action: %s', c)
    return (funex(c), dvars)
Пример #36
0
    def install(self):
        "this function is called for both install and uninstall"
        debug('build: install called')

        self.flush()

        # remove empty folders after uninstalling
        if self.is_install < 0:
            lst = []
            for x in self.uninstall:
                dir = os.path.dirname(x)
                if not dir in lst: lst.append(dir)
            lst.sort()
            lst.reverse()

            nlst = []
            for y in lst:
                x = y
                while len(x) > 4:
                    if not x in nlst: nlst.append(x)
                    x = os.path.dirname(x)

            nlst.sort()
            nlst.reverse()
            for x in nlst:
                try:
                    os.rmdir(x)
                except OSError:
                    pass
Пример #37
0
    def clean(self):
        debug('build: clean called')
        precious = set([])
        for env in self.all_envs.values():
            for x in env[CFG_FILES]:
                node = self.srcnode.find_resource(x)
                if node:
                    precious.add(node.id)

        def clean_rec(node):
            for x in list(node.childs.keys()):
                nd = node.childs[x]
                tp = nd.id & 3
                if tp == Node.DIR:
                    clean_rec(nd)
                elif tp == Node.BUILD:
                    if nd.id in precious: continue
                    for env in self.all_envs.values():
                        try:
                            os.remove(nd.abspath(env))
                        except OSError:
                            pass
                    node.childs.__delitem__(x)

        clean_rec(self.srcnode)
        for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split(
        ):
            setattr(self, v, {})
Пример #38
0
    def compile(self):
        debug("build: compile called")
        self.flush()
        self.generator = Runner.Parallel(self, Options.options.jobs)

        def dw(on=True):
            if Options.options.progress_bar:
                if on:
                    sys.stderr.write(Logs.colors.cursor_on)
                else:
                    sys.stderr.write(Logs.colors.cursor_off)

        debug("build: executor starting")
        back = os.getcwd()
        os.chdir(self.bldnode.abspath())
        try:
            try:
                dw(on=False)
                self.generator.start()
            except KeyboardInterrupt:
                dw()
                if Runner.TaskConsumer.consumers:
                    self.save()
                raise
            except Exception:
                dw()
                raise
            else:
                dw()
                if Runner.TaskConsumer.consumers:
                    self.save()
            if self.generator.error:
                raise BuildError(self, self.task_manager.tasks_done)
        finally:
            os.chdir(back)
Пример #39
0
 def load_dirs(self, srcdir, blddir, load_cache=1):
     assert (os.path.isabs(srcdir))
     assert (os.path.isabs(blddir))
     self.cachedir = os.path.join(blddir, CACHE_DIR)
     if srcdir == blddir:
         raise Utils.WafError(
             "build dir must be different from srcdir: %s <-> %s " %
             (srcdir, blddir))
     self.bdir = blddir
     self.load()
     if not self.root:
         Node.Nodu = self.node_class
         self.root = Node.Nodu('', None, Node.DIR)
     if not self.srcnode:
         self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
     debug('build: srcnode is %s and srcdir %s' %
           (self.srcnode.name, srcdir))
     self.path = self.srcnode
     try:
         os.makedirs(blddir)
     except OSError:
         pass
     if not self.bldnode:
         self.bldnode = self.root.ensure_dir_node_from_path(blddir)
     self.init_variants()
Пример #40
0
	def load_dirs(self, srcdir, blddir, load_cache=1):
		"this functions should be the start of everything"

		assert(os.path.isabs(srcdir))
		assert(os.path.isabs(blddir))

		self.cachedir = os.path.join(blddir, CACHE_DIR)

		if srcdir == blddir:
			raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))

		self.bdir = blddir

		# try to load the cache file, if it does not exist, nothing happens
		self.load()

		if not self.root:
			Node.Nodu = self.node_class
			self.root = Node.Nodu('', None, Node.DIR)

		if not self.srcnode:
			self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
		debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)

		self.path = self.srcnode

		# create this build dir if necessary
		try: os.makedirs(blddir)
		except OSError: pass

		if not self.bldnode:
			self.bldnode = self.root.ensure_dir_node_from_path(blddir)

		self.init_variants()
Пример #41
0
	def process_line(self, token, line):
		"""
		WARNING: a new state must be added for if* because the endif
		"""
		ve = Logs.verbose
		if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
		state = self.state

		# make certain we define the state if we are about to enter in an if block
		if token in ['ifdef', 'ifndef', 'if']:
			state.append(undefined)
		elif token == 'endif':
			state.pop()

		# skip lines when in a dead 'if' branch, wait for the endif
		if not token in ['else', 'elif', 'endif']:
			if skipped in self.state or ignored in self.state:
				return

		if token == 'if':
			ret = eval_macro(tokenize(line), self.defs)
			if ret: state[-1] = accepted
			else: state[-1] = ignored
		elif token == 'ifdef':
			m = re_mac.match(line)
			if m and m.group(0) in self.defs: state[-1] = accepted
			else: state[-1] = ignored
		elif token == 'ifndef':
			m = re_mac.match(line)
			if m and m.group(0) in self.defs: state[-1] = ignored
			else: state[-1] = accepted
		elif token == 'include' or token == 'import':
			(kind, inc) = extract_include(line, self.defs)
			if inc in self.ban_includes: return
			if token == 'import': self.ban_includes.add(inc)
			if ve: debug('preproc: include found %s    (%s) ', inc, kind)
			if kind == '"' or not strict_quotes:
				self.tryfind(inc)
		elif token == 'elif':
			if state[-1] == accepted:
				state[-1] = skipped
			elif state[-1] == ignored:
				if eval_macro(tokenize(line), self.defs):
					state[-1] = accepted
		elif token == 'else':
			if state[-1] == accepted: state[-1] = skipped
			elif state[-1] == ignored: state[-1] = accepted
		elif token == 'define':
			try:
				self.defs[define_name(line)] = line
			except:
				raise PreprocError("invalid define line %s" % line)
		elif token == 'undef':
			m = re_mac.match(line)
			if m and m.group(0) in self.defs:
				self.defs.__delitem__(m.group(0))
				#print "undef %s" % name
		elif token == 'pragma':
			if re_pragma_once.match(line.lower()):
				self.ban_includes.add(self.curfile)
Пример #42
0
Файл: d.py Проект: NKSG/ns3
def scan(self):
	env=self.env
	gruik=d_parser(env,env['INC_PATHS'])
	gruik.start(self.inputs[0])
	if Logs.verbose:
		debug('deps: nodes found for %s: %s %s'%(str(self.inputs[0]),str(gruik.nodes),str(gruik.names)))
	return(gruik.nodes,gruik.names)
Пример #43
0
	def install(self):
		"this function is called for both install and uninstall"
		debug('build: install called')

		self.flush()

		# remove empty folders after uninstalling
		if self.is_install < 0:
			lst = []
			for x in self.uninstall:
				dir = os.path.dirname(x)
				if not dir in lst: lst.append(dir)
			lst.sort()
			lst.reverse()

			nlst = []
			for y in lst:
				x = y
				while len(x) > 4:
					if not x in nlst: nlst.append(x)
					x = os.path.dirname(x)

			nlst.sort()
			nlst.reverse()
			for x in nlst:
				try: os.rmdir(x)
				except OSError: pass
Пример #44
0
 def addlines(self, node):
     self.currentnode_stack.append(node.parent)
     filepath = node.abspath(self.env)
     self.count_files += 1
     if self.count_files > recursion_limit:
         raise PreprocError("recursion limit exceeded")
     pc = self.parse_cache
     debug('preproc: reading file %r', filepath)
     try:
         lns = pc[filepath]
     except KeyError:
         pass
     else:
         self.lines = lns + self.lines
         return
     try:
         lines = filter_comments(filepath)
         lines.append((POPFILE, ''))
         pc[filepath] = lines
         self.lines = lines + self.lines
     except IOError:
         raise PreprocError("could not read the file %s" % filepath)
     except Exception:
         if Logs.verbose > 0:
             error("parsing %s failed" % filepath)
             traceback.print_exc()
Пример #45
0
 def can_retrieve_cache(self):
     if not Options.cache_global or Options.options.nocache or not self.outputs:
         return None
     env = self.env
     sig = self.signature()
     ssig = sig.encode("hex")
     dname = os.path.join(Options.cache_global, ssig)
     try:
         t1 = os.stat(dname).st_mtime
     except OSError:
         return None
     for node in self.outputs:
         variant = node.variant(env)
         orig = os.path.join(dname, node.name)
         try:
             shutil.copy2(orig, node.abspath(env))
             os.utime(orig, None)
         except (OSError, IOError):
             debug("task: failed retrieving file")
             return None
     try:
         t2 = os.stat(dname).st_mtime
     except OSError:
         return None
     if t1 != t2:
         return None
     for node in self.outputs:
         self.generator.bld.node_sigs[variant][node.id] = sig
         self.generator.bld.printout("restoring from cache %r\n" % node.bldpath(env))
     self.cached = True
     return 1
Пример #46
0
	def sig_implicit_deps(self):
		bld=self.generator.bld
		key=self.unique_id()
		prev_sigs=bld.task_sigs.get(key,())
		if prev_sigs:
			try:
				if prev_sigs[2]==self.compute_sig_implicit_deps():
					return prev_sigs[2]
			except(KeyError,OSError):
				pass
			del bld.task_sigs[key]
			raise ValueError('rescan')
		(nodes,names)=self.scan()
		if Logs.verbose:
			debug('deps: scanner for %s returned %s %s',str(self),str(nodes),str(names))
		bld.node_deps[key]=nodes
		bld.raw_deps[key]=names
		try:
			sig=self.compute_sig_implicit_deps()
		except KeyError:
			try:
				nodes=[]
				for k in bld.node_deps.get(self.unique_id(),[]):
					if k.id&3==2:
						if not k.id in bld.node_sigs[0]:
							nodes.append(k)
					else:
						if not k.id in bld.node_sigs[self.env.variant()]:
							nodes.append(k)
			except:
				nodes='?'
			raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)'%(nodes,self))
		return sig
Пример #47
0
	def addlines(self, node):

		self.currentnode_stack.append(node.parent)
		filepath = node.abspath(self.env)

		self.count_files += 1
		if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
		pc = self.parse_cache
		debug('preproc: reading file %r', filepath)
		try:
			lns = pc[filepath]
		except KeyError:
			pass
		else:
			self.lines.extend(lns)
			return

		try:
			lines = filter_comments(filepath)
			lines.append((POPFILE, ''))
			lines.reverse()
			pc[filepath] = lines # cache the lines filtered
			self.lines.extend(lines)
		except IOError:
			raise PreprocError("could not read the file %s" % filepath)
		except Exception:
			if Logs.verbose > 0:
				error("parsing %s failed" % filepath)
				traceback.print_exc()
def libname_msvc(self,libname,is_static=False,mandatory=False):
	lib=libname.lower()
	lib=re.sub('\.lib$','',lib)
	if lib in g_msvc_systemlibs:
		return lib
	lib=re.sub('^lib','',lib)
	if lib=='m':
		return None
	(lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
	if lt_path!=None and lt_libname!=None:
		if lt_static==True:
			return os.path.join(lt_path,lt_libname)
	if lt_path!=None:
		_libpaths=[lt_path]+self.env['LIBPATH']
	else:
		_libpaths=self.env['LIBPATH']
	static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
	dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
	libnames=static_libs
	if not is_static:
		libnames=dynamic_libs+static_libs
	for path in _libpaths:
		for libn in libnames:
			if os.path.exists(os.path.join(path,libn)):
				debug('msvc: lib found: %s'%os.path.join(path,libn))
				return re.sub('\.lib$','',libn)
	if mandatory:
		self.fatal("The library %r could not be found"%libname)
	return re.sub('\.lib$','',libname)
Пример #49
0
	def start(self, node, env):
		debug('preproc: scanning %s (in %s)', node.name, node.parent.name)

		self.env = env
		variant = node.variant(env)
		bld = node.__class__.bld
		try:
			self.parse_cache = bld.parse_cache
		except AttributeError:
			bld.parse_cache = {}
			self.parse_cache = bld.parse_cache

		self.addlines(node)
		if env['DEFLINES']:
			lst = [('define', x) for x in env['DEFLINES']]
			lst.reverse()
			self.lines.extend(lst)

		while self.lines:
			(kind, line) = self.lines.pop()
			if kind == POPFILE:
				self.currentnode_stack.pop()
				continue
			try:
				self.process_line(kind, line)
			except Exception, e:
				if Logs.verbose:
					debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
Пример #50
0
 def runnable_status(self):
     if self.inputs and (not self.outputs):
         if not getattr(self.__class__, 'quiet', None):
             warn(
                 "invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"
                 % self)
     for t in self.run_after:
         if not t.hasrun:
             return ASK_LATER
     env = self.env
     bld = self.generator.bld
     new_sig = self.signature()
     key = self.unique_id()
     try:
         prev_sig = bld.task_sigs[key][0]
     except KeyError:
         debug(
             "task: task %r must run as it was never run before or the task code changed",
             self)
         return RUN_ME
     for node in self.outputs:
         variant = node.variant(env)
         try:
             if bld.node_sigs[variant][node.id] != new_sig:
                 return RUN_ME
         except KeyError:
             debug(
                 "task: task %r must run as the output nodes do not exist",
                 self)
             return RUN_ME
     if Logs.verbose: self.debug_why(bld.task_sigs[key])
     if new_sig != prev_sig:
         return RUN_ME
     return SKIP_ME
Пример #51
0
    def clean(self):
        debug("build: clean called")
        precious = set([])
        for env in self.all_envs.values():
            for x in env[CFG_FILES]:
                node = self.srcnode.find_resource(x)
                if node:
                    precious.add(node.id)

        def clean_rec(node):
            for x in list(node.childs.keys()):
                nd = node.childs[x]
                tp = nd.id & 3
                if tp == Node.DIR:
                    clean_rec(nd)
                elif tp == Node.BUILD:
                    if nd.id in precious:
                        continue
                    for env in self.all_envs.values():
                        try:
                            os.remove(nd.abspath(env))
                        except OSError:
                            pass
                    node.childs.__delitem__(x)

        clean_rec(self.srcnode)
        for v in "node_sigs node_deps task_sigs raw_deps cache_node_abspath".split():
            setattr(self, v, {})
Пример #52
0
def compile_fun_noshell(name, line):
    extr = []

    def repl(match):
        g = match.group
        if g('dollar'): return "$"
        elif g('subst'):
            extr.append((g('var'), g('code')))
            return "<<|@|>>"
        return None

    line2 = reg_act.sub(repl, line)
    params = line2.split('<<|@|>>')
    buf = []
    dvars = []
    app = buf.append
    for x in range(len(extr)):
        params[x] = params[x].strip()
        if params[x]:
            app("lst.extend(%r)" % params[x].split())
        (var, meth) = extr[x]
        if var == 'SRC':
            if meth: app('lst.append(task.inputs%s)' % meth)
            else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
        elif var == 'TGT':
            if meth: app('lst.append(task.outputs%s)' % meth)
            else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
        else:
            app('lst.extend(to_list(env[%r]))' % var)
            if not var in dvars: dvars.append(var)
    if params[-1]:
        app("lst.extend(%r)" % shlex.split(params[-1]))
    fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
    debug('action: %s', fun)
    return (funex(fun), dvars)
Пример #53
0
	def apply(self):
		"order the methods to execute using self.prec or task_gen.prec"
		keys = set(self.meths)

		# add the methods listed in the features
		self.features = Utils.to_list(self.features)
		for x in self.features + ['*']:
			st = task_gen.traits[x]
			if not st:
				warn('feature %r does not exist - bind at least one method to it' % x)
			keys.update(st)

		# copy the precedence table
		prec = {}
		prec_tbl = self.prec or task_gen.prec
		for x in prec_tbl:
			if x in keys:
				prec[x] = prec_tbl[x]

		# elements disconnected
		tmp = []
		for a in keys:
			for x in prec.values():
				if a in x: break
			else:
				tmp.append(a)

		# topological sort
		out = []
		while tmp:
			e = tmp.pop()
			if e in keys: out.append(e)
			try:
				nlst = prec[e]
			except KeyError:
				pass
			else:
				del prec[e]
				for x in nlst:
					for y in prec:
						if x in prec[y]:
							break
					else:
						tmp.append(x)

		if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
		out.reverse()
		self.meths = out

		# then we run the methods in order
		debug('task_gen: posting %s %d', self, id(self))
		for x in out:
			try:
				v = getattr(self, x)
			except AttributeError:
				raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
			debug('task_gen: -> %s (%d)', x, id(self))
			v()
Пример #54
0
def build_direct_deps(bld, tgt_list):
    '''build the direct_objects and direct_libs sets for each target'''

    targets  = LOCAL_CACHE(bld, 'TARGET_TYPE')
    syslib_deps  = LOCAL_CACHE(bld, 'SYSLIB_DEPS')

    global_deps = bld.env.GLOBAL_DEPENDENCIES
    global_deps_exclude = set()
    for dep in global_deps:
        t = bld.get_tgen_by_name(dep)
        for d in t.samba_deps:
            # prevent loops from the global dependencies list
            global_deps_exclude.add(d)
            global_deps_exclude.add(d + '.objlist')

    for t in tgt_list:
        t.direct_objects = set()
        t.direct_libs = set()
        t.direct_syslibs = set()
        deps = t.samba_deps_extended[:]
        if getattr(t, 'samba_use_global_deps', False) and not t.sname in global_deps_exclude:
            deps.extend(global_deps)
        for d in deps:
            if d == t.sname: continue
            if not d in targets:
                Logs.error("Unknown dependency '%s' in '%s'" % (d, t.sname))
                sys.exit(1)
            if targets[d] in [ 'EMPTY', 'DISABLED' ]:
                continue
            if targets[d] == 'PYTHON' and targets[t.sname] != 'PYTHON' and t.sname.find('.objlist') == -1:
                # this check should be more restrictive, but for now we have pidl-generated python
                # code that directly depends on other python modules
                Logs.error('ERROR: Target %s has dependency on python module %s' % (t.sname, d))
                sys.exit(1)
            if targets[d] == 'SYSLIB':
                t.direct_syslibs.add(d)
                if d in syslib_deps:
                    for implied in TO_LIST(syslib_deps[d]):
                        if BUILTIN_LIBRARY(bld, implied):
                            t.direct_objects.add(implied)
                        elif targets[implied] == 'SYSLIB':
                            t.direct_syslibs.add(implied)
                        elif targets[implied] in ['LIBRARY', 'MODULE']:
                            t.direct_libs.add(implied)
                        else:
                            Logs.error('Implied dependency %s in %s is of type %s' % (
                                implied, t.sname, targets[implied]))
                            sys.exit(1)
                continue
            t2 = bld.get_tgen_by_name(d)
            if t2 is None:
                Logs.error("no task %s of type %s in %s" % (d, targets[d], t.sname))
                sys.exit(1)
            if t2.samba_type in [ 'LIBRARY', 'MODULE' ]:
                t.direct_libs.add(d)
            elif t2.samba_type in [ 'SUBSYSTEM', 'ASN1', 'PYTHON' ]:
                t.direct_objects.add(d)
    debug('deps: built direct dependencies')
Пример #55
0
def flush(self):
	if not Options.options.bf:
		return old(self)

	lst = Options.options.bf.split(',')

	self.ini = datetime.datetime.now()
	# force the initialization of the mapping name->object in flush
	# name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
	self.task_gen_cache_names = {}
	self.name_to_obj('', self.env)

	debug('build: delayed operation TaskGen.flush() called')

	ln = self.srcnode

	for i in xrange(len(self.task_manager.groups)):
		g = self.task_manager.groups[i]
		self.task_manager.current_group = i
		for tg in g.tasks_gen:
			if not tg.path.is_child_of(ln):
				continue
			tg.post()

	# find the nodes corresponding to the names given
	nodes = []
	alltasks = []
	for i in xrange(len(self.task_manager.groups)):
		g = self.task_manager.groups[i]
		self.task_manager.current_group = i
		for t in g.tasks:
			alltasks.append(t)
			for k in t.outputs:
				if k.name in lst:
					nodes.append(k)
					break

	# and now we must perform a search over all tasks to find what might generate the nodes from the above
	while True:
		newnodes = []
		skipped = []
		for t in alltasks:
			for x in nodes:
				if x in t.outputs:
					newnodes.extend(t.inputs)
					break
			else:
				skipped.append(t)
		alltasks = skipped

		if newnodes:
			nodes = nodes + newnodes
		else:
			break

	# the tasks that need not be executed remain
	for x in alltasks:
		x.hasrun = Constants.SKIPPED
Пример #56
0
    def can_retrieve_cache(self):
        """
		Retrieve build nodes from the cache
		update the file timestamps to help cleaning the least used entries from the cache
		additionally, set an attribute 'cached' to avoid re-creating the same cache files

		suppose there are files in cache/dir1/file1 and cache/dir2/file2
		first, read the timestamp of dir1
		then try to copy the files
		then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
		should an exception occur, ignore the data
		"""
        if not Options.cache_global or Options.options.nocache or not self.outputs:
            return None

        env = self.env
        sig = self.signature()
        ssig = sig.encode('hex')

        # first try to access the cache folder for the task
        dname = os.path.join(Options.cache_global, ssig)
        try:
            t1 = os.stat(dname).st_mtime
        except OSError:
            return None

        i = 0
        for node in self.outputs:
            variant = node.variant(env)

            orig = os.path.join(dname, str(i) + node.name)
            try:
                shutil.copy2(orig, node.abspath(env))
                # mark the cache file as used recently (modified)
                os.utime(orig, None)
            except (OSError, IOError):
                debug('task: failed retrieving file')
                return None
            i += 1

        # is it the same folder?
        try:
            t2 = os.stat(dname).st_mtime
        except OSError:
            return None

        if t1 != t2:
            return None

        for node in self.outputs:
            self.generator.bld.node_sigs[variant][node.id] = sig
            if Options.options.progress_bar < 1:
                self.generator.bld.printout('restoring from cache %r\n' %
                                            node.bldpath(env))

        self.cached = True
        return 1
Пример #57
0
def get_msvc_version(conf, compiler, version, target, vcvars):
    debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
    batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
    f = open(batfile, 'w')
    f.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%
""" % (vcvars, target))
    f.close()
    sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
    lines = sout.splitlines()
    for x in ('Setting environment', 'Setting SDK environment',
              'Intel(R) C++ Compiler'):
        if lines[0].find(x) != -1:
            break
    else:
        debug('msvc: get_msvc_version: %r %r %r -> not found', compiler,
              version, target)
        conf.fatal(
            'msvc: Impossible to find a valid architecture for building (in get_msvc_version)'
        )
    for line in lines[1:]:
        if line.startswith('PATH='):
            path = line[5:]
            MSVC_PATH = path.split(';')
        elif line.startswith('INCLUDE='):
            MSVC_INCDIR = [i for i in line[8:].split(';') if i]
        elif line.startswith('LIB='):
            MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
    env = {}
    env.update(os.environ)
    env.update(PATH=path)
    compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
    cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
    if env.has_key('CL'):
        del (env['CL'])
    try:
        p = pproc.Popen([cxx, '/help'],
                        env=env,
                        stdout=pproc.PIPE,
                        stderr=pproc.PIPE)
        out, err = p.communicate()
        if p.returncode != 0:
            raise Exception('return code: %r: %r' % (p.returncode, err))
    except Exception as e:
        debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version,
              target)
        debug(str(e))
        conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
    else:
        debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version,
              target)
    return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
Пример #58
0
def SET_TARGET_TYPE(ctx, target, value):
    '''set the target type of a target'''
    cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
    if target in cache and cache[target] != 'EMPTY':
        Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
        sys.exit(1)
    LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
    debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
    return True