def execute(self): if not Configure.autoconfig: return execute_method(self) env = ConfigSet.ConfigSet() do_config = False try: env.load(os.path.join(Context.top_dir, Options.lockfile)) except Exception: Logs.warn("Configuring the project") do_config = True else: if env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env["files"]: h = hash((h, Utils.readf(f, "rb"))) do_config = h != env.hash if do_config: Options.commands.insert(0, self.cmd) Options.commands.insert(0, "configure") return return execute_method(self)
def setup_private_ssh_key(self): """ When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory Make sure that the ssh key does not prompt for a password """ key = os.environ.get('WAF_SSH_KEY', '') if not key: return if not os.path.isfile(key): self.fatal('Key in WAF_SSH_KEY must point to a valid file') self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh') self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts') self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key)) self.ssh_config = os.path.join(self.ssh_dir, 'config') for x in self.ssh_hosts, self.ssh_key, self.ssh_config: if not os.path.isfile(x): if not os.path.isdir(self.ssh_dir): os.makedirs(self.ssh_dir) Utils.writef(self.ssh_key, Utils.readf(key), 'wb') os.chmod(self.ssh_key, 448) Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts())) os.chmod(self.ssh_key, 448) Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb') os.chmod(self.ssh_config, 448) self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key] self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
def load_module(path): """ Load a source file as a python module. :param path: file path :type path: string :return: Loaded Python module :rtype: module """ try: return cache_modules[path] except KeyError: pass module = imp.new_module(WSCRIPT_FILE) try: code = Utils.readf(path, m='rU') except (IOError, OSError): raise Errors.WafError('Could not read the file %r' % path) module_dir = os.path.dirname(path) sys.path.insert(0, module_dir) # Inject a dummy symbol to allow executing wscripts using Path code = 'def Path(path):\n pass\n' + 'def Settings(path):\n pass\n' + code exec(compile(code, path, 'exec'), module.__dict__) sys.path.remove(module_dir) cache_modules[path] = module return module
def scan(task): try: incn = task.generator.includes_nodes except AttributeError: raise WafError('%r is missing a feature such as "go" or "includes": ' % task.generator) # filter out nodes that are not in the project directory, we don't care # about these nodepaths = [x for x in incn if x.is_child_of(x.ctx.bldnode) or x.is_child_of(x.ctx.srcnode)] bldnode = task.generator.bld.bldnode deps = [] for input in task.inputs: file = Utils.readf(input.abspath()) try: gp = GoParser(file) gp.parse() except ParserError: pass for s in gp.statements: if not isinstance(s, ImportSpec): continue # TODO: global paths should be treated as local too, but # no one uses them? if s.path.startswith("./") or s.path.startswith("../"): node = find_local(s.path, bldnode) else: node = find_global(s.path, nodepaths) if node: deps.append(node) return (deps, None)
def restore(self): try: env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) except(IOError,OSError): pass else: if env['version']<Context.HEXVERSION: raise Errors.WafError('Version mismatch! reconfigure the project') for t in env['tools']: self.setup(**t) dbfn=os.path.join(self.variant_dir,Context.DBFILE) try: data=Utils.readf(dbfn,'rb') except(IOError,EOFError): Logs.debug('build: Could not load the build cache %s (missing)'%dbfn) else: try: waflib.Node.pickle_lock.acquire() waflib.Node.Nod3=self.node_class try: data=cPickle.loads(data) except Exception ,e: Logs.debug('build: Could not pickle the build cache %s: %r'%(dbfn,e)) else: for x in SAVED_ATTRS:
def subst_func(tsk): "Substitutes variables in a .in file" m4_re = re.compile('@(\w+)@', re.M) env = tsk.env infile = tsk.inputs[0].abspath() outfile = tsk.outputs[0].abspath() code = Utils.readf(infile) # replace all % by %% to prevent errors by % signs in the input file while string formatting code = code.replace('%', '%%') s = m4_re.sub(r'%(\1)s', code) di = tsk.dict or {} if not di: names = m4_re.findall(code) for i in names: di[i] = env.get_flat(i) or env.get_flat(i.upper()) file = open(outfile, 'w') file.write(s % di) file.close() if tsk.chmod: os.chmod(outfile, tsk.chmod)
def load_module(path): """ Load a source file as a python module. :param path: file path :type path: string :return: Loaded Python module :rtype: module """ try: return cache_modules[path] except KeyError: pass module = imp.new_module(WSCRIPT_FILE) try: code = Utils.readf(path, m="rU") except (IOError, OSError): raise Errors.WafError("Could not read the file %r" % path) module_dir = os.path.dirname(path) sys.path.insert(0, module_dir) exec(compile(code, path, "exec"), module.__dict__) sys.path.remove(module_dir) cache_modules[path] = module return module
def load_module(file_path): """ Load a Python source file containing user code. @type file_path: string @param file_path: file path @return: Loaded Python module """ try: return cache_modules[file_path] except KeyError: pass module = imp.new_module(WSCRIPT_FILE) try: code = Utils.readf(file_path, m='rU') except (IOError, OSError): raise Errors.WafError('Could not read the file %r' % file_path) module_dir = os.path.dirname(file_path) sys.path.insert(0, module_dir) exec(compile(code, file_path, 'exec'), module.__dict__) sys.path.remove(module_dir) cache_modules[file_path] = module return module
def bibunitscan(self): """ Parse the inputs and try to find the *bibunit* dependencies :return: list of bibunit files :rtype: list of :py:class:`waflib.Node.Node` """ node = self.inputs[0] env = self.env nodes = [] if not node: return nodes code = Utils.readf(node.abspath()) for match in re_bibunit.finditer(code): path = match.group('file') if path: for k in ['', '.bib']: # add another loop for the tex include paths? debug('tex: trying %s%s' % (path, k)) fi = node.parent.find_resource(path + k) if fi: nodes.append(fi) # no break, people are crazy else: debug('tex: could not find %s' % path) debug("tex: found the following bibunit files: %s" % nodes) return nodes
def execute(self): if not Configure.autoconfig: return execute_method(self) env = ConfigSet.ConfigSet() do_config = False if self.root.find_node(self.cache_dir) == None: do_config = True else: try: env.load(os.path.join(Context.lock_dir, Options.lockfile)) except Exception: Logs.warn('Configuring the project') do_config = True else: if env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env['files']: try: h = hash((h, Utils.readf(f, 'rb'))) except (IOError, EOFError): pass # ignore missing files (will cause a rerun cause of the changed hash) do_config = h != env.hash if do_config: Options.commands.insert(0, self.cmd) Options.commands.insert(0, 'configure') self.skip_finish_message = True return return execute_method(self)
def load(self,filename): tbl=self.table code=Utils.readf(filename,m='rU') for m in re_imp.finditer(code): g=m.group tbl[g(2)]=eval(g(3)) Logs.debug('env: %s'%str(self.table))
def _parse_group_doc(self, group_node): ''' parse the doc of a package group and return (name, description) to be used in the .pc file ''' name = group_node.name doc_node = group_node.make_node(['doc', name + '.txt']) try: doc = Utils.readf(doc_node.abspath()) purpose = None mnemonic = None for line in doc.split('\n'): if line.startswith('@PURPOSE'): purpose = line.split(':')[1].strip() elif line.startswith('@MNEMONIC'): mnemonic = line.split(':')[1].strip() if purpose and mnemonic: return (mnemonic, purpose) except: pass return (name, 'N/A')
def filter_comments(filename): code=Utils.readf(filename) if use_trigraphs: for(a,b)in trig_def:code=code.split(a).join(b) code=re_nl.sub('',code) code=re_cpp.sub(repl,code) return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
def execute(self): if not Configure.autoconfig: return execute_method(self) env=ConfigSet.ConfigSet() do_config=False try: env.load(os.path.join(Context.top_dir,Options.lockfile)) except Exception: Logs.warn('Configuring the project') do_config=True else: if env.run_dir!=Context.run_dir: do_config=True else: h=0 for f in env['files']: h=Utils.h_list((h,Utils.readf(f,'rb'))) do_config=h!=env.hash if do_config: cmd=env['config_cmd']or'configure' if Configure.autoconfig=='clobber': tmp=Options.options.__dict__ Options.options.__dict__=env.options try: run_command(cmd) finally: Options.options.__dict__=tmp else: run_command(cmd) run_command(self.cmd) else: return execute_method(self)
def configure(self): kdeconfig=self.find_program('kde4-config') prefix=self.cmd_and_log(kdeconfig+['--prefix']).strip() fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix try:os.stat(fname) except OSError: fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix try:os.stat(fname) except OSError:self.fatal('could not open %s'%fname) try: txt=Utils.readf(fname) except(OSError,IOError): self.fatal('could not read %s'%fname) txt=txt.replace('\\\n','\n') fu=re.compile('#(.*)\n') txt=fu.sub('',txt) setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') found=setregexp.findall(txt) for(_,key,val)in found: self.env[key]=val self.env['LIB_KDECORE']=['kdecore'] self.env['LIB_KDEUI']=['kdeui'] self.env['LIB_KIO']=['kio'] self.env['LIB_KHTML']=['khtml'] self.env['LIB_KPARTS']=['kparts'] self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR] self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']] self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE']) self.find_program('msgfmt',var='MSGFMT')
def load_module(path, encoding=None): """ Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules` :param path: file path :type path: string :return: Loaded Python module :rtype: module """ try: return cache_modules[path] except KeyError: pass module = imp.new_module(WSCRIPT_FILE) try: code = Utils.readf(path, m="rU", encoding=encoding) except EnvironmentError: raise Errors.WafError("Could not read the file %r" % path) module_dir = os.path.dirname(path) sys.path.insert(0, module_dir) try: exec(compile(code, path, "exec"), module.__dict__) finally: sys.path.remove(module_dir) cache_modules[path] = module return module
def configure(self): kdeconfig = self.find_program("kde4-config") prefix = self.cmd_and_log(kdeconfig + ["--prefix"]).strip() fname = "%s/share/apps/cmake/modules/KDELibsDependencies.cmake" % prefix try: os.stat(fname) except OSError: fname = "%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake" % prefix try: os.stat(fname) except OSError: self.fatal("could not open %s" % fname) try: txt = Utils.readf(fname) except EnvironmentError: self.fatal("could not read %s" % fname) txt = txt.replace("\\\n", "\n") fu = re.compile("#(.*)\n") txt = fu.sub("", txt) setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+"([^"]+)"\)') found = setregexp.findall(txt) for (_, key, val) in found: self.env[key] = val self.env["LIB_KDECORE"] = ["kdecore"] self.env["LIB_KDEUI"] = ["kdeui"] self.env["LIB_KIO"] = ["kio"] self.env["LIB_KHTML"] = ["khtml"] self.env["LIB_KPARTS"] = ["kparts"] self.env["LIBPATH_KDECORE"] = [ os.path.join(self.env.KDE4_LIB_INSTALL_DIR, "kde4", "devel"), self.env.KDE4_LIB_INSTALL_DIR, ] self.env["INCLUDES_KDECORE"] = [self.env["KDE4_INCLUDE_INSTALL_DIR"]] self.env.append_value("INCLUDES_KDECORE", [self.env["KDE4_INCLUDE_INSTALL_DIR"] + os.sep + "KDE"]) self.find_program("msgfmt", var="MSGFMT")
def fatal(self, msg, ex=None): """ Prints an error message in red and stops command execution; this is usually used in the configuration section:: def configure(conf): conf.fatal('a requirement is missing') :param msg: message to display :type msg: string :param ex: optional exception object :type ex: exception :raises: :py:class:`waflib.Errors.ConfigurationError` """ if self.logger: self.logger.info('from %s: %s' % (self.path.abspath(), msg)) try: logfile = self.logger.handlers[0].baseFilename except AttributeError: pass else: if os.environ.get('WAF_PRINT_FAILURE_LOG'): # see #1930 msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile)) else: msg = '%s\n(complete log in %s)' % (msg, logfile) raise self.errors.ConfigurationError(msg, ex=ex)
def load_module(path, encoding=None): """ Load a source file as a python module. :param path: file path :type path: string :return: Loaded Python module :rtype: module """ try: return cache_modules[path] except KeyError: pass module = imp.new_module(WSCRIPT_FILE) try: code = Utils.readf(path, m='rU', encoding=encoding) except EnvironmentError: raise Errors.WafError('Could not read the file %r' % path) module_dir = os.path.dirname(path) sys.path.insert(0, module_dir) try : exec(compile(code, path, 'exec'), module.__dict__) finally: sys.path.remove(module_dir) cache_modules[path] = module return module
def restore(self): try: env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) except EnvironmentError: pass else: if env.version<Context.HEXVERSION: raise Errors.WafError('Version mismatch! reconfigure the project') for t in env.tools: self.setup(**t) dbfn=os.path.join(self.variant_dir,Context.DBFILE) try: data=Utils.readf(dbfn,'rb') except(EnvironmentError,EOFError): Logs.debug('build: Could not load the build cache %s (missing)',dbfn) else: try: Node.pickle_lock.acquire() Node.Nod3=self.node_class try: data=cPickle.loads(data) except Exception as e: Logs.debug('build: Could not pickle the build cache %s: %r',dbfn,e) else: for x in SAVED_ATTRS: setattr(self,x,data[x]) finally: Node.pickle_lock.release() self.init_dirs()
def load_module(file_path) : """ Add global pushing to WSCRIPT when it loads """ try: return Context.cache_modules[file_path] except KeyError: pass module = imp.new_module(Context.WSCRIPT_FILE) try: code = Utils.readf(file_path, m='rU') except (IOError, OSError) : raise Errors.WafError('Could not read the file %r' % file_path) module_dir = os.path.dirname(file_path) sys.path.insert(0, module_dir) for k, v in Context.wscript_vars.items() : setattr(module, k, v) if not hasattr(module, 'src') : def src(x) : import os.path return os.path.abspath(x) module.src = src Context.g_module = module exec(compile(code, file_path, 'exec'), module.__dict__) sys.path.remove(module_dir) if not hasattr(module, 'root_path') : module.root_path = file_path Context.cache_modules[file_path] = module return module
def restore(self): try: env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, "build.config.py")) except (IOError, OSError): pass else: if env["version"] < Context.HEXVERSION: raise Errors.WafError("Version mismatch! reconfigure the project") for t in env["tools"]: self.setup(**t) dbfn = os.path.join(self.variant_dir, Context.DBFILE) try: data = Utils.readf(dbfn, "rb") except (IOError, EOFError): Logs.debug("build: Could not load the build cache %s (missing)" % dbfn) else: try: waflib.Node.pickle_lock.acquire() waflib.Node.Nod3 = self.node_class try: data = cPickle.loads(data) except Exception as e: Logs.debug("build: Could not pickle the build cache %s: %r" % (dbfn, e)) else: for x in SAVED_ATTRS: setattr(self, x, data[x]) finally: waflib.Node.pickle_lock.release() self.init_dirs()
def restore(self): """ Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS` """ try: env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py')) except (IOError, OSError): pass else: if env['version'] < Context.HEXVERSION: raise Errors.WafError('Version mismatch! reconfigure the project') for t in env['tools']: self.setup(**t) dbfn = os.path.join(self.variant_dir, Context.DBFILE) try: data = Utils.readf(dbfn, 'rb') except (IOError, EOFError): # handle missing file/empty file Logs.debug('build: Could not load the build cache %s (missing)' % dbfn) else: try: waflib.Node.pickle_lock.acquire() waflib.Node.Nod3 = self.node_class try: data = cPickle.loads(data) except Exception as e: Logs.debug('build: Could not pickle the build cache %s: %r' % (dbfn, e)) else: for x in SAVED_ATTRS: setattr(self, x, data[x]) finally: waflib.Node.pickle_lock.release() self.init_dirs()
def load_module(path): """ Load a source file as a python module. :param path: file path :type path: string :return: Loaded Python module :rtype: module """ try: return cache_modules[path] except KeyError: pass module = imp.new_module( # Script files may have periods. Python gets confused by module names with periods. WSCRIPT_FILES.intersection(os.listdir('.')).pop().split('.')[0] ) try: code = Utils.readf(path, m='rU') except (IOError, OSError): raise Errors.WafError('Could not read the file %r' % path) module_dir = os.path.dirname(path) sys.path.insert(0, module_dir) exec(compile(code, path, 'exec'), module.__dict__) sys.path.remove(module_dir) cache_modules[path] = module return module
def execute(self): if not Configure.autoconfig: return execute_method(self) env = ConfigSet.ConfigSet() do_config = False try: env.load(os.path.join(Context.top_dir, Options.lockfile)) except Exception: Logs.warn('Configuring the project') do_config = True else: if env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env['files']: h = Utils.h_list((h, Utils.readf(f, 'rb'))) do_config = h != env.hash if do_config: Options.commands.insert(0, self.cmd) Options.commands.insert(0, 'configure') if Configure.autoconfig == 'clobber': Options.options.__dict__ = env.options return return execute_method(self)
def load(self, filename): "Retrieve the variables from a file" tbl = self.table code = Utils.readf(filename) for m in re_imp.finditer(code): g = m.group tbl[g(2)] = eval(g(3)) Logs.debug('env: %s' % str(self.table))
def load_constraints(self, pkgname, pkgver, requires=REQUIRES): try: return self.cache_constraints[(pkgname, pkgver)] except KeyError: text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires)) ret = parse_constraints(text) self.cache_constraints[(pkgname, pkgver)] = ret return ret
def load_pickle(self, filename): data = None with suppress(pickle.UnpicklingError): data = pickle.loads(Utils.readf(filename, m='rb')) if data is None: data = load_orig(self, filename) if data: self.table.update(data) Logs.debug('env: %s' % str(self.table))
def gather_intel_composer_versions(conf,versions): version_pattern=re.compile('^...?.?\...?.?.?') try: all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') except WindowsError: try: all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') except WindowsError: return index=0 while 1: try: version=Utils.winreg.EnumKey(all_versions,index) except WindowsError: break index=index+1 if not version_pattern.match(version): continue targets=[] for target,arch in all_icl_platforms: try: if target=='intel64':targetDir='EM64T_NATIVE' else:targetDir=target try: defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) except WindowsError: if targetDir=='EM64T_NATIVE': defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') else: raise WindowsError uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): try: targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) except conf.errors.ConfigurationError: pass compilervars_warning_attr='_compilervars_warning_key' if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): setattr(conf,compilervars_warning_attr,False) patch_url='http://software.intel.com/en-us/forums/topic/328487' compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'): if vscomntool in os.environ: vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) except WindowsError: pass major=version[0:2] versions.append(('intel '+major,targets))
def load(self, filename): """ Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files :param filename: file to use :type filename: string """ tbl = self.table code = Utils.readf(filename, m='rU') for m in re_imp.finditer(code): g = m.group tbl[g(2)] = eval(g(3)) Logs.debug('env: %s' % str(self.table))
def abi_process_file(fname, version, symmap): '''process one ABI file, adding new symbols to the symmap''' for line in Utils.readf(fname).splitlines(): symname = line.split(":")[0] if not symname in symmap: symmap[symname] = version
try: Configure.download_tool(tool,force=True,ctx=ctx) except Errors.WafError: Logs.warn('Could not find the tool %s in the remote repository'%x) def autoconfigure(execute_method): def execute(self): if not Configure.autoconfig: return execute_method(self) env=ConfigSet.ConfigSet() do_config=False try: env.load(os.path.join(Context.top_dir,Options.lockfile)) except Exception ,e: Logs.warn('Configuring the project') do_config=True else: if env.run_dir!=Context.run_dir: do_config=True else: h=0 for f in env['files']: h=hash((h,Utils.readf(f,'rb'))) do_config=h!=env.hash if do_config: Options.commands.insert(0,self.cmd) Options.commands.insert(0,'configure') return return execute_method(self) return execute Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute)
def gather_intel_composer_versions(conf, versions): """ Checks ICL compilers that are part of Intel Composer Suites :param versions: list to modify :type versions: list """ version_pattern = re.compile(r"^...?.?\...?.?.?") try: all_versions = Utils.winreg.OpenKey( Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Intel\\Suites" ) except OSError: try: all_versions = Utils.winreg.OpenKey( Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Intel\\Suites" ) except OSError: return index = 0 while 1: try: version = Utils.winreg.EnumKey(all_versions, index) except OSError: break index += 1 if not version_pattern.match(version): continue targets = {} for target, arch in all_icl_platforms: if target == "intel64": targetDir = "EM64T_NATIVE" else: targetDir = target try: try: defaults = Utils.winreg.OpenKey( all_versions, version + "\\Defaults\\C++\\" + targetDir ) except OSError: if targetDir == "EM64T_NATIVE": defaults = Utils.winreg.OpenKey( all_versions, version + "\\Defaults\\C++\\EM64T" ) else: raise uid, type = Utils.winreg.QueryValueEx(defaults, "SubKey") Utils.winreg.OpenKey( all_versions, version + "\\" + uid + "\\C++\\" + targetDir ) icl_version = Utils.winreg.OpenKey( all_versions, version + "\\" + uid + "\\C++" ) path, type = Utils.winreg.QueryValueEx(icl_version, "ProductDir") except OSError: pass else: batch_file = os.path.join(path, "bin", "iclvars.bat") if os.path.isfile(batch_file): targets[target] = target_compiler( conf, "intel", arch, version, target, batch_file ) # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 # http://software.intel.com/en-us/forums/topic/328487 compilervars_warning_attr = "_compilervars_warning_key" if version[0:2] == "13" and getattr( conf, compilervars_warning_attr, True ): setattr(conf, compilervars_warning_attr, False) patch_url = "http://software.intel.com/en-us/forums/topic/328487" compilervars_arch = os.path.join( path, "bin", "compilervars_arch.bat" ) for vscomntool in ("VS110COMNTOOLS", "VS100COMNTOOLS"): if vscomntool in os.environ: vs_express_path = ( os.environ[vscomntool] + r"..\IDE\VSWinExpress.exe" ) dev_env_path = os.environ[vscomntool] + r"..\IDE\devenv.exe" if ( r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path) ): Logs.warn( ( "The Intel compilervar_arch.bat only checks for one Visual Studio SKU " "(VSWinExpress.exe) but it does not seem to be installed at %r. " "The intel command line set up will fail to configure unless the file %r" "is patched. See: %s" ) % (vs_express_path, compilervars_arch, patch_url) ) major = version[0:2] versions["intel " + major] = targets
def parse_strace_deps(self, path, cwd): # uncomment the following line to disable the dependencies and force a file scan # return try: cnt = Utils.readf(path) finally: try: os.remove(path) except OSError: pass if not isinstance(cwd, str): cwd = cwd.abspath() nodes = [] bld = self.generator.bld try: cache = bld.strace_cache except AttributeError: cache = bld.strace_cache = {} # chdir and relative paths pid_to_cwd = {} global BANNED done = set() for m in re.finditer(re_lines, cnt): # scraping the output of strace pid = m.group('pid') if m.group('npid'): npid = m.group('npid') pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd) continue p = m.group('path').replace('\\"', '"') if p == '.' or m.group().find('= -1 ENOENT') > -1: # just to speed it up a bit continue if not os.path.isabs(p): p = os.path.join(pid_to_cwd.get(pid, cwd), p) call = m.group('call') if call == 'chdir': pid_to_cwd[pid] = p continue if p in done: continue done.add(p) for x in BANNED: if p.startswith(x): break else: if p.endswith('/') or os.path.isdir(p): continue try: node = cache[p] except KeyError: strace_lock.acquire() try: cache[p] = node = bld.root.find_node(p) if not node: continue finally: strace_lock.release() nodes.append(node) # record the dependencies then force the task signature recalculation for next time if Logs.verbose: Logs.debug('deps: real scanner for %r returned %r', self, nodes) bld = self.generator.bld bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except AttributeError: pass self.signature()
def filter_comments(filename): txt = Utils.readf(filename) i = 0 buf = [] max = len(txt) begin = 0 while i < max: c = txt[i] if c == '"' or c == "'": buf.append(txt[begin:i]) delim = c i += 1 while i < max: c = txt[i] if c == delim: break elif c == '\\': i += 1 i += 1 i += 1 begin = i elif c == '/': buf.append(txt[begin:i]) i += 1 if i == max: break c = txt[i] if c == '+': i += 1 nesting = 1 c = None while i < max: prev = c c = txt[i] if prev == '/' and c == '+': nesting += 1 c = None elif prev == '+' and c == '/': nesting -= 1 if nesting == 0: break c = None i += 1 elif c == '*': i += 1 c = None while i < max: prev = c c = txt[i] if prev == '*' and c == '/': break i += 1 elif c == '/': i += 1 while i < max and txt[i] != '\n': i += 1 else: begin = i - 1 continue i += 1 begin = i buf.append(' ') else: i += 1 buf.append(txt[begin:]) return buf
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if getattr(self, 'cached', None): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.unlink(name) txt = txt.replace('\\\n', '') lst = txt.strip().split(':') val = ":".join(lst[1:]) val = val.split() nodes = [] bld = self.generator.bld f = re.compile("^(\.\.)[\\/](.*)$") for x in val: node = None if os.path.isabs(x): if not c_preproc.go_absolute: continue lock.acquire() try: node = bld.root.find_resource(x) finally: lock.release() else: path = bld.bldnode x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent # when calling find_resource, make sure the path does not begin by '..' try: lock.acquire() node = path.find_resource(x) finally: lock.release() if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def compute_needed_tgs(self): # assume the 'use' keys are not modified during the build phase dbfn = os.path.join(self.variant_dir, TSTAMP_DB) Logs.debug('rev_use: Loading %s', dbfn) try: data = Utils.readf(dbfn, 'rb') except (EnvironmentError, EOFError): Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn) self.f_deps = {} self.f_tstamps = {} else: try: self.f_tstamps, self.f_deps = Build.cPickle.loads(data) except Exception as e: Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e) self.f_deps = {} self.f_tstamps = {} else: Logs.debug('rev_use: Loaded %s', dbfn) # 1. obtain task generators that contain rebuilds # 2. obtain the 'use' graph and its dual stales = set() reverse_use_map = Utils.defaultdict(list) use_map = Utils.defaultdict(list) for g in self.groups: for tg in g: if tg.is_stale(): stales.add(tg) try: lst = tg.use = Utils.to_list(tg.use) except AttributeError: pass else: for x in lst: try: xtg = self.get_tgen_by_name(x) except Errors.WafError: pass else: use_map[tg].append(xtg) reverse_use_map[xtg].append(tg) Logs.debug('rev_use: found %r stale tgs', len(stales)) # 3. dfs to post downstream tg as stale visited = set() def mark_down(tg): if tg in visited: return visited.add(tg) Logs.debug('rev_use: marking down %r as stale', tg.name) tg.staleness = DIRTY for x in reverse_use_map[tg]: mark_down(x) for tg in stales: mark_down(tg) # 4. dfs to find ancestors tg to mark as needed self.needed_tgs = needed_tgs = set() def mark_needed(tg): if tg in needed_tgs: return needed_tgs.add(tg) if tg.staleness == DONE: Logs.debug('rev_use: marking up %r as needed', tg.name) tg.staleness = NEEDED for x in use_map[tg]: mark_needed(x) for xx in visited: mark_needed(xx) # so we have the whole tg trees to post in the set "needed" # load their build trees for tg in needed_tgs: tg.bld.restore() tg.bld.fix_tg_path(tg) # the stale ones should be fully build, while the needed ones # may skip a few tasks, see create_compiled_task and apply_link_after below Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
def gather_intel_composer_versions(conf, versions): version_pattern = re.compile('^...?.?\...?.?.?') try: all_versions = Utils.winreg.OpenKey( Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites') except WindowsError: try: all_versions = Utils.winreg.OpenKey( Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites') except WindowsError: return index = 0 while 1: try: version = Utils.winreg.EnumKey(all_versions, index) except WindowsError: break index = index + 1 if not version_pattern.match(version): continue targets = [] for target, arch in all_icl_platforms: try: if target == 'intel64': targetDir = 'EM64T_NATIVE' else: targetDir = target try: defaults = Utils.winreg.OpenKey( all_versions, version + '\\Defaults\\C++\\' + targetDir) except WindowsError: if targetDir == 'EM64T_NATIVE': defaults = Utils.winreg.OpenKey( all_versions, version + '\\Defaults\\C++\\EM64T') else: raise WindowsError uid, type = Utils.winreg.QueryValueEx(defaults, 'SubKey') Utils.winreg.OpenKey( all_versions, version + '\\' + uid + '\\C++\\' + targetDir) icl_version = Utils.winreg.OpenKey( all_versions, version + '\\' + uid + '\\C++') path, type = Utils.winreg.QueryValueEx(icl_version, 'ProductDir') batch_file = os.path.join(path, 'bin', 'iclvars.bat') if os.path.isfile(batch_file): try: targets.append( (target, (arch, conf.get_msvc_version('intel', version, target, batch_file)))) except conf.errors.ConfigurationError, e: pass compilervars_warning_attr = '_compilervars_warning_key' if version[0:2] == '13' and getattr( conf, compilervars_warning_attr, True): setattr(conf, compilervars_warning_attr, False) patch_url = 'http://software.intel.com/en-us/forums/topic/328487' compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat') for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'): if vscomntool in os.environ: vs_express_path = os.environ[ vscomntool] + r'..\IDE\VSWinExpress.exe' dev_env_path = os.environ[ vscomntool] + r'..\IDE\devenv.exe' if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)): Logs.warn(( 'The Intel compilervar_arch.bat only checks for one Visual Studio SKU ' '(VSWinExpress.exe) but it does not seem to be installed at %r. ' 'The intel command line set up will fail to configure unless the file %r' 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url)) except WindowsError: pass
def post_run(self): if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: return super(self.derived_gccdeps, self).post_run() name = self.outputs[0].abspath() name = re_o.sub(".d", name) try: txt = Utils.readf(name) except OSError: Logs.error( "Could not find a .d dependency file, are cflags/cxxflags overwritten?" ) raise # os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = "\n".join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace("\\\n", "") val = txt.strip() val = [x.replace("\\ ", " ") for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None if os.path.isabs(x): node = path_to_node(bld.root, x, cached_nodes) else: # TODO waf 1.9 - single cwd value path = getattr(bld, "cwdx", bld.bldnode) # when calling find_resource, make sure the path does not contain '..' x = [k for k in Utils.split_path(x) if k and k != "."] while ".." in x: idx = x.index("..") if idx == 0: x = x[1:] path = path.parent else: del x[idx] del x[idx - 1] node = path_to_node(path, x, cached_nodes) if not node: raise ValueError(f"could not find {x!r} for {self!r}") if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug("deps: gccdeps for %s returned %s", self, nodes) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except AttributeError: pass Task.Task.post_run(self)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in supported_compilers: return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) # Do not check dependencies for disassembly and preprocessed files as this is already the final output bld = self.generator.bld if bld.is_option_true('show_preprocessed_file') or bld.is_option_true( 'show_disassembly'): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() lst = val.split(':') val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None # Remove leading and tailing double quotes if x[0] == '"': x = x[1:] if x[len(x) - 1] == '"': x = x[:len(x) - 1] if os.path.isabs(x): # HACK: for reasons unknown, some of the android library includes have a ':' appended to the path # causing the following to fail if 'android' in self.env['PLATFORM']: if x[-1] == ':': x = x[:-1] node = path_to_node(bld.root, x, cached_nodes, self.env['PLATFORM'] == 'orbis') else: path = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent node = path_to_node(path, x, cached_nodes, self.env['PLATFORM'] == 'orbis') if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def filter_comments(filename): """ :param filename: d file name :type filename: string :rtype: list :return: a list of characters """ txt = Utils.readf(filename) i = 0 buf = [] max = len(txt) begin = 0 while i < max: c = txt[i] if c == '"' or c == "'": # skip a string or character literal buf.append(txt[begin:i]) delim = c i += 1 while i < max: c = txt[i] if c == delim: break elif c == "\\": # skip the character following backslash i += 1 i += 1 i += 1 begin = i elif c == "/": # try to replace a comment with whitespace buf.append(txt[begin:i]) i += 1 if i == max: break c = txt[i] if c == "+": # eat nesting /+ +/ comment i += 1 nesting = 1 c = None while i < max: prev = c c = txt[i] if prev == "/" and c == "+": nesting += 1 c = None elif prev == "+" and c == "/": nesting -= 1 if nesting == 0: break c = None i += 1 elif c == "*": # eat /* */ comment i += 1 c = None while i < max: prev = c c = txt[i] if prev == "*" and c == "/": break i += 1 elif c == "/": # eat // comment i += 1 while i < max and txt[i] != "\n": i += 1 else: # no comment begin = i - 1 continue i += 1 begin = i buf.append(" ") else: i += 1 buf.append(txt[begin:]) return buf
def execute(self): """ Wraps :py:func:`waflib.Context.Context.execute` on the context class """ if not Configure.autoconfig: return execute_method(self) # Disable autoconfig so waf's version doesn't run (and don't end up on loop of bad configure) Configure.autoconfig = False if self.variant == '': raise Errors.WafError( 'The project is badly configured: run "waf configure" again!') env = ConfigSet.ConfigSet() do_config = False try: p = os.path.join(Context.out_dir, Build.CACHE_DIR, self.variant + Build.CACHE_SUFFIX) env.load(p) except EnvironmentError: raise Errors.WafError( 'The project is not configured for board {0}: run "waf configure --board {0} [...]" first!' .format(self.variant)) lock_env = ConfigSet.ConfigSet() try: lock_env.load(os.path.join(Context.top_dir, Options.lockfile)) except EnvironmentError: Logs.warn('Configuring the project') do_config = True else: if lock_env.run_dir != Context.run_dir: do_config = True else: h = 0 for f in env.CONFIGURE_FILES: try: h = Utils.h_list((h, Utils.readf(f, 'rb'))) except EnvironmentError: do_config = True break else: do_config = h != env.CONFIGURE_HASH if do_config: cmd = lock_env.config_cmd or 'configure' tmp = Options.options.__dict__ if env.OPTIONS and sorted(env.OPTIONS.keys()) == sorted( tmp.keys()): Options.options.__dict__ = env.OPTIONS else: raise Errors.WafError( 'The project configure options have changed: run "waf configure" again!' ) try: run_command(cmd) finally: Options.options.__dict__ = tmp run_command(self.cmd) else: return execute_method(self)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: return self.no_gccdeps_post_run() name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None if os.path.isabs(x): node = path_to_node(bld.root, x, cached_nodes) else: # TODO waf 1.9 - single cwd value path = getattr(bld, 'cwdx', bld.bldnode) # when calling find_resource, make sure the path does not contain '..' x = [k for k in Utils.split_path(x) if k and k != '.'] while '..' in x: idx = x.index('..') if idx == 0: x = x[1:] path = path.parent else: del x[idx] del x[idx - 1] node = path_to_node(path, x, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (x, self)) if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: gccdeps for %s returned %s', self, nodes) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except AttributeError: pass Task.Task.post_run(self)
def restore(self): """ Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS` """ try: env = ConfigSet.ConfigSet( os.path.join(self.cache_dir, 'build.config.py')) except (IOError, OSError): pass else: if env['version'] < Context.HEXVERSION: raise Errors.WafError('Version mismatch! reconfigure the project') for t in env['tools']: self.setup(**t) dbfn = os.path.join(self.variant_dir, Context.DBFILE) Node.Nod3 = self.node_class local_data = None cache_data = None data = None try: local_data_str = Utils.readf(dbfn, 'rb') try: local_data = cPickle.loads(local_data_str) except cPickle.UnpicklingError: Logs.debug( 'build: Could not unpickle the data from local build cache {}'. format(dbfn)) except (IOError, EOFError): # handle missing file/empty file Logs.debug( 'build: Could not load the local build cache {} (missing)'.format( dbfn)) if local_data: data = local_data # If artifacts cache is enabled, try to load the artifacts cache, this ensures that the task's include dependencies can be known in advance in a clean build if self.artifacts_cache and self.is_option_true('artifacts_cache_restore'): try: dbfn = os.path.join(self.artifacts_cache, 'wafpickle', self.cmd, Context.DBFILE) cache_data_str = Utils.readf(dbfn, 'rb') try: cache_data = cPickle.loads(cache_data_str) except cPickle.UnpicklingError: Logs.debug( 'build: Could not unpickle the data from global build cache {}' .format(dbfn)) except (IOError, EOFError): # handle missing file/empty file Logs.debug( 'build: Could not load the global build cache {} (missing)'. format(dbfn)) if cache_data: if not local_data: data = cache_data else: merged_data = {} for x in local_data: if x not in cache_data: merged_data[x] = local_data for x in cache_data: if x not in local_data: merged_data[x] = cache_data[x] else: if isinstance(local_data[x], dict): cache_data[x].update(local_data[x]) merged_data[x] = cache_data[x] else: merged_data[x] = local_data[x] data = merged_data data['cached_engine_path'] = cache_data['cached_engine_path'] data['cached_tp_root_path'] = cache_data['cached_tp_root_path'] if data: try: Node.pickle_lock.acquire() for x in Build.SAVED_ATTRS: if x in data: setattr(self, x, data[x]) else: Logs.debug( "build: SAVED_ATTRS key {} missing from cache".format( x)) finally: Node.pickle_lock.release() self.init_dirs()
def read(self,flags='r',encoding='ISO8859-1'): return Utils.readf(self.abspath(),flags,encoding)
def read(self, flags='r'): return Utils.readf(self.abspath(), flags)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in supported_compilers: return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.unlink(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() lst = val.split(':') val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld for x in val: node = None if os.path.isabs(x): lock.acquire() try: node = bld.root.find_resource(x) finally: lock.release() else: path = bld.bldnode x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent # when calling find_resource, make sure the path does not begin by '..' try: lock.acquire() node = path.find_resource(x) finally: lock.release() if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)