def process_lib(self): """ Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`. """ node = None names = [x % self.name for x in lib_patterns[self.lib_type]] for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS: if not isinstance(x, Node.Node): x = self.bld.root.find_node(x) or self.path.find_node(x) if not x: continue for y in names: node = x.find_node(y) if node: try: Utils.h_file(node.abspath()) except EnvironmentError: raise ValueError('Could not read %r' % y) break else: continue break else: raise Errors.WafError('could not find library %r' % self.name) self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node]) self.target = self.name
def dynamic_post(self): if not getattr(self, 'dynamic_source', None): return self.source = Utils.to_list(self.source) src = self.bld.path.get_bld().ant_glob (Utils.to_list(self.dynamic_source)) for cc in src: # Signature for the source cc.sig = Utils.h_file (cc.abspath()) # Signature for the header h = cc.change_ext (".h") h.sig = Utils.h_file (h.abspath()) self.source.extend (src)
def load_envs(self): """load the data from the project directory into self.allenvs""" try: lst = Utils.listdir(self.cache_dir) except OSError as e: if e.errno == errno.ENOENT: raise Errors.WafError('The project was not configured: run "waf configure" first!') else: raise if not lst: raise Errors.WafError('The cache directory is empty: reconfigure the project') for fname in lst: if fname.endswith(CACHE_SUFFIX): env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, fname)) name = fname[:-len(CACHE_SUFFIX)] self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.path.find_or_declare(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER for x in self.outputs: x.sig=Utils.h_file(x.abspath()) return Task.SKIP_ME
def h_file(self): try: ret = getxattr(self) except OSError: if HASH_CACHE: st = os.stat(self.abspath()) mtime = st.st_mtime size = st.st_size else: if len(ret) == 16: # for build directory files return ret if HASH_CACHE: # check if timestamp and mtime match to avoid re-hashing st = os.stat(self.abspath()) mtime, size = ret[16:].split(SEP) if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size): return ret[:16] ret = Utils.h_file(self.abspath()) if HASH_CACHE: val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size)) try: setxattr(self, val) except PermissionError: os.chmod(self.abspath(), st.st_mode | 128) setxattr(self, val) return ret
def runnable_status(self): status=old_runnable_status(self) if status!=RUN_ME: return status try: bld=self.generator.bld prev_sig=bld.task_sigs[self.uid()] if prev_sig==self.signature(): for x in self.outputs: if not x.is_child_of(bld.bldnode): x.sig=Utils.h_file(x.abspath()) if not x.sig or bld.task_sigs[x.abspath()]!=self.uid(): return RUN_ME return SKIP_ME except OSError: pass except IOError: pass except KeyError: pass except IndexError: pass except AttributeError: pass return RUN_ME
def runnable_status(self): status = old_runnable_status(self) if status != RUN_ME: return status try: # by default, we check that the output nodes have the signature of the task # perform a second check, returning 'SKIP_ME' as we are expecting that # the signatures do not match bld = self.generator.bld prev_sig = bld.task_sigs[self.uid()] if prev_sig == self.signature(): for x in self.outputs: if not x.is_child_of(bld.bldnode): # special case of files created in the source directory # hash them here for convenience -_- x.sig = Utils.h_file(x.abspath()) if not x.sig or bld.task_sigs[x.abspath()] != self.uid(): return RUN_ME return SKIP_ME except OSError: pass except IOError: pass except KeyError: pass except IndexError: pass except AttributeError: pass return RUN_ME
def process_lib(self): """ Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`. """ node = None names = [x % self.name for x in lib_patterns[self.lib_type]] for x in self.lib_paths + [self.path, '/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']: if not isinstance(x, Node.Node): x = self.bld.root.find_node(x) or self.path.find_node(x) if not x: continue for y in names: node = x.find_node(y) if node: node.sig = Utils.h_file(node.abspath()) break else: continue break else: raise Errors.WafError('could not find library %r' % self.name) self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node]) self.target = self.name
def load_envs(self): """ The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those files. The config sets are then stored in the dict :py:attr:`waflib.Build.BuildContext.allenvs`. """ node = self.root.find_node(self.cache_dir) if not node: raise Errors.WafError('The project was not configured: run "waf configure" first!') lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True) if not lst: raise Errors.WafError('The cache directory is empty: reconfigure the project') for x in lst: name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/') env = ConfigSet.ConfigSet(x.abspath()) self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def load_envs(self): """ The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those files. The config sets are then stored in the dict :py:attr:`waflib.Build.BuildContext.allenvs`. """ try: lst = Utils.listdir(self.cache_dir) except OSError as e: if e.errno == errno.ENOENT: raise Errors.WafError('The project was not configured: run "waf configure" first!') else: raise if not lst: raise Errors.WafError('The cache directory is empty: reconfigure the project') for fname in lst: if fname.endswith(CACHE_SUFFIX): env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, fname)) name = fname[:-len(CACHE_SUFFIX)] self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def scan(self): py = self.generator.bld.py xfiles, xany = list(), list() for pat in Utils.to_list(self.xtra): xfiles.extend(py.ant_glob(pat, remove=False)) for x in xfiles: x.sig = Utils.h_file(x.abspath()) return xfiles, xany
def h_file(self): """ See :py:func:`waflib.Utils.h_file` :return: a hash representing the file contents :rtype: string or bytes """ return Utils.h_file(self.abspath())
def post_run(self): nodes=self.output_dir.ant_glob('**/*',quiet=True) for x in nodes: x.sig=Utils.h_file(x.abspath()) self.outputs+=nodes if getattr(self.generator,'install_path',None): if not getattr(self.generator,'doxy_tar',None): self.generator.bld.install_files(self.generator.install_path,self.outputs,postpone=False) return Task.Task.post_run(self)
def post_run(self): html = self.generator nodes = html.output_dir.ant_glob('**/*', quiet=True) print nodes[0:10] for x in nodes: x.sig = Utils.h_file(x.abspath()) self.outputs += nodes html.bld.install_files(html.env.MDOC_OUTPUT, nodes, cwd=html.output_dir, relative_trick=True, postpone=False) return Task.Task.post_run(self)
def get_bld_sig_win32(self): try: return self.ctx.hash_cache[id(self)] except KeyError: pass except AttributeError: self.ctx.hash_cache = {} self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath()) return ret
def get_bld_sig(self): if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: return Utils.h_file(self.abspath()) try: # add the creation time to the signature return self.sig + str(os.stat(self.abspath()).st_mtime) except AttributeError: return None
def get_bld_sig(self): try: return self.cache_sig except AttributeError: pass if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: self.sig=Utils.h_file(self.abspath()) self.cache_sig=ret=self.sig return ret
def hash_aux_nodes(self): try: self.aux_nodes except AttributeError: try: self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux')) except IOError: return None return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
def run(self): env=self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS','-interaction=batchmode') env.append_value('PDFLATEXFLAGS','-interaction=batchmode') env.append_value('XELATEXFLAGS','-interaction=batchmode') fun=self.texfun node=self.inputs[0] srcfile=node.abspath() texinputs=self.env.TEXINPUTS or'' self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep\ +node.parent.get_src().abspath()+os.pathsep\ +self.generator.path.get_src().abspath()+os.pathsep\ +self.generator.path.get_bld().abspath()+os.pathsep\ +texinputs+os.pathsep self.cwd=self.inputs[0].parent.get_bld().abspath() Logs.warn('first pass on %s'%self.__class__.__name__) self.env.env={} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS':self.TEXINPUTS,'TEXPICTS':self.TEXINPUTS}) self.env.SRCFILE=srcfile self.env.LOF_FILES=node.change_ext(".lof").abspath() print "LOF_FILES:",self.env.LOF_FILES #print fun #print self.env fun_clean=self.texfun_clean fun_clean() self.check_status('error when calling latex',fun()) fun_clean() self.aux_nodes=self.scan_aux(node.change_ext('.aux')) self.idx_node=node.change_ext('.idx') self.bibtopic() self.bibfile() self.bibunits() self.makeindex() hash='' for i in range(10): prev_hash=hash try: hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes] hash=Utils.h_list(hashes) except(OSError,IOError): Logs.error('could not read aux.h') pass if hash and hash==prev_hash: break Logs.warn('calling %s'%self.__class__.__name__) self.env.env={} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) self.env.SRCFILE=srcfile #fun_clean() self.check_status('error when calling %s'%self.__class__.__name__,fun())
def get_bld_sig(self): try: if id(self) in self.ctx.hash_cache: return self.sig except AttributeError: self.ctx.hash_cache = {} if not self.is_bld(): self.sig = Utils.h_file(self.abspath()) ret = self.sig self.ctx.hash_cache[id(self)] = True return ret
def post_run(self): """ The -verbose flags gives us the files created, so we have to parse the outputs """ for x in re_classes.findall(self.out): if os.path.isabs(x): n = self.generator.bld.root.find_node(x) else: n = self.generator.bld.bldnode.find_node(x) if not n: raise ValueError('cannot find %r in %r' % (x, self.generator.bld.bldnode.abspath())) n.sig = Utils.h_file(n.abspath()) self.generator.bld.task_sigs[self.uid()] = self.cache_sig
def get_bld_sig(self): try: ret=self.ctx.hash_cache[id(self)] except KeyError: pass except AttributeError: self.ctx.hash_cache={} else: return ret if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: self.sig=Utils.h_file(self.abspath()) self.ctx.hash_cache[id(self)]=ret=self.sig return ret
def scan(self): env = self.env gen = self.generator bld = gen.bld zpy = bld.zpy py = bld.py l4sh = '*/l4sh/*/Setup' self.inputs = bld.bldnode.ant_glob(l4sh, remove=False) for i in self.inputs: i.sig = Utils.h_file(i.abspath()) return tuple(), tuple()
def get_bld_sig(self): try: return self.cache_sig except AttributeError: pass if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: self.sig = Utils.h_file(self.abspath()) self.cache_sig = ret = self.sig else: # add the self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime) return ret
def post_run(self): for x in re_classes.findall(self.out): if os.path.isabs(x): n=self.generator.bld.root.find_node(x) else: n=self.generator.bld.bldnode.find_node(x) if not n: raise ValueError('cannot find %r in %r'%(x,self.generator.bld.bldnode.abspath())) n.sig=Utils.h_file(n.abspath()) self.generator.bld.task_sigs[self.uid()]=self.cache_sig out=re_verbose.sub('',self.out).strip() if out: self.generator.bld.to_log(out+'\n')
def get_bld_sig(self): """ Node signature, assuming the file is in the build directory """ try: return self.cache_sig except AttributeError: pass if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: self.sig = Utils.h_file(self.abspath()) self.cache_sig = ret = self.sig return ret
def get_bld_sig(self, tg = None): """ Node signature, assuming the file is in the build directory NOTE: Never do anything in this funcition, it is extremly performance sensitive! """ try: return self.cache_sig except AttributeError: pass if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: self.sig = Utils.h_file(self.abspath(), tg) self.cache_sig = ret = self.sig return ret
def apply_java(self): Utils.def_attrs(self, jarname='', jaropts='', classpath='', sourcepath='.', srcdir='.', source_re='**/*.java', jar_mf_attributes={}, jar_mf_classpath=[]) nodes_lst = [] if not self.classpath: if not self.env['CLASSPATH']: self.env['CLASSPATH'] = '..' + os.pathsep + '.' else: self.env['CLASSPATH'] = self.classpath srcdir_node = self.path.find_dir(self.srcdir) if not srcdir_node: raise Errors.WafError('could not find srcdir %r' % self.srcdir) src_nodes = srcdir_node.ant_glob(self.source_re) bld_nodes = [x.change_ext('.class') for x in src_nodes] for x in src_nodes: x.sig = Utils.h_file(x.abspath()) self.env['OUTDIR'] = [srcdir_node.get_bld().abspath()] tsk = self.create_task('javac') tsk.set_inputs(src_nodes) tsk.set_outputs(bld_nodes) if getattr(self, 'compat', None): tsk.env.append_value('JAVACFLAGS', ['-source', self.compat]) if hasattr(self, 'sourcepath'): fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)] names = os.pathsep.join([x.srcpath() for x in fold]) else: names = srcdir_node.srcpath() if names: tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names]) if self.jarname: tsk = self.create_task('jar_create') tsk.set_outputs(self.path.find_or_declare(self.jarname)) if not self.env['JAROPTS']: if self.jaropts: self.env['JAROPTS'] = self.jaropts else: dirs = '.' self.env['JAROPTS'] = ['-C', ''.join(self.env['OUTDIR']), dirs]
def hash_aux_nodes(self): """ Returns a hash of the .aux file contents :rtype: string or bytes """ try: self.aux_nodes except AttributeError: try: self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux')) except IOError: return None return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
def zpy_update(gen): env = gen.env bld = gen.bld zpy = bld.zpy py = bld.py # map patches to requirements patches = dict() for reqs in bld.path.ant_glob('extern/patches/*.requirements.txt'): node = reqs.change_ext('', ext_in='.requirements.txt') patches[node] = reqs.read().splitlines() task_last = None subst = dict(zpy) targets = Utils.to_list(gen.target) for req_name in targets: dist = bld.zippy_dist_get(req_name) dist_node = bld.bldnode.make_node(str(dist.key)) for node, reqs in patches.items(): for req in reqs: if dist.matches_requirement(req): task = gen.create_task('ZPyTask_Patch', node) task.cwd = dist_node.abspath() task.dist = dist node.sig = Utils.h_file(node.abspath()) if task_last: task.set_run_after(task_last) task_last = task # check other patches, not addtl reqs to current patch break for req_name in targets: dist = bld.zippy_dist_get(req_name) dist_node = bld.bldnode.make_node(str(dist.key)) for req, rules in Task.classes['ZPyTask_Update'].x.iteritems(): if dist.matches_requirement(req): for path, (fun, kwds) in rules.iteritems(): realpath = path % subst finder = getattr(dist_node, kwds.get('finder', 'find_node')) node = finder(realpath) if node: task = gen.create_task('ZPyTask_Update', node) task.fun = fun task.kwds = kwds task.dist = dist node.sig = task.uid() #...updates should run after the last patch, if any if task_last: task.set_run_after(task_last)
def post_run(self): """ Update the output nodes signature while caputuring a list of all output (.class) files """ classes = self.generator.outdir.ant_glob('**/*.class', quiet=True) output_parent = self.generator.outdir.make_node('..') output_log = output_parent.make_node('classes.txt') try: with open(output_log.abspath(), 'w') as output_file: for class_node in classes: output_file.write('{}\n'.format( class_node.path_from(output_parent))) class_node.sig = Utils.h_file( class_node.abspath()) # careful with this except: self.bld.fatal('[ERROR] Unable to write output log for javac task') self.generator.bld.task_sigs[self.uid()] = self.cache_sig
def process_lib(self): node = None names = [x % self.name for x in lib_patterns[self.lib_type]] for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS: if not isinstance(x, Node.Node): x = self.bld.root.find_node(x) or self.path.find_node(x) if not x: continue for y in names: node = x.find_node(y) if node: node.sig = Utils.h_file(node.abspath()) break else: continue break else: raise Errors.WafError('could not find library %r' % self.name) self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node]) self.target = self.name
def run(self): env = self.env bld = self.generator.bld if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') env.append_value('XELATEXFLAGS', '-interaction=batchmode') fun = self.texfun node = self.inputs[0] srcfile = node.abspath() self.TEXINPUTS = node.parent.get_bld().abspath( ) + os.pathsep + node.parent.get_src().abspath() + os.pathsep self.aux_node = node.change_ext('.aux') self.idx_node = node.change_ext('.idx') self.cwd = self.inputs[0].parent.get_bld().abspath() warn('first pass on %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling latex', fun()) self.bibfile() self.bibunits() self.makeindex() hash = '' for i in range(10): prev_hash = hash try: hash = Utils.h_file(self.aux_node.abspath()) except (OSError, IOError): error('could not read aux.h -> %s' % self.aux_node.abspath()) pass if hash and hash == prev_hash: break warn('calling %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status( 'error when calling %s' % self.__class__.__name__, fun())
def scan(self): bld = self.generator.bld zpy = bld.zpy ins = set() excl = [ pth.join('bin', 'python'), pth.join('bin', zpy.py_ver1), ] nodes = bld.o.ant_glob('bin/**', excl=excl) for node in nodes: path = node.abspath() ins.add(node.path_from(bld.o)) if not getattr(node, 'sig', None): node.sig = Utils.h_file(path) self.inputs = nodes #...save the config for `install` zpy.ins = sorted(ins) zpy_file = bld.variant + Build.CACHE_SUFFIX zpy.store(pth.join(bld.cache_dir, zpy_file)) if bld.cmd.endswith('install'): self.more_tasks = get_module('zippy.install').install(bld, False) return tuple(), tuple()
def load_envs(self): node = self.root.find_node(self.cache_dir) if not node: raise Errors.WafError( 'The project was not configured: run "waf configure" first!') lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True) if not lst: raise Errors.WafError( 'The cache directory is empty: reconfigure the project') for x in lst: name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/') env = ConfigSet.ConfigSet(x.abspath()) self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def h_file(self): # similar to md5_tstamp.py, but with 2-layer cache # global_cache for the build context common for all task generators # local_cache for the build context proxy (one by task generator) # # the global cache is not persistent # the local cache is persistent and meant for partial builds # # assume all calls are made from a single thread # filename = self.abspath() st = os.stat(filename) global_cache = self.ctx.bld.hashes_md5_tstamp local_cache = self.ctx.hashes_md5_tstamp if filename in global_cache: # value already calculated in this build cval = global_cache[filename] # the value in global cache is assumed to be calculated once # reverifying it could cause task generators # to get distinct tstamp values, thus missing rebuilds local_cache[filename] = cval return cval[1] if filename in local_cache: cval = local_cache[filename] if cval[0] == st.st_mtime: # correct value from a previous build # put it in the global cache global_cache[filename] = cval return cval[1] ret = Utils.h_file(filename) local_cache[filename] = global_cache[filename] = (st.st_mtime, ret) return ret
def get_bld_sig(self): """ Node signature, assuming the file is in the build directory NOTE: Never do anything in this funcition, it is extremly performance sensitive! """ try: return self.cache_sig except AttributeError: pass def should_use_parent_task_sig(): if not self.is_bld() and hasattr(self, 'sig'): # If node is outside of bld folder and is gerenerated, use parent task sig return True if os.path.splitext(self.name)[1] in ['.o', '.obj']: # If node is object file, use parent task sig if exists return hasattr(self, 'sig') if should_use_parent_task_sig(): self.cache_sig = self.sig else: self.cache_sig = Utils.h_file(self.abspath()) return self.cache_sig
def h_file(self): return Utils.h_file(self.abspath())
def post_run(self): nodes = self.generator.javadoc_output.ant_glob('**') for x in nodes: x.sig = Utils.h_file(x.abspath()) self.generator.bld.task_sigs[self.uid()] = self.cache_sig
def post_run(self): for n in self.generator.outdir.ant_glob('**/*.class'): n.sig = Utils.h_file(n.abspath()) self.generator.bld.task_sigs[self.uid()] = self.cache_sig
def post_run(): original_post_run() firmware.sig = firmware.cache_sig = Utils.h_file(firmware.abspath())
def post_run(self): old_post_run(self) for node in self.outputs: node.sig = node.cache_sig = Utils.h_file(node.abspath()) self.generator.bld.task_sigs[ node.abspath()] = self.uid() # issue #1017
def run(self): src_dir = self.generator.bld.path source = self.inputs target = self.outputs[0].change_ext('') #print ("--> %s" % self.outputs) #print ('++> %s' % self.outputs[1]) bld_dir = self.outputs[1] bld_dir.mkdir() obj_dir = bld_dir.make_node('_obj') obj_dir.mkdir() bld_srcs = [] for s in source: # FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go # -> for the time being replace '/' with '_'... #b = bld_dir.make_node(s.path_from(src_dir)) b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_')) b.parent.mkdir() #print ('++> %s' % (s.path_from(src_dir),)) try: try:os.remove(b.abspath()) except Exception:pass os.symlink(s.abspath(), b.abspath()) except Exception: # if no support for symlinks, copy the file from src b.write(s.read()) bld_srcs.append(b) #print("--|> [%s]" % b.abspath()) b.sig = Utils.h_file(b.abspath()) pass #self.set_inputs(bld_srcs) #self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs makefile_node = bld_dir.make_node("Makefile") makefile_tmpl = '''\ # Copyright 2009 The Go Authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. --- include $(GOROOT)/src/Make.inc TARG=%(target)s GCIMPORTS= %(gcimports)s CGOFILES=\\ \t%(source)s CGO_CFLAGS= %(cgo_cflags)s CGO_LDFLAGS= %(cgo_ldflags)s include $(GOROOT)/src/Make.pkg %%: install %%.go $(GC) $*.go $(LD) -o $@ $*.$O ''' % { 'gcimports': ' '.join(l for l in self.env['GOCFLAGS']), 'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']), 'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']), 'target': target.path_from(obj_dir), 'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs]) } makefile_node.write(makefile_tmpl) #print ("::makefile: %s"%makefile_node.abspath()) cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip() o = self.outputs[0].change_ext('.gomake.log') fout_node = bld_dir.find_or_declare(o.name) fout = open(fout_node.abspath(), 'w') rc = self.generator.bld.exec_command( cmd, stdout=fout, stderr=fout, cwd=bld_dir.abspath(), ) if rc != 0: import waflib.Logs as msg msg.error('** error running [%s] (cgo-%s)' % (cmd, target)) msg.error(fout_node.read()) return rc self.generator.bld.read_stlib( target, paths=[obj_dir.abspath(),], ) tgt = self.outputs[0] if tgt.parent != obj_dir: install_dir = os.path.join('${LIBDIR}', tgt.parent.path_from(obj_dir)) else: install_dir = '${LIBDIR}' #print('===> %s (%s)' % (tgt.abspath(), install_dir)) self.generator.bld.install_files( install_dir, tgt.abspath(), relative_trick=False, postpone=False, ) return rc
def sig_hook(self, node): node.sig = Utils.h_file(node.abspath())
def post_run(self): old_post_run(self) for node in self.outputs: node.sig = Utils.h_file(node.abspath())
def post_run(self): nodes = self.output_dir.ant_glob('**/*', quiet=True) for x in nodes: x.sig = Utils.h_file(x.abspath()) self.outputs += nodes return Task.Task.post_run(self)
def process_aar(self): """ Find the Android library and unpack it so it's resources can be used by other modules """ if self.bld.env['PLATFORM'] not in ('android_armv7_clang', 'android_armv7_gcc', 'project_generator'): Logs.debug('android_library: Skipping the reading of the aar') return Utils.def_attrs( self, manifest = None, package = '', classpath = [], native_libs = [], aapt_assets = None, aapt_resources = None, ) lib_node = None search_paths = [] if self.is_android_support_lib: android_api_level = str(self.env['ANDROID_SDK_VERSION_NUMBER']) Logs.debug('android_library: Searching for support library - %s - built with API %s', self.name, android_api_level) for path in self.lib_paths: if os.path.exists(path): entries = os.listdir(path) Logs.debug('android_library: All API versions installed {}'.format(entries)) api_versions = sorted([ entry for entry in entries if entry.startswith(android_api_level) ]) Logs.debug('android_library: Found versions {}'.format(api_versions)) highest_useable_version = api_versions[-1] search_paths.append(os.path.join(path, highest_useable_version)) self.android_studio_name = 'com.android.support:{}:{}'.format(self.name, highest_useable_version) lib_name = '{}-{}.aar'.format(self.name, highest_useable_version) break else: raise Errors.WafError('Unable to detect a valid useable version for Android support library %r' % self.name) else: lib_name = '{}.aar'.format(self.name) search_paths = self.lib_paths + [ self.path ] for path in search_paths: Logs.debug('android_library: Searching path {}'.format(path)) if not isinstance(path, Node.Node): path = self.bld.root.find_node(path) or self.path.find_node(path) if not path: Logs.debug('android_library: Unable to find node for path') continue lib_node = path.find_node(lib_name) if lib_node: lib_node.sig = Utils.h_file(lib_node.abspath()) break else: raise Errors.WafError('Could not find Android library %r' % self.name) android_cache = self.bld.get_android_cache_node() extraction_node = android_cache.make_node([ 'aar', self.name ]) if os.path.exists(extraction_node.abspath()): extraction_node.delete() extraction_node.mkdir() aar_file = zipfile.ZipFile(file = lib_node.abspath()) aar_file.extractall(path = extraction_node.abspath()) Logs.debug('android_library: AAR contents = {}'.format(aar_file.namelist())) # required entries from the aar main_jar_file = extraction_node.find_node('classes.jar') if not main_jar_file: self.bld.fatal('[ERROR] Unable to find the required classes.jar from {}'.format(lib_name)) self.manifest = extraction_node.find_node('AndroidManifest.xml') if not self.manifest: self.bld.fatal('[ERROR] Unable to find the required AndroidManifest.xml from {}'.format(lib_name)) self.package = get_package_name(self.manifest.abspath()) if not self.package: self.bld.fatal('[ERROR] Failed to extract the package name from AndroidManifest.xml in {}'.format(lib_name)) self.aapt_resources = extraction_node.find_dir('res') if not self.aapt_resources: self.bld.fatal('[ERROR] Unable to find the required resources directory - res - from {}'.format(lib_name)) # optional entries from the aar self.aapt_assets = extraction_node.find_dir('assets') java_libs = extraction_node.find_dir('libs') if java_libs: self.classpath = java_libs.ant_glob('**/*.jar') native_lib_path = 'jni/{}'.format(self.bld.env['ANDROID_ARCH']) native_libs = extraction_node.find_dir(native_lib_path) if native_libs: self.native_libs_root = native_libs self.native_libs = native_libs.ant_glob('**/*.so') # create the fake tasks self.jar_task = self.create_task('fake_jar', [], main_jar_file) self.aar_task = self.create_task('fake_aar', [], lib_node) # task chaining self.aar_task.set_run_after(self.jar_task)
def post_run(self): if hasattr(self, 'cached'): # Also add the raw output path self.generator.env.append_unique( 'INCPATHS', get_azcg_output_dir_node(self.generator).abspath()) # Also add paths we stored from prior builds azcg_paths = self.azcg_get('AZCG_INCPATHS', []) self.propagate_azcg_incpaths(azcg_paths) # link_inputs is a list of nodes that need to be added to the link each time for link_node in self.azcg_get('link_inputs', []): if not self.add_link_task(link_node): return Task.EXCEPTION self.generator.source += self.outputs else: # Register output files generated by the code gen execution self.process_generated_output() bld = self.generator.bld dep_node = None resolved_nodes = [] # Resolve registered dependencies we got into dependency nodes for path in self.registered_dependencies: dep_node = self.get_node_from_dependency_path(path) if dep_node: if not (dep_node.is_child_of(bld.srcnode) or dep_node.is_child_of(bld.bldnode)): # System library continue if dep_node in self.inputs: # Self-dependency continue if dep_node in self.outputs: # Circular dependency continue append_to_unique_list(resolved_nodes, dep_node) else: Logs.error( 'az_code_gen: Unable to find dependency file as node: {}' .format(path)) # Add azcg_deps and script nodes as dependencies for dep_node in itertools.chain(self.azcg_deps, self.script_nodes): append_to_unique_list(resolved_nodes, dep_node) bld.node_deps[self.uid()] = resolved_nodes # force waf to recompute a full signature for this task (we may have new/deleted dependencies we need it to account for) try: del self.cache_sig except: pass self.azcg_set('AZCG_OUTPUTS', self.outputs) Task.Task.post_run(self) # Due to #includes of code generator header files, we can have an output node which is also an input node. # In addition, we are taking nodes that are not originally build nodes (e.g. header files) and building them, which alters the signature flow in Node.get_bld_sig(). # Task.post_run() default behavior is to set the Node.sig to the task signature which will change our computed task signature because our outputs are our inputs in same cases. # To mitigate this, we must restore the original signature for any file that had a non-build signature previously. # However, we do not want to alter the signature for files that will be consumed by later tasks. # Therefore, we should restore signatures on any node that is not being added to the build (any output nodes not in link_task). for output in self.outputs: if not output in self.azcg_get('link_inputs', []): output.sig = output.cache_sig = Utils.h_file(output.abspath())
def run(self): """ Runs the TeX build process. It may require multiple passes, depending on the usage of cross-references, bibliographies, content susceptible of needing such passes. The appropriate TeX compiler is called until the *.aux* files stop changing. Makeindex and bibtex are called if necessary. """ env = self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') env.append_value('XELATEXFLAGS', '-interaction=batchmode') fun = self.texfun node = self.inputs[0] srcfile = node.abspath() texinputs = self.env.TEXINPUTS or '' self.TEXINPUTS = node.parent.get_bld().abspath( ) + os.pathsep + node.parent.get_src().abspath( ) + os.pathsep + texinputs + os.pathsep self.aux_node = node.change_ext( '.aux') # TODO waf 1.7 remove (left for compatibility) # important, set the cwd for everybody self.cwd = self.inputs[0].parent.get_bld().abspath() warn('first pass on %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling latex', fun()) self.aux_nodes = self.scan_aux(node.change_ext('.aux')) self.idx_node = node.change_ext('.idx') self.bibfile() self.bibunits() self.makeindex() hash = '' for i in range(10): # prevent against infinite loops - one never knows # watch the contents of file.aux and stop if file.aux does not change anymore prev_hash = hash try: hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes] hash = Utils.h_list(hashes) except (OSError, IOError): error('could not read aux.h') pass if hash and hash == prev_hash: break # run the command warn('calling %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status( 'error when calling %s' % self.__class__.__name__, fun())
def runnable_status(self): for x in self.outputs: x.sig=Utils.h_file(x.abspath()) return Task.SKIP_ME
def post_run(self): # collect headers and add them to deps # this is ported from msvcdeps.py try: cached_nodes = self.bld.cached_nodes except: cached_nodes = self.bld.cached_nodes = {} bld = self.generator.bld lowercase = False if Utils.is_win32: (drive, _) = os.path.splitdrive(bld.srcnode.abspath()) lowercase = drive == drive.lower() correct_case_path = bld.path.abspath() correct_case_path_len = len(correct_case_path) correct_case_path_norm = os.path.normcase(correct_case_path) dep_node = None resolved_nodes = [] for path in self.azcg_headers: if os.path.isabs(path): if Utils.is_win32: # Force drive letter to match conventions of main source tree drive, tail = os.path.splitdrive(path) if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm: # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. path = correct_case_path + path[correct_case_path_len:] else: # Check the drive letter if lowercase and (drive != drive.lower()): path = drive.lower() + tail elif (not lowercase) and (drive != drive.upper()): path = drive.upper() + tail dep_node = path_to_node(bld.root, path, cached_nodes) else: base_node = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': path = path[1:] base_node = base_node.parent dep_node = path_to_node(base_node, path, cached_nodes) if dep_node: if not (dep_node.is_child_of(bld.srcnode) or dep_node.is_child_of(bld.bldnode)): # System library Logs.debug('az_code_gen: Ignoring system include %r' % dep_node) continue if dep_node in self.inputs: # Self-dependency continue if dep_node in self.outputs: # Circular dependency continue resolved_nodes.append(dep_node) else: Logs.error('az_code_gen: Unable to find dependency file as node: {}'.format(path)) bld.node_deps[self.uid()] = resolved_nodes # force waf to recompute a full signature for this task (we may have new/deleted dependencies we need it to account for) try: del self.cache_sig except: pass self.azcg_set('AZCG_OUTPUTS', self.outputs) Task.Task.post_run(self) # Due to #includes of code generator header files, we can have an output node which is also an input node. # In addition, we are taking nodes that are not originally build nodes (e.g. header files) and building them, which alters the signature flow in Node.get_bld_sig(). # Task.post_run() default behavior is to set the Node.sig to the task signature which will change our computed task signature because our outputs are our inputs in same cases. # To mitigate this, we must restore the original signature for any file that had a non-build signature previously. # However, we do not want to alter the signature for files that will be consumed by later tasks. # Therefore, we should restore signatures on any node that is not being added to the build (any output nodes not in link_task). for output in self.outputs: if not output in self.azcg_get('link_inputs', []): output.sig = output.cache_sig = Utils.h_file(output.abspath())
def process_py(self,node): node.sig=Utils.h_file(node.abspath())
def process_aar(self): ''' Find the Android library and unpack it so it's resources can be used by other modules ''' def _could_not_find_lib_error(): raise Errors.WafError('[ERROR] Could not find Android library %r' % self.name) bld = self.bld platform = bld.env['PLATFORM'] # the android studio project generation also requires this to run in order for the # aar dependencies to get added to gradle correctly if not (bld.is_android_platform(platform) or bld.cmd == 'android_studio'): Logs.debug('android_library: Skipping the reading of the aar') return Utils.def_attrs( self, manifest=None, package='', classpath=[], native_libs=[], aapt_assets=None, aapt_resources=None, ) group = self.group_id version = self.version search_paths = self.paths android_cache = bld.get_android_cache_node() aar_cache = android_cache.make_node('aar') aar_cache.mkdir() Logs.debug('android_library: Processing Android library %s', self.name) lib_node = None if search_paths: aar_filename = '{}.aar'.format(self.name) for path in search_paths: if not isinstance(path, Node.Node): path = bld.root.find_node(path) or self.path.find_node(path) if not path: Logs.debug( 'android_library: Unable to find node for path %s', path) continue Logs.debug('android_library: Searching path {}'.format( path.abspath())) lib_node = path.find_node(aar_filename) if lib_node: break else: _could_not_find_lib_error() self.android_studio_name = 'file:{}'.format( lib_node.abspath()).replace('\\', '/') else: file_url, aar_filename = search_maven_repos(bld, self.name, group, version) if not (file_url and aar_filename): _could_not_find_lib_error() if file_url.startswith('file:'): local_path = file_url[5:] lib_node = bld.root.find_node(local_path) if not lib_node: _could_not_find_lib_error() else: lib_node = aar_cache.find_node(aar_filename) if not lib_node: lib_node = aar_cache.make_node(aar_filename) Logs.debug('android_library: Downloading %s => %s', file_url, lib_node.abspath()) try: url_opener = urllib.FancyURLopener() url_opener.retrieve(file_url, filename=lib_node.abspath()) except: bld.Fatal( '[ERROR] Failed to download Android library {} from {}.' .format(self.name, file_url)) if not version: version = os.path.splitext(aar_filename)[0].split('-')[-1] self.android_studio_name = '{}:{}:{}'.format(group, self.name, version) lib_node.sig = Utils.h_file(lib_node.abspath()) folder_name = os.path.splitext(aar_filename)[0] extraction_node = aar_cache.make_node(folder_name) if os.path.exists(extraction_node.abspath()): extraction_node.delete() extraction_node.mkdir() aar_file = zipfile.ZipFile(file=lib_node.abspath()) aar_file.extractall(path=extraction_node.abspath()) Logs.debug('android_library: AAR contents = {}'.format( aar_file.namelist())) # required entries from the aar main_jar_file = extraction_node.find_node('classes.jar') if not main_jar_file: self.bld.fatal( '[ERROR] Unable to find the required classes.jar from {}'.format( lib_name)) self.manifest = extraction_node.find_node('AndroidManifest.xml') if not self.manifest: self.bld.fatal( '[ERROR] Unable to find the required AndroidManifest.xml from {}'. format(lib_name)) self.package = get_package_name(self.manifest.abspath()) if not self.package: self.bld.fatal( '[ERROR] Failed to extract the package name from AndroidManifest.xml in {}' .format(lib_name)) self.aapt_resources = extraction_node.find_dir('res') if not self.aapt_resources: self.bld.fatal( '[ERROR] Unable to find the required resources directory - res - from {}' .format(lib_name)) # optional entries from the aar self.aapt_assets = extraction_node.find_dir('assets') java_libs = extraction_node.find_dir('libs') if java_libs: self.classpath = java_libs.ant_glob('**/*.jar') native_lib_path = 'jni/{}'.format(self.bld.env['ANDROID_ARCH']) native_libs = extraction_node.find_dir(native_lib_path) if native_libs: self.native_libs_root = native_libs self.native_libs = native_libs.ant_glob('**/*.so') # create the fake tasks self.jar_task = self.create_task('fake_jar', [], main_jar_file) self.aar_task = self.create_task('fake_aar', [], lib_node) # task chaining self.aar_task.set_run_after(self.jar_task)
def run(self): """Runs the HALCoGen code generator #. Copy the HALCoGen sources to the build directory #. Run HALCoGen #. Get CPU clock frequency that is set by HALCoGen from FreeRTOSConfig.h and copy it to our configuration file #. Copy the HALCoGen sources again to the output directory. This needs to be done, as HALCoGen re-writes the timestamp of the HALCoGen file when the tool is run. With this step the sources and build directory are synchronized. """ for src, tgt in zip(self.inputs[:2], self.outputs[:2]): shutil.copy2(src.abspath(), tgt.abspath()) cmd = Utils.subst_vars( "${HALCOGEN} ${HALCOGEN_SRC_INPUT} " + self.outputs[0].abspath(), self.generator.env, ).split() try: self.generator.bld.exec_command(cmd) except Errors.WafError: self.generator.bld.fatal("Could not generate HAL sources.") output_dir = self.outputs[0].parent generated_os_sources = [ output_dir.find_node(i) for i in self.remove_files # pylint: disable=no-member ] # get clock info from generated source 'FreeRTOSConfig.h' freertos_config_file = self.remove_files.index( # pylint: disable=no-member os.path.join("include", "FreeRTOSConfig.h")) if not freertos_config_file: self.generator.bld.fatal("Could not find 'FreeRTOSConfig.h'.") freertos_config = generated_os_sources[freertos_config_file].read() frequency = None for line in freertos_config.splitlines(): mach = re.search( r"#define configCPU_CLOCK_HZ.*\( \( unsigned portLONG \) ([0-9]+) \)", line, ) if mach: frequency = mach.group(1) break if not frequency: self.generator.bld.fatal("Could not determine clock frequency.") define_guard = (self.outputs[3].name.replace(self.outputs[3].suffix(), "").upper() + "_H_") self.outputs[3].write(f"#ifndef {define_guard}\n" f"#define {define_guard}\n" f"#define HALCOGEN_CPU_CLOCK_HZ ({frequency})\n" f"#endif /* {define_guard} */\n") # remove un-wanted generated sources for src in generated_os_sources: src.delete() startup_node = self.generator.bld.root.find_node( os.path.join(self.generator.path.get_bld().abspath(), "source", "HL_sys_startup.c")) if not startup_node: self.generator.bld.fatal("Could not find startup source.") hl_sys_startup_file = self.outputs.index(startup_node) # pylint: disable=no-member if not hl_sys_startup_file: self.generator.bld.fatal("Could not find 'HL_sys_startup.c'.") generated_file_hash = binascii.hexlify( Utils.h_file(self.outputs[hl_sys_startup_file].abspath())) known_hash = bytes(self.generator.startup_hash.read().strip(), encoding="utf-8") if not generated_file_hash == known_hash: self.generator.bld.fatal( "The auto-generated file 'HL_sys_startup.c' has changed due to " "a configuration change in the HALCoGen project.\nThe " f"expected hash is {known_hash} but the generated hash is " f"{generated_file_hash}.\nCompare '{startup_node}' with " "'fstartup.c' and see if changes need to be applied to to " "'fstartup.c'. If everything is changed as needed, updated " f"the hash in '{self.generator.startup_hash}' and build " "again.\nFor more information see the documentation " " (Configuration/HALCoGen).") # HALCoGen alters the timestamp hardcoded in the copied file after it # generated the sources, we do not want that, therefore overwrite the # altered HALCoGen files with the "original" ones for src, tgt in zip(self.inputs[:2], self.outputs[:2]): shutil.copy2(src.abspath(), tgt.abspath())
def post_run(self): for x in self.outputs: x.sig = Utils.h_file(x.abspath())
def post_run(self): nodes = self.output_dir.ant_glob('**/*', quiet=True) for node in nodes: node.sig = Utils.h_file(node.abspath()) self.add_install() return Task.Task.post_run(self)
class BuildContext(Context.Context): '''executes the build''' cmd = 'build' variant = '' def __init__(self, **kw): super(BuildContext, self).__init__(**kw) self.top_dir = kw.get('top_dir', Context.top_dir) self.run_dir = kw.get('run_dir', Context.run_dir) self.post_mode = POST_AT_ONCE self.out_dir = kw.get('out_dir', Context.out_dir) self.cache_dir = kw.get('cache_dir', None) if not self.cache_dir: self.cache_dir = self.out_dir + os.sep + CACHE_DIR self.all_envs = {} for v in 'task_sigs node_deps raw_deps'.split(): setattr(self, v, {}) self.cache_dir_contents = {} self.task_gen_cache_names = {} self.launch_dir = Context.launch_dir self.targets = Options.options.targets self.keep = Options.options.keep self.cache_global = Options.cache_global self.nocache = Options.options.nocache self.progress_bar = Options.options.progress_bar self.deps_man = Utils.defaultdict(list) self.current_group = 0 self.groups = [] self.group_names = {} def get_variant_dir(self): if not self.variant: return self.out_dir return os.path.join(self.out_dir, self.variant) variant_dir = property(get_variant_dir, None) def __call__(self, *k, **kw): kw['bld'] = self ret = TaskGen.task_gen(*k, **kw) self.task_gen_cache_names = {} self.add_to_group(ret, group=kw.get('group', None)) return ret def __copy__(self): raise Errors.WafError('build contexts are not supposed to be copied') def install_files(self, *k, **kw): pass def install_as(self, *k, **kw): pass def symlink_as(self, *k, **kw): pass def load_envs(self): try: lst = Utils.listdir(self.cache_dir) except OSError, e: if e.errno == errno.ENOENT: raise Errors.WafError( 'The project was not configured: run "waf configure" first!' ) else: raise if not lst: raise Errors.WafError( 'The cache directory is empty: reconfigure the project') for fname in lst: if fname.endswith(CACHE_SUFFIX): env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, fname)) name = fname[:-len(CACHE_SUFFIX)] self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) try: h = Utils.h_file(newnode.abspath()) except (IOError, AttributeError): Logs.error('cannot find %r' % f) h = Utils.SIG_NIL newnode.sig = h
def run(self): """ Runs the TeX build process. It may require multiple passes, depending on the usage of cross-references, bibliographies, content susceptible of needing such passes. The appropriate TeX compiler is called until the *.aux* files stop changing. Makeindex and bibtex are called if necessary. """ env = self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') env.append_value('XELATEXFLAGS', '-interaction=batchmode') fun = self.texfun node = self.inputs[0] srcfile = node.abspath() texinputs = self.env.TEXINPUTS or '' self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep + texinputs + os.pathsep # important, set the cwd for everybody self.cwd = self.inputs[0].parent.get_bld().abspath() Logs.warn('first pass on %s' % self.__class__.__name__) hash = '' try: self.aux_nodes = self.scan_aux(node.change_ext('.aux')) hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes] hash = Utils.h_list(hashes) except (OSError, IOError): pass hash_bcf = '' try: self.bcf_node = node.change_ext('.bcf') hashes = [Utils.h_file(self.bcf_node.abspath())] hash_bcf = Utils.h_list(hashes) except (OSError, IOError): pass hash_bbl = '' try: self.bbl_node = node.change_ext('.bbl') hashes = [Utils.h_file(self.bbl_node.abspath())] hash_bbl = Utils.h_list(hashes) except (OSError, IOError): pass hash_idx = '' try: self.idx_node = node.change_ext('.idx') hashes = [Utils.h_file(self.idx_node.abspath())] hash_idx = Utils.h_list(hashes) except (OSError, IOError): pass hash_ind = '' try: self.ind_node = node.change_ext('.ind') hashes = [Utils.h_file(self.ind_node.abspath())] hash_ind = Utils.h_list(hashes) except (OSError, IOError): pass hash_nlo = '' try: self.nlo_node = node.change_ext('.nlo') hashes = [Utils.h_file(self.nlo_node.abspath())] hash_nlo = Utils.h_list(hashes) except (OSError, IOError): pass hash_nls = '' self.nls_node = node.change_ext('.nls') try: hashes = [Utils.h_file(self.nls_node.abspath())] hash_nls = Utils.h_list(hashes) except (OSError, IOError): pass self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling latex', fun()) self.aux_nodes = self.scan_aux(node.change_ext('.aux')) self.idx_node = node.change_ext('.idx') self.bibtopic() self.bibfile() prev_bcf_hash = hash_bcf try: self.bcf_node = node.change_ext('.bcf') hashes = [Utils.h_file(self.bcf_node.abspath())] hash_bcf = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read bcf.h') pass if hash_bcf and hash_bcf != prev_bcf_hash: self.bibunits() else: Logs.warn('%s unchanged, not calling bibliography engine' % (self.bcf_node)) prev_idx_hash = hash_idx try: self.idx_node = node.change_ext('.idx') hashes = [Utils.h_file(self.idx_node.abspath())] hash_idx = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read idx.h') pass if hash_idx and hash_idx != prev_idx_hash: self.makeindex() else: Logs.warn('%s unchanged, not calling indexing engine' % (self.idx_node)) prev_nlo_hash = hash_nlo try: self.nlo_node = node.change_ext('.nlo') hashes = [Utils.h_file(self.nlo_node.abspath())] hash_nlo = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read nlo.h') pass if hash_nlo and hash_nlo != prev_nlo_hash: self.makenomen() else: Logs.warn('%s unchanged, not calling nomenclature engine' % (self.nlo_node)) for i in range(10): # prevent against infinite loops - one never knows # watch the contents of file.aux and stop if file.aux does not change anymore prev_hash = hash try: hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes] hash = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read aux.h') pass prev_hash_bbl = hash_bbl try: hashes = [Utils.h_file(self.bbl_node.abspath())] hash_bbl = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read bbl.h') pass prev_hash_ind = hash_ind try: hashes = [Utils.h_file(self.ind_node.abspath())] hash_ind = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read ind.h') pass prev_hash_nls = hash_nls try: hashes = [Utils.h_file(self.nls_node.abspath())] hash_nls = Utils.h_list(hashes) except (OSError, IOError): Logs.error('could not read nls.h') pass if hash and hash == prev_hash: Logs.warn('.aux files unchanged') if hash_bbl and hash_bbl == prev_hash_bbl: Logs.warn('%s unchanged' % (self.bcf_node)) if hash_ind and hash_ind == prev_hash_ind: Logs.warn('%s unchanged' % (self.ind_node)) if hash_nls and hash_nls == prev_hash_nls: Logs.warn('%s unchanged' % (self.nls_node)) if hash and hash == prev_hash and hash_bbl and hash_bbl == prev_hash_bbl and hash_ind and hash_ind == prev_hash_ind and hash_nls and hash_nls == prev_hash_nls: Logs.warn('Breaking loop now.') break # run the command Logs.warn('calling %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling %s' % self.__class__.__name__, fun())