def iapply_intltool_in_f(self): try: self.meths.remove('process_source') except ValueError: pass if not self.env.LOCALEDIR: self.env.LOCALEDIR = self.env.PREFIX + '/share/locale' for i in self.to_list(self.source): node = self.path.find_resource(i) podir = getattr(self, 'podir', 'po') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) continue cache = getattr(self, 'intlcache', '.intlcache') self.env['INTLCACHE'] = os.path.join(self.path.bldpath(), podir, cache) self.env['INTLPODIR'] = podirnode.bldpath() self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c']) task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', '${LOCALEDIR}') if inst: self.bld.install_files(inst, task.outputs)
def addlines(self,node): self.currentnode_stack.append(node.parent) filepath=node.abspath() self.count_files+=1 if self.count_files>recursion_limit: raise PreprocError("recursion limit exceeded") pc=self.parse_cache debug('preproc: reading file %r',filepath) try: lns=pc[filepath] except KeyError: pass else: self.lines.extend(lns) return try: lines=filter_comments(filepath) lines.append((POPFILE,'')) lines.reverse() pc[filepath]=lines self.lines.extend(lines) except IOError: raise PreprocError("could not read the file %s"%filepath) except Exception: if Logs.verbose>0: error("parsing %s failed"%filepath) traceback.print_exc()
def bibfile(self): """ Parse the *.aux* file to find a bibfile to process. If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ try: ct = self.aux_node.read() except (OSError, IOError): error('error bibtex scan') else: fo = g_bibtex_re.findall(ct) # there is a .aux file to process if fo: warn('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({ 'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS }) self.env.SRCFILE = self.aux_node.name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
def apply_intltool_in_f(self): try: self.meths.remove('process_source') except ValueError: pass self.ensure_localedir() podir = getattr(self, 'podir', '.') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) return cache = getattr(self, 'intlcache', '.intlcache') self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)] self.env.INTLPODIR = podirnode.bldpath() self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT)) if '-c' in self.env.INTLFLAGS: self.bld.fatal('Redundant -c flag in intltool task %r' % self) style = getattr(self, 'style', None) if style: try: style_flag = _style_flags[style] except KeyError: self.bld.fatal('intltool_in style "%s" is not valid' % style) self.env.append_unique('INTLFLAGS', [style_flag]) for i in self.to_list(self.source): node = self.path.find_resource(i) task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', None) if inst: self.add_install_files(install_to=inst, install_from=task.outputs)
def install_pyfile(self,node,install_from=None): from_node=install_from or node.parent tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False) path=tsk.get_install_path() if self.bld.is_install<0: info("+ removing byte compiled python files") for x in'co': try: os.remove(path+x) except OSError: pass if self.bld.is_install>0: try: st1=os.stat(path) except: error('The python file is missing, this should not happen') for x in['c','o']: do_inst=self.env['PY'+x.upper()] try: st2=os.stat(path+x) except OSError: pass else: if st1.st_mtime<=st2.st_mtime: do_inst=False if do_inst: lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[] (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x) argv=self.env['PYTHON']+lst+['-c',INST,a,b,c] info('+ byte compiling %r'%(path+x)) env=self.env.env or None ret=Utils.subprocess.Popen(argv,env=env).wait() if ret: raise Errors.WafError('py%s compilation failed %r'%(x,path))
def install_pyfile(self, node, install_from=None): from_node = install_from or node.parent tsk = self.bld.install_as(self.install_path + "/" + node.path_from(from_node), node, postpone=False) path = tsk.get_install_path() if self.bld.is_install < 0: info("+ removing byte compiled python files") for x in "co": try: os.remove(path + x) except OSError: pass if self.bld.is_install > 0: try: st1 = os.stat(path) except: error("The python file is missing, this should not happen") for x in ["c", "o"]: do_inst = self.env["PY" + x.upper()] try: st2 = os.stat(path + x) except OSError: pass else: if st1.st_mtime <= st2.st_mtime: do_inst = False if do_inst: lst = (x == "o") and [self.env["PYFLAGS_OPT"]] or [] (a, b, c) = (path, path + x, tsk.get_install_path(destdir=False) + x) argv = self.env["PYTHON"] + lst + ["-c", INST, a, b, c] info("+ byte compiling %r" % (path + x)) ret = Utils.subprocess.Popen(argv).wait() if ret: raise Errors.WafError("py%s compilation failed %r" % (x, path))
def apply_intltool_in_f(self): try:self.meths.remove('process_source') except ValueError:pass self.ensure_localedir() podir=getattr(self,'podir','.') podirnode=self.path.find_dir(podir) if not podirnode: error("could not find the podir %r"%podir) return cache=getattr(self,'intlcache','.intlcache') self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)] self.env.INTLPODIR=podirnode.bldpath() self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT)) if'-c'in self.env.INTLFLAGS: self.bld.fatal('Redundant -c flag in intltool task %r'%self) style=getattr(self,'style',None) if style: try: style_flag=_style_flags[style] except KeyError: self.bld.fatal('intltool_in style "%s" is not valid'%style) self.env.append_unique('INTLFLAGS',[style_flag]) for i in self.to_list(self.source): node=self.path.find_resource(i) task=self.create_task('intltool',node,node.change_ext('')) inst=getattr(self,'install_path',None) if inst: self.bld.install_files(inst,task.outputs)
def bibfile(self): """ Parse the *.aux* files to find a bibfile to process. If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ need_bibtex = False try: for aux_node in self.aux_nodes: ct = aux_node.read() if g_bibtex_re.findall(ct): need_bibtex = True break except (OSError, IOError): error('error bibtex scan') else: # only the main .aux file needs to be processed if need_bibtex: warn('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({ 'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS }) self.env.SRCFILE = self.aux_nodes[0].name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
def addlines(self, node): self.currentnode_stack.append(node.parent) filepath = node.abspath() self.count_files += 1 if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded") pc = self.parse_cache debug('preproc: reading file %r', filepath) try: lns = pc[filepath] except KeyError: pass else: self.lines.extend(lns) return try: lines = filter_comments(filepath) lines.append((POPFILE, '')) lines.reverse() pc[filepath] = lines self.lines.extend(lines) except IOError: raise PreprocError("could not read the file %s" % filepath) except Exception: if Logs.verbose > 0: error("parsing %s failed" % filepath) traceback.print_exc()
def add_moc_tasks(self): node=self.inputs[0] bld=self.generator.bld try: self.signature() except KeyError: pass else: delattr(self,'cache_sig') moctasks=[] mocfiles=[] try: tmp_lst=bld.raw_deps[self.uid()] bld.raw_deps[self.uid()]=[] except KeyError: tmp_lst=[] for d in tmp_lst: if not d.endswith('.moc'): continue if d in mocfiles: error("paranoia owns") continue mocfiles.append(d) ext='' try:ext=Options.options.qt_header_ext except AttributeError:pass if not ext: base2=d[:-4] for exth in MOC_H: k=node.parent.find_node(base2+exth) if k: break else: raise Errors.WafError('no header found for %r which is a moc file'%d) h_node=node.parent.find_resource(base2+exth) m_node=h_node.change_ext('.moc') bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node task=Task.classes['moc'](env=self.env,generator=self.generator) task.set_inputs(h_node) task.set_outputs(m_node) gen=bld.producer gen.outstanding.insert(0,task) gen.total+=1 moctasks.append(task) tmp_lst=bld.raw_deps[self.uid()]=mocfiles lst=bld.node_deps.get(self.uid(),()) for d in lst: name=d.name if name.endswith('.moc'): task=Task.classes['moc'](env=self.env,generator=self.generator) task.set_inputs(bld.node_deps[(self.inputs[0].parent.abspath(),name)]) task.set_outputs(d) gen=bld.producer gen.outstanding.insert(0,task) gen.total+=1 moctasks.append(task) self.run_after.update(set(moctasks)) self.moc_done=1
def make_package(bld, name, use=''): use = to_list(use) + ['ROOTSYS'] includes = [] headers = [] source = [] incdir = bld.path.find_dir('inc') srcdir = bld.path.find_dir('src') dictdir = bld.path.find_dir('dict') testsrc = bld.path.ant_glob('test/test_*.cxx') + bld.path.ant_glob('tests/test_*.cxx') appsdir = bld.path.find_dir('apps') if incdir: headers += incdir.ant_glob(name + '/*.h') includes += ['inc'] bld.env['INCLUDES_'+name] = [incdir.abspath()] if headers: bld.install_files('${PREFIX}/include/%s' % name, headers) if srcdir: source += srcdir.ant_glob('*.cxx') if dictdir: if not headers: error('No header files for ROOT dictionary "%s"' % name) linkdef = dictdir.find_resource('LinkDef.h') bld.gen_rootcling_dict(name, linkdef, headers = headers, includes = includes, use = use) source.append(name+'Dict.cxx') if incdir and srcdir: bld(features = 'cxx cxxshlib', name = name, source = source, target = name, includes = 'inc', export_includes = 'inc', use=use) if testsrc: for test_main in testsrc: bld.program(features = 'test', source = [test_main], target = test_main.name.replace('.cxx',''), install_path = None, includes = 'inc', use = use + [name]) if appsdir: for app in appsdir.ant_glob('*.cxx'): bld.program(source = [app], target = app.name.replace('.cxx',''), includes = 'inc', use = use + [name])
def apply_incpaths_ml(self): inc_lst = self.includes.split() lst = self.incpaths_lst for dir in inc_lst: node = self.path.find_dir(dir) if not node: error("node not found: " + str(dir)) continue if not node in lst: lst.append(node) self.bld_incpaths_lst.append(node)
def bibfile(self): try: ct=self.aux_node.read() except(OSError,IOError): error('error bibtex scan') else: fo=g_bibtex_re.findall(ct) if fo: warn('calling bibtex') self.env.env={} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}) self.env.SRCFILE=self.aux_node.name[:-4] self.check_status('error when calling bibtex',self.bibtex_fun())
def run(task): command = 'SAS' env = task.env bld = task.generator.bld fun = sas_fun node = task.inputs[0] logfilenode = node.change_ext('.log') lstfilenode = node.change_ext('.lst') # set the cwd task.cwd = task.inputs[0].parent.get_src().abspath() debug('runner: %s on %s' % (command, node.abspath)) SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep task.env.env = {'SASINPUTS': SASINPUTS} task.env.SRCFILE = node.abspath() task.env.LOGFILE = logfilenode.abspath() task.env.LSTFILE = lstfilenode.abspath() ret = fun(task) if ret: error('Running %s on %r returned a non-zero exit' % (command, node)) error('SRCFILE = %r' % node) error('LOGFILE = %r' % logfilenode) error('LSTFILE = %r' % lstfilenode) return ret
def bibunits(self): try: bibunits=bibunitscan(self) except FSError: error('error bibunitscan') else: if bibunits: fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)] if fn: warn('calling bibtex on bibunits') for f in fn: self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS} self.env.SRCFILE=f self.check_status('error when calling bibtex',self.bibtex_fun())
def bibunits(self): try: bibunits = bibunitscan(self) except FSError: error("error bibunitscan") else: if bibunits: fn = ["bu" + str(i) for i in xrange(1, len(bibunits) + 1)] if fn: warn("calling bibtex on bibunits") for f in fn: self.env.env = {"BIBINPUTS": self.TEXINPUTS, "BSTINPUTS": self.TEXINPUTS} self.env.SRCFILE = f self.check_status("error when calling bibtex", self.bibtex_fun())
def apply_intltool_in_f(self): """ Create tasks to translate files by intltool-merge:: def build(bld): bld( features = "intltool_in", podir = "../po", flags = ["-d", "-q", "-u", "-c"], source = 'kupfer.desktop.in', install_path = "${DATADIR}/applications", ) :param podir: location of the .po files :type podir: string :param source: source files to process :type source: list of string :param flags: compilation flags ("-quc" by default) :type flags: list of string :param install_path: installation path :type install_path: string """ try: self.meths.remove('process_source') except ValueError: pass if not self.env.LOCALEDIR: self.env.LOCALEDIR = self.env.PREFIX + '/share/locale' for i in self.to_list(self.source): node = self.path.find_resource(i) podir = getattr(self, 'podir', 'po') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) continue cache = getattr(self, 'intlcache', '.intlcache') self.env['INTLCACHE'] = os.path.join(self.path.bldpath(), podir, cache) self.env['INTLPODIR'] = podirnode.bldpath() self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c']) task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', '${LOCALEDIR}') if inst: self.bld.install_files(inst, task.outputs)
def iapply_intltool_in_f(self): try: self.meths.remove("process_source") except ValueError: pass for i in self.to_list(self.source): node = self.path.find_resource(i) podir = getattr(self, "podir", "po") podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) continue cache = getattr(self, "intlcache", ".intlcache") self.env["INTLCACHE"] = os.path.join(self.path.bldpath(self.env), podir, cache) self.env["INTLPODIR"] = podirnode.srcpath(self.env) self.env["INTLFLAGS"] = getattr(self, "flags", ["-q", "-u", "-c"]) task = self.create_task("intltool", node, node.change_ext("")) task.install_path = self.install_path
def bibfile(self): need_bibtex=False try: for aux_node in self.aux_nodes: ct=aux_node.read() if g_bibtex_re.findall(ct): need_bibtex=True break except(OSError,IOError): error('error bibtex scan') else: if need_bibtex: warn('calling bibtex') self.env.env={} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}) self.env.SRCFILE=self.aux_nodes[0].name[:-4] self.check_status('error when calling bibtex',self.bibtex_fun())
def run(self): env = self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') env.append_value('XELATEXFLAGS', '-interaction=batchmode') fun = self.texfun node = self.inputs[0] srcfile = node.abspath() texinputs = self.env.TEXINPUTS or '' self.TEXINPUTS = node.parent.get_bld().abspath( ) + os.pathsep + node.parent.get_src().abspath( ) + os.pathsep + texinputs + os.pathsep self.aux_node = node.change_ext('.aux') self.cwd = self.inputs[0].parent.get_bld().abspath() warn('first pass on %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling latex', fun()) self.aux_nodes = self.scan_aux(node.change_ext('.aux')) self.idx_node = node.change_ext('.idx') self.bibfile() self.bibunits() self.makeindex() hash = '' for i in range(10): prev_hash = hash try: hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes] hash = Utils.h_list(hashes) except (OSError, IOError): error('could not read aux.h') pass if hash and hash == prev_hash: break warn('calling %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status( 'error when calling %s' % self.__class__.__name__, fun())
def bibunits(self): """ Parse the *.aux* file to find bibunit files. If there are bibunit files, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`. """ try: bibunits = bibunitscan(self) except FSError: error('error bibunitscan') else: if bibunits: fn = ['bu' + str(i) for i in xrange(1, len(bibunits) + 1)] if fn: warn('calling bibtex on bibunits') for f in fn: self.env.env = {'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS} self.env.SRCFILE = f self.check_status('error when calling bibtex', self.bibtex_fun())
def iapply_intltool_in_f(self): try: self.meths.remove('process_source') except ValueError: pass for i in self.to_list(self.source): node = self.path.find_resource(i) podir = getattr(self, 'podir', 'po') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) continue cache = getattr(self, 'intlcache', '.intlcache') self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache) self.env['INTLPODIR'] = podirnode.srcpath(self.env) self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c']) task = self.create_task('intltool', node, node.change_ext('')) task.install_path = self.install_path
def run(self): env=self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS','-interaction=batchmode') env.append_value('PDFLATEXFLAGS','-interaction=batchmode') env.append_value('XELATEXFLAGS','-interaction=batchmode') fun=self.texfun node=self.inputs[0] srcfile=node.abspath() texinputs=self.env.TEXINPUTS or'' self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep self.aux_node=node.change_ext('.aux') self.cwd=self.inputs[0].parent.get_bld().abspath() warn('first pass on %s'%self.__class__.__name__) self.env.env={} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) self.env.SRCFILE=srcfile self.check_status('error when calling latex',fun()) self.aux_nodes=self.scan_aux(node.change_ext('.aux')) self.idx_node=node.change_ext('.idx') self.bibfile() self.bibunits() self.makeindex() hash='' for i in range(10): prev_hash=hash try: hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes] hash=Utils.h_list(hashes) except(OSError,IOError): error('could not read aux.h') pass if hash and hash==prev_hash: break warn('calling %s'%self.__class__.__name__) self.env.env={} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) self.env.SRCFILE=srcfile self.check_status('error when calling %s'%self.__class__.__name__,fun())
def bibfile(self): """ Parse the *.aux* file to find a bibfile to process. If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ try: ct = self.aux_node.read() except (OSError, IOError): error('error bibtex scan') else: fo = g_bibtex_re.findall(ct) # there is a .aux file to process if fo: warn('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS}) self.env.SRCFILE = self.aux_node.name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
def run(self): env = self.env gen = self.generator path = gen.path bld = gen.bld bjam = gen.bld.root.find_dir(env.BJAM_SRC) if not bjam: error('Can not find bjam source') return -1 bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam' bjam_exe = bjam.find_resource(bjam_exe_relpath) if bjam_exe: env.BJAM = bjam_exe.srcpath() return 0 bjam_cmd = ['./build.sh'] debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd)) result = self.exec_command(bjam_cmd, cwd=bjam.srcpath()) if not result == 0: error('bjam failed') return -1 bjam_exe = bjam.find_resource(bjam_exe_relpath) if bjam_exe: env.BJAM = bjam_exe.srcpath() return 0 error('bjam failed') return -1
def apply_intltool_in_f(self): try:self.meths.remove('process_source') except ValueError:pass self.ensure_localedir() for i in self.to_list(self.source): node=self.path.find_resource(i) podir=getattr(self,'podir','po') podirnode=self.path.find_dir(podir) if not podirnode: error("could not find the podir %r"%podir) continue cache=getattr(self,'intlcache','.intlcache') self.env.INTLCACHE=[os.path.join(self.path.bldpath(),podir,cache)] self.env.INTLPODIR=podirnode.bldpath() self.env.INTLFLAGS=getattr(self,'flags',self.env.INTLFLAGS_DEFAULT) if'-c'in self.env.INTLFLAGS: Logs.warn('Redundant -c flag in intltool task %r'%self) self.env.INTLFLAGS.remove('-c') task=self.create_task('intltool',node,node.change_ext('')) inst=getattr(self,'install_path','${LOCALEDIR}') if inst: self.bld.install_files(inst,task.outputs)
def run(self): env = self.env bld = self.generator.bld if not env["PROMPT_LATEX"]: env.append_value("LATEXFLAGS", "-interaction=batchmode") env.append_value("PDFLATEXFLAGS", "-interaction=batchmode") env.append_value("XELATEXFLAGS", "-interaction=batchmode") fun = self.texfun node = self.inputs[0] srcfile = node.abspath() self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep self.aux_node = node.change_ext(".aux") self.idx_node = node.change_ext(".idx") self.cwd = self.inputs[0].parent.get_bld().abspath() warn("first pass on %s" % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({"TEXINPUTS": self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status("error when calling latex", fun()) self.bibfile() self.bibunits() self.makeindex() hash = "" for i in range(10): prev_hash = hash try: hash = Utils.h_file(self.aux_node.abspath()) except (OSError, IOError): error("could not read aux.h -> %s" % self.aux_node.abspath()) pass if hash and hash == prev_hash: break warn("calling %s" % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({"TEXINPUTS": self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status("error when calling %s" % self.__class__.__name__, fun())
def apply_intltool_in_f(self): try: self.meths.remove('process_source') except ValueError: pass if not self.env.LOCALEDIR: self.env.LOCALEDIR = self.env.PREFIX + '/share/locale' for i in self.to_list(self.source): node = self.path.find_resource(i) podir = getattr(self, 'podir', 'po') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) continue cache = getattr(self, 'intlcache', '.intlcache') self.env['INTLCACHE'] = os.path.join(self.path.bldpath(), podir, cache) self.env['INTLPODIR'] = podirnode.bldpath() self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c']) task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', '${LOCALEDIR}') if inst: self.bld.install_files(inst, task.outputs)
def bibfile(self): need_bibtex = False try: for aux_node in self.aux_nodes: ct = aux_node.read() if g_bibtex_re.findall(ct): need_bibtex = True break except (OSError, IOError): error('error bibtex scan') else: if need_bibtex: warn('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({ 'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS }) self.env.SRCFILE = self.aux_nodes[0].name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
def bibunits(self): """ Parse the *.aux* file to find bibunit files. If there are bibunit files, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`. """ try: bibunits = bibunitscan(self) except FSError: error('error bibunitscan') else: if bibunits: fn = ['bu' + str(i) for i in xrange(1, len(bibunits) + 1)] if fn: warn('calling bibtex on bibunits') for f in fn: self.env.env = { 'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS } self.env.SRCFILE = f self.check_status('error when calling bibtex', self.bibtex_fun())
def run(self): env = self.env gen = self.generator path = gen.path bld = gen.bld if hasattr(gen, 'root'): build_root = path.find_node(gen.root) else: build_root = path jam = bld.srcnode.find_resource(env.BJAM_CONFIG) if jam: debug('bjam: Using jam configuration from ' + jam.srcpath()) jam_rel = jam.relpath_gen(build_root) else: warn( 'No build configuration in build_config/user-config.jam. Using default' ) jam_rel = None bjam_exe = bld.srcnode.find_node(env.BJAM) if not bjam_exe: error('env.BJAM is not set') return -1 bjam_exe_rel = bjam_exe.relpath_gen(build_root) cmd = ([bjam_exe_rel] + (['--user-config=' + jam_rel] if jam_rel else []) + ['--stagedir=' + path.get_bld().path_from(build_root)] + ['--debug-configuration'] + ['--with-' + lib for lib in self.generator.target] + (['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) + ['link=' + 'shared'] + ['variant=' + 'release']) debug('runner: ' + build_root.srcpath() + '> ' + str(cmd)) ret = self.exec_command(cmd, cwd=build_root.srcpath()) if ret != 0: return ret self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*')) return 0
def addlines(self, node): """ Add the lines from a header in the list of preprocessor lines to parse :param node: header :type node: :py:class:`waflib.Node.Node` """ self.currentnode_stack.append(node.parent) filepath = node.abspath() self.count_files += 1 if self.count_files > recursion_limit: # issue #812 raise PreprocError("recursion limit exceeded") pc = self.parse_cache debug('preproc: reading file %r', filepath) try: lns = pc[filepath] except KeyError: pass else: self.lines.extend(lns) return try: lines = filter_comments(filepath) lines.append((POPFILE, '')) lines.reverse() pc[filepath] = lines # cache the lines filtered self.lines.extend(lines) except IOError: raise PreprocError("could not read the file %s" % filepath) except Exception: if Logs.verbose > 0: error("parsing %s failed" % filepath) traceback.print_exc()
def bibfile(self): """ Parse the *.aux* files to find a bibfile to process. If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ need_bibtex = False try: for aux_node in self.aux_nodes: ct = aux_node.read() if g_bibtex_re.findall(ct): need_bibtex = True break except (OSError, IOError): error('error bibtex scan') else: # only the main .aux file needs to be processed if need_bibtex: warn('calling bibtex') self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS}) self.env.SRCFILE = self.aux_nodes[0].name[:-4] self.check_status('error when calling bibtex', self.bibtex_fun())
def run(self): env = self.env gen = self.generator path = gen.path bld = gen.bld if hasattr(gen, 'root'): build_root = path.find_node(gen.root) else: build_root = path jam = bld.srcnode.find_resource(env.BJAM_CONFIG) if jam: debug('bjam: Using jam configuration from ' + jam.srcpath()) jam_rel = jam.relpath_gen(build_root) else: warn('No build configuration in build_config/user-config.jam. Using default') jam_rel = None bjam_exe = bld.srcnode.find_node(env.BJAM) if not bjam_exe: error('env.BJAM is not set') return -1 bjam_exe_rel = bjam_exe.relpath_gen(build_root) cmd = ([bjam_exe_rel] + (['--user-config=' + jam_rel] if jam_rel else []) + ['--stagedir=' + path.get_bld().path_from(build_root)] + ['--debug-configuration'] + ['--with-' + lib for lib in self.generator.target] + (['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) + ['link=' + 'shared'] + ['variant=' + 'release'] ) debug('runner: ' + build_root.srcpath() + '> ' + str(cmd)) ret = self.exec_command(cmd, cwd=build_root.srcpath()) if ret != 0: return ret self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*')) return 0
def apply_intltool_in_f(self): """ Creates tasks to translate files by intltool-merge:: def build(bld): bld( features = "intltool_in", podir = "../po", style = "desktop", flags = ["-u"], source = 'kupfer.desktop.in', install_path = "${DATADIR}/applications", ) :param podir: location of the .po files :type podir: string :param source: source files to process :type source: list of string :param style: the intltool-merge mode of operation, can be one of the following values: ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. See the ``intltool-merge`` man page for more information about supported modes of operation. :type style: string :param flags: compilation flags ("-quc" by default) :type flags: list of string :param install_path: installation path :type install_path: string """ try: self.meths.remove('process_source') except ValueError: pass self.ensure_localedir() podir = getattr(self, 'podir', '.') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) return cache = getattr(self, 'intlcache', '.intlcache') self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)] self.env.INTLPODIR = podirnode.bldpath() self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT)) if '-c' in self.env.INTLFLAGS: self.bld.fatal('Redundant -c flag in intltool task %r' % self) style = getattr(self, 'style', None) if style: try: style_flag = _style_flags[style] except KeyError: self.bld.fatal('intltool_in style "%s" is not valid' % style) self.env.append_unique('INTLFLAGS', [style_flag]) for i in self.to_list(self.source): node = self.path.find_resource(i) task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', None) if inst: self.add_install_files(install_to=inst, install_from=task.outputs)
def apply_intltool_in_f(self): """ Create tasks to translate files by intltool-merge:: def build(bld): bld( features = "intltool_in", podir = "../po", style = "desktop", flags = ["-u"], source = 'kupfer.desktop.in', install_path = "${DATADIR}/applications", ) :param podir: location of the .po files :type podir: string :param source: source files to process :type source: list of string :param style: the intltool-merge mode of operation, can be one of the following values: ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. See the ``intltool-merge`` man page for more information about supported modes of operation. :type style: string :param flags: compilation flags ("-quc" by default) :type flags: list of string :param install_path: installation path :type install_path: string """ try: self.meths.remove('process_source') except ValueError: pass self.ensure_localedir() podir = getattr(self, 'podir', '.') podirnode = self.path.find_dir(podir) if not podirnode: error("could not find the podir %r" % podir) return cache = getattr(self, 'intlcache', '.intlcache') self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)] self.env.INTLPODIR = podirnode.bldpath() self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT)) if '-c' in self.env.INTLFLAGS: self.bld.fatal('Redundant -c flag in intltool task %r' % self) style = getattr(self, 'style', None) if style: try: style_flag = _style_flags[style] except KeyError: self.bld.fatal('intltool_in style "%s" is not valid' % style) self.env.append_unique('INTLFLAGS', [style_flag]) for i in self.to_list(self.source): node = self.path.find_resource(i) task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', None) if inst: self.bld.install_files(inst, task.outputs)
def add_moc_tasks(self): node = self.inputs[0] bld = self.generator.bld try: self.signature() except KeyError: pass else: delattr(self, 'cache_sig') moctasks = [] mocfiles = [] try: tmp_lst = bld.raw_deps[self.uid()] bld.raw_deps[self.uid()] = [] except KeyError: tmp_lst = [] for d in tmp_lst: if not d.endswith('.moc'): continue if d in mocfiles: error("paranoia owns") continue mocfiles.append(d) h_node = None try: ext = Options.options.qt_header_ext.split() except AttributeError: pass if not ext: ext = MOC_H base2 = d[:-4] for x in [node.parent] + self.generator.includes_nodes: for e in ext: h_node = x.find_node(base2 + e) if h_node: break else: continue break else: raise Errors.WafError( 'no header found for %r which is a moc file' % d) m_node = h_node.change_ext('.moc') bld.node_deps[(self.inputs[0].parent.abspath(), m_node.name)] = h_node task = Task.classes['moc'](env=self.env, generator=self.generator) task.set_inputs(h_node) task.set_outputs(m_node) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) tmp_lst = bld.raw_deps[self.uid()] = mocfiles lst = bld.node_deps.get(self.uid(), ()) for d in lst: name = d.name if name.endswith('.moc'): task = Task.classes['moc'](env=self.env, generator=self.generator) task.set_inputs(bld.node_deps[(self.inputs[0].parent.abspath(), name)]) task.set_outputs(d) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) self.run_after.update(set(moctasks)) self.moc_done = 1
def add_moc_tasks(self): """ Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` """ node = self.inputs[0] bld = self.generator.bld try: # compute the signature once to know if there is a moc file to create self.signature() except KeyError: # the moc file may be referenced somewhere else pass else: # remove the signature, it must be recomputed with the moc task delattr(self, 'cache_sig') moctasks = [] mocfiles = [] try: tmp_lst = bld.raw_deps[self.uid()] bld.raw_deps[self.uid()] = [] except KeyError: tmp_lst = [] for d in tmp_lst: if not d.endswith('.moc'): continue # paranoid check if d in mocfiles: error("paranoia owns") continue # process that base.moc only once mocfiles.append(d) # find the extension - this search is done only once h_node = None try: ext = Options.options.qt_header_ext.split() except AttributeError: pass if not ext: ext = MOC_H base2 = d[:-4] for x in [node.parent] + self.generator.includes_nodes: for e in ext: h_node = x.find_node(base2 + e) if h_node: break else: continue break else: raise Errors.WafError( 'no header found for %r which is a moc file' % d) # next time we will not search for the extension (look at the 'for' loop below) m_node = h_node.change_ext('.moc') bld.node_deps[(self.inputs[0].parent.abspath(), m_node.name)] = h_node # create the task task = Task.classes['moc'](env=self.env, generator=self.generator) task.set_inputs(h_node) task.set_outputs(m_node) # direct injection in the build phase (safe because called from the main thread) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) # remove raw deps except the moc files to save space (optimization) tmp_lst = bld.raw_deps[self.uid()] = mocfiles # look at the file inputs, it is set right above lst = bld.node_deps.get(self.uid(), ()) for d in lst: name = d.name if name.endswith('.moc'): task = Task.classes['moc'](env=self.env, generator=self.generator) task.set_inputs( bld.node_deps[(self.inputs[0].parent.abspath(), name)]) # 1st element in a tuple task.set_outputs(d) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) # simple scheduler dependency: run the moc task before others self.run_after.update(set(moctasks)) self.moc_done = 1
def smplpkg(bld, name, use='', app_use='', test_use=''): use = list(set(to_list(use))) app_use = list(set(use + to_list(app_use))) test_use = list(set(use + to_list(test_use))) includes = [] headers = [] source = [] incdir = bld.path.find_dir('inc') srcdir = bld.path.find_dir('src') dictdir = bld.path.find_dir('dict') testsrc = bld.path.ant_glob('test/test_*.cxx') test_scripts = bld.path.ant_glob('test/test_*.sh') + bld.path.ant_glob( 'test/test_*.py') test_jsonnets = bld.path.ant_glob('test/test*.jsonnet') appsdir = bld.path.find_dir('apps') if incdir: headers += incdir.ant_glob(name + '/*.h') includes += ['inc'] bld.env['INCLUDES_' + name] = [incdir.abspath()] if headers: bld.install_files('${PREFIX}/include/%s' % name, headers) if srcdir: source += srcdir.ant_glob('*.cxx') source += srcdir.ant_glob('*.cu') # cuda # fixme: I should move this out of here. # root dictionary if dictdir: if not headers: error('No header files for ROOT dictionary "%s"' % name) #print 'Building ROOT dictionary: %s using %s' % (name,use) if 'ROOTSYS' in use: linkdef = dictdir.find_resource('LinkDef.h') bld.gen_rootcling_dict(name, linkdef, headers=headers, includes=includes, use=use) source.append(bld.path.find_or_declare(name + 'Dict.cxx')) else: warn( 'No ROOT dictionary will be generated for "%s" unless "ROOTSYS" added to "use"' % name) def get_rpath(uselst, local=True): ret = set([bld.env["PREFIX"] + "/lib"]) for one in uselst: libpath = bld.env["LIBPATH_" + one] for l in libpath: ret.add(l) if local: if one.startswith("WireCell"): sd = one[8:].lower() blddir = bld.path.find_or_declare(bld.out_dir) pkgdir = blddir.find_or_declare(sd).abspath() #print pkgdir ret.add(pkgdir) ret = list(ret) return ret # the library if incdir and srcdir: #print "Building library: %s using %s"%(name, use) bld(features='cxx cxxshlib', name=name, source=source, target=name, rpath=get_rpath(use), includes='inc', export_includes='inc', use=use) if appsdir: for app in appsdir.ant_glob('*.cxx'): #print 'Building %s app: %s using %s' % (name, app, app_use) bld.program(source=[app], target=app.name.replace('.cxx', ''), includes='inc', rpath=get_rpath(app_use + [name], local=False), use=app_use + [name]) if (testsrc or test_scripts) and not bld.options.no_tests: for test_main in testsrc: #print 'Building %s test: %s using %s' % (name, test_main, test_use) rpath = get_rpath(test_use + [name]) #print rpath bld.program(features='test', source=[test_main], ut_cwd=bld.path, target=test_main.name.replace('.cxx', ''), install_path=None, rpath=rpath, includes=['inc', 'test', 'tests'], use=test_use + [name]) for test_script in test_scripts: interp = "${BASH}" if test_script.abspath().endswith(".py"): interp = "${PYTHON}" #print 'Building %s test %s script: %s using %s' % (name, interp, test_script, test_use) bld(features="test_scripts", ut_cwd=bld.path, test_scripts_source=test_script, test_scripts_template="pwd && " + interp + " ${SCRIPT}") if test_jsonnets and not bld.options.no_tests: # print ("testing %d jsonnets in %s" % (len(test_jsonnets), bld.path )) for test_jsonnet in test_jsonnets: bld(features="test_scripts", ut_cwd=bld.path, test_scripts_source=test_jsonnet, test_scripts_template="pwd && wcsonnet ${SCRIPT}")
def run(self): """ Runs the TeX build process. It may require multiple passes, depending on the usage of cross-references, bibliographies, content susceptible of needing such passes. The appropriate TeX compiler is called until the *.aux* files stop changing. Makeindex and bibtex are called if necessary. """ env = self.env if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') env.append_value('XELATEXFLAGS', '-interaction=batchmode') fun = self.texfun node = self.inputs[0] srcfile = node.abspath() texinputs = self.env.TEXINPUTS or '' self.TEXINPUTS = node.parent.get_bld().abspath( ) + os.pathsep + node.parent.get_src().abspath( ) + os.pathsep + texinputs + os.pathsep self.aux_node = node.change_ext( '.aux') # TODO waf 1.7 remove (left for compatibility) # important, set the cwd for everybody self.cwd = self.inputs[0].parent.get_bld().abspath() warn('first pass on %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling latex', fun()) self.aux_nodes = self.scan_aux(node.change_ext('.aux')) self.idx_node = node.change_ext('.idx') self.bibfile() self.bibunits() self.makeindex() hash = '' for i in range(10): # prevent against infinite loops - one never knows # watch the contents of file.aux and stop if file.aux does not change anymore prev_hash = hash try: hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes] hash = Utils.h_list(hashes) except (OSError, IOError): error('could not read aux.h') pass if hash and hash == prev_hash: break # run the command warn('calling %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status( 'error when calling %s' % self.__class__.__name__, fun())
def add_moc_tasks(self): node = self.inputs[0] bld = self.generator.bld try: self.signature() except KeyError: pass else: delattr(self, "cache_sig") moctasks = [] mocfiles = [] try: tmp_lst = bld.raw_deps[self.uid()] bld.raw_deps[self.uid()] = [] except KeyError: tmp_lst = [] for d in tmp_lst: if not d.endswith(".moc"): continue if d in mocfiles: error("paranoia owns") continue mocfiles.append(d) h_node = None try: ext = Options.options.qt_header_ext.split() except AttributeError: pass if not ext: ext = MOC_H base2 = d[:-4] for x in [node.parent] + self.generator.includes_nodes: for e in ext: h_node = x.find_node(base2 + e) if h_node: break else: continue break else: raise Errors.WafError("no header found for %r which is a moc file" % d) m_node = h_node.change_ext(".moc") bld.node_deps[(self.inputs[0].parent.abspath(), m_node.name)] = h_node task = Task.classes["moc"](env=self.env, generator=self.generator) task.set_inputs(h_node) task.set_outputs(m_node) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) tmp_lst = bld.raw_deps[self.uid()] = mocfiles lst = bld.node_deps.get(self.uid(), ()) for d in lst: name = d.name if name.endswith(".moc"): task = Task.classes["moc"](env=self.env, generator=self.generator) task.set_inputs(bld.node_deps[(self.inputs[0].parent.abspath(), name)]) task.set_outputs(d) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) self.run_after.update(set(moctasks)) self.moc_done = 1
def run(self): """ Runs the TeX build process. It may require multiple passes, depending on the usage of cross-references, bibliographies, content susceptible of needing such passes. The appropriate TeX compiler is called until the *.aux* file ceases changing. Makeindex and bibtex are called if necessary. """ env = self.env bld = self.generator.bld if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') env.append_value('XELATEXFLAGS', '-interaction=batchmode') fun = self.texfun node = self.inputs[0] srcfile = node.abspath() self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep self.aux_node = node.change_ext('.aux') self.idx_node = node.change_ext('.idx') # important, set the cwd for everybody self.cwd = self.inputs[0].parent.get_bld().abspath() warn('first pass on %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling latex', fun()) self.bibfile() self.bibunits() self.makeindex() hash = '' for i in range(10): # prevent against infinite loops - one never knows # watch the contents of file.aux and stop if file.aux does not change anymore prev_hash = hash try: hash = Utils.h_file(self.aux_node.abspath()) except (OSError, IOError): error('could not read aux.h -> %s' % self.aux_node.abspath()) pass if hash and hash == prev_hash: break # run the command warn('calling %s' % self.__class__.__name__) self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.TEXINPUTS}) self.env.SRCFILE = srcfile self.check_status('error when calling %s' % self.__class__.__name__, fun())
def tex_build(task, command='LATEX'): env = task.env bld = task.generator.bld if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS', '-interaction=batchmode') env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') fun = latex_fun if command == 'PDFLATEX': fun = pdflatex_fun node = task.inputs[0] srcfile = node.abspath() sr2 = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep aux_node = node.change_ext('.aux') idx_node = node.change_ext('.idx') nm = aux_node.name docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux" # important, set the cwd for everybody task.cwd = task.inputs[0].parent.get_bld().abspath() warn('first pass on %s' % command) task.env.env = {'TEXINPUTS': sr2} task.env.SRCFILE = srcfile ret = fun(task) if ret: return ret # look in the .aux file if there is a bibfile to process try: ct = Utils.readf(aux_node.abspath()) except (OSError, IOError): error('error bibtex scan') else: fo = g_bibtex_re.findall(ct) # there is a .aux file to process if fo: warn('calling bibtex') task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2} task.env.SRCFILE = docuname ret = bibtex_fun(task) if ret: error('error when calling bibtex %s' % docuname) return ret # look on the filesystem if there is a .idx file to process try: idx_path = idx_node.abspath() os.stat(idx_path) except OSError: error('error file.idx scan') else: warn('calling makeindex') task.env.SRCFILE = idx_node.name task.env.env = {} ret = makeindex_fun(task) if ret: error('error when calling makeindex %s' % idx_path) return ret hash = '' i = 0 while i < 10: # prevent against infinite loops - one never knows i += 1 # watch the contents of file.aux prev_hash = hash try: hash = Utils.h_file(aux_node.abspath()) except KeyError: error('could not read aux.h -> %s' % aux_node.abspath()) pass # debug #print "hash is, ", hash, " ", old_hash # stop if file.aux does not change anymore if hash and hash == prev_hash: break # run the command warn('calling %s' % command) task.env.env = {'TEXINPUTS': sr2 + os.pathsep} task.env.SRCFILE = srcfile ret = fun(task) if ret: error('error when calling %s %s' % (command, latex_compile_cmd)) return ret return None # ok
def tex_build(task,command='LATEX'): env=task.env bld=task.generator.bld if not env['PROMPT_LATEX']: env.append_value('LATEXFLAGS','-interaction=batchmode') env.append_value('PDFLATEXFLAGS','-interaction=batchmode') fun=latex_fun if command=='PDFLATEX': fun=pdflatex_fun node=task.inputs[0] srcfile=node.abspath() sr2=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep aux_node=node.change_ext('.aux') idx_node=node.change_ext('.idx') nm=aux_node.name docuname=nm[:len(nm)-4] task.cwd=task.inputs[0].parent.get_bld().abspath() warn('first pass on %s'%command) task.env.env={'TEXINPUTS':sr2} task.env.SRCFILE=srcfile ret=fun(task) if ret: return ret try: ct=Utils.readf(aux_node.abspath()) except(OSError,IOError): error('error bibtex scan') else: fo=g_bibtex_re.findall(ct) if fo: warn('calling bibtex') task.env.env={'BIBINPUTS':sr2,'BSTINPUTS':sr2} task.env.SRCFILE=docuname ret=bibtex_fun(task) if ret: error('error when calling bibtex %s'%docuname) return ret try: idx_path=idx_node.abspath() os.stat(idx_path) except OSError: error('error file.idx scan') else: warn('calling makeindex') task.env.SRCFILE=idx_node.name task.env.env={} ret=makeindex_fun(task) if ret: error('error when calling makeindex %s'%idx_path) return ret hash='' i=0 while i<10: i+=1 prev_hash=hash try: hash=Utils.h_file(aux_node.abspath()) except KeyError: error('could not read aux.h -> %s'%aux_node.abspath()) pass if hash and hash==prev_hash: break warn('calling %s'%command) task.env.env={'TEXINPUTS':sr2+os.pathsep} task.env.SRCFILE=srcfile ret=fun(task) if ret: error('error when calling %s %s'%(command,latex_compile_cmd)) return ret return None
def add_moc_tasks(self): """ Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` """ node = self.inputs[0] bld = self.generator.bld try: # compute the signature once to know if there is a moc file to create self.signature() except KeyError: # the moc file may be referenced somewhere else pass else: # remove the signature, it must be recomputed with the moc task delattr(self, 'cache_sig') moctasks=[] mocfiles=[] try: tmp_lst = bld.raw_deps[self.uid()] bld.raw_deps[self.uid()] = [] except KeyError: tmp_lst = [] for d in tmp_lst: if not d.endswith('.moc'): continue # paranoid check if d in mocfiles: error("paranoia owns") continue # process that base.moc only once mocfiles.append(d) # find the extension - this search is done only once ext = '' try: ext = Options.options.qt_header_ext except AttributeError: pass if not ext: base2 = d[:-4] for exth in MOC_H: k = node.parent.find_node(base2+exth) if k: break else: raise Errors.WafError('no header found for %r which is a moc file' % d) # next time we will not search for the extension (look at the 'for' loop below) h_node = node.parent.find_resource(base2 + exth) m_node = h_node.change_ext('.moc') bld.node_deps[(self.inputs[0].parent.abspath(), m_node.name)] = h_node # create the task task = Task.classes['moc'](env=self.env, generator=self.generator) task.set_inputs(h_node) task.set_outputs(m_node) # direct injection in the build phase (safe because called from the main thread) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) # remove raw deps except the moc files to save space (optimization) tmp_lst = bld.raw_deps[self.uid()] = mocfiles # look at the file inputs, it is set right above lst = bld.node_deps.get(self.uid(), ()) for d in lst: name = d.name if name.endswith('.moc'): task = Task.classes['moc'](env=self.env, generator=self.generator) task.set_inputs(bld.node_deps[(self.inputs[0].parent.abspath(), name)]) # 1st element in a tuple task.set_outputs(d) gen = bld.producer gen.outstanding.insert(0, task) gen.total += 1 moctasks.append(task) # simple scheduler dependency: run the moc task before others self.run_after.update(set(moctasks)) self.moc_done = 1