def reconstructMetaIndexedFlags(self): m=self.loadMeta() # NOTE: getMeta is not used because we want to make sure we are using a fresh one l1=m.getIndexedList() l2=m.getUnindexedList() #l3=m.getDirtyIndexList() # NOTE: Dirty are kept as is for i in l1: v=self.searchEngine.getIndexedVersion(i['kitab']) if not v or metaVrr(i)!=v: m.setIndexedFlags(i['uri'], 0) # mark as unindexed for i in l2: v=self.searchEngine.getIndexedVersion(i['kitab']) if v and metaVrr(i)==v: m.setIndexedFlags(i['uri']) # mark as indexed if same version
def _view(self, ki, m, i, d='#', s=""): r=self._emptyViewResp.copy() node,p,u,n,c,b=ki.toc.getNodePrevUpNextChildrenBreadcrumbs(i) if n: ub=n.globalOrder else: ub=-1 if not node or i=="_i0": r['content']="<h1>%s</h1>" % escape(prettyId(m['kitab'])) else: r['content']=node.toHtml(ub).replace('\n\n','\n</p><p>\n') if c: cLinks=''.join(map(lambda cc: '<li><a href="%s">%s</a></li>\n' % (d+"_i"+str(cc.idNum)+s,escape(cc.getContent())) ,c)) cLinks="<ul>\n"+cLinks+"</ul>" else: cLinks='' r['childrenLinks']=cLinks if n: r['nextUrl']=d+'_i'+str(n.idNum)+s r['nextTitle']=escape(n.getContent()) if p: r['prevUrl']=d+'_i'+str(p.idNum)+s r['prevTitle']=escape(p.getContent()) if u: r['upUrl']=d+'_i'+str(u.idNum)+s r['upTitle']=escape(u.getContent()) if b: r['breadcrumbs']=" > ".join(map(lambda (i,t): ("<a href='"+d+"_i%i"+s+"'>%s</a>") % (i,escape(t)),b)) vrr=metaVrr(ki.meta) #self.th.searchEngine.related(m['kitab'], vrr, node.idNum) return r
def indexKitab(self, name): """ create search index for a given Kitab name NOTE: you need to call indexingStart() before this and indexingEnd() after it """ #print "creating index for kitab with name:", name ki=self.th.getKitab(name) self.th.getMeta().setIndexedFlags(ki.uri, 1) vrr=metaVrr(ki.meta) iix=self.__IIX() ki.root.traverser(3, self.__ix_nodeStart, self.__ix_nodeEnd, name, vrr, iix) self.th.getMeta().setIndexedFlags(ki.uri, 2)
def indexKitab(self, name): """ create search index for a given Kitab name NOTE: you need to call indexingStart() before this and indexingEnd() after it """ #print "creating index for kitab with name:", name ki = self.th.getKitab(name) self.th.getMeta().setIndexedFlags(ki.uri, 1) vrr = metaVrr(ki.meta) iix = self.__IIX() ki.root.traverser(3, self.__ix_nodeStart, self.__ix_nodeEnd, name, vrr, iix) self.th.getMeta().setIndexedFlags(ki.uri, 2)
def dropOld(self): """ drop index for all indexed kutub that got updated this is useful if followed by indexNew no need you need to call indexingStart() indexingEnd() around this """ self.indexingStart() m=self.th.getMeta() for n in self.th.getKitabList(): vr=self.getIndexedVersion(n) if vr and strverscmp(vr,metaVrr(m.getLatestKitab(n)))>0: self.dropKitabIndex(n) self.indexingEnd()
def dropChanged(self): """ drop index for all indexed kutub that got changed (updated or downgraded) this is useful if followed by indexNew no need you need to call indexingStart() indexingEnd() around this """ self.indexingStart() m=self.th.getMeta() for n in self.th.getKitabList(): vr=self.getIndexedVersion(n) if vr and vr!=metaVrr(m.getLatestKitab(n)): self.dropKitabIndex(n) self.indexingEnd()
def dropOld(self): """ drop index for all indexed kutub that got updated this is useful if followed by indexNew no need you need to call indexingStart() indexingEnd() around this """ self.indexingStart() m = self.th.getMeta() for n in self.th.getKitabList(): vr = self.getIndexedVersion(n) if vr and strverscmp(vr, metaVrr(m.getLatestKitab(n))) > 0: self.dropKitabIndex(n) self.indexingEnd()
def dropChanged(self): """ drop index for all indexed kutub that got changed (updated or downgraded) this is useful if followed by indexNew no need you need to call indexingStart() indexingEnd() around this """ self.indexingStart() m = self.th.getMeta() for n in self.th.getKitabList(): vr = self.getIndexedVersion(n) if vr and vr != metaVrr(m.getLatestKitab(n)): self.dropKitabIndex(n) self.indexingEnd()
def _view(self, ki, m, i, d='#', s=""): r = self._emptyViewResp.copy() node, p, u, n, c, b = ki.toc.getNodePrevUpNextChildrenBreadcrumbs(i) if n: ub = n.globalOrder else: ub = -1 if not node or i == "_i0": r['content'] = "<h1>%s</h1>" % escape(prettyId(m['kitab'])) else: r['content'] = node.toHtml(ub).replace('\n\n', '\n</p><p>\n') if c: cLinks = ''.join(map(lambda cc: '<li><a href = "%s">%s</a></li>\n' % \ (d + "_i" + str(cc.idNum) + s, escape(cc.getContent())), c)) cLinks = "<ul>\n" + cLinks + "</ul>" else: cLinks = '' r['childrenLinks'] = cLinks if n: r['nextUrl'] = d + '_i' + str(n.idNum) + s r['nextTitle'] = escape(n.getContent()) if p: r['prevUrl'] = d + '_i' + str(p.idNum) + s r['prevTitle'] = escape(p.getContent()) if u: r['upUrl'] = d + '_i' + str(u.idNum) + s r['upTitle'] = escape(u.getContent()) if b: r['breadcrumbs'] = " > ".join(map(lambda (i,t): ("<a href = '" + \ d + \ "_i%i" + \ s + \ "'>%s</a>") % \ (i, escape(t)), b)) vrr = metaVrr(ki.meta) #self.th.searchEngine.related(m['kitab'], vrr, node.idNum) return r