Exemplo n.º 1
0
	def Re_Index(self, feedlist=[], entrylist=[]):
		if len(feedlist) == 0 and len(entrylist) == 0:
			return
			
		def reindex_interrupt():
			indexModifier.close()
			self._index_lock.release()
			self._interrupt()
			return
			
		self._index_lock.acquire()
		db = self._get_db()
		c = db.cursor()
					
		analyzer = StandardAnalyzer()
		indexModifier = IndexModifier(self._storeDir, analyzer, False)
		#let it fail
		#except Exception, e:
		#	print "index modifier error (probably lock)",e,type(e)
		#	return
		
		feedlist = utils.uniquer(feedlist)
		entrylist = utils.uniquer(entrylist)
		
		feed_addition = []
		entry_addition = []
	
		for feed_id in feedlist:
			if self._quitting:
				return reindex_interrupt()
			try:
				c.execute(u"""SELECT title, description FROM feeds WHERE id=?""",(feed_id,))
				title, description = c.fetchone()
				feed_addition.append((feed_id, title, description))
			except TypeError:
				pass #it won't be readded.  Assumption is we have deleted this feed

		for entry_id in entrylist:
			if self._quitting:
				return reindex_interrupt()
			try:
				c.execute(u"""SELECT feed_id, title, description, fakedate FROM entries WHERE id=?""",(entry_id,))
				feed_id, title, description, fakedate = c.fetchone()
				entry_addition.append((entry_id, feed_id, title, description, fakedate))
			except TypeError:
				pass
				
		c.close()
		db.close()
		
		entry_addition = utils.uniquer(entry_addition)
				
		if self._quitting:
			return reindex_interrupt()
		#first delete anything deleted or changed
		for feed_id in feedlist:
			try:
				indexModifier.deleteDocuments(Term("feed_id",str(feed_id)))
			except Exception, e:
				print "Failed deleting feed:", e
Exemplo n.º 2
0
	def Re_Index(self, feedlist=[], entrylist=[]):
		if len(feedlist) == 0 and len(entrylist) == 0:
			return
			
		def reindex_interrupt():
			self._indexing = False
			self._index_lock.release()
			self._interrupt()
			#logging.debug("Reindex interrupted")
			return
			
		#logging.debug("Xapian reindexing: %i, %i" % (len(feedlist), len(entrylist)))
			
		self._index_lock.acquire()
		self._indexing = True
		db = self._get_db()
		c = db.cursor()
					
		database = xapian.WritableDatabase(self._storeDir, xapian.DB_CREATE_OR_OPEN)
		indexer = xapian.TermGenerator()
		stemmer = xapian.Stem("english")
		indexer.set_stemmer(stemmer)
		
		#feedlist = utils.uniquer(feedlist)
		entrylist = utils.uniquer(entrylist)
		
		#feed_addition = []
		entry_addition = []
	
		#for feed_id in feedlist:
		#	if self._quitting:
		#		del database
		#		return reindex_interrupt()
		#	try:
		#		c.execute(u"""SELECT title, description FROM feeds WHERE id=?""",(feed_id,))
		#		title, description = c.fetchone()
		#		feed_addition.append((feed_id, title, description))
		#	except TypeError:
		#		pass #it won't be readded.  Assumption is we have deleted this feed

		for entry_id in entrylist:
			if self._quitting:
				del database
				return reindex_interrupt()
			try:
				c.execute(u"""SELECT feed_id, title, description, fakedate FROM entries WHERE id=?""",(entry_id,))
				feed_id, title, description, fakedate = c.fetchone()
				entry_addition.append((entry_id, feed_id, title, description, fakedate))
			except TypeError:
				pass
				
		c.close()
		db.close()
		
		entry_addition = utils.uniquer(entry_addition)
				
		if self._quitting:
			del database
			return reindex_interrupt()
		#first delete anything deleted or changed
		#for feed_id in feedlist:
		#	try:
		#		database.delete_document("f"+str(feed_id))
		#	except Exception, e:
		#		logging.error("Failed deleting feed: %s" % str(e))
				
		for entry_id in entrylist:
			try:
				database.delete_document("e"+str(entry_id))
			except Exception, e:
				logging.error("Failed deleting entry: %s" % str(e))
Exemplo n.º 3
0
					else: # meaning "entry"
						title    = doc.get_value(ENTRY_TITLE)
						fakedate = float(doc.get_value(DATE)) / 1000.0
						if fakedate > since:
							entry_results.append((int(entry_id),title, fakedate, feed_id))
				#else:
				#	print "excluding:"+doc.get("title")
			except Exception, e:
				print e
				print feed_id
				print blacklist

		for entry in entry_results:
			feed_results.append(entry[3])
			
		feed_results = utils.uniquer(feed_results)
		entry_results = utils.uniquer(entry_results)	
		#need to resort because we merged two lists together
		entry_results.sort(lambda x,y: int(y[2] - x[2]))
		#for e in entry_results:
		#	print e[2],e[1]
		return (feed_results, entry_results)
		
	def merge(self, l1, l2):
		"""merges two sorted lists"""
		if len(l1)>len(l2):
			l3 = l1
			l1 = l2
			l2 = l3
			del l3
		i=-1