def handle_index(instance, isunindex=False): # Trick to defer import from sesql import config from sesql.index import unindex, index, schedule_reindex if getattr(config, 'ASYNCHRONOUS_INDEXING', False): return schedule_reindex(instance) else: if isunindex: return unindex(instance) else: return index(instance)
def process_chunk(self): """ Process a chunk """ cursor = connection.cursor() cursor.execute("""SELECT classname, objid FROM sesql_reindex_schedule ORDER BY scheduled_at ASC LIMIT %d""" % self.chunk) rows = cursor.fetchall() if not rows: transaction.rollback() return self.log.info("Found %d row(s) to reindex" % len(rows)) done = set() for row in rows: try: row = tuple(row) if not row in done: self.log.info("Reindexing %s:%d" % row) done.add(row) try: obj = results.SeSQLResultSet.load(row) index.index(obj) except config.orm.not_found: self.log.info("%s:%d doesn't exist anymore, undexing" % row) index.unindex(row) cursor.execute( """DELETE FROM sesql_reindex_schedule WHERE classname=%s AND objid=%s""", row) except Exception, e: self.log.error('Error in row %s:%s : %s' % (row[0], row[1], e)) if cmd["debug"]: import pdb pdb.post_mortem()
def delete_cb(self, mapper, connection, target): """ Object was created or deleted """ from sesql import index return index.unindex(target)