def reindex(index): """ Iterate over all documents we're able to find (even those that are already in the search index) and index them. Note that this may take a lot of time. """ from xappy import errors from inyoka.core.resource import IResourceManager from inyoka.core.search import create_search_document index = IResourceManager.get_search_indexes()[index] # iterate over all search providers... with index.indexer_connection() as indexer: for provider in index.providers.itervalues(): # ... to get all their data for id, obj in provider.prepare_all(): # create a new document for the search index doc = create_search_document('%s-%s' % (provider.name, id), obj) try: # try to create a new search entry indexer.add(doc) except errors.IndexerError: # there's already an exising one, replace it indexer.replace(doc) indexer.flush()
def run(self, index, provider, doc_id, **kwargs): id = '%s-%s' % (provider, doc_id) index = get_index_implementation(index) try: # get the document data from the database obj = index.providers[provider].prepare([doc_id]).next() except StopIteration: # There's no document that matches the doc_id, abort return try: with index.indexer_connection() as indexer: if obj is None: # the document was deleted in the database, delete the search index # entry too indexer.delete(id) else: doc = create_search_document(id, obj) try: # try to create a new search entry indexer.add(doc) except errors.IndexerError: # there's already an exising one, replace it indexer.replace(doc) except errors.XapianDatabaseLockError as exc: # Retry to index that object in 30 seconds self.retry([index, provider, doc_id], kwargs, countdown=30, exc=exc)
def reindex(index): """ Iterate over all documents we're able to find (even those that are already in the search index) and index them. Note that this may take a lot of time. """ from xappy import errors from inyoka.core.resource import IResourceManager from inyoka.core.search import create_search_document index = IResourceManager.get_search_indexes()[index] # iterate over all search providers... with index.indexer_connection() as indexer: for provider in index.providers.itervalues(): # ... to get all their data for id, obj in provider.prepare_all(): # create a new document for the search index doc = create_search_document('%s-%s' % (provider.name, id), obj) try: # try to create a new search entry indexer.add(doc) except errors.IndexerError: # there's already an exising one, replace it indexer.replace(doc) indexer.flush()
def run(self, index, provider, doc_id, **kwargs): id = '%s-%s' % (provider, doc_id) index = get_index_implementation(index) try: # get the document data from the database obj = index.providers[provider].prepare([doc_id]).next() except StopIteration: # There's no document that matches the doc_id, abort return try: with index.indexer_connection() as indexer: if obj is None: # the document was deleted in the database, delete the search index # entry too indexer.delete(id) else: doc = create_search_document(id, obj) try: # try to create a new search entry indexer.add(doc) except errors.IndexerError: # there's already an exising one, replace it indexer.replace(doc) except errors.XapianDatabaseLockError as exc: # Retry to index that object in 30 seconds self.retry([index, provider, doc_id], kwargs, countdown=30, exc=exc)