def load_fixtures(self, file_name, process_documents=True): filepath = self.get_fixture_path(file_name) load_fixtures(db, loaders.load(filepath)) db.session.commit() reindex_entities() if process_documents: for doc in Document.all(): analyze_document(doc) optimize_search()
def index(foreign_id=None): """Index documents in the given collection (or throughout).""" q = Document.all_ids() if foreign_id: collection = Collection.by_foreign_id(foreign_id) if collection is None: raise ValueError("No such collection: %r" % foreign_id) clause = Collection.id == collection.id q = q.filter(Document.collections.any(clause)) for doc_id, in q: index_document_id.delay(doc_id) if foreign_id is None: reindex_entities()
def index(foreign_id=None): """Index documents in the given collection (or throughout).""" q = Document.all_ids() # re-index newest document first. q = q.order_by(Document.id.desc()) if foreign_id: collection = Collection.by_foreign_id(foreign_id) if collection is None: raise ValueError("No such collection: %r" % foreign_id) q = q.filter(Document.collection_id == collection.id) for doc_id, in q.yield_per(10000): index_document_id.delay(doc_id) if foreign_id is None: reindex_entities()
def index(foreign_id=None): """Index documents in the given collection (or throughout).""" q = Document.all_ids() # re-index newest document first. q = q.order_by(Document.id.desc()) if foreign_id: collection = Collection.by_foreign_id(foreign_id) if collection is None: raise ValueError("No such collection: %r" % foreign_id) q = q.filter(Document.collection_id == collection.id) for idx, (doc_id,) in enumerate(q.yield_per(5000), 1): index_document_id.delay(doc_id) if idx % 1000 == 0: log.info("Index: %s documents...", idx) if foreign_id is None: reindex_entities()
def indexentities(): """Re-index all the entities.""" reindex_entities()
def indexentities(foreign_id=None): """Re-index all the entities.""" reindex_entities()