def load_fixtures(self, file_name, process_documents=True): filepath = self.get_fixture_path(file_name) load_fixtures(db, loaders.load(filepath)) db.session.commit() reindex_entities() if process_documents: for doc_id, in Document.all_ids(): analyze_document(doc_id) optimize_search()
def index(foreign_id=None): """Index documents in the given source (or throughout).""" q = Document.all_ids() if foreign_id: source = Source.by_foreign_id(foreign_id) if source is None: raise ValueError("No such source: %r" % foreign_id) q = q.filter(Document.source_id == source.id) for doc_id, in q: index_document.delay(doc_id) if foreign_id is None: reindex_entities()
def index(foreign_id=None): """Index documents in the given source (or throughout).""" q = Document.all_ids() if foreign_id: source = Source.by_foreign_id(foreign_id) if source is None: raise ValueError("No such source: %r" % foreign_id) q = q.filter(Document.source_id == source.id) else: delete_index() init_search() for doc_id, in q: index_document.delay(doc_id) reindex_entities()
def index(foreign_id=None, immediate=False): """Index documents in the given source (or throughout).""" q = Document.all_ids() if foreign_id: source = Source.by_foreign_id(foreign_id) if source is None: raise ValueError("No such source: %r" % foreign_id) q = q.filter(Document.source_id == source.id) for doc_id, in q: #import time; time.sleep(10) #let's not get banned print('indexing %s' % doc_id) if immediate: #bypass the queue index_document(doc_id) else: index_document.delay(doc_id) if foreign_id is None: reindex_entities()
def indexentities(foreign_id=None): """Re-index all the entities.""" reindex_entities()