def load_fixtures(self): from ..data.fixtures.dataset001 import load_fixtures as load_dataset001 with self.app.app_context(): from app import db load_dataset001(db) index_name = DocumentFacade.get_index_name() print("Reindexing", index_name) self.app.elasticsearch.indices.delete(index=index_name, ignore=[400, 404]) # remove all records from app.models import Document from app.api.search import SearchIndexManager for doc in Document.query.all(): f_obj = DocumentFacade("", doc) for data in f_obj.get_data_to_index_when_added(): SearchIndexManager.add_to_index(index=index_name, id=doc.id, payload=data)
def get_document_resource_identifiers(self): from app.api.document.facade import DocumentFacade return [] if self.obj.placenames_having_roles is None else [ DocumentFacade.make_resource_identifier(c_h_r.document.id, DocumentFacade.TYPE) for c_h_r in self.obj.placenames_having_roles ]
def get_document_resources(self): from app.api.document.facade import DocumentFacade return [] if self.obj.placenames_having_roles is None else [ DocumentFacade(self.url_prefix, c.document, self.with_relationships_links, self.with_relationships_data).resource for c in self.obj.placenames_having_roles ]
def test_doc_attribute_change(self): r, s, res = self.api_patch("documents/1", data={ "data": { "id": 1, "type": "document", "attributes": { "title": "Document TestIndexation" } } }) self.assert200(r) self.reindex_document(1) r, status, resource = self.api_get("documents/1") self.assert200(r) self.assertEqual("Document TestIndexation", resource["data"]["attributes"]["title"]) resource = self.search(DocumentFacade.get_index_name(), "TestIndexation") pprint.pprint(resource) self.assertEqual(1, resource["meta"]["total-count"]) self.assertEqual(1, resource["data"][0]["id"])
def make_collection(self, doc): f_obj, errors, kwargs = DocumentFacade.get_facade('', doc) collection_url = f_obj.get_iiif_collection_url() collection = dict(self.collection_template) manifest_urls = [] for witness in sorted(doc.witnesses, key=attrgetter('num')): f_obj, errors, kwargs = WitnessFacade.get_facade('', witness) manifest_url = f_obj.get_iiif_manifest_url() if manifest_url is not None and (manifest_url, witness) not in manifest_urls: manifest_urls.append((manifest_url, witness)) collection["@id"] = collection_url collection["manifests"] = [] for i, (url, witness) in enumerate(manifest_urls): manifest = { "@id": url, "@type": "sc:Manifest", "label": witness.content } collection["manifests"].append(manifest) return collection, collection_url
def reindex_document(self, id): from app.api.search import SearchIndexManager f_obj, errors, kwargs = DocumentFacade.get_resource_facade("", id) index_name = DocumentFacade.get_index_name() for data in f_obj.get_data_to_index_when_added(): SearchIndexManager.add_to_index(index=index_name, id=id, payload=data)
from app import create_app, db from app.api.document.facade import DocumentFacade if sys.version_info < (3, 6): json_loads = lambda s: json_loads(s.decode("utf-8")) if isinstance( s, bytes) else json.loads(s) else: json_loads = json.loads _app = create_app("test") with _app.app_context(): if hasattr(current_app, "elasticsearch"): print("DELETE AND CREATE SEARCH INDEXES") _app.elasticsearch.indices.delete( index=DocumentFacade.get_index_name(), ignore=[400, 404]) # delete the index _app.elasticsearch.indices.create( index=DocumentFacade.get_index_name(), ignore=[400, 404]) # create the index class TestBaseServer(TestCase): def setUp(self): with self.app.app_context(): self.clear_data() self.load_fixtures() def create_app(self): with _app.app_context(): db.create_all()