def reverse_func(apps, schema_editor): se = SearchEngineFactory().create() prefix = settings.ELASTICSEARCH_PREFIX if (se.es.indices.exists(index="%s_resource_relations" % prefix)): index_settings = prepare_resource_relations_index(create=False) index_settings['mappings']['all'] = index_settings['mappings'][ '_doc'] index_settings['mappings'].pop('_doc', None) se.create_index(index='resource_relations_temp', body=index_settings) doc = { "source": { "index": "%s_resource_relations" % prefix, "type": "_doc" }, "dest": { "index": "%s_resource_relations_temp" % prefix, "type": "all" } } se.es.reindex(body=doc, refresh=True, wait_for_completion=True) se.delete_index(index='resource_relations') se.create_index(index='resource_relations', body=index_settings) doc = { "source": { "index": "%s_resource_relations_temp" % prefix }, "dest": { "index": "%s_resource_relations" % prefix, "type": "all" } } se.es.reindex(body=doc, refresh=True, wait_for_completion=True)
def index_resources(): """ Deletes any existing indicies from elasticsearch related to resources and then indexes all resources from the database """ result_summary = {} se = SearchEngineFactory().create() # clear existing indexes for index_type in ['resource_relations', 'entity', 'resource', 'maplayers']: se.delete_index(index=index_type) se.delete(index='term', body='{"query":{"bool":{"must":[{"constant_score":{"filter":{"missing":{"field":"value.options.conceptid"}}}}],"must_not":[],"should":[]}}}') Resource().prepare_term_index(create=True) cursor = connection.cursor() cursor.execute("""select entitytypeid from data.entity_types where isresource = TRUE""") resource_types = cursor.fetchall() Resource().prepare_resource_relations_index(create=True) for resource_type in resource_types: Resource().prepare_search_index(resource_type[0], create=True) index_resources_by_type(resource_types, result_summary) se.es.indices.refresh(index='entity') for resource_type in resource_types: result_summary[resource_type[0]]['indexed'] = se.es.count(index="entity", doc_type=resource_type[0])['count'] print '\nResource Index Results:' for k, v in result_summary.iteritems(): status = 'Passed' if v['database'] == v['indexed'] else 'failed' print "Status: {0}, Resource Type: {1}, In Database: {2}, Indexed: {3}".format(status, k, v['database'], v['indexed'])
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='concept_labels') se.delete_index(index='term') se.create_index(index='concept_labels') se.create_index(index='term') management.call_command('packages', operation='import_json', source='tests/fixtures/resource_graphs/archesv4_resource.json')
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index="concept_labels") se.delete_index(index="term") se.create_index(index="concept_labels") se.create_index(index="term") management.call_command( "packages", operation="import_json", source="tests/fixtures/resource_graphs/archesv4_resource.json" )
def index_concepts(): """ Collects all concepts and indexes both concepts and concept_labels """ se = SearchEngineFactory().create() se.delete_index(index='concept_labels') se.delete( index='term', body= '{"query":{"bool":{"must_not":[{"constant_score":{"filter":{"missing":{"field":"value.options.conceptid"}}}}],"must":[],"should":[]}}}' ) Resource().prepare_term_index(create=True) print 'indexing concepts' start = datetime.now() cursor = connection.cursor() cursor.execute("""select conceptid from concepts.concepts""") conceptids = cursor.fetchall() for c in conceptids: if c[0] not in CORE_CONCEPTS: concept = Concept().get(id=c[0], include_subconcepts=True, include_parentconcepts=False, include=['label']) concept.index() end = datetime.now() duration = end - start print 'indexing concepts required', duration.seconds, 'seconds' cursor = connection.cursor() sql = """ select conceptid, conceptlabel from concepts.vw_concepts where conceptid not in ('%s') """ % ("','".join(CORE_CONCEPTS)) cursor.execute(sql) concepts = cursor.fetchall() concept_index_results = {'count': len(concepts), 'passed': 0, 'failed': 0} for conceptid, conceptvalue in concepts: result = get_indexed_concepts(se, conceptid, conceptvalue) if result != 'passed': concept_index_results['failed'] += 1 else: concept_index_results['passed'] += 1 status = 'Passed' if concept_index_results['failed'] == 0 else 'Failed' print '\nConcept Index Results:' print "Status: {0}, In Database: {1}, Indexed: {2}".format( status, concept_index_results['count'], concept_index_results['passed'])
def index_concepts(): """ Collects all concepts and indexes both concepts and concept_labels """ se = SearchEngineFactory().create() se.delete_index(index='concept_labels') se.delete(index='term', body='{"query":{"bool":{"must_not":[{"constant_score":{"filter":{"missing":{"field":"value.options.conceptid"}}}}],"must":[],"should":[]}}}') Resource().prepare_term_index(create=True) print 'indexing concepts' start = datetime.now() cursor = connection.cursor() cursor.execute("""select conceptid from concepts.concepts""") conceptids = cursor.fetchall() for c in conceptids: if c[0] not in CORE_CONCEPTS: concept = Concept().get(id=c[0], include_subconcepts=True, include_parentconcepts=False, include=['label']) concept.index() end = datetime.now() duration = end - start print 'indexing concepts required', duration.seconds, 'seconds' cursor = connection.cursor() sql = """ select conceptid, conceptlabel from concepts.vw_concepts where conceptid not in ('%s') """ % ("','".join(CORE_CONCEPTS)) cursor.execute(sql) concepts = cursor.fetchall() concept_index_results = {'count':len(concepts), 'passed':0, 'failed':0} for conceptid, conceptvalue in concepts: result = get_indexed_concepts(se, conceptid, conceptvalue) if result != 'passed': concept_index_results['failed'] += 1 else: concept_index_results['passed'] += 1 status = 'Passed' if concept_index_results['failed'] == 0 else 'Failed' print '\nConcept Index Results:' print "Status: {0}, In Database: {1}, Indexed: {2}".format(status, concept_index_results['count'], concept_index_results['passed'])
def tearDownTestPackage(): """ see https://nose.readthedocs.io/en/latest/writing_tests.html#test-packages this is called from __init__.py """ se = SearchEngineFactory().create() se.delete_index(index="terms") se.delete_index(index="concepts") se.delete_index(index="resources") se.delete_index(index="resource_relations")
def delete_resource_relations_index(): se = SearchEngineFactory().create() se.delete_index(index='resource_relations')
def delete_term_index(): se = SearchEngineFactory().create() se.delete_index(index='strings')
def tearDownClass(cls): se = SearchEngineFactory().create() se.delete_index(index='strings') se.create_index(index='strings')
class BaseIndex(object): def __init__(self, index_name=None): if index_name is None or index_name == "": raise SearchIndexError("Index name is not defined") self.se = SearchEngineFactory().create() self.index_metadata = None self.index_name = index_name def prepare_index(self): """ Defines the Elastic Search mapping and settings for an index Arguments: None Keyword Arguments: None Return: None """ if self.index_metadata is not None: self.se.create_index(index=self.index_name, body=self.index_metadata) else: raise SearchIndexError("No index metadata defined.") def get_documents_to_index(self, resourceinstance, tiles): """ Gets a document to index into Elastic Search Arguments: resourceinstance -- resource instance object tiles -- list of tiles that make up the resource instance Keyword Arguments: None Return: tuple of (document, document id) """ raise NotImplementedError def index_document(self, document=None, id=None): """ Indexes a document into Elastic Search Arguments: None Keyword Arguments: document -- the document to index id -- the id of the document Return: None """ if document is not None and id is not None: self.se.index_data(index=self.index_name, body=document, id=id) def index_resources(self, resources=None, batch_size=settings.BULK_IMPORT_BATCH_SIZE, quiet=False): """ Indexes a list of resources in bulk to Elastic Search Keyword Arguments: resources -- the list of resource instances to index batch_size -- the number of records to index as a group, the larger the number to more memory required quiet -- Silences the status bar output during certain operations, use in celery operations for example Return: None """ start = datetime.now() q = Query(se=self.se) self.se.refresh(index=self.index_name) count_before = self.se.count(index=self.index_name, body=q.dsl) result_summary = {"database": len(resources), "indexed": 0} if quiet is False: bar = pyprind.ProgBar(len(resources), bar_char="█") if len(resources) > 1 else None with self.se.BulkIndexer(batch_size=batch_size, refresh=True) as indexer: for resource in resources: if quiet is False and bar is not None: bar.update(item_id=resource) tiles = list(models.TileModel.objects.filter(resourceinstance=resource)) document, doc_id = self.get_documents_to_index(resource, tiles) if document is not None and id is not None: indexer.add(index=self.index_name, id=doc_id, data=document) self.se.refresh(index=self.index_name) result_summary["indexed"] = self.se.count(index=self.index_name, body=q.dsl) - count_before status = "Passed" if result_summary["database"] == result_summary["indexed"] else "Failed" print(f"Custom Index - {settings.ELASTICSEARCH_PREFIX}_{self.index_name}") print( f" Status: {status}, In Database: {result_summary['database']}, Indexed: {result_summary['indexed']}, Took: {(datetime.now() - start).seconds} seconds" ) def delete_resources(self, resources=None): """ Deletes documents from an index based on the passed in list of resources Delete by query, so this is a single operation Keyword Arguments: resources -- a single resource instance or a list of resource instances """ q = Query(se=self.se) if not isinstance(resources, list): resourcelist = [resources] else: resourcelist = resources list_of_ids_to_delete = [] for resource in resourcelist: list_of_ids_to_delete.append(resource.pk) ids_query = Ids(ids=list_of_ids_to_delete) q.add_query(ids_query) q.delete(index=self.index_name) def delete_index(self): """ Deletes this index from Elastic Search Arguments: None Keyword Arguments: None Return: None """ self.se.delete_index(index=self.index_name) def reindex(self, graphids=None, clear_index=True, batch_size=settings.BULK_IMPORT_BATCH_SIZE, quiet=False): """ Reindexes the index. By default this does nothing, it needs to be implemented in a subclass. By default you can pass in a list of graph ids to trigger the reindex. This will loop through all resource instances of each graph type. Example subclass command: def reindex(self, clear_index=True): PARCEL_GRAPHID = "e3c35dca-5e72-11ea-a2d3-dca90488358a" super(CustomIndexName, self).reindex(graphids=[PARCEL_GRAPHID], clear_index=clear_index) Keyword Arguments: graphids -- list of graphs ids to trigger the reindex on, will get all resource instances of each graph id supplied clear_index -- True(default) to clear all documents out of the index before reindexing begins batch_size -- the number of records to index as a group, the larger the number to more memory required Return: None """ if graphids is not None: if clear_index: self.delete_index() self.prepare_index() for graphid in graphids: resources = Resource.objects.filter(graph_id=graphid) self.index_resources(resources=resources, batch_size=batch_size, quiet=quiet) else: raise NotImplementedError
class BaseIndex(object): def __init__(self, index_name=None): if index_name is None or index_name is "": raise SearchIndexError("Index name is not defined") self.se = SearchEngineFactory().create() self.index_metadata = None self.index_name = index_name def prepare_index(self): """ Defines the Elastic Search mapping and settings for an index Arguments: None Keyword Arguments: None Return: None """ if self.index_metadata is not None: self.se.create_index(index=self.index_name, body=self.index_metadata) else: raise SearchIndexError("No index metadata defined.") def get_documents_to_index(self, resourceinstance, tiles): """ Gets a document to index into Elastic Search Arguments: resourceinstance -- resource instance object tiles -- list of tiles that make up the resource instance Keyword Arguments: None Return: tuple of (document, document id) """ raise NotImplementedError def index_document(self, document=None, id=None): """ Indexes a document into Elastic Search Arguments: None Keyword Arguments: document -- the document to index id -- the id of the document Return: None """ if document is not None and id is not None: self.se.index_data(index=self.index_name, body=document, id=id) def bulk_index(self, resources=None, resource_type=None, graph_name=None, clear_index=True): """ Indexes a list of documents in bulk to Elastic Search Arguments: None Keyword Arguments: resources -- the list of resource instances to index resource_type -- the type of resources being indexed graph_name -- the name of the graph model that represents the resources being indexed clear_index -- True(default) to remove all index records of type "resource_type" before indexing, assumes that a field called "graph_id" exists on the indexed documents Return: None """ start = datetime.now() q = Query(se=self.se) if clear_index: term = Term(field="graph_id", term=str(resource_type)) q.add_query(term) q.delete(index=self.index_name, refresh=True) q = Query(se=self.se) count_before = self.se.count(index=self.index_name, body=q.dsl) result_summary = {"database": len(resources), "indexed": 0} with self.se.BulkIndexer(batch_size=settings.BULK_IMPORT_BATCH_SIZE, refresh=True) as indexer: for resource in resources: tiles = list( models.TileModel.objects.filter(resourceinstance=resource)) document, doc_id = self.get_documents_to_index(resource, tiles) if document is not None and id is not None: indexer.add(index=self.index_name, id=doc_id, data=document) result_summary["indexed"] = self.se.count(index=self.index_name, body=q.dsl) - count_before status = "Passed" if result_summary["database"] == result_summary[ "indexed"] else "Failed" print("Custom Index - %s:" % self.index_name) print( " Status: {0}, Resource Type: {1}, In Database: {2}, Indexed: {3}, Took: {4} seconds" .format(status, graph_name, result_summary["database"], result_summary["indexed"], (datetime.now() - start).seconds)) def delete_index(self): """ Deletes this index from Elastic Search Arguments: None Keyword Arguments: None Return: None """ self.se.delete_index(index=self.index_name)
def delete_concepts_index(): se = SearchEngineFactory().create() se.delete_index(index=CONCEPTS_INDEX)
def delete_terms_index(): se = SearchEngineFactory().create() se.delete_index(index=TERMS_INDEX)
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='strings') se.delete_index(index='resource') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open(os.path.join('tests/fixtures/resource_graphs/Search Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_destruction_date_nodeid = 'e771b8a1-65fe-11e7-9163-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' cls.search_model_sensitive_info_nodeid = '57446fae-65ff-11e7-b63a-14109fd34195' cls.search_model_geom_nodeid = '3ebc6785-fa61-11e6-8c85-14109fd34195' cls.user = User.objects.create_user('test', '*****@*****.**', 'test') cls.user.save() cls.user.groups.add(Group.objects.get(name='Guest')) nodegroup = models.NodeGroup.objects.get(pk=cls.search_model_destruction_date_nodeid) assign_perm('no_access_to_nodegroup', cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [ { "values": [ { "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" } ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] } ] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post(reverse('concept', kwargs={'conceptid':'00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] cls.conceptid = response_json['subconcepts'][0]['id'] # add resource instance with only a cultural period defined cls.cultural_period_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]},nodegroup_id=cls.search_model_cultural_period_nodeid) cls.cultural_period_resource.tiles.append(tile) cls.cultural_period_resource.save() # add resource instance with a creation and destruction date defined cls.date_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'},nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_destruction_date_nodeid: '1948-01-01'},nodegroup_id=cls.search_model_destruction_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_name_nodeid: 'testing 123'},nodegroup_id=cls.search_model_name_nodeid) cls.date_resource.tiles.append(tile) cls.date_resource.save() # add resource instance with a creation date and a cultural period defined cls.date_and_cultural_period_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: '1942-01-01'},nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_and_cultural_period_resource.tiles.append(tile) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]},nodegroup_id=cls.search_model_cultural_period_nodeid) cls.date_and_cultural_period_resource.tiles.append(tile) cls.date_and_cultural_period_resource.save() # add resource instance with with no dates or periods defined cls.name_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_name_nodeid: 'some test name'},nodegroup_id=cls.search_model_name_nodeid) cls.name_resource.tiles.append(tile) geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }] } tile = Tile(data={cls.search_model_geom_nodeid: geom},nodegroup_id=cls.search_model_geom_nodeid) cls.name_resource.tiles.append(tile) cls.name_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='terms,concepts') se.create_index(index='terms,concepts')
def delete_index(index=None): se = SearchEngineFactory().create() se.delete_index(index=index) pass
def setUpModule(): #pass se = SearchEngineFactory().create() se.delete_index(index='test')
def tearDownModule(): se = SearchEngineFactory().create() se.delete_index(index='strings') se.delete_index(index='resource')
def delete_search_index(): se = SearchEngineFactory().create() se.delete_index(index=RESOURCES_INDEX)
def delete_resource_relations_index(): se = SearchEngineFactory().create() se.delete_index(index=RESOURCE_RELATIONS_INDEX)
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='strings') se.delete_index(index='resource') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open(os.path.join('tests/fixtures/resource_graphs/Search Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [ { "values": [ { "value": "ANP TEST", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" } ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] } ] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post(reverse('concept', kwargs={'conceptid':'00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] # add resource instance with only a cultural period defined cls.cultural_period_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]},nodegroup_id=cls.search_model_cultural_period_nodeid) cls.cultural_period_resource.tiles.append(tile) cls.cultural_period_resource.save() # add resource instance with only a creation date defined cls.date_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'},nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_name_nodeid: 'testing 123'},nodegroup_id=cls.search_model_name_nodeid) cls.date_resource.tiles.append(tile) cls.date_resource.save() # add resource instance with with no dates or periods defined cls.name_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_name_nodeid: 'some test name'},nodegroup_id=cls.search_model_name_nodeid) cls.name_resource.tiles.append(tile) cls.name_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def delete_search_index(): se = SearchEngineFactory().create() se.delete_index(index='resource')
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='test')
def delete_index(index=None): se = SearchEngineFactory().create() se.delete_index(index=index)
def tearDownModule(): se = SearchEngineFactory().create() se.delete_index(index="terms,concepts") se.delete_index(index="resources") se.delete_index(index="resource_relations")
def tearDownClass(cls): se = SearchEngineFactory().create() se.delete_index(index='concept_labels') se.delete_index(index='term') se.create_index(index='concept_labels') se.create_index(index='term')
def tearDownClass(cls): se = SearchEngineFactory().create() se.delete_index(index="concept_labels") se.delete_index(index="term") se.create_index(index="concept_labels") se.create_index(index="term")
def tearDownClass(cls): se = SearchEngineFactory().create() se.delete_index(index="test") se.delete_index(index="bulk")
def tearDownClass(cls): se = SearchEngineFactory().create() se.delete_index(index='test') se.delete_index(index='bulk')
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='terms,concepts') se.delete_index(index='resources') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open( os.path.join( 'tests/fixtures/resource_graphs/Resource Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) resource_graph_importer(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_destruction_date_nodeid = 'e771b8a1-65fe-11e7-9163-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' cls.search_model_sensitive_info_nodeid = '57446fae-65ff-11e7-b63a-14109fd34195' cls.search_model_geom_nodeid = '3ebc6785-fa61-11e6-8c85-14109fd34195' cls.user = User.objects.create_user('test', '*****@*****.**', 'test') cls.user.save() cls.user.groups.add(Group.objects.get(name='Guest')) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm('no_access_to_nodegroup', cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [{ "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] }] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post( reverse( 'concept', kwargs={'conceptid': '00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] cls.conceptid = response_json['subconcepts'][0]['id'] # Add resource with Name, Cultural Period, Creation Date and Geometry cls.test_resource = Resource(graph_id=cls.search_model_graphid) # Add Name tile = Tile(data={cls.search_model_name_nodeid: 'Test Name 1'}, nodegroup_id=cls.search_model_name_nodeid) cls.test_resource.tiles.append(tile) # Add Cultural Period tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.test_resource.tiles.append(tile) # Add Creation Date tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.test_resource.tiles.append(tile) # Add Gometry cls.geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }] } tile = Tile(data={cls.search_model_geom_nodeid: cls.geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.test_resource.tiles.append(tile) cls.test_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def tearDownClass(cls): se = SearchEngineFactory().create() se.delete_index(index="terms,concepts") se.create_index(index="terms,concepts")
def delete_terms_index(): se = SearchEngineFactory().create() se.delete_index(index="terms")
def delete_index(self, index): se = SearchEngineFactory().create() se.delete_index(index=index)
def delete_concepts_index(): se = SearchEngineFactory().create() se.delete_index(index="concepts")