def delete(self): from arches.app.search.search_engine_factory import SearchEngineFactory se = SearchEngineFactory().create() se.delete(index='resource_relations', doc_type='all', id=self.resourcexid) super(ResourceXResource, self).delete()
def delete(self, *args, **kwargs): se = SearchEngineFactory().create() request = kwargs.pop("request", None) provisional_edit_log_details = kwargs.pop("provisional_edit_log_details", None) for tile in self.tiles: tile.delete(*args, request=request, **kwargs) try: user = request.user user_is_reviewer = user_is_resource_reviewer(user) except AttributeError: # no user user = None user_is_reviewer = True if user_is_reviewer is True or self.user_owns_provisional(user): query = Query(se) bool_query = Bool() bool_query.filter(Terms(field="tileid", terms=[self.tileid])) query.add_query(bool_query) results = query.search(index="terms")["hits"]["hits"] for result in results: se.delete(index="terms", id=result["_id"]) self.__preDelete(request) self.save_edit( user=request.user, edit_type="tile delete", old_value=self.data, provisional_edit_log_details=provisional_edit_log_details ) super(Tile, self).delete(*args, **kwargs) resource = Resource.objects.get(resourceinstanceid=self.resourceinstance.resourceinstanceid) resource.index() else: self.apply_provisional_edit(user, data={}, action="delete") super(Tile, self).save(*args, **kwargs)
def __init__(self, index_name=None): if index_name is None or index_name == "": raise SearchIndexError("Index name is not defined") self.se = SearchEngineFactory().create() self.index_metadata = None self.index_name = index_name
def reverse_func(apps, schema_editor): extensions = [os.path.join(settings.ONTOLOGY_PATH, x) for x in settings.ONTOLOGY_EXT] management.call_command('load_ontology', source=os.path.join(settings.ONTOLOGY_PATH, settings.ONTOLOGY_BASE), version=settings.ONTOLOGY_BASE_VERSION, ontology_name=settings.ONTOLOGY_BASE_NAME, id=settings.ONTOLOGY_BASE_ID, extensions=','.join(extensions), verbosity=0) Node = apps.get_model("models", "Node") Edge = apps.get_model("models", "Edge") for node in Node.objects.all(): node.ontologyclass = str(node.ontologyclass).split('/')[-1] node.save() for edge in Edge.objects.all(): edge.ontologyproperty = str(edge.ontologyproperty).split('/')[-1] edge.save() # remove index for base Arches concept se = SearchEngineFactory().create() query = Query(se, start=0, limit=10000) query.add_query(Term(field='conceptid', term='00000000-0000-0000-0000-000000000001')) query.delete(index='concepts') try: DValueType = apps.get_model("models", "DValueType") DValueType.objects.get(valuetype='identifier').delete() except: pass
def delete(self): """ Deletes a single resource and any related indexed data """ se = SearchEngineFactory().create() related_resources = self.get_related_resources(lang="en-US", start=0, limit=15) for rr in related_resources['resource_relationships']: models.ResourceXResource.objects.get(pk=rr['resourcexid']).delete() query = Query(se) bool_query = Bool() bool_query.filter( Terms(field='resourceinstanceid', terms=[self.resourceinstanceid])) query.add_query(bool_query) results = query.search(index='strings', doc_type='term')['hits']['hits'] for result in results: se.delete(index='strings', doc_type='term', id=result['_id']) se.delete(index='resource', doc_type=str(self.graph_id), id=self.resourceinstanceid) super(Resource, self).delete()
def get_preflabel_from_conceptid(conceptid, lang): ret = None default = { "category": "", "conceptid": "", "language": "", "value": "", "type": "", "id": "" } se = SearchEngineFactory().create() query = Query(se) terms = Terms(field='conceptid', terms=[conceptid]) match = Match(field='type', query='preflabel', type='phrase') query.add_filter(terms) query.add_query(match) preflabels = query.search(index='concept_labels')['hits']['hits'] for preflabel in preflabels: default = preflabel['_source'] # get the label in the preferred language, otherwise get the label in the default language if preflabel['_source']['language'] == lang: return preflabel['_source'] if preflabel['_source']['language'].split('-')[0] == lang.split( '-')[0]: ret = preflabel['_source'] if preflabel['_source'][ 'language'] == settings.LANGUAGE_CODE and ret == None: ret = preflabel['_source'] return default if ret == None else ret
def get(self, request, resourceid=None): if Resource.objects.filter(pk=resourceid).exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_ID).exists(): try: resource = Resource.objects.get(pk=resourceid) se = SearchEngineFactory().create() document = se.search(index=RESOURCES_INDEX, id=resourceid) return JSONResponse({ "graphid": document["_source"]["graph_id"], "graph_name": resource.graph.name, "displaydescription": document["_source"]["displaydescription"], "map_popup": document["_source"]["map_popup"], "displayname": document["_source"]["displayname"], "geometries": document["_source"]["geometries"], "permissions": document["_source"]["permissions"], "userid": request.user.id, }) except Exception as e: logger.exception( _("Failed to fetch resource instance descriptors")) return HttpResponseNotFound()
def prepare_resource_relations_index(create=False): """ Creates the settings and mappings in Elasticsearch to support related resources """ index_settings = { 'mappings': { '_doc': { 'properties': { 'resourcexid': {'type': 'keyword'}, 'notes': {'type': 'text'}, 'relationshiptype': {'type': 'keyword'}, 'resourceinstanceidfrom': {'type': 'keyword'}, 'resourceinstanceidto': {'type': 'keyword'}, 'created': {'type': 'keyword'}, 'modified': {'type': 'keyword'} } } } } if create: se = SearchEngineFactory().create() se.create_index(index='resource_relations', body=index_settings) return index_settings
def index(self): """ Indexes all the nessesary documents related to resources to support the map, search, and reports """ se = SearchEngineFactory().create() search_documents = self.prepare_documents_for_search_index() for document in search_documents: se.index_data('entity', self.entitytypeid, document, id=self.entityid) report_documents = self.prepare_documents_for_report_index( geom_entities=document['geometries']) for report_document in report_documents: se.index_data('resource', self.entitytypeid, report_document, id=self.entityid) geojson_documents = self.prepare_documents_for_map_index( geom_entities=document['geometries']) for geojson in geojson_documents: se.index_data('maplayers', self.entitytypeid, geojson, idfield='id') for term in self.prepare_terms_for_search_index(): se.index_term(term['term'], term['entityid'], term['context'], term['ewstatus'], term['options'])
def delete(self, *args, **kwargs): se = SearchEngineFactory().create() request = kwargs.pop('request', None) provisional_edit_log_details = kwargs.pop('provisional_edit_log_details', None) for tile in self.tiles: tile.delete(*args, request=request, **kwargs) try: user = request.user user_is_reviewer = request.user.groups.filter(name='Resource Reviewer').exists() except AttributeError: #no user user = None if user_is_reviewer is True or self.user_owns_provisional(user): query = Query(se) bool_query = Bool() bool_query.filter(Terms(field='tileid', terms=[self.tileid])) query.add_query(bool_query) results = query.search(index='terms')['hits']['hits'] for result in results: se.delete(index='terms', id=result['_id']) self.__preDelete(request) self.save_edit( user=request.user, edit_type='tile delete', old_value=self.data, provisional_edit_log_details=provisional_edit_log_details) super(Tile, self).delete(*args, **kwargs) resource = Resource.objects.get(resourceinstanceid=self.resourceinstance.resourceinstanceid) resource.index() else: self.apply_provisional_edit(user, data={}, action='delete') super(Tile, self).save(*args, **kwargs)
def get_preflabel_from_conceptid(conceptid, lang): ret = None default = { "category": "", "conceptid": "", "language": "", "value": "", "type": "", "id": "" } se = SearchEngineFactory().create() query = Query(se) terms = Terms(field='conceptid', terms=[conceptid]) # Uncomment the following line only after having reindexed ElasticSearch cause currently the Arabic labels are indexed as altLabels # match = Match(field='type', query='prefLabel', type='phrase') query.add_filter(terms) # Uncomment the following line only after having reindexed ElasticSearch cause currently the Arabic labels are indexed as altLabels # query.add_query(match) preflabels = query.search(index='concept_labels')['hits']['hits'] for preflabel in preflabels: # print 'Language at this point %s and label language %s and ret is %s' % (lang, preflabel['_source']['language'], ret) default = preflabel['_source'] # get the label in the preferred language, otherwise get the label in the default language if preflabel['_source']['language'] == lang: # print 'prefLabel from Conceptid: %s' % preflabel['_source'] return preflabel['_source'] if preflabel['_source']['language'].split('-')[0] == lang.split( '-')[0]: ret = preflabel['_source'] if preflabel['_source']['language'] == lang and ret == None: ret = preflabel['_source'] return default if ret == None else ret
def delete(self, request, resourceid=None): lang = request.GET.get('lang', settings.LANGUAGE_CODE) se = SearchEngineFactory().create() req = dict(request.GET) ids_to_delete = req['resourcexids[]'] root_resourceinstanceid = req['root_resourceinstanceid'] for resourcexid in ids_to_delete: try: ret = models.ResourceXResource.objects.get( pk=resourcexid).delete() except: print 'resource relation does not exist' start = request.GET.get('start', 0) se.es.indices.refresh(index=se._add_prefix("resource_relations")) resource = Resource.objects.get(pk=root_resourceinstanceid[0]) page = 1 if request.GET.get('page') == '' else int( request.GET.get('page', 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page) ret = [] if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret, indent=4)
def search_terms(request): lang = request.GET.get('lang', settings.LANGUAGE_CODE) se = SearchEngineFactory().create() searchString = request.GET.get('q', '') user_is_reviewer = request.user.groups.filter(name='Resource Reviewer').exists() i = 0 ret = {} for index in ['terms', 'concepts']: query = Query(se, start=0, limit=0) boolquery = Bool() boolquery.should(Match(field='value', query=searchString.lower(), type='phrase_prefix')) boolquery.should(Match(field='value.folded', query=searchString.lower(), type='phrase_prefix')) boolquery.should(Match(field='value.folded', query=searchString.lower(), fuzziness='AUTO', prefix_length=settings.SEARCH_TERM_SENSITIVITY)) if user_is_reviewer is False and index == 'terms': boolquery.filter(Terms(field='provisional', terms=['false'])) query.add_query(boolquery) base_agg = Aggregation(name='value_agg', type='terms', field='value.raw', size=settings.SEARCH_DROPDOWN_LENGTH, order={"max_score": "desc"}) nodegroupid_agg = Aggregation(name='nodegroupid', type='terms', field='nodegroupid') top_concept_agg = Aggregation(name='top_concept', type='terms', field='top_concept') conceptid_agg = Aggregation(name='conceptid', type='terms', field='conceptid') max_score_agg = MaxAgg(name='max_score', script='_score') top_concept_agg.add_aggregation(conceptid_agg) base_agg.add_aggregation(max_score_agg) base_agg.add_aggregation(top_concept_agg) base_agg.add_aggregation(nodegroupid_agg) query.add_aggregation(base_agg) ret[index] = [] results = query.search(index=index) for result in results['aggregations']['value_agg']['buckets']: if len(result['top_concept']['buckets']) > 0: for top_concept in result['top_concept']['buckets']: top_concept_id = top_concept['key'] top_concept_label = get_preflabel_from_conceptid(top_concept['key'], lang)['value'] for concept in top_concept['conceptid']['buckets']: ret[index].append({ 'type': 'concept', 'context': top_concept_id, 'context_label': top_concept_label, 'id': i, 'text': result['key'], 'value': concept['key'] }) i = i + 1 else: ret[index].append({ 'type': 'term', 'context': '', 'context_label': get_resource_model_label(result), 'id': i, 'text': result['key'], 'value': result['key'] }) i = i + 1 return JSONResponse(ret)
def get_preflabel_from_conceptid(conceptid, lang): ret = None default = { "category": "", "conceptid": "", "language": "", "value": "", "type": "", "id": "" } se = SearchEngineFactory().create() query = Query(se) bool_query = Bool() bool_query.must(Match(field="type", query="prefLabel", type="phrase")) bool_query.filter(Terms(field="conceptid", terms=[conceptid])) query.add_query(bool_query) preflabels = query.search(index="concepts")["hits"]["hits"] for preflabel in preflabels: default = preflabel["_source"] # get the label in the preferred language, otherwise get the label in the default language if preflabel["_source"]["language"] == lang: return preflabel["_source"] if preflabel["_source"]["language"].split("-")[0] == lang.split( "-")[0]: ret = preflabel["_source"] if preflabel["_source"][ "language"] == settings.LANGUAGE_CODE and ret is None: ret = preflabel["_source"] return default if ret is None else ret
def index(self): """ Indexes all the nessesary items values of a resource to support search """ if str(self.graph_id) != str( settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID): se = SearchEngineFactory().create() datatype_factory = DataTypeFactory() node_datatypes = { str(nodeid): datatype for nodeid, datatype in models.Node.objects.values_list( "nodeid", "datatype") } document, terms = self.get_documents_to_index( datatype_factory=datatype_factory, node_datatypes=node_datatypes) document["root_ontology_class"] = self.get_root_ontology() doc = JSONSerializer().serializeToPython(document) se.index_data(index="resources", body=doc, id=self.pk) for term in terms: se.index_data("terms", body=term["_source"], id=term["_id"]) for index in settings.ELASTICSEARCH_CUSTOM_INDEXES: es_index = import_class_from_string(index["module"])( index["name"]) document, doc_id = es_index.get_documents_to_index( self, document["tiles"]) es_index.index_document(document=document, id=doc_id)
def prepare_search_index(self, resource_type_id, create=False): """ Creates the settings and mappings in Elasticsearch to support resource search """ index_settings = super(Resource, self).prepare_search_index(resource_type_id, create=False) index_settings['mappings'][resource_type_id]['properties'][ 'date_groups'] = { 'properties': { 'conceptid': { 'type': 'string', 'index': 'not_analyzed' } } } if create: se = SearchEngineFactory().create() try: se.create_index(index='entity', body=index_settings) except: index_settings = index_settings['mappings'] se.create_mapping(index='entity', doc_type=resource_type_id, body=index_settings)
def clear_resources(): """Removes all resource instances from your db and elasticsearch resource index""" se = SearchEngineFactory().create() match_all_query = Query(se) match_all_query.delete(index="terms") match_all_query.delete(index="resources") match_all_query.delete(index="resource_relations") print( "deleting", Resource.objects.exclude( resourceinstanceid=settings.RESOURCE_INSTANCE_ID).count(), "resources") Resource.objects.exclude( resourceinstanceid=settings.RESOURCE_INSTANCE_ID).delete() print( Resource.objects.exclude( resourceinstanceid=settings.RESOURCE_INSTANCE_ID).count(), "resources remaining") print("deleting", models.ResourceXResource.objects.count(), "resource relationships") cursor = connection.cursor() cursor.execute("TRUNCATE public.resource_x_resource CASCADE;") print(models.ResourceXResource.objects.count(), "resource relationships remaining")
def get_related_resources(resourceid, lang, limit=1000, start=0): ret = { 'resource_relationships': [], 'related_resources': [] } se = SearchEngineFactory().create() query = Query(se, limit=limit, start=start) query.add_filter(Terms(field='entityid1', terms=resourceid).dsl, operator='or') query.add_filter(Terms(field='entityid2', terms=resourceid).dsl, operator='or') resource_relations = query.search(index='resource_relations', doc_type='all') ret['total'] = resource_relations['hits']['total'] entityids = set() for relation in resource_relations['hits']['hits']: relation['_source']['preflabel'] = get_preflabel_from_valueid(relation['_source']['relationshiptype'], lang) ret['resource_relationships'].append(relation['_source']) entityids.add(relation['_source']['entityid1']) entityids.add(relation['_source']['entityid2']) if len(entityids) > 0: entityids.remove(resourceid) related_resources = se.search(index='entity', doc_type='_all', id=list(entityids)) if related_resources: for resource in related_resources['docs']: ret['related_resources'].append(resource['_source']) return ret
def reverse_func(apps, schema_editor): se = SearchEngineFactory().create() prefix = settings.ELASTICSEARCH_PREFIX if (se.es.indices.exists(index="%s_resource_relations" % prefix)): index_settings = prepare_resource_relations_index(create=False) index_settings['mappings']['all'] = index_settings['mappings'][ '_doc'] index_settings['mappings'].pop('_doc', None) se.create_index(index='resource_relations_temp', body=index_settings) doc = { "source": { "index": "%s_resource_relations" % prefix, "type": "_doc" }, "dest": { "index": "%s_resource_relations_temp" % prefix, "type": "all" } } se.es.reindex(body=doc, refresh=True, wait_for_completion=True) se.delete_index(index='resource_relations') se.create_index(index='resource_relations', body=index_settings) doc = { "source": { "index": "%s_resource_relations_temp" % prefix }, "dest": { "index": "%s_resource_relations" % prefix, "type": "all" } } se.es.reindex(body=doc, refresh=True, wait_for_completion=True)
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({ 'success': True }) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) form.load(lang) return render(request, 'resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), }) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def delete(self, request, resourceid=None): lang = request.GET.get("lang", request.LANGUAGE_CODE) se = SearchEngineFactory().create() req = dict(request.GET) ids_to_delete = req["resourcexids[]"] root_resourceinstanceid = req["root_resourceinstanceid"] for resourcexid in ids_to_delete: try: ret = models.ResourceXResource.objects.get( pk=resourcexid).delete() except ObjectDoesNotExist: logger.exception( _("Unable to delete. Relationship does not exist")) start = request.GET.get("start", 0) se.es.indices.refresh(index=se._add_prefix("resource_relations")) resource = Resource.objects.get(pk=root_resourceinstanceid[0]) page = 1 if request.GET.get("page") == "" else int( request.GET.get("page", 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page, user=request.user) ret = [] if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret, indent=4)
def delete_index(self): se = SearchEngineFactory().create() query = Query(se, start=0, limit=10000) phrase = Match(field='id', query=self.id, type='phrase') query.add_query(phrase) query.delete(index='concept_labels') se.delete_terms(self.id)
def index_resources(clear_index=True, index_name=None, batch_size=settings.BULK_IMPORT_BATCH_SIZE): """ Indexes all resources from the database Keyword Arguments: clear_index -- set to True to remove all the resources from the index before the reindexing operation index_name -- only applies to custom indexes and if given will try and just refresh the data in that index batch_size -- the number of records to index as a group, the larger the number to more memory required """ se = SearchEngineFactory().create() if clear_index and index_name is None: q = Query(se=se) q.delete(index="terms") resource_types = (models.GraphModel.objects.filter( isresource=True).exclude( graphid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).values_list( "graphid", flat=True)) index_resources_by_type(resource_types, clear_index=clear_index, index_name=index_name, batch_size=batch_size)
def get_scheme_id(self): se = SearchEngineFactory().create() result = se.search(index='concept_labels', id=self.id) if result['found']: return Concept(result['_type']) else: return None
def search_results(request): se = SearchEngineFactory().create() search_results_object = {"query": Query(se)} include_provisional = get_provisional_type(request) permitted_nodegroups = get_permitted_nodegroups(request.user) search_filter_factory = SearchFilterFactory(request) try: for filter_type, querystring in list( request.GET.items()) + [("search-results", "")]: search_filter = search_filter_factory.get_filter(filter_type) if search_filter: search_filter.append_dsl(search_results_object, permitted_nodegroups, include_provisional) except Exception as err: return JSONResponse(err, status=500) dsl = search_results_object.pop("query", None) dsl.include("graph_id") dsl.include("root_ontology_class") dsl.include("resourceinstanceid") dsl.include("points") dsl.include("geometries") dsl.include("displayname") dsl.include("displaydescription") dsl.include("map_popup") dsl.include("provisional_resource") if request.GET.get("tiles", None) is not None: dsl.include("tiles") results = dsl.search(index="resources") ret = {} if results is not None: # allow filters to modify the results for filter_type, querystring in list( request.GET.items()) + [("search-results", "")]: search_filter = search_filter_factory.get_filter(filter_type) if search_filter: search_filter.post_search_hook(search_results_object, results, permitted_nodegroups) ret["results"] = results for key, value in list(search_results_object.items()): ret[key] = value ret["reviewer"] = request.user.groups.filter( name="Resource Reviewer").exists() ret["timestamp"] = datetime.now() ret["total_results"] = dsl.count(index="resources") return JSONResponse(ret) else: ret = { "message": _("There was an error retrieving the search results") } return JSONResponse(ret, status=500)
def get_scheme_id(self): se = SearchEngineFactory().create() result = se.search(index='strings', doc_type='concept', id=self.id) if result['found']: return Concept(result['top_concept']) else: return None
def index_resources_by_type(resource_types, result_summary): """ Collects and indexes all resources """ for resource_type in resource_types: resources = archesmodels.Entities.objects.filter( entitytypeid=resource_type) print "Indexing {0} {1} resources".format(len(resources), resource_type[0]) result_summary[resource_type[0]] = { 'database': len(resources), 'indexed': 0 } errors = [] for resource in resources: try: resource = Resource().get(resource.entityid) resource.index() except Exception as e: if e not in errors: errors.append(e) if len(errors) > 0: print errors[0], ':', len(errors) se = SearchEngineFactory().create() related_resource_records = archesmodels.RelatedResource.objects.all() for related_resource_record in related_resource_records: se.index_data(index='resource_relations', doc_type='all', body=model_to_dict(related_resource_record), idfield='resourcexid') return result_summary
def delete_concept_values_index(concepts_to_delete): se = SearchEngineFactory().create() for concept in concepts_to_delete.itervalues(): query = Query(se, start=0, limit=10000) term = Term(field='conceptid', term=concept.id) query.add_query(term) query.delete(index='strings', doc_type='concept')
def index(self): """ Indexes all the nessesary items values of a resource to support search """ if unicode(self.graph_id) != unicode( settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID): se = SearchEngineFactory().create() datatype_factory = DataTypeFactory() node_datatypes = { str(nodeid): datatype for nodeid, datatype in models.Node.objects.values_list( 'nodeid', 'datatype') } document, terms = self.get_documents_to_index( datatype_factory=datatype_factory, node_datatypes=node_datatypes) document['root_ontology_class'] = self.get_root_ontology() se.index_data('resource', self.graph_id, JSONSerializer().serializeToPython(document), id=self.pk) for term in terms: se.index_data('strings', 'term', term['_source'], id=term['_id'])
def find_overlapping(request): '''This function queries ES when called via Ajax when a new geometry is created in the Location tab. If pre-existing resources are found within the perimeter of the polygon (or the buffered zone around a point/line/polygon), an alert is raised.''' geomString = request.GET.get('geom', '') geom = GEOSGeometry(geomString, srid=4326) mindistance = settings.METER_RADIUS if not mindistance: mindistance = 1000 # if settings.METER_RADIUS isn't set, default to 1Km geom.transform(3857) buffered_geom = geom.buffer(mindistance) buffered_geom.transform(4326) print geom, buffered_geom se = SearchEngineFactory().create() query = Query(se) boolfilter = Bool() geoshape = GeoShape(field='geometries.value', type=buffered_geom.geom_type, coordinates=buffered_geom.coords) nested = Nested(path='geometries', query=geoshape) boolfilter.must(nested) query.add_filter(boolfilter) results = query.search(index='entity', doc_type='') overlaps = [] for hit in results['hits']['hits']: overlaps.append({ 'id': hit['_id'], 'type': hit['_type'], 'primaryname': hit['_source']['primaryname'] }) return JSONResponse(overlaps)