def get(self, request, format=None): """Performs basic search.""" serializer = BasicSearchQuerySerializer(data=request.query_params) serializer.is_valid(raise_exception=True) validated_params = serializer.validated_data fields_to_exclude = ( *SHARED_FIELDS_TO_EXCLUDE, *(self.fields_to_exclude or ()), ) query = get_basic_search_query( entity=validated_params['entity'], term=validated_params['term'], permission_filters_by_entity=dict( _get_global_search_permission_filters(request)), offset=validated_params['offset'], limit=validated_params['limit'], fields_to_exclude=fields_to_exclude, ) results = execute_search_query(query) response = { 'count': results.hits.total, 'results': [result.to_dict() for result in results.hits], 'aggregations': [{ 'count': x['doc_count'], 'entity': x['key'] } for x in results.aggregations['count_by_type']['buckets']], } return Response(data=response)
def test_contact_auto_sync_to_opensearch(opensearch_with_signals): """Tests if contact gets synced to OpenSearch.""" test_name = 'very_hard_to_find_contact' ContactFactory(first_name=test_name, ) opensearch_with_signals.indices.refresh() result = get_basic_search_query(Contact, test_name).execute() assert result.hits.total.value == 1
def test_offset_near_max_results(offset, limit, expected_size): """Tests limit clipping when near max_results.""" query = get_basic_search_query( mock.Mock(), 'test', offset=offset, limit=limit, ) query_dict = query.to_dict() assert query_dict['from'] == offset assert query_dict['size'] == expected_size
def test_company_auto_sync_to_es(setup_es): """Tests if company gets synced to Elasticsearch.""" test_name = 'very_hard_to_find_company' CompanyFactory(name=test_name, ) setup_es.indices.refresh() result = get_basic_search_query(Company, test_name).execute() assert result.hits.total == 1
def test_contact_auto_sync_to_es(setup_es): """Tests if contact gets synced to Elasticsearch.""" test_name = 'very_hard_to_find_contact' ContactFactory(first_name=test_name, ) setup_es.indices.refresh() result = get_basic_search_query(test_name, entities=(Contact, )).execute() assert result.hits.total == 1
def test_company_auto_updates_to_es(setup_es): """Tests if company gets updated in Elasticsearch.""" test_name = 'very_hard_to_find_company_international' company = CompanyFactory(name=test_name, ) new_test_name = 'very_hard_to_find_company_local' company.name = new_test_name company.save() setup_es.indices.refresh() result = get_basic_search_query(Company, new_test_name).execute() assert result.hits.total == 1 assert result.hits[0].id == str(company.id)
def test_company_auto_updates_to_opensearch(opensearch_with_signals): """Tests if company gets updated in OpenSearch.""" test_name = 'very_hard_to_find_company_international' company = CompanyFactory(name=test_name, ) new_test_name = 'very_hard_to_find_company_local' company.name = new_test_name company.save() opensearch_with_signals.indices.refresh() result = get_basic_search_query(Company, new_test_name).execute() assert result.hits.total.value == 1 assert result.hits[0].id == str(company.id)
def test_contact_auto_updates_to_opensearch(opensearch_with_signals): """Tests if contact gets updated in OpenSearch.""" test_name = 'very_hard_to_find_contact_ii' contact = ContactFactory(first_name=test_name, ) contact.save() new_test_name = 'very_hard_to_find_contact_v' contact.first_name = new_test_name contact.save() opensearch_with_signals.indices.refresh() result = get_basic_search_query(Contact, new_test_name).execute() assert result.hits.total.value == 1 assert result.hits[0].id == str(contact.id)
def test_contact_auto_updates_to_es(setup_es): """Tests if contact gets updated in Elasticsearch.""" test_name = 'very_hard_to_find_contact_ii' contact = ContactFactory(first_name=test_name, ) contact.save() new_test_name = 'very_hard_to_find_contact_v' contact.first_name = new_test_name contact.save() setup_es.indices.refresh() result = get_basic_search_query(new_test_name, entities=(Contact, )).execute() assert result.hits.total == 1 assert result.hits[0].id == str(contact.id)
def get(self, request, format=None): """Performs basic search.""" if 'term' not in request.query_params: raise ValidationError('Missing required "term" field.') term = request.query_params['term'] entity = request.query_params.get('entity', self.DEFAULT_ENTITY) if entity not in (self.entity_by_name): raise ValidationError( f'Entity is not one of {", ".join(self.entity_by_name)}', ) sortby = request.query_params.get('sortby') if sortby: field = sortby.rsplit(':')[0] if field not in self.SORT_BY_FIELDS: raise ValidationError( f'"sortby" field is not one of {self.SORT_BY_FIELDS}.') offset = int(request.query_params.get('offset', 0)) limit = int(request.query_params.get('limit', 100)) query = get_basic_search_query( term=term, entities=(self.entity_by_name[entity].model, ), permission_filters_by_entity=dict( _get_permission_filters(request)), ordering=sortby, ignored_entities=self.IGNORED_ENTITIES, offset=offset, limit=limit, ) results = _execute_search_query(query) response = { 'count': results.hits.total, 'results': [result.to_dict() for result in results.hits], 'aggregations': [{ 'count': x['doc_count'], 'entity': x['key'] } for x in results.aggregations['count_by_type']['buckets']], } return Response(data=response)
def test_get_basic_search_query(): """Tests basic search query.""" query = get_basic_search_query(ESContact, 'test', offset=5, limit=5) assert query.to_dict() == { 'query': { 'bool': { 'should': [ { 'match': { 'name.keyword': { 'query': 'test', 'boost': 2, }, }, }, { 'multi_match': { 'query': 'test', 'fields': [ 'address.country.name.trigram', 'address.postcode.trigram', 'address_country.name.trigram', 'address_postcode_trigram', 'company.name', 'company.name.trigram', 'company_number', 'contact.name', 'contact.name.trigram', 'contacts.name', 'contacts.name.trigram', 'dit_participants.adviser.name', 'dit_participants.adviser.name.trigram', 'dit_participants.team.name', 'dit_participants.team.name.trigram', 'email', 'email_alternative', 'event.name', 'event.name.trigram', 'id', 'investor_company.name', 'investor_company.name.trigram', 'name', 'name.trigram', 'organiser.name.trigram', 'project_code_trigram', 'reference_code', 'reference_trigram', 'registered_address.country.name.trigram', 'registered_address.postcode.trigram', 'related_programmes.name', 'related_programmes.name.trigram', 'subject_english', 'subtotal_cost_string', 'teams.name', 'teams.name.trigram', 'total_cost_string', 'trading_names', 'trading_names_trigram', 'uk_company.name', 'uk_company.name.trigram', 'uk_region.name.trigram', ], 'type': 'cross_fields', 'operator': 'and', }, }, ], }, }, 'post_filter': { 'bool': { 'should': [ { 'term': { '_type': 'contact', }, }, ], }, }, 'aggs': { 'count_by_type': { 'terms': { 'field': '_type', }, }, }, 'from': 5, 'size': 5, 'sort': [ '_score', 'id', ], }
def test_get_basic_search_query(mocked_get_global_search_apps_as_mapping): """Test for get_basic_search_query.""" search_app = SimpleModelSearchApp mocked_get_global_search_apps_as_mapping.return_value = { search_app.name: search_app, } query = get_basic_search_query( search_app.search_model, 'test', permission_filters_by_entity={ search_app.name: [('name', 'perm')], }, offset=2, limit=3, ) assert query.to_dict() == { 'query': { 'bool': { 'should': [ { 'match': { 'name.keyword': { 'query': 'test', 'boost': 2, }, }, }, { 'multi_match': { 'query': 'test', 'fields': [ 'address.trigram', 'country.trigram', 'name', 'name.trigram', ], 'type': 'cross_fields', 'operator': 'and', }, }, ], 'filter': [ { 'bool': { 'should': [ { 'bool': { 'should': [ { 'term': { 'name': 'perm', }, }, ], 'must': [ { 'term': { '_document_type': search_app.name, }, }, ], 'minimum_should_match': 1, }, }, ], }, }, ], 'minimum_should_match': 1, }, }, 'post_filter': { 'bool': { 'should': [ { 'term': { '_document_type': search_app.name, }, }, ], }, }, 'aggs': { 'count_by_type': { 'terms': { 'field': '_document_type', }, }, }, 'sort': [ '_score', 'id', ], 'track_total_hits': True, 'from': 2, 'size': 3, }
def test_get_basic_search_query(): """Tests basic search query.""" expected_query = { 'query': { 'bool': { 'should': [ { 'match': { 'name.keyword': { 'query': 'test', 'boost': 2, }, }, }, { 'multi_match': { 'query': 'test', 'fields': [ 'address.area.name.trigram', 'address.country.name.trigram', 'address.county.trigram', 'address.line_1.trigram', 'address.line_2.trigram', 'address.postcode', 'address.town.trigram', 'address.trigram', 'address_country.name.trigram', 'address_postcode', 'companies.name', 'companies.name.trigram', 'company.name', 'company.name.trigram', 'company_number', 'contact.name', 'contact.name.trigram', 'contacts.name', 'contacts.name.trigram', 'country.trigram', 'dit_participants.adviser.name', 'dit_participants.adviser.name.trigram', 'dit_participants.team.name', 'dit_participants.team.name.trigram', 'email', 'event.name', 'event.name.trigram', 'event_type.name', 'event_type.name.trigram', 'full_telephone_number', 'id', 'investor_company.name', 'investor_company.name.trigram', 'job_title', 'job_title.trigram', 'name', 'name.trigram', 'name_with_title', 'name_with_title.trigram', 'organiser.name.trigram', 'project_code', 'reference.trigram', 'reference_code', 'registered_address.area.name.trigram', 'registered_address.country.name.trigram', 'registered_address.county.trigram', 'registered_address.line_1.trigram', 'registered_address.line_2.trigram', 'registered_address.postcode', 'registered_address.town.trigram', 'related_programmes.name', 'related_programmes.name.trigram', 'sector.name', 'service.name', 'service.name.trigram', 'simpleton.name', 'subject.english', 'subtotal_cost.keyword', 'teams.name', 'teams.name.trigram', 'total_cost.keyword', 'trading_names', 'trading_names.trigram', 'uk_company.name', 'uk_company.name.trigram', 'uk_region.name', 'uk_region.name.trigram', ], 'type': 'cross_fields', 'operator': 'and', }, }, ], }, }, 'post_filter': { 'bool': { 'should': [ { 'term': { '_document_type': 'contact', }, }, ], }, }, 'aggs': { 'count_by_type': { 'terms': { 'field': '_document_type', }, }, }, 'from': 5, 'size': 5, 'sort': [ '_score', 'id', ], 'track_total_hits': True, } query = get_basic_search_query(SearchContact, 'test', offset=5, limit=5) assert query.to_dict() == expected_query