def test_collector(monkeypatch, opensearch_with_signals): """ Test that the collector collects and deletes all the django objects deleted. """ obj = SimpleModel.objects.create() sync_object(SimpleModelSearchApp, str(obj.pk)) opensearch_with_signals.indices.refresh() doc = SearchSimpleModel.to_document(obj, include_index=False, include_source=False) assert SimpleModel.objects.count() == 1 collector = Collector() # check that the post/pre_delete callbacks of SimpleModel are in the collected # signal receivers to disable simplemodel_receivers = [ receiver for receiver in collector.signal_receivers_to_disable if receiver.sender is SimpleModel ] assert simplemodel_receivers assert {receiver.signal for receiver in simplemodel_receivers } == {post_delete, pre_delete} # mock the receiver methods so that we can check they are called for receiver in collector.signal_receivers_to_disable: monkeypatch.setattr(receiver, 'enable', mock.Mock()) monkeypatch.setattr(receiver, 'disable', mock.Mock()) collector.connect() # check that the existing signal receivers are disabled for receiver in collector.signal_receivers_to_disable: assert receiver.disable.called assert not receiver.enable.called obj.delete() collector.disconnect() # check that the existing signal receivers are re-enabled for receiver in collector.signal_receivers_to_disable: assert receiver.enable.called assert collector.deletions == { SimpleModel: [doc], } read_alias = SearchSimpleModel.get_read_alias() assert SimpleModel.objects.count() == 0 assert opensearch_with_signals.count(index=read_alias)['count'] == 1 collector.delete_from_opensearch() opensearch_with_signals.indices.refresh() assert opensearch_with_signals.count(index=read_alias)['count'] == 0
def test_response_body(self, setup_es): """Tests the response body of a search query.""" company = CompaniesHouseCompanyFactory( name='Pallas', company_number='111', incorporation_date=dateutil_parse('2012-09-12T00:00:00Z'), company_status='jumping', ) sync_object(CompaniesHouseCompanySearchApp, company.pk) setup_es.indices.refresh() url = reverse('api-v3:search:companieshousecompany') response = self.api_client.post(url) assert response.status_code == status.HTTP_200_OK assert response.json() == { 'count': 1, 'results': [ { 'id': str(company.pk), 'name': company.name, 'company_category': company.company_category, 'incorporation_date': company.incorporation_date.date().isoformat(), 'company_number': company.company_number, 'company_status': company.company_status, 'registered_address_1': company.registered_address_1, 'registered_address_2': company.registered_address_2, 'registered_address_town': company.registered_address_town, 'registered_address_county': company.registered_address_county, 'registered_address_postcode': company.registered_address_postcode, 'registered_address_country': { 'id': str(company.registered_address_country.id), 'name': company.registered_address_country.name, }, 'sic_code_1': company.sic_code_1, 'sic_code_2': company.sic_code_2, 'sic_code_3': company.sic_code_3, 'sic_code_4': company.sic_code_4, 'uri': company.uri, }, ], }
def sync_object_task(search_app_name, pk): """ Syncs a single object to Elasticsearch. If an error occurs, the task will be automatically retried with an exponential back-off. The wait between attempts is approximately 2 ** attempt_num seconds (with some jitter added). This task is named sync_object_task to avoid a conflict with sync_object. """ from datahub.search.sync_object import sync_object search_app = get_search_app(search_app_name) sync_object(search_app, pk)
def test_indexed_doc(opensearch): """Test the OpenSearch data of an indexed company.""" company = CompanyFactory(trading_names=['a', 'b'], ) sync_object(CompanySearchApp, company.pk) opensearch.indices.refresh() indexed_company = opensearch.get( index=CompanySearchApp.search_model.get_write_index(), id=company.pk, ) assert indexed_company['_id'] == str(company.pk) assert indexed_company['_source'].keys() == { '_document_type', 'archived', 'archived_by', 'archived_on', 'archived_reason', 'business_type', 'company_number', 'created_on', 'description', 'employee_range', 'export_experience_category', 'export_to_countries', 'future_interest_countries', 'headquarter_type', 'id', 'modified_on', 'name', 'global_headquarters', 'reference_code', 'address', 'registered_address', 'one_list_group_global_account_manager', 'sector', 'trading_names', 'turnover_range', 'uk_based', 'uk_region', 'uk_address_postcode', 'uk_registered_address_postcode', 'vat_number', 'duns_number', 'website', 'latest_interaction_date', 'export_sub_segment', 'export_segment', }
def test_update_es_after_deletions(setup_es): """ Test that the context manager update_es_after_deletions collects and deletes all the django objects deleted. """ obj = SimpleModel.objects.create() sync_object(SimpleModelSearchApp, str(obj.pk)) setup_es.indices.refresh() read_alias = ESSimpleModel.get_read_alias() assert SimpleModel.objects.count() == 1 assert setup_es.count(read_alias, doc_type=SimpleModelSearchApp.name)['count'] == 1 with update_es_after_deletions(): obj.delete() setup_es.indices.refresh() assert setup_es.count(read_alias, doc_type=SimpleModelSearchApp.name)['count'] == 0
def test_ignored_models_excluded_from_aggregations(self, setup_es): """That that companieshousecompany is not included in aggregations.""" ch_company = CompaniesHouseCompanyFactory() sync_object(CompaniesHouseCompanySearchApp, ch_company.pk) setup_es.indices.refresh() url = reverse('api-v3:search:basic') response = self.api_client.get( url, data={ 'term': '', }, ) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == 0 assert response_data['aggregations'] == []
def test_update_opensearch_after_deletions(opensearch_with_signals): """ Test that the context manager update_opensearch_after_deletions collects and deletes all the django objects deleted. """ assert SimpleModel.objects.count() == 0 obj = SimpleModel.objects.create() sync_object(SimpleModelSearchApp, str(obj.pk)) opensearch_with_signals.indices.refresh() read_alias = SearchSimpleModel.get_read_alias() assert SimpleModel.objects.count() == 1 assert opensearch_with_signals.count(index=read_alias)['count'] == 1 with update_opensearch_after_deletions(): obj.delete() opensearch_with_signals.indices.refresh() assert opensearch_with_signals.count(index=read_alias)['count'] == 0
def test_creates_user_event_log_entries(self, es_with_collector): """Tests that when an export is performed, a user event is recorded.""" user = create_test_user(permission_codenames=['view_simplemodel']) api_client = self.create_api_client(user=user) url = reverse('api-v3:search:simplemodel-export') simple_obj = SimpleModel(name='test') simple_obj.save() sync_object(SimpleModelSearchApp, simple_obj.pk) es_with_collector.flush_and_refresh() frozen_time = datetime.datetime(2018, 1, 2, 12, 30, 50, tzinfo=utc) with freeze_time(frozen_time): response = api_client.post( url, data={ 'name': 'test', }, ) assert response.status_code == status.HTTP_200_OK assert UserEvent.objects.count() == 1 user_event = UserEvent.objects.first() assert user_event.adviser == user assert user_event.type == UserEventType.SEARCH_EXPORT assert user_event.timestamp == frozen_time assert user_event.api_url_path == '/v3/search/simplemodel/export' assert user_event.data == { 'args': { 'limit': 100, 'name': 'test', 'offset': 0, 'original_query': '', 'sortby': None, }, 'num_results': 1, }
def test_sorting(self, setup_es): """Test to demonstrate how NormalizedKeyword sorts.""" names = [ 'Alice', 'Barbara', 'barbara 2', 'Álice 2', 'alice 3', ] shuffle(names) for name in names: obj = SimpleModel(name=name) obj.save() sync_object(SimpleModelSearchApp, obj.pk) setup_es.indices.refresh() user = create_test_user(permission_codenames=['view_simplemodel']) api_client = self.create_api_client(user=user) url = reverse('api-v3:search:simplemodel') response = api_client.post( url, data={ 'sortby': 'name', }, ) response_data = response.json() results = response_data['results'] assert [result['name'] for result in results] == [ 'Alice', 'Álice 2', 'alice 3', 'Barbara', 'barbara 2', ]
def setup_data(setup_es): """Sets up data for the tests.""" companies = ( CompaniesHouseCompanyFactory( name='Pallas', company_number='111', incorporation_date=dateutil_parse('2012-09-12T00:00:00Z'), registered_address_postcode='SW1A 1AA', company_status='jumping', ), CompaniesHouseCompanyFactory( name='Jaguarundi', company_number='222', incorporation_date=dateutil_parse('2015-09-12T00:00:00Z'), registered_address_postcode='E1 6JE', company_status='sleeping', ), CompaniesHouseCompanyFactory( name='Cheetah', company_number='333', incorporation_date=dateutil_parse('2016-09-12T00:00:00Z'), registered_address_postcode='SW1A 0PW', company_status='purring', ), CompaniesHouseCompanyFactory( name='Pallas Second', company_number='444', incorporation_date=dateutil_parse('2019-09-12T00:00:00Z'), registered_address_postcode='WC1B 3DG', company_status='crying', ), ) for company in companies: sync_object(CompaniesHouseCompanySearchApp, company.pk) setup_es.indices.refresh()