def test_filter_by_dit_participant(self, setup_es, dit_participant_field): """Test filtering interaction by DIT participant adviser and team IDs.""" interactions = CompanyInteractionFactory.create_batch( 10, dit_participants=[]) for interaction in interactions: InteractionDITParticipantFactory.create_batch( 2, interaction=interaction) setup_es.indices.refresh() interaction = choice(interactions) dit_participant = interaction.dit_participants.order_by('?').first() url = reverse('api-v3:search:interaction') request_data = { f'dit_participants__{dit_participant_field}': getattr(dit_participant, dit_participant_field).id, } response = self.api_client.post(url, request_data) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == 1 results = response_data['results'] assert len(results) == 1 assert results[0]['id'] == str(interaction.pk)
def company_with_multiple_participant_interaction_factory(): """Factory for a company with an interaction that has multiple participants.""" company = CompanyFactory() interaction = CompanyInteractionFactory(company=company, dit_participants=[]) InteractionDITParticipantFactory.create_batch(2, interaction=interaction) return company
def test_can_replace_all_participants(self): """Test that all existing participants can be replaced with different ones.""" interaction = CompanyInteractionFactory(dit_participants=[]) InteractionDITParticipantFactory.create_batch( 3, interaction=interaction, ) new_advisers = AdviserFactory.create_batch(2) new_advisers.sort(key=attrgetter('pk')) request_data = { 'dit_participants': [ { 'adviser': { 'id': adviser.pk, }, } for adviser in new_advisers ], } url = reverse('api-v3:interaction:item', kwargs={'pk': interaction.pk}) response = self.api_client.patch(url, data=request_data) assert response.status_code == status.HTTP_200_OK response_data = response.json() response_data['dit_participants'].sort( key=lambda dit_participant: dit_participant['adviser']['id'], ) expected_advisers_and_teams = [(adviser, adviser.dit_team) for adviser in new_advisers] assert response_data['dit_participants'] == [ { 'adviser': { 'id': str(adviser.pk), 'first_name': adviser.first_name, 'last_name': adviser.last_name, 'name': adviser.name, }, 'team': { 'id': str(team.pk), 'name': team.name, }, } for adviser, team in expected_advisers_and_teams ]
def test_interaction_synced_when_dit_participant_added( opensearch_with_signals): """Test that interactions are synced to OpenSearch if their DIT participants change.""" interaction = CompanyInteractionFactory(dit_participants=[]) opensearch_with_signals.indices.refresh() doc = opensearch_with_signals.get( index=InteractionSearchApp.search_model.get_read_alias(), id=interaction.pk, ) assert doc['_source']['dit_participants'] == [] dit_participant = InteractionDITParticipantFactory(interaction=interaction) opensearch_with_signals.indices.refresh() updated_doc = opensearch_with_signals.get( index=InteractionSearchApp.search_model.get_read_alias(), id=interaction.pk, ) actual_dit_participants = updated_doc['_source']['dit_participants'] assert len(actual_dit_participants) == 1 assert actual_dit_participants[0]['adviser']['id'] == str( dit_participant.adviser.pk) assert actual_dit_participants[0]['team']['id'] == str( dit_participant.team.pk)
def test_ignores_interactions_with_existing_participants( self, monkeypatch, caplog): """Test that the task does not modify interactions with an existing DIT participant.""" caplog.set_level('INFO', 'datahub') populate_interaction_dit_participant_mock = Mock( wraps=populate_interaction_dit_participant, ) monkeypatch.setattr( 'datahub.dbmaintenance.tasks.populate_interaction_dit_participant', populate_interaction_dit_participant_mock, ) interactions = CompanyInteractionFactory.create_batch( 5, dit_participants=[], ) for interaction in interactions: InteractionDITParticipantFactory(interaction=interaction) result = populate_interaction_dit_participant_mock.apply_async( kwargs={'batch_size': 100}, ) assert result.successful() assert populate_interaction_dit_participant_mock.apply_async.call_count == 1 for obj in interactions: obj.refresh_from_db() # These objects should not have been modified assert all([ obj.dit_participants.filter(adviser=obj.dit_adviser).count() == 0 for obj in interactions ], ) assert len(caplog.records) == 1 assert f'0 InteractionDITParticipant many-to-many objects created' in caplog.text
def test_intelligent_homepage(self, setup_es): """Intelligent homepage.""" datetimes = [datetime(year, 1, 1) for year in range(2015, 2030)] interactions = [] contacts = [] for creation_datetime in datetimes: with freeze_time(creation_datetime): interaction = CompanyInteractionFactory(dit_participants=[]) InteractionDITParticipantFactory(interaction=interaction) InteractionDITParticipantFactory(interaction=interaction, adviser=self.user) interactions.append(interaction) contacts.append(ContactFactory(created_by=self.user)) setup_es.indices.refresh() url = reverse('dashboard:intelligent-homepage') response = self.api_client.get(url) assert response.status_code == status.HTTP_200_OK response_data = response.json() actual_contacts = response_data['contacts'] actual_contact_ids = [contact['id'] for contact in actual_contacts] # Latest 5 contacts, most recent first expected_contact_ids = [ str(contact.id) for contact in contacts[:-6:-1] ] assert actual_contact_ids == expected_contact_ids actual_interactions = response_data['interactions'] actual_interaction_ids = [ interaction['id'] for interaction in actual_interactions ] # Latest 5 interactions, most recent first expected_interaction_ids = [ str(interaction.id) for interaction in interactions[:-6:-1] ] assert actual_interaction_ids == expected_interaction_ids actual_first_interaction = response.data['interactions'][0] assert isinstance(actual_first_interaction['company'], dict) assert actual_first_interaction['company']['name'] == interactions[ -1].company.name
def test_can_replace_some_participants(self): """Test that a subset of existing DIT participants can be replaced.""" interaction = CompanyInteractionFactory(dit_participants=[]) dit_participants = InteractionDITParticipantFactory.create_batch( 3, interaction=interaction, ) # Change the first adviser's team so that we can check that the participant's team is # unchanged after the update. dit_participants[0].adviser.dit_team = TeamFactory() dit_participants[0].adviser.save() new_advisers = [ dit_participants[0].adviser, AdviserFactory(), ] request_data = { 'dit_participants': [ { 'adviser': { 'id': adviser.pk, }, } for adviser in new_advisers ], } url = reverse('api-v3:interaction:item', kwargs={'pk': interaction.pk}) response = self.api_client.patch(url, data=request_data) assert response.status_code == status.HTTP_200_OK response_data = response.json() response_data['dit_participants'].sort( key=lambda dit_participant: dit_participant['adviser']['id'], ) expected_advisers_and_teams = [ (new_advisers[0], dit_participants[0].team), (new_advisers[1], new_advisers[1].dit_team), ] expected_advisers_and_teams.sort(key=lambda adviser_and_team: adviser_and_team[0].pk) assert response_data['dit_participants'] == [ { 'adviser': { 'id': str(adviser.pk), 'first_name': adviser.first_name, 'last_name': adviser.last_name, 'name': adviser.name, }, 'team': { 'id': str(team.pk), 'name': team.name, }, } for adviser, team in expected_advisers_and_teams ]
def test_null_team(api_client): """ Test that we can handle dit_participant.team being None """ interaction = EventServiceDeliveryFactory(dit_participants=[]) InteractionDITParticipantFactory( interaction=interaction, team=None, ) response = hawk.get(api_client, get_url('api-v3:activity-stream:interactions')) assert response.status_code == status.HTTP_200_OK
def test_dit_participant_ordering(api_client): """ Test that dit_participants are ordered by `pk` """ interaction = CompanyInteractionFactory(dit_participants=[]) InteractionDITParticipantFactory.create_batch(5, interaction=interaction) response = hawk.get(api_client, get_url('api-v3:activity-stream:interactions')) assert response.status_code == status.HTTP_200_OK sorted_participant_ids = [ f'dit:DataHubAdviser:{participant.adviser.pk}' for participant in sorted(interaction.dit_participants.all(), key=lambda obj: obj.pk) ] items = response.json()['orderedItems'][0]['object']['attributedTo'] response_participant_ids = [ item['id'] for item in items if item['type'] == ['Person', 'dit:Adviser'] ] assert sorted_participant_ids == response_participant_ids
def test_null_adviser(api_client): """ Test that we can handle dit_participant.adviser being None """ interaction = CompanyInteractionFactory(dit_participants=[]) InteractionDITParticipantFactory( interaction=interaction, adviser=None, team=TeamFactory(), ) response = hawk.get(api_client, get_url('api-v3:activity-stream:interactions')) assert response.status_code == status.HTTP_200_OK
def test_null_adviser(api_client): """ Test that we can handle dit_participant.adviser being None """ with freeze_time() as frozen_datetime: interaction = CompanyInteractionFactory(dit_participants=[]) InteractionDITParticipantFactory( interaction=interaction, adviser=None, team=TeamFactory(), ) frozen_datetime.tick(datetime.timedelta(seconds=1, microseconds=1)) response = hawk.get(api_client, get_url('api-v3:activity-stream:interactions')) assert response.status_code == status.HTTP_200_OK
def test_export( self, opensearch_with_collector, request_sortby, orm_ordering, requests_mock, accepts_dit_email_marketing, ): """Test export of contact search results.""" ArchivedContactFactory() ContactWithOwnAddressFactory() ContactFactory() ContactWithOwnAreaFactory() # These are to test date of and team of latest interaction a bit more thoroughly CompanyInteractionFactory.create_batch(2) CompanyInteractionFactory(contacts=ContactFactory.create_batch(2)) interaction_with_multiple_teams = CompanyInteractionFactory() InteractionDITParticipantFactory.create_batch( 2, interaction=interaction_with_multiple_teams, ) opensearch_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:contact-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Contacts - 2018-01-01-11-12-13.csv' }, ) sorted_contacts = Contact.objects.annotate( computed_address_country_name=Coalesce( 'address_country__name', 'company__address_country__name', ), ).order_by( orm_ordering, 'pk', ) matcher = requests_mock.get( f'{settings.CONSENT_SERVICE_BASE_URL}' f'{CONSENT_SERVICE_PERSON_PATH_LOOKUP}', text=generate_hawk_response({ 'results': [{ 'email': contact.email, 'consents': [ CONSENT_SERVICE_EMAIL_CONSENT_TYPE, ] if accepts_dit_email_marketing else [], } for contact in sorted_contacts], }), status_code=status.HTTP_200_OK, ) reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchContactExportAPIView.field_titles.values()) expected_row_data = format_csv_data([{ 'Name': contact.name, 'Job title': contact.job_title, 'Date created': contact.created_on, 'Archived': contact.archived, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["contact"]}/{contact.pk}', 'Company': get_attr_or_none(contact, 'company.name'), 'Company sector': get_attr_or_none(contact, 'company.sector.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}/{contact.company.pk}', 'Company UK region': get_attr_or_none(contact, 'company.uk_region.name'), 'Area': (contact.company.address_area and contact.company.address_area.name) if contact.address_same_as_company else (contact.address_area and contact.address_area.name), 'Country': contact.company.address_country.name if contact.address_same_as_company else contact.address_country.name, 'Postcode': contact.company.address_postcode if contact.address_same_as_company else contact.address_postcode, 'Phone number': contact.full_telephone_number, 'Email address': contact.email, 'Accepts DIT email marketing': accepts_dit_email_marketing, 'Date of latest interaction': max(contact.interactions.all(), key=attrgetter('date')).date if contact.interactions.all() else None, 'Teams of latest interaction': _format_interaction_team_names( max(contact.interactions.all(), key=attrgetter('date')), ) if contact.interactions.exists() else None, 'Created by team': get_attr_or_none(contact, 'created_by.dit_team.name'), } for contact in sorted_contacts]) actual_row_data = [dict(row) for row in reader] assert len(actual_row_data) == len(expected_row_data) for index, row in enumerate(actual_row_data): assert row == expected_row_data[index] assert matcher.call_count == 1 assert matcher.last_request.query == urllib.parse.urlencode( {'email': [c.email for c in sorted_contacts]}, doseq=True, )
def test_export( self, es_with_collector, request_sortby, orm_ordering, ): """Test export of contact search results.""" ArchivedContactFactory() ContactWithOwnAddressFactory() ContactFactory() # These are to test date of and team of latest interaction a bit more thoroughly CompanyInteractionFactory.create_batch(2) CompanyInteractionFactory(contacts=ContactFactory.create_batch(2)) interaction_with_multiple_teams = CompanyInteractionFactory() InteractionDITParticipantFactory.create_batch( 2, interaction=interaction_with_multiple_teams, ) es_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:contact-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Contacts - 2018-01-01-11-12-13.csv' }, ) sorted_contacts = Contact.objects.annotate( computed_address_country_name=Coalesce( 'address_country__name', 'company__address_country__name', ), ).order_by( orm_ordering, 'pk', ) reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchContactExportAPIView.field_titles.values()) # E123 is ignored as there are seemingly unresolvable indentation errors in the dict below expected_row_data = [ # noqa: E123 { 'Name': contact.name, 'Job title': contact.job_title, 'Date created': contact.created_on, 'Archived': contact.archived, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["contact"]}/{contact.pk}', 'Company': get_attr_or_none(contact, 'company.name'), 'Company sector': get_attr_or_none(contact, 'company.sector.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}/{contact.company.pk}', 'Company UK region': get_attr_or_none(contact, 'company.uk_region.name'), 'Country': contact.company.address_country.name if contact.address_same_as_company else contact.address_country.name, 'Postcode': contact.company.address_postcode if contact.address_same_as_company else contact.address_postcode, 'Phone number': ' '.join( (contact.telephone_countrycode, contact.telephone_number)), 'Email address': contact.email, 'Accepts DIT email marketing': contact.accepts_dit_email_marketing, 'Date of latest interaction': max(contact.interactions.all(), key=attrgetter('date')).date if contact.interactions.all() else None, 'Teams of latest interaction': _format_interaction_team_names( max(contact.interactions.all(), key=attrgetter('date')), ) if contact.interactions.exists() else None, 'Created by team': get_attr_or_none(contact, 'created_by.dit_team.name'), } for contact in sorted_contacts ] actual_row_data = [dict(row) for row in reader] assert actual_row_data == format_csv_data(expected_row_data)
def test_interaction_export( self, es_with_collector, request_sortby, orm_ordering, ): """ Test export of interaction search results with a policy feedback user. Checks that all interaction kinds except for policy feedback are included in the export. """ # Faker generates job titles containing commas which complicates comparisons, # so all contact job titles are explicitly set company = CompanyFactory() interaction = CompanyInteractionFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Engineer'), ContactFactory(company=company, job_title=None), ContactFactory(company=company, job_title=''), ], ) InteractionDITParticipantFactory.create_batch(2, interaction=interaction) InteractionDITParticipantFactory(interaction=interaction, team=None) InteractionDITParticipantFactory( interaction=interaction, adviser=None, team=factory.SubFactory(TeamFactory), ) EventServiceDeliveryFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Managing director'), ], ) InvestmentProjectInteractionFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Exports manager'), ], ) ServiceDeliveryFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Sales director'), ], ) CompanyInteractionFactoryWithPolicyFeedback( company=company, contacts=[ ContactFactory(company=company, job_title='Business development manager'), ], policy_areas=PolicyArea.objects.order_by('?')[:2], policy_issue_types=PolicyIssueType.objects.order_by('?')[:2], ) es_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:interaction-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Interactions - 2018-01-01-11-12-13.csv' }, ) sorted_interactions = Interaction.objects.order_by( orm_ordering, 'pk', ) reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchInteractionExportAPIView.field_titles.values()) expected_row_data = [{ 'Date': interaction.date, 'Type': interaction.get_kind_display(), 'Service': get_attr_or_none(interaction, 'service.name'), 'Subject': interaction.subject, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["interaction"]}' f'/{interaction.pk}', 'Company': get_attr_or_none(interaction, 'company.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}' f'/{interaction.company.pk}', 'Company country': get_attr_or_none( interaction, 'company.address_country.name', ), 'Company UK region': get_attr_or_none(interaction, 'company.uk_region.name'), 'Company sector': get_attr_or_none(interaction, 'company.sector.name'), 'Contacts': _format_expected_contacts(interaction), 'Advisers': _format_expected_advisers(interaction), 'Event': get_attr_or_none(interaction, 'event.name'), 'Communication channel': get_attr_or_none(interaction, 'communication_channel.name'), 'Service delivery status': get_attr_or_none( interaction, 'service_delivery_status.name', ), 'Net company receipt': interaction.net_company_receipt, 'Policy issue types': join_attr_values( interaction.policy_issue_types.order_by('name'), ), 'Policy areas': join_attr_values( interaction.policy_areas.order_by('name'), separator='; ', ), 'Policy feedback notes': interaction.policy_feedback_notes, } for interaction in sorted_interactions] # DictReader uses OrderedDicts, we convert them to normal dicts to get better errors # when the assertion fails actual_row_data = [dict(item) for item in reader] assert actual_row_data == format_csv_data(expected_row_data)