def get_expected_data_from_interaction(interaction): """Returns expected API response dictionary for an interaction""" return { 'adviser_ids': [ str(x.adviser_id) for x in interaction.dit_participants.all().order_by('pk') ], 'communication_channel__name': get_attr_or_none( interaction, 'communication_channel.name', ), 'company_id': str(interaction.company.id), 'contact_ids': [str(x.id) for x in interaction.contacts.all().order_by('pk')], 'created_by_id': str(interaction.created_by_id), 'created_on': format_date_or_datetime(interaction.created_on), 'date': format_date_or_datetime(interaction.date), 'event_id': ( str(interaction.event_id) if interaction.event_id is not None else None ), 'grant_amount_offered': ( float(interaction.grant_amount_offered) if interaction.grant_amount_offered is not None else None ), 'id': str(interaction.id), 'interaction_link': interaction.get_absolute_url(), 'investment_project_id': ( str(interaction.investment_project_id) if interaction.investment_project is not None else None ), 'kind': interaction.kind, 'modified_on': format_date_or_datetime(interaction.modified_on), 'net_company_receipt': ( float(interaction.net_company_receipt) if interaction.net_company_receipt is not None else None ), 'notes': interaction.notes, 'policy_area_names': ( [x.name for x in interaction.policy_areas.all()] if interaction.policy_areas.exists() else None ), 'policy_feedback_notes': interaction.policy_feedback_notes, 'policy_issue_type_names': ( [x.name for x in interaction.policy_issue_types.all()] if interaction.policy_areas.exists() else None ), 'sector': get_attr_or_none(interaction, 'company.sector.name'), 'service_delivery_status__name': get_attr_or_none( interaction, 'service_delivery_status.name', ), 'service_delivery': get_attr_or_none(interaction, 'service.name'), 'subject': interaction.subject, 'theme': interaction.theme, 'were_countries_discussed': interaction.were_countries_discussed, }
def get_expected_data_from_company_referral(referral): """Returns company referral data as a dictionary""" return { 'company_id': str(referral.company_id), 'completed_by_id': get_attr_or_none(referral, 'completed_by_id'), 'completed_on': format_date_or_datetime(referral.completed_on), 'contact_id': str(referral.contact_id), 'created_by_id': str(referral.created_by_id), 'created_on': format_date_or_datetime(referral.created_on), 'id': str(referral.id), 'interaction_id': (str(referral.interaction_id) if referral.interaction_id is not None else None), 'notes': referral.notes, 'recipient_id': str(referral.recipient_id), 'status': str(referral.status), 'subject': referral.subject, }
def get_expected_data_from_pipeline_item(item): """Returns pipeline item data as a dictionary""" return { 'adviser_id': str(item.adviser_id), 'archived': item.archived, 'company_id': str(item.company_id), 'contact_ids': [str(contact.id) for contact in item.contacts.all()] or None, 'created_on': format_date_or_datetime(item.created_on), 'expected_win_date': (format_date_or_datetime(item.expected_win_date) if item.expected_win_date else None), 'id': str(item.id), 'likelihood_to_win': item.likelihood_to_win, 'modified_on': format_date_or_datetime(item.modified_on), 'name': item.name, 'potential_value': item.potential_value, 'sector_name': get_attr_or_none(item, 'sector.name'), 'status': item.status, }
def get_expected_data_from_contact(contact): """Returns expected dictionary based on given contact""" return { 'accepts_dit_email_marketing': contact.accepts_dit_email_marketing, 'address_1': contact.address_1, 'address_2': contact.address_2, 'address_country__name': get_attr_or_none(contact, 'address_country.name'), 'address_county': contact.address_county, 'address_postcode': contact.address_postcode, 'address_same_as_company': contact.address_same_as_company, 'address_town': contact.address_town, 'archived': contact.archived, 'archived_on': format_date_or_datetime(contact.archived_on), 'company_id': str(contact.company_id) if contact.company_id is not None else None, 'created_on': format_date_or_datetime(contact.created_on), 'email': contact.email, 'email_alternative': contact.email_alternative, 'id': str(contact.id), 'job_title': contact.job_title, 'modified_on': format_date_or_datetime(contact.modified_on), 'name': contact.name, 'notes': contact.notes, 'primary': contact.primary, 'telephone_alternative': contact.telephone_alternative, 'telephone_number': contact.telephone_number, }
def get_expected_data_from_order(order): """Returns expected dictionary based on given order""" return { 'cancellation_reason__name': get_attr_or_none(order, 'cancellation_reason.name'), 'cancelled_on': format_date_or_datetime(order.cancelled_on), 'company_id': str(order.company_id), 'completed_on': format_date_or_datetime(order.completed_on), 'contact_id': str(order.contact_id), 'created_by__dit_team_id': str(order.created_by.dit_team_id), 'created_on': format_date_or_datetime(order.created_on), 'delivery_date': format_date_or_datetime(order.delivery_date), 'id': str(order.id), 'invoice__subtotal_cost': get_attr_or_none(order, 'invoice.subtotal_cost'), 'paid_on': format_date_or_datetime(order.paid_on), 'primary_market__name': get_attr_or_none(order, 'primary_market.name'), 'quote__accepted_on': format_date_or_datetime(get_attr_or_none(order, 'quote.accepted_on'), ), 'quote__created_on': format_date_or_datetime(get_attr_or_none(order, 'quote.created_on')), 'reference': order.reference, 'refund_created': (format_date_or_datetime( order.refunds.latest('created_on').created_on) if order.refunds.exists() else None), 'refund_total_amount': (sum([x.total_amount for x in order.refunds.all()]) if order.refunds.exists() else None), 'sector_name': get_attr_or_none(order, 'sector.name'), 'services': join_attr_values(order.service_types.order_by('name')), 'status': order.status, 'subtotal_cost': order.subtotal_cost, 'total_cost': order.total_cost, 'uk_region__name': order.uk_region.name, 'vat_cost': order.vat_cost, }
def test_export( self, setup_es, request_sortby, orm_ordering, ): """Test export of company search results.""" CompanyFactory.create_batch(3) CompanyFactory.create_batch(2, hq=True) setup_es.indices.refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:company-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', {'charset': 'utf-8'}) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', {'filename': 'Data Hub - Companies - 2018-01-01-11-12-13.csv'}, ) sorted_company = Company.objects.order_by(orm_ordering, 'pk') reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list(SearchCompanyExportAPIView.field_titles.values()) expected_row_data = [ { 'Name': company.name, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}/{company.pk}', 'Sector': get_attr_or_none(company, 'sector.name'), 'Country': get_attr_or_none(company, 'registered_address_country.name'), 'UK region': get_attr_or_none(company, 'uk_region.name'), 'Archived': company.archived, 'Date created': company.created_on, 'Number of employees': get_attr_or_none(company, 'employee_range.name'), 'Annual turnover': get_attr_or_none(company, 'turnover_range.name'), 'Headquarter type': (get_attr_or_none(company, 'headquarter_type.name') or '').upper(), } for company in sorted_company ] assert list(dict(row) for row in reader) == format_csv_data(expected_row_data)
def test_export(self, es_with_collector, request_sortby, orm_ordering): """Test export large capital investor profile search results.""" url = reverse('api-v4:search:large-investor-profile-export') CompleteLargeCapitalInvestorProfileFactory( investable_capital=10000, global_assets_under_management=20000, ) with freeze_time('2018-01-01 11:12:13'): LargeCapitalInvestorProfileFactory( investable_capital=300, global_assets_under_management=200, ) es_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Large capital profiles - 2018-01-01-11-12-13.csv', }, ) sorted_profiles = LargeCapitalInvestorProfile.objects.order_by( orm_ordering, 'pk') response_text = response.getvalue().decode('utf-8-sig') reader = DictReader(StringIO(response_text)) assert reader.fieldnames == list( SearchLargeInvestorProfileExportAPIView.field_titles.values(), ) expected_row_data = [{ 'Date created': profile.created_on, 'Global assets under management': profile.global_assets_under_management, 'Investable capital': profile.investable_capital, 'Investor company': get_attr_or_none( profile, 'investor_company.name', ), 'Investor description': profile.investor_description, 'Notes on locations': profile.notes_on_locations, 'Investor type': get_attr_or_none( profile, 'investor_type.name', ), 'Required checks conducted': get_attr_or_none( profile, 'required_checks_conducted.name', ), 'Minimum return rate': get_attr_or_none( profile, 'minimum_return_rate.name', ), 'Minimum equity percentage': get_attr_or_none( profile, 'minimum_equity_percentage.name', ), 'Date last modified': profile.modified_on, 'UK regions of interest': join_attr_values(profile.uk_region_locations.order_by('name'), ), 'Restrictions': join_attr_values(profile.restrictions.order_by('name'), ), 'Time horizons': join_attr_values(profile.time_horizons.order_by('name'), ), 'Investment types': join_attr_values(profile.investment_types.order_by('name'), ), 'Deal ticket sizes': join_attr_values(profile.deal_ticket_sizes.order_by('name'), ), 'Desired deal roles': join_attr_values(profile.desired_deal_roles.order_by('name'), ), 'Required checks conducted by': get_attr_or_none( profile, 'required_checks_conducted_by.name', ), 'Required checks conducted on': profile.required_checks_conducted_on, 'Other countries being considered': join_attr_values( profile.other_countries_being_considered.order_by('name'), ), 'Construction risks': join_attr_values(profile.construction_risks.order_by('name'), ), 'Data Hub profile reference': str(profile.pk), 'Asset classes of interest': join_attr_values( profile.asset_classes_of_interest.order_by('name'), ), 'Data Hub link': (f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}' f'/{profile.investor_company.pk}/investments/large-capital-profile' ), } for profile in sorted_profiles] expected_rows = format_csv_data(expected_row_data) # item is an ordered dict so is cast to a dict to make the comparison easier to # interpret in the event of the assert actual_rows == expected_rows failing. actual_rows = [dict(item) for item in reader] assert actual_rows == expected_rows
def test_export( self, opensearch_with_collector, request_sortby, orm_ordering, ): """Test export of company search results.""" companies_1 = CompanyFactory.create_batch( 3, turnover=None, is_turnover_estimated=None, number_of_employees=None, is_number_of_employees_estimated=None, ) companies_2 = CompanyFactory.create_batch( 2, hq=True, turnover=100, is_turnover_estimated=True, number_of_employees=95, is_number_of_employees_estimated=True, ) for company in (*companies_1, *companies_2): CompanyExportCountryFactory.create_batch( 3, company=company, country=factory.Iterator(Country.objects.order_by('?'), ), status=factory.Iterator([ CompanyExportCountry.Status.CURRENTLY_EXPORTING, CompanyExportCountry.Status.FUTURE_INTEREST, CompanyExportCountry.Status.CURRENTLY_EXPORTING, ], ), ) opensearch_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v4:search:company-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Companies - 2018-01-01-11-12-13.csv' }, ) sorted_company = Company.objects.order_by(orm_ordering, 'pk') reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchCompanyExportAPIView().field_titles.values()) expected_row_data = [{ 'Name': company.name, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}/{company.pk}', 'Sector': get_attr_or_none(company, 'sector.name'), 'Area': get_attr_or_none(company, 'address_area.name'), 'Country': get_attr_or_none(company, 'address_country.name'), 'UK region': get_attr_or_none(company, 'uk_region.name'), 'Countries exported to': ', '.join([ e.country.name for e in company.export_countries.filter( status=CompanyExportCountry.Status.CURRENTLY_EXPORTING, ).order_by('country__name') ]), 'Countries of interest': ', '.join([ e.country.name for e in company.export_countries.filter( status=CompanyExportCountry.Status.FUTURE_INTEREST, ).order_by('country__name') ]), 'Archived': company.archived, 'Date created': company.created_on, 'Number of employees': (company.number_of_employees if company.number_of_employees is not None else get_attr_or_none( company, 'employee_range.name')), 'Annual turnover': (f'${company.turnover}' if company.turnover is not None else get_attr_or_none(company, 'turnover_range.name')), 'Headquarter type': (get_attr_or_none(company, 'headquarter_type.name') or '').upper(), } for company in sorted_company] assert list(dict(row) for row in reader) == format_csv_data(expected_row_data)
def get_expected_data_from_company(company): """Returns company data as a dictionary""" return { 'address_1': company.address_1, 'address_2': company.address_2, 'address_county': company.address_county, 'address_country__name': company.address_country.name, 'address_postcode': company.address_postcode, 'address_town': company.address_town, 'archived': company.archived, 'archived_on': format_date_or_datetime(company.archived_on), 'archived_reason': company.archived_reason, 'business_type__name': get_attr_or_none(company, 'business_type.name'), 'company_number': company.company_number, 'created_by_id': (str(company.created_by_id) if company.created_by is not None else None), 'created_on': format_date_or_datetime(company.created_on), 'description': company.description, 'duns_number': company.duns_number, 'export_experience_category__name': get_attr_or_none( company, 'export_experience_category.name', ), 'global_headquarters_id': (str(company.global_headquarters_id) if company.global_headquarters_id is not None else None), 'global_ultimate_duns_number': company.global_ultimate_duns_number, 'headquarter_type__name': get_attr_or_none( 'company', 'headquarter_type.name', ), 'id': str(company.id), 'is_number_of_employees_estimated': company.is_number_of_employees_estimated, 'is_turnover_estimated': company.is_turnover_estimated, 'modified_on': format_date_or_datetime(company.modified_on), 'name': company.name, 'number_of_employees': company.number_of_employees, 'one_list_tier__name': get_attr_or_none(company, 'one_list_tier.name'), 'one_list_account_owner_id': company.one_list_account_owner_id, 'reference_code': company.reference_code, 'registered_address_1': company.registered_address_1, 'registered_address_2': company.registered_address_2, 'registered_address_country__name': get_attr_or_none( company, 'registered_address_country.name', ), 'registered_address_county': company.registered_address_county, 'registered_address_postcode': company.registered_address_postcode, 'registered_address_town': company.registered_address_town, 'sector_name': get_attr_or_none(company, 'sector.name'), 'trading_names': company.trading_names, 'turnover': company.turnover, 'uk_region__name': get_attr_or_none(company, 'uk_region.name'), 'vat_number': company.vat_number, 'website': company.website, }
def get_expected_data_from_project(project): """Returns expected dictionary based on given project""" return { 'actual_land_date': format_date_or_datetime(project.actual_land_date), 'actual_uk_region_names': ([ region.name for region in project.actual_uk_regions.order_by('name') ]) if project.actual_uk_regions.exists() else None, 'address_1': project.address_1, 'address_2': project.address_2, 'address_town': project.address_town, 'address_postcode': project.address_postcode, 'anonymous_description': project.anonymous_description, 'associated_non_fdi_r_and_d_project_id': str_or_none(project.associated_non_fdi_r_and_d_project_id, ), 'average_salary__name': get_attr_or_none(project, 'average_salary.name'), 'business_activity_names': ([ activity.name for activity in project.business_activities.order_by('name') ]) if project.business_activities.exists() else None, 'client_relationship_manager_id': str_or_none(project.client_relationship_manager_id), 'client_requirements': project.client_requirements, 'competing_countries': ([ country.name for country in project.competitor_countries.order_by('name') ] if project.competitor_countries.exists() else [None]), 'created_by_id': str_or_none(project.created_by_id), 'created_on': format_date_or_datetime(project.created_on), 'delivery_partner_names': ([ partner.name for partner in project.delivery_partners.order_by('name') ]) if project.delivery_partners.exists() else None, 'description': project.description, 'estimated_land_date': format_date_or_datetime(project.estimated_land_date), 'export_revenue': project.export_revenue, 'fdi_type__name': get_attr_or_none(project, 'fdi_type.name'), 'fdi_value__name': get_attr_or_none(project, 'fdi_value.name'), 'foreign_equity_investment': (float(project.foreign_equity_investment) if project.foreign_equity_investment else None), 'government_assistance': project.government_assistance, 'gross_value_added': project.gross_value_added, 'gva_multiplier__multiplier': (float(get_attr_or_none(project, 'gva_multiplier.multiplier')) if get_attr_or_none(project, 'gva_multiplier.multiplier') else None), 'id': str(project.pk), 'investment_type__name': get_attr_or_none(project, 'investment_type.name'), 'investor_company_id': str_or_none(project.investor_company_id), 'investor_company_sector': get_attr_or_none( project, 'investor_company.sector.name', ), 'investor_type__name': get_attr_or_none(project, 'investor_type.name'), 'level_of_involvement_name': get_attr_or_none(project, 'level_of_involvement.name'), 'likelihood_to_land__name': get_attr_or_none(project, 'likelihood_to_land.name'), 'modified_by_id': str_or_none(project.modified_by_id), 'modified_on': format_date_or_datetime(project.modified_on), 'name': project.name, 'new_tech_to_uk': project.new_tech_to_uk, 'non_fdi_r_and_d_budget': project.non_fdi_r_and_d_budget, 'number_new_jobs': project.number_new_jobs, 'number_safeguarded_jobs': project.number_safeguarded_jobs, 'other_business_activity': project.other_business_activity, 'project_arrived_in_triage_on': format_date_or_datetime(project.project_arrived_in_triage_on), 'project_assurance_adviser_id': str_or_none(project.project_assurance_adviser_id), 'project_manager_id': str_or_none(project.project_manager_id), 'project_reference': project.project_code, 'proposal_deadline': format_date_or_datetime(project.proposal_deadline), 'r_and_d_budget': project.r_and_d_budget, 'referral_source_activity__name': get_attr_or_none( project, 'referral_source_activity.name', ), 'referral_source_activity_marketing__name': get_attr_or_none( project, 'referral_source_activity_marketing.name', ), 'referral_source_activity_website__name': get_attr_or_none( project, 'referral_source_activity_website.name', ), 'sector_name': get_attr_or_none(project, 'sector.name'), 'specific_programme__name': get_attr_or_none(project, 'specific_programme.name'), 'stage__name': get_attr_or_none(project, 'stage.name'), 'status': project.status, 'strategic_driver_names': ([ driver.name for driver in project.strategic_drivers.order_by('name') ]) if project.strategic_drivers.exists() else None, 'team_member_ids': ([ str(team_member.adviser_id) for team_member in project.team_members.order_by('id') ]) if project.team_members.exists() else [None], 'total_investment': float(project.total_investment) if project.total_investment else None, 'uk_company_id': str_or_none(project.uk_company_id), 'uk_company_sector': get_attr_or_none(project, 'uk_company.sector.name'), 'uk_region_location_names': ([ region.name for region in project.uk_region_locations.order_by('name') ]) if project.uk_region_locations.exists() else None, }
def test_export(self, setup_es, request_sortby, orm_ordering): """Test export of investment project search results.""" url = reverse('api-v3:search:investment_project-export') InvestmentProjectFactory() InvestmentProjectFactory(cdms_project_code='cdms-code') VerifyWinInvestmentProjectFactory() won_project = WonInvestmentProjectFactory() InvestmentProjectTeamMemberFactory.create_batch(3, investment_project=won_project) InvestmentProjectFactory( name='project for subsidiary', investor_company=CompanyFactory( global_headquarters=CompanyFactory( one_list_tier_id=OneListTier.objects.first().id, one_list_account_owner=AdviserFactory(), ), ), ) setup_es.indices.refresh() data = {} if request_sortby: data['sortby'] = request_sortby with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Disposition')) == ( 'attachment', {'filename': 'Data Hub - Investment projects - 2018-01-01-11-12-13.csv'}, ) sorted_projects = InvestmentProject.objects.order_by(orm_ordering, 'pk') response_text = response.getvalue().decode('utf-8-sig') reader = DictReader(StringIO(response_text)) assert reader.fieldnames == list(SearchInvestmentExportAPIView.field_titles.values()) expected_row_data = [ { 'Date created': project.created_on, 'Project reference': project.project_code, 'Project name': project.name, 'Investor company': project.investor_company.name, 'Investor company town or city': project.investor_company.address_town, 'Country of origin': get_attr_or_none(project, 'investor_company.address_country.name'), 'Investment type': get_attr_or_none(project, 'investment_type.name'), 'Status': project.get_status_display(), 'Stage': get_attr_or_none(project, 'stage.name'), 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["investmentproject"]}' f'/{project.pk}', 'Actual land date': project.actual_land_date, 'Estimated land date': project.estimated_land_date, 'FDI value': get_attr_or_none(project, 'fdi_value.name'), 'Sector': get_attr_or_none(project, 'sector.name'), 'Date of latest interaction': None, 'Project manager': get_attr_or_none(project, 'project_manager.name'), 'Client relationship manager': get_attr_or_none(project, 'client_relationship_manager.name'), 'Global account manager': self._get_global_account_manager_name(project), 'Project assurance adviser': get_attr_or_none(project, 'project_assurance_adviser.name'), 'Other team members': join_attr_values(project.team_members.all(), 'adviser.name'), 'Delivery partners': join_attr_values(project.delivery_partners.all()), 'Possible UK regions': join_attr_values(project.uk_region_locations.all()), 'Actual UK regions': join_attr_values(project.actual_uk_regions.all()), 'Specific investment programme': get_attr_or_none(project, 'specific_programme.name'), 'Referral source activity': get_attr_or_none(project, 'referral_source_activity.name'), 'Referral source activity website': get_attr_or_none(project, 'referral_source_activity_website.name'), 'Total investment': project.total_investment, 'New jobs': project.number_new_jobs, 'Average salary of new jobs': get_attr_or_none(project, 'average_salary.name'), 'Safeguarded jobs': project.number_safeguarded_jobs, 'Level of involvement': get_attr_or_none(project, 'level_of_involvement.name'), 'Likelihood to land': get_attr_or_none(project, 'likelihood_to_land.name'), 'R&D budget': project.r_and_d_budget, 'Associated non-FDI R&D project': project.non_fdi_r_and_d_budget, 'New to world tech': project.new_tech_to_uk, 'FDI type': project.fdi_type, 'Foreign equity investment': project.foreign_equity_investment, 'GVA multiplier': get_attr_or_none(project, 'gva_multiplier.multiplier'), 'GVA': project.gross_value_added, } for project in sorted_projects ] expected_rows = format_csv_data(expected_row_data) # item is an ordered dict so is cast to a dict to make the comparison easier to # interpret in the event of the assert actual_rows == expected_rows failing. actual_rows = [dict(item) for item in reader] # Support for ordering was added to StringAgg in Django 2.2. However, it is not # currently used due to https://code.djangoproject.com/ticket/30315. While that # remains the case, our StringAgg fields are unordered and we use this workaround to # compare them. unordered_fields = ( 'Other team members', 'Delivery partners', 'Possible UK regions', 'Actual UK regions', ) for row in chain(actual_rows, expected_rows): for field in unordered_fields: row[field] = frozenset(row[field].split(', ')) assert actual_rows == expected_rows
def _get_global_account_manager_name(self, project): gam = project.investor_company.get_one_list_group_global_account_manager() return get_attr_or_none(gam, 'name')
def test_export( self, es_with_collector, request_sortby, orm_ordering, ): """Test export of interaction search results.""" factories = ( OrderCancelledFactory, OrderCompleteFactory, OrderFactory, OrderPaidFactory, OrderSubscriberFactory, OrderWithAcceptedQuoteFactory, OrderWithCancelledQuoteFactory, OrderWithOpenQuoteFactory, OrderWithoutAssigneesFactory, OrderWithoutLeadAssigneeFactory, ApprovedRefundFactory, RequestedRefundFactory, ) order_with_multiple_refunds = OrderPaidFactory() ApprovedRefundFactory( order=order_with_multiple_refunds, requested_amount=order_with_multiple_refunds.total_cost / 5, ) ApprovedRefundFactory( order=order_with_multiple_refunds, requested_amount=order_with_multiple_refunds.total_cost / 4, ) ApprovedRefundFactory( order=order_with_multiple_refunds, requested_amount=order_with_multiple_refunds.total_cost / 3, ) for factory_ in factories: factory_() es_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:order-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Orders - 2018-01-01-11-12-13.csv' }, ) sorted_orders = Order.objects.order_by(orm_ordering, 'pk') reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchOrderExportAPIView.field_titles.values()) sorted_orders_and_refunds = ((order, order.refunds.filter( status=RefundStatus.approved)) for order in sorted_orders) expected_row_data = [{ 'Order reference': order.reference, 'Net price': Decimal(order.subtotal_cost) / 100, 'Net refund': Decimal(sum(refund.net_amount for refund in refunds), ) / 100 if refunds else None, 'Status': order.get_status_display(), 'Link': order.get_datahub_frontend_url(), 'Sector': order.sector.name, 'Market': order.primary_market.name, 'UK region': order.uk_region.name, 'Company': order.company.name, 'Company country': order.company.address_country.name, 'Company UK region': get_attr_or_none(order, 'company.uk_region.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}' f'/{order.company.pk}', 'Contact': order.contact.name, 'Contact job title': order.contact.job_title, 'Contact link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["contact"]}' f'/{order.contact.pk}', 'Lead adviser': get_attr_or_none(order.get_lead_assignee(), 'adviser.name'), 'Created by team': get_attr_or_none(order, 'created_by.dit_team.name'), 'Date created': order.created_on, 'Delivery date': order.delivery_date, 'Date quote sent': get_attr_or_none(order, 'quote.created_on'), 'Date quote accepted': get_attr_or_none(order, 'quote.accepted_on'), 'Date payment received': order.paid_on, 'Date completed': order.completed_on, } for order, refunds in sorted_orders_and_refunds] assert list(dict(row) for row in reader) == format_csv_data(expected_row_data)
def test_export( self, es_with_collector, request_sortby, orm_ordering, ): """Test export of contact search results.""" ArchivedContactFactory() ContactWithOwnAddressFactory() ContactFactory() # These are to test date of and team of latest interaction a bit more thoroughly CompanyInteractionFactory.create_batch(2) CompanyInteractionFactory(contacts=ContactFactory.create_batch(2)) interaction_with_multiple_teams = CompanyInteractionFactory() InteractionDITParticipantFactory.create_batch( 2, interaction=interaction_with_multiple_teams, ) es_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:contact-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Contacts - 2018-01-01-11-12-13.csv' }, ) sorted_contacts = Contact.objects.annotate( computed_address_country_name=Coalesce( 'address_country__name', 'company__address_country__name', ), ).order_by( orm_ordering, 'pk', ) reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchContactExportAPIView.field_titles.values()) # E123 is ignored as there are seemingly unresolvable indentation errors in the dict below expected_row_data = [ # noqa: E123 { 'Name': contact.name, 'Job title': contact.job_title, 'Date created': contact.created_on, 'Archived': contact.archived, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["contact"]}/{contact.pk}', 'Company': get_attr_or_none(contact, 'company.name'), 'Company sector': get_attr_or_none(contact, 'company.sector.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}/{contact.company.pk}', 'Company UK region': get_attr_or_none(contact, 'company.uk_region.name'), 'Country': contact.company.address_country.name if contact.address_same_as_company else contact.address_country.name, 'Postcode': contact.company.address_postcode if contact.address_same_as_company else contact.address_postcode, 'Phone number': ' '.join( (contact.telephone_countrycode, contact.telephone_number)), 'Email address': contact.email, 'Accepts DIT email marketing': contact.accepts_dit_email_marketing, 'Date of latest interaction': max(contact.interactions.all(), key=attrgetter('date')).date if contact.interactions.all() else None, 'Teams of latest interaction': _format_interaction_team_names( max(contact.interactions.all(), key=attrgetter('date')), ) if contact.interactions.exists() else None, 'Created by team': get_attr_or_none(contact, 'created_by.dit_team.name'), } for contact in sorted_contacts ] actual_row_data = [dict(row) for row in reader] assert actual_row_data == format_csv_data(expected_row_data)
def test_interaction_export( self, setup_es, request_sortby, orm_ordering, ): """ Test export of interaction search results with a policy feedback user. Checks that all interaction kinds except for policy feedback are included in the export. """ # Faker generates job titles containing commas which complicates comparisons, # so all contact job titles are explicitly set company = CompanyFactory() CompanyInteractionFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Engineer'), ContactFactory(company=company, job_title=None), ContactFactory(company=company, job_title=''), ], ) EventServiceDeliveryFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Managing director'), ], ) InvestmentProjectInteractionFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Exports manager'), ], ) ServiceDeliveryFactory( company=company, contacts=[ ContactFactory(company=company, job_title='Sales director'), ], ) CompanyInteractionFactoryWithPolicyFeedback( company=company, contacts=[ ContactFactory(company=company, job_title='Business development manager'), ], policy_areas=PolicyArea.objects.order_by('?')[:2], policy_issue_types=PolicyIssueType.objects.order_by('?')[:2], ) setup_es.indices.refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:interaction-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Interactions - 2018-01-01-11-12-13.csv' }, ) sorted_interactions = Interaction.objects.all().order_by( orm_ordering, 'pk', ) reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchInteractionExportAPIView.field_titles.values()) expected_row_data = [{ 'Date': interaction.date, 'Type': interaction.get_kind_display(), 'Service': get_attr_or_none(interaction, 'service.name'), 'Subject': interaction.subject, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["interaction"]}' f'/{interaction.pk}', 'Company': get_attr_or_none(interaction, 'company.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}' f'/{interaction.company.pk}', 'Company country': get_attr_or_none( interaction, 'company.address_country.name', ), 'Company UK region': get_attr_or_none(interaction, 'company.uk_region.name'), 'Company sector': get_attr_or_none(interaction, 'company.sector.name'), 'Contacts': _format_expected_contacts(interaction), 'Adviser': get_attr_or_none(interaction, 'dit_adviser.name'), 'Service provider': get_attr_or_none(interaction, 'dit_team.name'), 'Event': get_attr_or_none(interaction, 'event.name'), 'Communication channel': get_attr_or_none(interaction, 'communication_channel.name'), 'Service delivery status': get_attr_or_none( interaction, 'service_delivery_status.name', ), 'Net company receipt': interaction.net_company_receipt, 'Policy issue types': join_attr_values(interaction.policy_issue_types.all()), 'Policy areas': join_attr_values(interaction.policy_areas.all(), separator='; '), 'Policy feedback notes': interaction.policy_feedback_notes, } for interaction in sorted_interactions] actual_row_data = [_format_actual_csv_row(row) for row in reader] assert actual_row_data == format_csv_data(expected_row_data)
def test_export( self, opensearch_with_collector, request_sortby, orm_ordering, requests_mock, accepts_dit_email_marketing, ): """Test export of contact search results.""" ArchivedContactFactory() ContactWithOwnAddressFactory() ContactFactory() ContactWithOwnAreaFactory() # These are to test date of and team of latest interaction a bit more thoroughly CompanyInteractionFactory.create_batch(2) CompanyInteractionFactory(contacts=ContactFactory.create_batch(2)) interaction_with_multiple_teams = CompanyInteractionFactory() InteractionDITParticipantFactory.create_batch( 2, interaction=interaction_with_multiple_teams, ) opensearch_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby url = reverse('api-v3:search:contact-export') with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Type')) == ('text/csv', { 'charset': 'utf-8' }) assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Contacts - 2018-01-01-11-12-13.csv' }, ) sorted_contacts = Contact.objects.annotate( computed_address_country_name=Coalesce( 'address_country__name', 'company__address_country__name', ), ).order_by( orm_ordering, 'pk', ) matcher = requests_mock.get( f'{settings.CONSENT_SERVICE_BASE_URL}' f'{CONSENT_SERVICE_PERSON_PATH_LOOKUP}', text=generate_hawk_response({ 'results': [{ 'email': contact.email, 'consents': [ CONSENT_SERVICE_EMAIL_CONSENT_TYPE, ] if accepts_dit_email_marketing else [], } for contact in sorted_contacts], }), status_code=status.HTTP_200_OK, ) reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig'))) assert reader.fieldnames == list( SearchContactExportAPIView.field_titles.values()) expected_row_data = format_csv_data([{ 'Name': contact.name, 'Job title': contact.job_title, 'Date created': contact.created_on, 'Archived': contact.archived, 'Link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["contact"]}/{contact.pk}', 'Company': get_attr_or_none(contact, 'company.name'), 'Company sector': get_attr_or_none(contact, 'company.sector.name'), 'Company link': f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}/{contact.company.pk}', 'Company UK region': get_attr_or_none(contact, 'company.uk_region.name'), 'Area': (contact.company.address_area and contact.company.address_area.name) if contact.address_same_as_company else (contact.address_area and contact.address_area.name), 'Country': contact.company.address_country.name if contact.address_same_as_company else contact.address_country.name, 'Postcode': contact.company.address_postcode if contact.address_same_as_company else contact.address_postcode, 'Phone number': contact.full_telephone_number, 'Email address': contact.email, 'Accepts DIT email marketing': accepts_dit_email_marketing, 'Date of latest interaction': max(contact.interactions.all(), key=attrgetter('date')).date if contact.interactions.all() else None, 'Teams of latest interaction': _format_interaction_team_names( max(contact.interactions.all(), key=attrgetter('date')), ) if contact.interactions.exists() else None, 'Created by team': get_attr_or_none(contact, 'created_by.dit_team.name'), } for contact in sorted_contacts]) actual_row_data = [dict(row) for row in reader] assert len(actual_row_data) == len(expected_row_data) for index, row in enumerate(actual_row_data): assert row == expected_row_data[index] assert matcher.call_count == 1 assert matcher.last_request.query == urllib.parse.urlencode( {'email': [c.email for c in sorted_contacts]}, doseq=True, )
def _build_expected_export_response(opportunity): return { 'Date created': opportunity.created_on, 'Created by': get_attr_or_none(opportunity, 'created_by.name'), 'Data Hub opportunity reference': str(opportunity.pk), 'Data Hub link': ( f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["largecapitalopportunity"]}' f'/{opportunity.pk}/investments/large-capital-opportunity' ), 'Name': opportunity.name, 'Description': opportunity.description, 'Type': get_attr_or_none( opportunity, 'type.name', ), 'Status': get_attr_or_none( opportunity, 'status.name', ), 'UK region locations': join_attr_values( opportunity.uk_region_locations.order_by('name'), ), 'Promoters': join_attr_values( opportunity.promoters.order_by('name'), ), 'Lead DIT relationship manager': opportunity.lead_dit_relationship_manager.name, 'Other DIT contacts': get_attr_or_none( opportunity, 'other_dit_contacts.name', ), 'Required checks conducted': get_attr_or_none( opportunity, 'required_checks_conducted.name', ), 'Required checks conducted by': get_attr_or_none( opportunity, 'required_checks_conducted_by.name', ), 'Required checks conducted on': opportunity.required_checks_conducted_on, 'Asset classes': join_attr_values( opportunity.asset_classes.order_by('name'), ), 'Opportunity value type': get_attr_or_none( opportunity, 'opportunity_value_type.name', ), 'Opportunity value': opportunity.opportunity_value, 'Construction risks': join_attr_values( opportunity.construction_risks.order_by('name'), ), 'Total investment sought': opportunity.total_investment_sought, 'Current investment secured': opportunity.current_investment_secured, 'Investment types': join_attr_values( opportunity.investment_types.order_by('name'), ), 'Estimated return rate': get_attr_or_none( opportunity, 'estimated_return_rate.name', ), 'Time horizons': join_attr_values( opportunity.time_horizons.order_by('name'), ), 'Sources of funding': join_attr_values( opportunity.sources_of_funding.order_by('name'), ), 'DIT support provided': opportunity.dit_support_provided, 'Funding supporting details': opportunity.funding_supporting_details, 'Reasons for abandonment': join_attr_values( opportunity.reasons_for_abandonment.order_by('name'), ), 'Why abandoned': opportunity.why_abandoned, 'Why suspended': opportunity.why_suspended, 'Date last modified': opportunity.modified_on, }