def get_expected_data_from_order(order):
    """Returns expected dictionary based on given order"""
    return {
        'cancellation_reason__name':
        get_attr_or_none(order, 'cancellation_reason.name'),
        'cancelled_on':
        format_date_or_datetime(order.cancelled_on),
        'company_id':
        str(order.company_id),
        'completed_on':
        format_date_or_datetime(order.completed_on),
        'contact_id':
        str(order.contact_id),
        'created_by__dit_team_id':
        str(order.created_by.dit_team_id),
        'created_on':
        format_date_or_datetime(order.created_on),
        'delivery_date':
        format_date_or_datetime(order.delivery_date),
        'id':
        str(order.id),
        'invoice__subtotal_cost':
        get_attr_or_none(order, 'invoice.subtotal_cost'),
        'paid_on':
        format_date_or_datetime(order.paid_on),
        'primary_market__name':
        get_attr_or_none(order, 'primary_market.name'),
        'quote__accepted_on':
        format_date_or_datetime(get_attr_or_none(order,
                                                 'quote.accepted_on'), ),
        'quote__created_on':
        format_date_or_datetime(get_attr_or_none(order, 'quote.created_on')),
        'reference':
        order.reference,
        'refund_created': (format_date_or_datetime(
            order.refunds.latest('created_on').created_on)
                           if order.refunds.exists() else None),
        'refund_total_amount':
        (sum([x.total_amount for x in order.refunds.all()])
         if order.refunds.exists() else None),
        'sector_name':
        get_attr_or_none(order, 'sector.name'),
        'services':
        join_attr_values(order.service_types.order_by('name')),
        'status':
        order.status,
        'subtotal_cost':
        order.subtotal_cost,
        'total_cost':
        order.total_cost,
        'uk_region__name':
        order.uk_region.name,
        'vat_cost':
        order.vat_cost,
    }
    def test_export(self, es_with_collector, request_sortby, orm_ordering):
        """Test export large capital investor profile search results."""
        url = reverse('api-v4:search:large-investor-profile-export')

        CompleteLargeCapitalInvestorProfileFactory(
            investable_capital=10000,
            global_assets_under_management=20000,
        )
        with freeze_time('2018-01-01 11:12:13'):
            LargeCapitalInvestorProfileFactory(
                investable_capital=300,
                global_assets_under_management=200,
            )

        es_with_collector.flush_and_refresh()

        data = {}
        if request_sortby:
            data['sortby'] = request_sortby

        with freeze_time('2018-01-01 11:12:13'):
            response = self.api_client.post(url, data=data)

        assert response.status_code == status.HTTP_200_OK
        assert parse_header(response.get('Content-Disposition')) == (
            'attachment',
            {
                'filename':
                'Data Hub - Large capital profiles - 2018-01-01-11-12-13.csv',
            },
        )

        sorted_profiles = LargeCapitalInvestorProfile.objects.order_by(
            orm_ordering, 'pk')
        response_text = response.getvalue().decode('utf-8-sig')
        reader = DictReader(StringIO(response_text))

        assert reader.fieldnames == list(
            SearchLargeInvestorProfileExportAPIView.field_titles.values(), )

        expected_row_data = [{
            'Date created':
            profile.created_on,
            'Global assets under management':
            profile.global_assets_under_management,
            'Investable capital':
            profile.investable_capital,
            'Investor company':
            get_attr_or_none(
                profile,
                'investor_company.name',
            ),
            'Investor description':
            profile.investor_description,
            'Notes on locations':
            profile.notes_on_locations,
            'Investor type':
            get_attr_or_none(
                profile,
                'investor_type.name',
            ),
            'Required checks conducted':
            get_attr_or_none(
                profile,
                'required_checks_conducted.name',
            ),
            'Minimum return rate':
            get_attr_or_none(
                profile,
                'minimum_return_rate.name',
            ),
            'Minimum equity percentage':
            get_attr_or_none(
                profile,
                'minimum_equity_percentage.name',
            ),
            'Date last modified':
            profile.modified_on,
            'UK regions of interest':
            join_attr_values(profile.uk_region_locations.order_by('name'), ),
            'Restrictions':
            join_attr_values(profile.restrictions.order_by('name'), ),
            'Time horizons':
            join_attr_values(profile.time_horizons.order_by('name'), ),
            'Investment types':
            join_attr_values(profile.investment_types.order_by('name'), ),
            'Deal ticket sizes':
            join_attr_values(profile.deal_ticket_sizes.order_by('name'), ),
            'Desired deal roles':
            join_attr_values(profile.desired_deal_roles.order_by('name'), ),
            'Required checks conducted by':
            get_attr_or_none(
                profile,
                'required_checks_conducted_by.name',
            ),
            'Required checks conducted on':
            profile.required_checks_conducted_on,
            'Other countries being considered':
            join_attr_values(
                profile.other_countries_being_considered.order_by('name'), ),
            'Construction risks':
            join_attr_values(profile.construction_risks.order_by('name'), ),
            'Data Hub profile reference':
            str(profile.pk),
            'Asset classes of interest':
            join_attr_values(
                profile.asset_classes_of_interest.order_by('name'), ),
            'Data Hub link':
            (f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}'
             f'/{profile.investor_company.pk}/investments/large-capital-profile'
             ),
        } for profile in sorted_profiles]

        expected_rows = format_csv_data(expected_row_data)

        # item is an ordered dict so is cast to a dict to make the comparison easier to
        # interpret in the event of the assert actual_rows == expected_rows failing.
        actual_rows = [dict(item) for item in reader]

        assert actual_rows == expected_rows
Exemple #3
0
    def test_interaction_export(
        self,
        setup_es,
        request_sortby,
        orm_ordering,
    ):
        """
        Test export of interaction search results with a policy feedback user.

        Checks that all interaction kinds except for policy feedback are included in the export.
        """
        # Faker generates job titles containing commas which complicates comparisons,
        # so all contact job titles are explicitly set
        company = CompanyFactory()
        CompanyInteractionFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Engineer'),
                ContactFactory(company=company, job_title=None),
                ContactFactory(company=company, job_title=''),
            ],
        )
        EventServiceDeliveryFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Managing director'),
            ],
        )
        InvestmentProjectInteractionFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Exports manager'),
            ],
        )
        ServiceDeliveryFactory(
            company=company,
            contacts=[
                ContactFactory(company=company, job_title='Sales director'),
            ],
        )
        CompanyInteractionFactoryWithPolicyFeedback(
            company=company,
            contacts=[
                ContactFactory(company=company,
                               job_title='Business development manager'),
            ],
            policy_areas=PolicyArea.objects.order_by('?')[:2],
            policy_issue_types=PolicyIssueType.objects.order_by('?')[:2],
        )

        setup_es.indices.refresh()

        data = {}
        if request_sortby:
            data['sortby'] = request_sortby

        url = reverse('api-v3:search:interaction-export')

        with freeze_time('2018-01-01 11:12:13'):
            response = self.api_client.post(url, data=data)

        assert response.status_code == status.HTTP_200_OK
        assert parse_header(response.get('Content-Type')) == ('text/csv', {
            'charset':
            'utf-8'
        })
        assert parse_header(response.get('Content-Disposition')) == (
            'attachment',
            {
                'filename': 'Data Hub - Interactions - 2018-01-01-11-12-13.csv'
            },
        )

        sorted_interactions = Interaction.objects.all().order_by(
            orm_ordering,
            'pk',
        )
        reader = DictReader(StringIO(response.getvalue().decode('utf-8-sig')))

        assert reader.fieldnames == list(
            SearchInteractionExportAPIView.field_titles.values())

        expected_row_data = [{
            'Date':
            interaction.date,
            'Type':
            interaction.get_kind_display(),
            'Service':
            get_attr_or_none(interaction, 'service.name'),
            'Subject':
            interaction.subject,
            'Link':
            f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["interaction"]}'
            f'/{interaction.pk}',
            'Company':
            get_attr_or_none(interaction, 'company.name'),
            'Company link':
            f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["company"]}'
            f'/{interaction.company.pk}',
            'Company country':
            get_attr_or_none(
                interaction,
                'company.address_country.name',
            ),
            'Company UK region':
            get_attr_or_none(interaction, 'company.uk_region.name'),
            'Company sector':
            get_attr_or_none(interaction, 'company.sector.name'),
            'Contacts':
            _format_expected_contacts(interaction),
            'Adviser':
            get_attr_or_none(interaction, 'dit_adviser.name'),
            'Service provider':
            get_attr_or_none(interaction, 'dit_team.name'),
            'Event':
            get_attr_or_none(interaction, 'event.name'),
            'Communication channel':
            get_attr_or_none(interaction, 'communication_channel.name'),
            'Service delivery status':
            get_attr_or_none(
                interaction,
                'service_delivery_status.name',
            ),
            'Net company receipt':
            interaction.net_company_receipt,
            'Policy issue types':
            join_attr_values(interaction.policy_issue_types.all()),
            'Policy areas':
            join_attr_values(interaction.policy_areas.all(), separator='; '),
            'Policy feedback notes':
            interaction.policy_feedback_notes,
        } for interaction in sorted_interactions]

        actual_row_data = [_format_actual_csv_row(row) for row in reader]
        assert actual_row_data == format_csv_data(expected_row_data)
Exemple #4
0
    def test_export(self, setup_es, request_sortby, orm_ordering):
        """Test export of investment project search results."""
        url = reverse('api-v3:search:investment_project-export')

        InvestmentProjectFactory()
        InvestmentProjectFactory(cdms_project_code='cdms-code')
        VerifyWinInvestmentProjectFactory()
        won_project = WonInvestmentProjectFactory()
        InvestmentProjectTeamMemberFactory.create_batch(3, investment_project=won_project)

        InvestmentProjectFactory(
            name='project for subsidiary',
            investor_company=CompanyFactory(
                global_headquarters=CompanyFactory(
                    one_list_tier_id=OneListTier.objects.first().id,
                    one_list_account_owner=AdviserFactory(),
                ),
            ),
        )

        setup_es.indices.refresh()

        data = {}
        if request_sortby:
            data['sortby'] = request_sortby

        with freeze_time('2018-01-01 11:12:13'):
            response = self.api_client.post(url, data=data)

        assert response.status_code == status.HTTP_200_OK
        assert parse_header(response.get('Content-Disposition')) == (
            'attachment', {'filename': 'Data Hub - Investment projects - 2018-01-01-11-12-13.csv'},
        )

        sorted_projects = InvestmentProject.objects.order_by(orm_ordering, 'pk')
        response_text = response.getvalue().decode('utf-8-sig')
        reader = DictReader(StringIO(response_text))

        assert reader.fieldnames == list(SearchInvestmentExportAPIView.field_titles.values())

        expected_row_data = [
            {
                'Date created': project.created_on,
                'Project reference': project.project_code,
                'Project name': project.name,
                'Investor company': project.investor_company.name,
                'Investor company town or city': project.investor_company.address_town,
                'Country of origin':
                    get_attr_or_none(project, 'investor_company.address_country.name'),
                'Investment type': get_attr_or_none(project, 'investment_type.name'),
                'Status': project.get_status_display(),
                'Stage': get_attr_or_none(project, 'stage.name'),
                'Link':
                    f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["investmentproject"]}'
                    f'/{project.pk}',
                'Actual land date': project.actual_land_date,
                'Estimated land date': project.estimated_land_date,
                'FDI value': get_attr_or_none(project, 'fdi_value.name'),
                'Sector': get_attr_or_none(project, 'sector.name'),
                'Date of latest interaction': None,
                'Project manager': get_attr_or_none(project, 'project_manager.name'),
                'Client relationship manager':
                    get_attr_or_none(project, 'client_relationship_manager.name'),
                'Global account manager': self._get_global_account_manager_name(project),
                'Project assurance adviser':
                    get_attr_or_none(project, 'project_assurance_adviser.name'),
                'Other team members': join_attr_values(project.team_members.all(), 'adviser.name'),
                'Delivery partners': join_attr_values(project.delivery_partners.all()),
                'Possible UK regions': join_attr_values(project.uk_region_locations.all()),
                'Actual UK regions': join_attr_values(project.actual_uk_regions.all()),
                'Specific investment programme':
                    get_attr_or_none(project, 'specific_programme.name'),
                'Referral source activity':
                    get_attr_or_none(project, 'referral_source_activity.name'),
                'Referral source activity website':
                    get_attr_or_none(project, 'referral_source_activity_website.name'),
                'Total investment': project.total_investment,
                'New jobs': project.number_new_jobs,
                'Average salary of new jobs': get_attr_or_none(project, 'average_salary.name'),
                'Safeguarded jobs': project.number_safeguarded_jobs,
                'Level of involvement': get_attr_or_none(project, 'level_of_involvement.name'),
                'Likelihood to land': get_attr_or_none(project, 'likelihood_to_land.name'),
                'R&D budget': project.r_and_d_budget,
                'Associated non-FDI R&D project': project.non_fdi_r_and_d_budget,
                'New to world tech': project.new_tech_to_uk,
                'FDI type': project.fdi_type,
                'Foreign equity investment': project.foreign_equity_investment,
                'GVA multiplier': get_attr_or_none(project, 'gva_multiplier.multiplier'),
                'GVA': project.gross_value_added,
            }
            for project in sorted_projects
        ]

        expected_rows = format_csv_data(expected_row_data)

        # item is an ordered dict so is cast to a dict to make the comparison easier to
        # interpret in the event of the assert actual_rows == expected_rows failing.
        actual_rows = [dict(item) for item in reader]

        # Support for ordering was added to StringAgg in Django 2.2. However, it is not
        # currently used due to https://code.djangoproject.com/ticket/30315. While that
        # remains the case, our StringAgg fields are unordered and we use this workaround to
        # compare them.
        unordered_fields = (
            'Other team members',
            'Delivery partners',
            'Possible UK regions',
            'Actual UK regions',
        )

        for row in chain(actual_rows, expected_rows):
            for field in unordered_fields:
                row[field] = frozenset(row[field].split(', '))

        assert actual_rows == expected_rows
Exemple #5
0
def _build_expected_export_response(opportunity):
    return {
        'Date created': opportunity.created_on,
        'Created by': get_attr_or_none(opportunity, 'created_by.name'),
        'Data Hub opportunity reference': str(opportunity.pk),
        'Data Hub link': (
            f'{settings.DATAHUB_FRONTEND_URL_PREFIXES["largecapitalopportunity"]}'
            f'/{opportunity.pk}/investments/large-capital-opportunity'
        ),
        'Name': opportunity.name,
        'Description': opportunity.description,
        'Type': get_attr_or_none(
            opportunity, 'type.name',
        ),
        'Status': get_attr_or_none(
            opportunity, 'status.name',
        ),
        'UK region locations': join_attr_values(
            opportunity.uk_region_locations.order_by('name'),
        ),
        'Promoters': join_attr_values(
            opportunity.promoters.order_by('name'),
        ),
        'Lead DIT relationship manager': opportunity.lead_dit_relationship_manager.name,
        'Other DIT contacts': get_attr_or_none(
            opportunity, 'other_dit_contacts.name',
        ),
        'Required checks conducted': get_attr_or_none(
            opportunity, 'required_checks_conducted.name',
        ),
        'Required checks conducted by': get_attr_or_none(
            opportunity, 'required_checks_conducted_by.name',
        ),
        'Required checks conducted on': opportunity.required_checks_conducted_on,
        'Asset classes': join_attr_values(
            opportunity.asset_classes.order_by('name'),
        ),
        'Opportunity value type': get_attr_or_none(
            opportunity, 'opportunity_value_type.name',
        ),
        'Opportunity value': opportunity.opportunity_value,
        'Construction risks': join_attr_values(
            opportunity.construction_risks.order_by('name'),
        ),
        'Total investment sought': opportunity.total_investment_sought,
        'Current investment secured': opportunity.current_investment_secured,
        'Investment types': join_attr_values(
            opportunity.investment_types.order_by('name'),
        ),
        'Estimated return rate': get_attr_or_none(
            opportunity, 'estimated_return_rate.name',
        ),
        'Time horizons': join_attr_values(
            opportunity.time_horizons.order_by('name'),
        ),
        'Sources of funding': join_attr_values(
            opportunity.sources_of_funding.order_by('name'),
        ),
        'DIT support provided': opportunity.dit_support_provided,
        'Funding supporting details': opportunity.funding_supporting_details,
        'Reasons for abandonment': join_attr_values(
            opportunity.reasons_for_abandonment.order_by('name'),
        ),
        'Why abandoned': opportunity.why_abandoned,
        'Why suspended': opportunity.why_suspended,
        'Date last modified': opportunity.modified_on,
    }