def test_updated_large_capital_opportunity_synced(opensearch_with_signals): """Test that when a large capital opportunity is updated it is synced to OpenSearch.""" opportunity = LargeCapitalOpportunityFactory() opportunity.total_investment_sought = 12345 opportunity.save() opensearch_with_signals.indices.refresh() doc = _get_documents(opensearch_with_signals, opportunity.pk) assert doc['_source']['total_investment_sought'] == 12345
def test_autocomplete_large_capital_opportunities(self): """Test that the opportunities viewset can autocomplete.""" LargeCapitalOpportunityFactory(name='Apple') LargeCapitalOpportunityFactory(name='Auburn') LargeCapitalOpportunityFactory(name='Boom') LargeCapitalOpportunityFactory(name='Crush') url = reverse(viewname='api-v4:large-capital-opportunity:collection') response = self.api_client.get(url, data={'autocomplete': 'A'}) assert response.status_code == status.HTTP_200_OK opportunities = response.json()['results'] assert len(opportunities) == 2 assert opportunities[0]['name'] == 'Apple' assert opportunities[1]['name'] == 'Auburn'
def test_delete_from_opensearch(opensearch_with_signals): """ Test that when a large capital opportunity is deleted from db it also calls delete document to delete from OpenSearch. """ opportunity = LargeCapitalOpportunityFactory() opensearch_with_signals.indices.refresh() assert _get_documents(opensearch_with_signals, opportunity.pk) with mock.patch( 'datahub.search.large_capital_opportunity.signals.delete_document', ) as mock_delete_document: opportunity.delete() opensearch_with_signals.indices.refresh() assert mock_delete_document.called is True
def test_raises_error_when_required_fields_not_provided(self): """Tests an integrity error is raised when any of the required fields are missing.""" with pytest.raises(IntegrityError): LargeCapitalOpportunityFactory( lead_dit_relationship_manager=None, dit_support_provided=None, )
def test_large_capital_opportunity_dbmodels_to_documents(self, opensearch): """Tests conversion of db models to OpenSearch documents.""" opportunities = LargeCapitalOpportunityFactory.create_batch(2) result = SearchLargeCapitalOpportunity.db_objects_to_documents( opportunities) assert len(list(result)) == len(opportunities)
def test_patch_large_capital_opportunity(self): """Test updating a large capital opportunity.""" new_value = 5 opportunity = LargeCapitalOpportunityFactory() url = reverse('api-v4:large-capital-opportunity:item', kwargs={'pk': opportunity.pk}) request_data = { 'opportunity_value': new_value, } response = self.api_client.patch(url, data=request_data) response_data = response.json() assert response.status_code == status.HTTP_200_OK, response_data assert response_data['opportunity_value'] == str(new_value) opportunity.refresh_from_db() assert opportunity.opportunity_value == new_value assert 'opportunity_value' not in response_data[ 'incomplete_details_fields']
def test_indexed_doc(opensearch): """Test the OpenSearch data of a large capital opportunity.""" opportunity = LargeCapitalOpportunityFactory( lead_dit_relationship_manager=None, ) doc = SearchLargeCapitalOpportunity.to_document(opportunity) opensearch_client.bulk(actions=(doc, ), chunk_size=1) opensearch.indices.refresh() indexed_large_capital_opportunity = opensearch.get( index=SearchLargeCapitalOpportunity.get_write_index(), id=opportunity.pk, ) assert indexed_large_capital_opportunity['_id'] == str(opportunity.pk) assert indexed_large_capital_opportunity['_source'] == { '_document_type': LargeCapitalOpportunitySearchApp.name, 'id': str(opportunity.pk), 'type': { 'id': str(OpportunityTypeConstant.large_capital.value.id), 'name': OpportunityTypeConstant.large_capital.value.name, }, 'status': { 'id': str(OpportunityStatusConstant.abandoned.value.id), 'name': OpportunityStatusConstant.abandoned.value.name, }, 'created_by': None, 'uk_region_locations': [], 'promoters': [], 'required_checks_conducted': None, 'required_checks_conducted_by': None, 'lead_dit_relationship_manager': None, 'other_dit_contacts': [], 'asset_classes': [], 'opportunity_value': None, 'opportunity_value_type': None, 'investment_types': [], 'construction_risks': [], 'estimated_return_rate': None, 'time_horizons': [], 'investment_projects': [], 'sources_of_funding': [], 'reasons_for_abandonment': [], 'total_investment_sought': None, 'current_investment_secured': None, 'modified_on': '2019-01-01T00:00:00+00:00', 'created_on': '2019-01-01T00:00:00+00:00', 'description': '', 'required_checks_conducted_on': None, 'name': '', 'dit_support_provided': False, }
def test_audit_log_view(self): """Test retrieval of audit log.""" initial_datetime = now() with reversion.create_revision(): opportunity = LargeCapitalOpportunityFactory( name='This amazing opportunity', ) reversion.set_comment('Initial') reversion.set_date_created(initial_datetime) reversion.set_user(self.user) changed_datetime = now() with reversion.create_revision(): opportunity.name = 'That amazing opportunity' opportunity.save() reversion.set_comment('Changed') reversion.set_date_created(changed_datetime) reversion.set_user(self.user) versions = Version.objects.get_for_object(opportunity) version_id = versions[0].id url = reverse('api-v4:large-capital-opportunity:audit-item', kwargs={'pk': opportunity.pk}) response = self.api_client.get(url) response_data = response.json()['results'] # No need to test the whole response assert len(response_data) == 1 entry = response_data[0] assert entry['id'] == version_id assert entry['user']['name'] == self.user.name assert entry['comment'] == 'Changed' assert entry['timestamp'] == format_date_or_datetime(changed_datetime) assert entry['changes']['name'] == [ 'This amazing opportunity', 'That amazing opportunity' ] assert not set(EXCLUDED_BASE_MODEL_FIELDS) & entry['changes'].keys()
def test_edit_promoter_syncs_large_capital_opportunity_in_opensearch( opensearch_with_signals): """ Tests that updating promoter company details also updated the relevant large capital opportunity. """ new_company_name = 'SYNC TEST' promoter = CompanyFactory() opportunity = LargeCapitalOpportunityFactory(promoters=[promoter]) opensearch_with_signals.indices.refresh() promoter.name = new_company_name promoter.save() result = _get_documents(opensearch_with_signals, opportunity.pk) assert result['_source']['promoters'][0]['name'] == new_company_name
def test_export(self, opensearch_with_collector, request_sortby, orm_ordering): """Test export large capital opportunity search results.""" url = reverse('api-v4:search:large-capital-opportunity-export') CompleteLargeCapitalOpportunityFactory() with freeze_time('2018-01-01 11:12:13'): LargeCapitalOpportunityFactory() opensearch_with_collector.flush_and_refresh() data = {} if request_sortby: data['sortby'] = request_sortby with freeze_time('2018-01-01 11:12:13'): response = self.api_client.post(url, data=data) assert response.status_code == status.HTTP_200_OK assert parse_header(response.get('Content-Disposition')) == ( 'attachment', { 'filename': 'Data Hub - Large capital opportunities - 2018-01-01-11-12-13.csv', }, ) sorted_opportunities = LargeCapitalOpportunity.objects.order_by(orm_ordering, 'pk') response_text = response.getvalue().decode('utf-8-sig') reader = DictReader(StringIO(response_text)) assert reader.fieldnames == list( SearchLargeCapitalOpportunityExportAPIView.field_titles.values(), ) expected_row_data = [ _build_expected_export_response(opportunity) for opportunity in sorted_opportunities ] expected_rows = format_csv_data(expected_row_data) # item is an ordered dict so is cast to a dict to make the comparison easier to # interpret in the event of the assert actual_rows == expected_rows failing. actual_rows = [dict(item) for item in reader] assert actual_rows == expected_rows
def test_patch_large_capital_opportunity_all_requirements_fields(self): """Test updating the requirements fields for a large capital opportunity.""" direct_investment_equity_id = ( LargeCapitalInvestmentTypesConstant. direct_investment_in_project_equity.value.id) opportunity = LargeCapitalOpportunityFactory() url = reverse('api-v4:large-capital-opportunity:item', kwargs={'pk': opportunity.pk}) request_data = { 'total_investment_sought': 10, 'current_investment_secured': 1, 'investment_types': [{ 'id': direct_investment_equity_id }], 'estimated_return_rate': ReturnRateConstant.up_to_five_percent.value.id, 'time_horizons': [ { 'id': TimeHorizonConstant.up_to_five_years.value.id, }, { 'id': TimeHorizonConstant.five_to_nine_years.value.id, }, ], } response = self.api_client.patch(url, data=request_data) response_data = response.json() assert response.status_code == status.HTTP_200_OK, response_data assert response_data['incomplete_requirements_fields'] == [] assert response_data['total_investment_sought'] == '10' assert response_data['current_investment_secured'] == '1' assert (response_data['investment_types'][0]['id'] == str( direct_investment_equity_id)) assert (response_data['estimated_return_rate']['id'] == str( ReturnRateConstant.up_to_five_percent.value.id)) assert (set(time_horizon['id'] for time_horizon in response_data['time_horizons']) == { str(TimeHorizonConstant.up_to_five_years.value.id), str(TimeHorizonConstant.five_to_nine_years.value.id), })
def test_large_capital_opportunity_dbmodel_to_dict(self, opensearch): """Tests conversion of db model to dict.""" opportunity = LargeCapitalOpportunityFactory() result = SearchLargeCapitalOpportunity.db_object_to_dict(opportunity) keys = { '_document_type', 'type', 'status', 'created_by', 'uk_region_locations', 'promoters', 'required_checks_conducted', 'required_checks_conducted_by', 'lead_dit_relationship_manager', 'other_dit_contacts', 'asset_classes', 'opportunity_value', 'opportunity_value_type', 'investment_types', 'construction_risks', 'estimated_return_rate', 'time_horizons', 'investment_projects', 'sources_of_funding', 'reasons_for_abandonment', 'name', 'current_investment_secured', 'dit_support_provided', 'description', 'id', 'total_investment_sought', 'created_on', 'required_checks_conducted_on', 'modified_on', } assert set(result.keys()) == keys
def test_patch_large_capital_opportunity_all_details_fields(self): """Test updating the details fields for a large capital opportunity.""" promoters = CompanyFactory.create_batch(2) lead_dit_relationship_manager = AdviserFactory() required_checks_conducted_by = AdviserFactory() opportunity = LargeCapitalOpportunityFactory() url = reverse('api-v4:large-capital-opportunity:item', kwargs={'pk': opportunity.pk}) request_data = { 'description': 'Lorem ipsum', 'uk_region_locations': [ { 'id': UKRegionConstant.north_east.value.id }, { 'id': UKRegionConstant.north_west.value.id }, ], 'promoters': [{ 'id': promoter.pk } for promoter in promoters], 'required_checks_conducted': { 'id': RequiredChecksConductedConstant.cleared.value.id, }, 'lead_dit_relationship_manager': lead_dit_relationship_manager.pk, 'asset_classes': [ { 'id': AssetClassInterestConstant.biofuel.value.id }, { 'id': AssetClassInterestConstant.biomass.value.id }, ], 'opportunity_value': 5, 'construction_risks': [ { 'id': ConstructionRiskConstant.greenfield.value.id, }, { 'id': ConstructionRiskConstant.brownfield.value.id, }, ], 'required_checks_conducted_on': '2019-01-05', 'required_checks_conducted_by': required_checks_conducted_by.id, } response = self.api_client.patch(url, data=request_data) response_data = response.json() assert response.status_code == status.HTTP_200_OK, response_data assert response_data['incomplete_details_fields'] == [] expected_uk_region_locations = { str(UKRegionConstant.north_east.value.id), str(UKRegionConstant.north_west.value.id), } assert (set(region['id'] for region in response_data['uk_region_locations']) == expected_uk_region_locations) assert (response_data['required_checks_conducted']['id'] == str( RequiredChecksConductedConstant.cleared.value.id)) assert (response_data['lead_dit_relationship_manager']['id'] == str( lead_dit_relationship_manager.pk)) assert (set(asset['id'] for asset in response_data['asset_classes']) == { str(AssetClassInterestConstant.biofuel.value.id), str(AssetClassInterestConstant.biomass.value.id), }) assert response_data['opportunity_value'] == '5' assert (set( construction_risk['id'] for construction_risk in response_data['construction_risks']) == { str(ConstructionRiskConstant.greenfield.value.id), str(ConstructionRiskConstant.brownfield.value.id), })
def test_new_large_capital_opportunity_synced(opensearch_with_signals): """Test that new large capital opportunity is synced to OpenSearch.""" opportunity = LargeCapitalOpportunityFactory() opensearch_with_signals.indices.refresh() assert _get_documents(opensearch_with_signals, opportunity.pk)
def get_opportunity_for_search(): """Sets up search list test by adding many opportunities and returning an opportunity.""" LargeCapitalOpportunityFactory.create_batch(5) opportunity = LargeCapitalOpportunityFactory( investment_projects=[InvestmentProjectFactory()], ) yield opportunity
def test_retrieve_large_capital_opportunity(self): """Test retrieving a large capital opportunity.""" opportunity = LargeCapitalOpportunityFactory() url = reverse('api-v4:large-capital-opportunity:item', kwargs={'pk': opportunity.pk}) response = self.api_client.get(url) response_data = response.json() assert response.status_code == status.HTTP_200_OK, response_data expected_data = { 'id': str(opportunity.pk), 'created_on': '2019-05-01T00:00:00Z', 'modified_on': '2019-05-01T00:00:00Z', 'type': { 'name': opportunity.type.name, 'id': str(opportunity.type.pk), }, 'status': { 'name': opportunity.status.name, 'id': str(opportunity.status.pk), }, 'name': opportunity.name, 'description': opportunity.description, 'dit_support_provided': opportunity.dit_support_provided, 'incomplete_details_fields': [ 'description', 'uk_region_locations', 'promoters', 'required_checks_conducted', 'asset_classes', 'opportunity_value', 'construction_risks', ], 'incomplete_requirements_fields': [ 'total_investment_sought', 'current_investment_secured', 'investment_types', 'estimated_return_rate', 'time_horizons', ], 'opportunity_value_type': None, 'opportunity_value': None, 'required_checks_conducted': None, 'investment_projects': [], 'reasons_for_abandonment': [], 'promoters': [], 'lead_dit_relationship_manager': { 'name': opportunity.lead_dit_relationship_manager.name, 'id': str(opportunity.lead_dit_relationship_manager.pk), }, 'other_dit_contacts': [], 'total_investment_sought': None, 'current_investment_secured': None, 'required_checks_conducted_on': None, 'required_checks_conducted_by': None, 'investment_types': [], 'estimated_return_rate': None, 'time_horizons': [], 'construction_risks': [], 'sources_of_funding': [], 'asset_classes': [], 'uk_region_locations': [], } assert response_data == expected_data
def setup_data(opensearch_with_collector): """Sets up data for the tests.""" promoter = CompanyFactory(name='promoter') capital_expenditure = OpportunityValueTypeConstant.capital_expenditure.value.id gross_development_value = OpportunityValueTypeConstant.gross_development_value.value.id with freeze_time('2010-02-01'): frozen_created_on_opportunity = LargeCapitalOpportunityFactory( promoters=[CompanyFactory( name='Frozen promoter', )], name='Frozen project', description='frozen in 2010', construction_risks=[ ConstructionRiskConstant.greenfield.value.id, ], total_investment_sought=1000, current_investment_secured=15, uk_region_locations=[ UKRegionConstant.north_west.value.id, UKRegionConstant.east_of_england.value.id, ], ) with freeze_time('2018-01-01 10:00:00'): south_project = LargeCapitalOpportunityFactory( promoters=[CompanyFactory( name='Distinct promoter', )], name='South project', description='South project', investment_types=[ InvestmentTypesConstant.direct_investment_in_project_equity.value.id, ], total_investment_sought=6000, current_investment_secured=1500, ) with freeze_time('2018-01-01 11:00:00'): north_project = LargeCapitalOpportunityFactory( total_investment_sought=20, current_investment_secured=7, promoters=[CompanyFactory( name='Another promoter', )], name='North project', description='North project', uk_region_locations=[ UKRegionConstant.north_west.value.id, UKRegionConstant.north_east.value.id, ], ) with freeze_time('2019-01-01'): opportunities = [ LargeCapitalOpportunityFactory( name='Railway', description='Railway', promoters=[promoter], total_investment_sought=950, construction_risks=[ ConstructionRiskConstant.operational.value.id, ], estimated_return_rate_id=ReturnRateConstant.up_to_five_percent.value.id, time_horizons=[ TimeHorizonConstant.up_to_five_years.value.id, TimeHorizonConstant.five_to_nine_years.value.id, ], ), LargeCapitalOpportunityFactory( name='Skyscraper', description='Skyscraper', promoters=[promoter], total_investment_sought=950, opportunity_value_type_id=capital_expenditure, opportunity_value=200, construction_risks=[ ConstructionRiskConstant.brownfield.value.id, ], time_horizons=[ TimeHorizonConstant.up_to_five_years.value.id, ], ), frozen_created_on_opportunity, LargeCapitalOpportunityFactory( name='Business centre', description='Business centre', promoters=[promoter], total_investment_sought=9500, estimated_return_rate_id=ReturnRateConstant.up_to_five_percent.value.id, opportunity_value_type_id=capital_expenditure, opportunity_value=250, construction_risks=[ ConstructionRiskConstant.brownfield.value.id, ConstructionRiskConstant.greenfield.value.id, ], asset_classes=[ AssetClassInterestConstant.biomass.value.id, ], ), LargeCapitalOpportunityFactory( name='Restaurant', description='Restaurant', promoters=[promoter], total_investment_sought=9500, opportunity_value_type_id=gross_development_value, opportunity_value=200, asset_classes=[ AssetClassInterestConstant.biofuel.value.id, ], time_horizons=[ TimeHorizonConstant.five_to_nine_years.value.id, ], ), north_project, south_project, ] opensearch_with_collector.flush_and_refresh() yield opportunities