def test_audit_log(s3_stubber): """Test that audit log is being created.""" new_sector = SectorFactory() investment_project = InvestmentProjectFactory() old_sector = investment_project.sector bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector {investment_project.id},{old_sector.id},{new_sector.id} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key) investment_project.refresh_from_db() assert investment_project.sector == new_sector versions = Version.objects.get_for_object(investment_project) assert len(versions) == 1 assert versions[0].revision.get_comment() == 'Sector migration.'
def test_audit_log(s3_stubber): """Test that the audit log is being created.""" investment_project = InvestmentProjectFactory() bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,createdon {investment_project.id},2015-09-29 11:03:20.000 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_created_on', bucket, object_key) investment_project.refresh_from_db() assert investment_project.created_on == datetime(2015, 9, 29, 11, 3, 20, tzinfo=utc) versions = Version.objects.get_for_object(investment_project) assert len(versions) == 1 assert versions[0].revision.get_comment() == 'Created On migration.'
def test_audit_log(s3_stubber): """Test that reversion revisions are created for updated rows.""" project_without_change = InvestmentProjectFactory( status=InvestmentProject.STATUSES.ongoing) project_with_change = InvestmentProjectFactory( status=InvestmentProject.STATUSES.ongoing) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,status {project_without_change.pk},ongoing {project_with_change.pk},delayed """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_status', bucket, object_key) versions = Version.objects.get_for_object(project_without_change) assert versions.count() == 0 versions = Version.objects.get_for_object(project_with_change) assert versions.count() == 1 assert versions[0].revision.get_comment() == 'Bulk status update.'
def test_no_project_code(): """Tests that None is returned when a project code is not set.""" # cdms_project_code is set and removed to avoid a DH project code # being generated project = InvestmentProjectFactory(cdms_project_code='P-79661656') project.cdms_project_code = None assert project.project_code is None
def test_associated_advisers_team_members(): """Tests that get_associated_advisers() includes team members.""" adviser = AdviserFactory() project = InvestmentProjectFactory() InvestmentProjectTeamMemberFactory(investment_project=project, adviser=adviser) assert adviser in tuple(project.get_associated_advisers())
def test_investment_project_interaction_changed_sync_to_opensearch( opensearch_with_signals): """ Test projects get synced to OpenSearch when an interaction's project is changed. When an interaction's project is switched to another project, both the old and new project should be updated in OpenSearch. """ investment_project_a = InvestmentProjectFactory(name='alpha') investment_project_b = InvestmentProjectFactory(name='beta') with reversion.create_revision(): interaction = InvestmentProjectInteractionFactory( investment_project=investment_project_a, ) opensearch_with_signals.indices.refresh() for project_name, has_interaction in [('alpha', True), ('beta', False)]: assert_project_search_latest_interaction( has_interaction=has_interaction, name=project_name, ) interaction.investment_project = investment_project_b with reversion.create_revision(): interaction.save() opensearch_with_signals.indices.refresh() for project_name, has_interaction in [('alpha', False), ('beta', True)]: assert_project_search_latest_interaction( has_interaction=has_interaction, name=project_name, )
def test_audit_log(s3_stubber): """Test that the audit log is being created.""" investment_project = InvestmentProjectFactory() file_companies = CompanyFactory.create_batch(3) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,investor_company_id,intermediate_company_id,uk_company_id,uk_company_decided {investment_project.id},{file_companies[0].pk},{file_companies[1].pk},{file_companies[2].pk},1 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_company', bucket, object_key) investment_project.refresh_from_db() assert investment_project.investor_company == file_companies[0] assert investment_project.intermediate_company == file_companies[1] assert investment_project.uk_company == file_companies[2] assert investment_project.uk_company_decided is True versions = Version.objects.get_for_object(investment_project) assert len(versions) == 1 assert versions[0].revision.get_comment() == 'Companies data migration.'
def test_sector_descends_filter(self, hierarchical_sectors, setup_es, sector_level): """Test the sector_descends filter.""" num_sectors = len(hierarchical_sectors) sectors_ids = [sector.pk for sector in hierarchical_sectors] projects = InvestmentProjectFactory.create_batch( num_sectors, sector_id=factory.Iterator(sectors_ids), ) InvestmentProjectFactory.create_batch( 3, sector=factory.LazyFunction(lambda: random_obj_for_queryset( Sector.objects.exclude(pk__in=sectors_ids), )), ) setup_es.indices.refresh() url = reverse('api-v3:search:investment_project') body = { 'sector_descends': hierarchical_sectors[sector_level].pk, } response = self.api_client.post(url, body) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == num_sectors - sector_level actual_ids = {UUID(project['id']) for project in response_data['results']} expected_ids = {project.pk for project in projects[sector_level:]} assert actual_ids == expected_ids
def test_updating_gva_multiplier_value(self, get_gva_multiplier): """Test updating GVA Multiplier value updates any associated investment projects.""" gva_multiplier = get_gva_multiplier with mock.patch( 'datahub.investment.project.gva_utils.' 'GrossValueAddedCalculator._get_gva_multiplier', ) as mock_get_multiplier: mock_get_multiplier.return_value = gva_multiplier project = InvestmentProjectFactory( foreign_equity_investment=1000, investment_type_id=constants.InvestmentType.fdi.value.id, ) url = reverse('admin:investment_gvamultiplier_change', args=(gva_multiplier.pk,)) response = self.client.get(url, follow=True) assert response.status_code == status.HTTP_200_OK with mock.patch( 'datahub.investment.project.gva_utils.' 'GrossValueAddedCalculator._get_gva_multiplier_financial_year', ) as mock_get_financial_year: mock_get_financial_year.return_value = 3010 data = { 'multiplier': 3, } response = self.client.post(url, data, follow=True) assert response.status_code == status.HTTP_200_OK gva_multiplier.refresh_from_db() assert gva_multiplier.multiplier == 3 project.refresh_from_db() assert project.gross_value_added == 3000
def test_global_restricted_user_with_no_team_cannot_see_projects(self, setup_es): """ Checks that a restricted user that doesn't have a team cannot view projects associated with other advisers that don't have teams. """ adviser_other = AdviserFactory(dit_team_id=None) request_user = create_test_user( permission_codenames=['view_associated_investmentproject'], ) api_client = self.create_api_client(user=request_user) InvestmentProjectFactory() InvestmentProjectFactory(created_by=adviser_other) setup_es.indices.refresh() url = reverse('api-v3:search:basic') response = api_client.get( url, data={ 'term': '', 'entity': 'investment_project', }, ) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == 0
def test_audit_log(s3_stubber): """Test that reversion revisions are created.""" project_without_change = InvestmentProjectFactory(actual_land_date=date(2017, 2, 2)) project_with_change = InvestmentProjectFactory(actual_land_date=date(2017, 2, 2)) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_actual_land_date,new_actual_land_date {project_without_change.pk},2017-02-24, {project_with_change.pk},2017-02-02,2016-08-24 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_actual_land_date', bucket, object_key) versions = Version.objects.get_for_object(project_without_change) assert len(versions) == 0 versions = Version.objects.get_for_object(project_with_change) assert len(versions) == 1 assert versions[0].revision.get_comment() == 'Actual land date migration correction.'
def test_if_assigning_project_manager_second_time_doesnt_update_related_columns(self): """ Test that the assignment of project manager for the second time, doesn't update who and when made an assignment. """ investment_project = InvestmentProjectFactory( project_manager=AdviserFactory(), project_manager_first_assigned_on=datetime(2010, 1, 2, 0, 0, tzinfo=utc), project_manager_first_assigned_by=AdviserFactory(), ) url = reverse('admin:investment_investmentproject_change', args=(investment_project.pk,)) data = {} # populate data with required field values admin_form = InvestmentProjectAdmin(InvestmentProject, site).get_form(mock.Mock()) for field_name, field in admin_form.base_fields.items(): if field.required: field_value = getattr(investment_project, field_name) data[field_name] = field.prepare_value(field_value) project_manager = AdviserFactory() data['project_manager'] = project_manager.pk response = self.client.post(url, data, follow=True) assert response.status_code == status.HTTP_200_OK investment_project.refresh_from_db() assert investment_project.project_manager == project_manager assert investment_project.project_manager_first_assigned_on != now() assert investment_project.project_manager_first_assigned_by != self.user
def test_search_sort_desc_with_null_values(self, setup_es, setup_data): """Tests placement of null values in sorted results when order is descending.""" InvestmentProjectFactory(name='Ether 1', total_investment=1000) InvestmentProjectFactory(name='Ether 2') setup_es.indices.refresh() term = 'Ether' url = reverse('api-v3:search:investment_project') response = self.api_client.post( url, data={ 'original_query': term, 'sortby': 'total_investment:desc', }, ) assert response.status_code == status.HTTP_200_OK assert response.data['count'] == 2 assert [ ('Ether 1', 1000), ('Ether 2', None), ] == [ (investment['name'], investment['total_investment']) for investment in response.data['results'] ]
def test_audit_log(s3_stubber): """Test that reversion revisions are created.""" delivery_partners = list(InvestmentDeliveryPartner.objects.all()[:10]) project_with_change = InvestmentProjectFactory(delivery_partners=[]) project_without_change = InvestmentProjectFactory(delivery_partners=delivery_partners[0:1]) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,delivery_partners {project_with_change.pk},{delivery_partners[2].pk} {project_without_change.pk},{delivery_partners[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_delivery_partners', bucket, object_key) versions = Version.objects.get_for_object(project_without_change) assert versions.count() == 0 versions = Version.objects.get_for_object(project_with_change) assert versions.count() == 1 assert versions[0].revision.get_comment() == 'Investment delivery partners migration.'
def _company_factory( num_interactions=0, num_contacts=0, num_investment_projects=0, num_orders=0, num_referrals=0, num_company_list_items=0, num_pipeline_items=0, ): """ Factory for a company that has companies, interactions, investment projects and OMIS orders. """ company = CompanyFactory() ContactFactory.create_batch(num_contacts, company=company) CompanyInteractionFactory.create_batch(num_interactions, company=company) CompanyReferralFactory.create_batch(num_referrals, company=company, contact=None) OrderFactory.create_batch(num_orders, company=company) CompanyListItemFactory.create_batch(num_company_list_items, company=company) PipelineItemFactory.create_batch(num_pipeline_items, company=company) fields_iter = cycle(INVESTMENT_PROJECT_COMPANY_FIELDS) fields = islice(fields_iter, 0, num_investment_projects) InvestmentProjectFactory.create_batch( num_investment_projects, **{field: company for field in fields}, ) return company
def test_refresh_gross_value_added( self, investment_type, sector, business_activities, multiplier_value, ): """Test populating Gross value added data.""" with mock.patch( 'datahub.investment.project.signals.set_gross_value_added_for_investment_project', ) as mock_update_gva: mock_update_gva.return_value = None project = InvestmentProjectFactory( sector_id=sector, business_activities=business_activities, investment_type_id=investment_type, foreign_equity_investment=1000, ) assert not project.gva_multiplier self._run_command() project.refresh_from_db() if not multiplier_value: assert not project.gva_multiplier else: assert project.gva_multiplier.multiplier == Decimal(multiplier_value)
def test_restricted_user_can_only_list_associated_interactions(self): """ Test that a restricted user can only list interactions for associated investment projects. """ creator = AdviserFactory() requester = create_test_user( permission_codenames=[InteractionPermission.view_associated_investmentproject], dit_team=creator.dit_team, ) api_client = self.create_api_client(user=requester) company = CompanyFactory() non_associated_project = InvestmentProjectFactory() associated_project = InvestmentProjectFactory(created_by=creator) CompanyInteractionFactory.create_batch(3, company=company) CompanyInteractionFactory.create_batch( 3, investment_project=non_associated_project, ) associated_project_interactions = CompanyInteractionFactory.create_batch( 2, investment_project=associated_project, ) url = reverse('api-v3:interaction:collection') response = api_client.get(url) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == 2 actual_ids = {i['id'] for i in response_data['results']} expected_ids = {str(i.id) for i in associated_project_interactions} assert actual_ids == expected_ids
def test_non_restricted_user_can_see_all_projects(self, setup_es, permissions): """Test that normal users can see all projects.""" team = TeamFactory() team_others = TeamFactory() adviser_1 = AdviserFactory(dit_team_id=team.id) adviser_2 = AdviserFactory(dit_team_id=team_others.id) request_user = create_test_user( permission_codenames=permissions, dit_team=team, ) api_client = self.create_api_client(user=request_user) iproject_1 = InvestmentProjectFactory() iproject_2 = InvestmentProjectFactory() InvestmentProjectTeamMemberFactory(adviser=adviser_1, investment_project=iproject_1) InvestmentProjectTeamMemberFactory(adviser=adviser_2, investment_project=iproject_2) setup_es.indices.refresh() url = reverse('api-v3:search:investment_project') response = api_client.post(url, {}) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == 2 assert {str(iproject_1.pk), str(iproject_2.pk)} == { result['id'] for result in response_data['results'] }
def test_associated_advisers_specific_roles(field): """Tests that get_associated_advisers() includes advisers in specific roles.""" adviser = AdviserFactory() factory_kwargs = { field: adviser, } project = InvestmentProjectFactory(**factory_kwargs) assert adviser in tuple(project.get_associated_advisers())
def test_simulate(s3_stubber, caplog): """Test that the command simulates updates if --simulate is passed in.""" caplog.set_level('ERROR') regions = list(UKRegion.objects.all()) investment_projects = [ InvestmentProjectFactory(actual_uk_regions=[]), InvestmentProjectFactory(actual_uk_regions=[]), InvestmentProjectFactory(actual_uk_regions=regions[0:1]), InvestmentProjectFactory(actual_uk_regions=regions[1:2]), InvestmentProjectFactory(actual_uk_regions=[]), ] bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,actual_uk_regions 00000000-0000-0000-0000-000000000000, {investment_projects[0].pk},invalid-uuid {investment_projects[1].pk},{regions[2].pk} {investment_projects[2].pk},"{regions[3].pk},{regions[4].pk}" {investment_projects[3].pk}, {investment_projects[4].pk},"{regions[3].pk},{regions[4].pk}" """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_actual_uk_regions', bucket, object_key, simulate=True) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert 'Must be a valid UUID.' in caplog.text assert len(caplog.records) == 2 assert [ list(project.actual_uk_regions.all()) for project in investment_projects ] == [ [], [], regions[0:1], regions[1:2], [], ]
def test_active_ongoing_or_delayed_projects_only(self, adviser, mock_create_reminder): """ A reminder should only be sent for active ongoing or active delayed projects. """ days = 30 subscription = UpcomingEstimatedLandDateSubscriptionFactory( adviser=adviser, reminder_days=[days], email_reminders_enabled=True, ) estimated_land_date = self.current_date + relativedelta(months=1) active_ongoing_project = ActiveInvestmentProjectFactory( project_manager=adviser, estimated_land_date=estimated_land_date, status=InvestmentProject.Status.ONGOING, ) active_delayed_project = ActiveInvestmentProjectFactory( project_manager=adviser, estimated_land_date=estimated_land_date, status=InvestmentProject.Status.DELAYED, ) InvestmentProjectFactory( project_manager=adviser, estimated_land_date=estimated_land_date, stage_id=InvestmentProjectStage.verify_win.value.id, status=InvestmentProject.Status.ONGOING, ) InvestmentProjectFactory( project_manager=adviser, estimated_land_date=estimated_land_date, stage_id=InvestmentProjectStage.won.value.id, status=InvestmentProject.Status.ONGOING, ) ActiveInvestmentProjectFactory( project_manager=adviser, estimated_land_date=estimated_land_date, status=InvestmentProject.Status.ABANDONED, ) generate_estimated_land_date_reminders_for_subscription( subscription=subscription, current_date=self.current_date, ) mock_create_reminder.assert_has_calls([ call( project=project, adviser=adviser, days_left=days, send_email=True, current_date=self.current_date, ) for project in [active_ongoing_project, active_delayed_project] ], any_order=True)
def test_run(s3_stubber, caplog): """Test that the command updates the specified records (ignoring ones with errors).""" caplog.set_level('ERROR') regions = list(UKRegion.objects.all()) investment_projects = [ InvestmentProjectFactory(actual_uk_regions=[]), InvestmentProjectFactory(actual_uk_regions=[]), InvestmentProjectFactory(actual_uk_regions=regions[0:1]), InvestmentProjectFactory(actual_uk_regions=regions[1:2]), InvestmentProjectFactory(actual_uk_regions=[]), ] bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,actual_uk_regions 00000000-0000-0000-0000-000000000000, {investment_projects[0].pk}, {investment_projects[1].pk},{regions[2].pk} {investment_projects[2].pk},"{regions[3].pk},{regions[4].pk}" {investment_projects[3].pk}, {investment_projects[4].pk},"{regions[3].pk},{regions[4].pk}" """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_actual_uk_regions', bucket, object_key) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [ list(project.actual_uk_regions.all()) for project in investment_projects ] == [ [], regions[2:3], regions[0:1], regions[1:2], regions[3:5], ]
def unrelated_objects(): """ Create some objects not related to a known company. This is used in tests below to make sure objects unrelated to the company being merged do not affect the counts of objects that will be affected by the merge. """ ContactFactory.create_batch(2) CompanyInteractionFactory.create_batch(2) OrderFactory.create_batch(2) InvestmentProjectFactory.create_batch(2)
def setup_data(setup_es, project_with_max_gross_value_added): """Sets up data for the tests.""" investment_projects = [ InvestmentProjectFactory( investment_type_id=constants.InvestmentType.fdi.value.id, name='abc defg', description='investmentproject1', estimated_land_date=datetime.date(2011, 6, 13), actual_land_date=datetime.date(2010, 8, 13), investor_company=CompanyFactory( address_country_id=constants.Country.united_states.value.id, ), status=InvestmentProject.STATUSES.ongoing, uk_region_locations=[ constants.UKRegion.east_midlands.value.id, constants.UKRegion.isle_of_man.value.id, ], level_of_involvement_id=Involvement.hq_and_post_only.value.id, likelihood_to_land_id=LikelihoodToLand.high.value.id, foreign_equity_investment=100000, ), InvestmentProjectFactory( investment_type_id=constants.InvestmentType.fdi.value.id, name='delayed project', description='investmentproject2', estimated_land_date=datetime.date(2057, 6, 13), actual_land_date=datetime.date(2047, 8, 13), country_investment_originates_from_id=constants.Country.ireland.value.id, investor_company=CompanyFactory( address_country_id=constants.Country.japan.value.id, ), project_manager=AdviserFactory(), project_assurance_adviser=AdviserFactory(), fdi_value_id=constants.FDIValue.higher.value.id, status=InvestmentProject.STATUSES.delayed, uk_region_locations=[ constants.UKRegion.north_west.value.id, ], level_of_involvement_id=Involvement.no_involvement.value.id, likelihood_to_land_id=LikelihoodToLand.medium.value.id, ), project_with_max_gross_value_added, InvestmentProjectFactory( name='new project', description='investmentproject4', country_investment_originates_from_id=constants.Country.canada.value.id, estimated_land_date=None, level_of_involvement_id=None, likelihood_to_land_id=LikelihoodToLand.low.value.id, ), ] setup_es.indices.refresh() yield investment_projects
def test_load_investment_projects(): """Tests that investment projects are loaded to FDIDashboard table.""" InvestmentProjectFactory.create_batch(10) etl = ETLInvestmentProjects(destination=MIInvestmentProject) updated, created = etl.load() assert (0, 10) == (updated, created) dashboard = MIInvestmentProject.objects.values(*etl.COLUMNS).all() for row in dashboard: source_row = etl.get_rows().get(pk=row['dh_fdi_project_id']) assert source_row == row
def setup_data(): """Sets up the data and makes the ES client available.""" ContactFactory( first_name='abc', last_name='defg', company__name='name0', company__trading_names=['trading0'], ) ContactFactory( first_name='first', last_name='last', company__name='name1', company__trading_names=['trading1'], ) InvestmentProjectFactory( name='abc defg', description='investmentproject1', estimated_land_date=datetime.datetime(2011, 6, 13, 9, 44, 31, 62870), project_manager=AdviserFactory(first_name='name 0', last_name='surname 0'), project_assurance_adviser=AdviserFactory(first_name='name 1', last_name='surname 1'), investor_company=CompanyFactory(name='name3', trading_names=['trading3']), client_relationship_manager=AdviserFactory(first_name='name 2', last_name='surname 2'), referral_source_adviser=AdviserFactory(first_name='name 3', last_name='surname 3'), client_contacts=[], ) InvestmentProjectFactory( description='investmentproject2', estimated_land_date=datetime.datetime(2057, 6, 13, 9, 44, 31, 62870), project_manager=AdviserFactory(first_name='name 4', last_name='surname 4'), project_assurance_adviser=AdviserFactory(first_name='name 5', last_name='surname 5'), investor_company=CompanyFactory(name='name4', trading_names=['trading4']), client_relationship_manager=AdviserFactory(first_name='name 6', last_name='surname 6'), referral_source_adviser=AdviserFactory(first_name='name 7', last_name='surname 7'), client_contacts=[], ) country_uk = constants.Country.united_kingdom.value.id country_us = constants.Country.united_states.value.id CompanyFactory( name='abc defg ltd', trading_names=['abc defg trading ltd'], address_1='1 Fake Lane', address_town='Downtown', address_country_id=country_uk, ) CompanyFactory( name='abc defg us ltd', trading_names=['abc defg us trading ltd'], address_1='1 Fake Lane', address_town='Downtown', address_country_id=country_us, registered_address_country_id=country_us, )
def test_run(s3_stubber): """Test that the command updates the relevant records ignoring ones with errors.""" sectors = SectorFactory.create_batch(5) investment_projects = [ # investment project in CSV doesn't exist so row should fail # sector should get updated InvestmentProjectFactory(sector_id=sectors[0].id), # sector should get updated InvestmentProjectFactory(sector_id=None), # sector should not get updated InvestmentProjectFactory(sector_id=None), # should be ignored InvestmentProjectFactory(sector_id=sectors[3].id), # should be skipped because of an error InvestmentProjectFactory(sector_id=sectors[4].id), ] new_sectors = SectorFactory.create_batch(5) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector 00000000-0000-0000-0000-000000000000,NULL,NULL {investment_projects[0].id},{sectors[0].id},{new_sectors[0].id} {investment_projects[1].id},NULL,{new_sectors[1].id} {investment_projects[2].id},{new_sectors[2].id},{new_sectors[2].id} {investment_projects[4].id},invalid_id,another_invalid_id """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].sector == new_sectors[0] assert investment_projects[1].sector == new_sectors[1] assert investment_projects[2].sector is None assert investment_projects[3].sector == sectors[3] assert investment_projects[4].sector == sectors[4]
def test_run_mi_investment_project_etl_pipeline(): """Tests that run_mi_investment_project_etl_pipeline copy data to MIInvestmentProject table.""" InvestmentProjectFactory.create_batch(5, actual_land_date=date(2018, 4, 1)) InvestmentProjectFactory.create_batch(5, actual_land_date=date(2018, 3, 31)) updated, created = run_mi_investment_project_etl_pipeline() assert (0, 10) == (updated, created) etl = ETLInvestmentProjects(destination=MIInvestmentProject) dashboard = MIInvestmentProject.objects.values( *ETLInvestmentProjects.COLUMNS).all() for row in dashboard: source_row = etl.get_rows().get(pk=row['dh_fdi_project_id']) assert source_row == row
def test_investment_project_auto_updates_to_es(es_with_signals): """Tests if investment project gets synced to Elasticsearch.""" project = InvestmentProjectFactory() new_test_name = 'even_harder_to_find_investment_project' project.name = new_test_name project.save() es_with_signals.indices.refresh() result = get_search_by_entity_query( InvestmentProject, term='', filter_data={'name': new_test_name}, ).execute() assert result.hits.total == 1
def test_run(s3_stubber): """Test that the command updates the relevant records ignoring ones with errors.""" investment_projects = [ # investment project in CSV doesn't exist so row should fail # created_on should get updated InvestmentProjectFactory(), # should be ignored InvestmentProjectFactory(), # date in the file is invalid so it should fail InvestmentProjectFactory(), ] created_on_dates = [ip.created_on for ip in investment_projects] bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,createdon 00000000-0000-0000-0000-000000000000,2016-09-29 14:03:20.000 {investment_projects[0].id},2015-09-29 11:03:20.000 {investment_projects[2].id},invalid_date """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_created_on', bucket, object_key) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].created_on == datetime(2015, 9, 29, 11, 3, 20, tzinfo=utc) assert investment_projects[1].created_on == created_on_dates[1] assert investment_projects[2].created_on == created_on_dates[2]