def test_sector_descends_filter(self, hierarchical_sectors, setup_es, sector_level): """Test the sector_descends filter.""" num_sectors = len(hierarchical_sectors) sectors_ids = [sector.pk for sector in hierarchical_sectors] projects = InvestmentProjectFactory.create_batch( num_sectors, sector_id=factory.Iterator(sectors_ids), ) InvestmentProjectFactory.create_batch( 3, sector=factory.LazyFunction(lambda: random_obj_for_queryset( Sector.objects.exclude(pk__in=sectors_ids), )), ) setup_es.indices.refresh() url = reverse('api-v3:search:investment_project') body = { 'sector_descends': hierarchical_sectors[sector_level].pk, } response = self.api_client.post(url, body) assert response.status_code == status.HTTP_200_OK response_data = response.json() assert response_data['count'] == num_sectors - sector_level actual_ids = {UUID(project['id']) for project in response_data['results']} expected_ids = {project.pk for project in projects[sector_level:]} assert actual_ids == expected_ids
def test_audit_log(s3_stubber): """Test that reversion revisions are created.""" investment_projects = InvestmentProjectFactory.create_batch(2) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""Id,comments {investment_projects[0].id},Comment 1 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_comments', bucket, object_key) versions = Version.objects.get_for_object(investment_projects[0]) assert len(versions) == 1 assert versions[0].revision.get_comment() == 'Comments migration.' versions = Version.objects.get_for_object(investment_projects[1]) assert len(versions) == 0
def test_simulate(s3_stubber): """Test that the command only simulates the actions if --simulate is passed in.""" investment_projects = InvestmentProjectFactory.create_batch(2) old_comments = [ip.comments for ip in investment_projects] bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""Id,comments {investment_projects[0].id},Comment 1 {investment_projects[1].id},Comment 2 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_comments', bucket, object_key, simulate=True) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].comments == old_comments[0] assert investment_projects[1].comments == old_comments[1]
def test_investment_project_dbmodels_to_es_documents(setup_es): """Tests conversion of db models to Elasticsearch documents.""" projects = InvestmentProjectFactory.create_batch(2) result = ESInvestmentProject.db_objects_to_es_documents(projects) assert len(list(result)) == len(projects)
def test_run(s3_stubber, caplog): """ Test that the command updates the specified records, checking if current business activities match the old business activities in the CSV. """ caplog.set_level('ERROR') business_activities = list(InvestmentBusinessActivity.objects.all()) old_business_activities = [[], [], business_activities[0:2], business_activities[2:3]] investment_projects = InvestmentProjectFactory.create_batch(4) for project, project_business_activities in zip(investment_projects, old_business_activities): project.business_activities.set(project_business_activities) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_business_activities,new_business_activities 00000000-0000-0000-0000-000000000000,null,null {investment_projects[0].pk},null,null {investment_projects[1].pk},null,{business_activities[2].pk} {investment_projects[2].pk},"{business_activities[0].pk},{business_activities[1].pk}","{business_activities[0].pk},{business_activities[1].pk}" {investment_projects[3].pk},{business_activities[5].pk},"{business_activities[0].pk},{business_activities[1].pk},{business_activities[2].pk},{business_activities[3].pk}" """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_business_activities', bucket, object_key) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [ list(project.business_activities.all()) for project in investment_projects ] == [ [], business_activities[2:3], business_activities[0:2], business_activities[2:3], # Old business activities did not match ]
def test_simulate(s3_stubber): """Test that the command only simulates the actions if --simulate is passed in.""" referral_source_activities = ReferralSourceActivityFactory.create_batch(2) referral_source_marketings = ReferralSourceMarketingFactory.create_batch(2) investment_projects = InvestmentProjectFactory.create_batch( 2, referral_source_activity_id=factory.Iterator( referral_source_activity.id for referral_source_activity in referral_source_activities ), referral_source_activity_marketing=factory.Iterator(referral_source_marketings), ) new_referral_activity = ReferralSourceActivityFactory() new_referral_marketing = ReferralSourceMarketingFactory() bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,referral_source_activity_id,referral_source_activity_marketing_id {investment_projects[0].id},{new_referral_activity.id},{new_referral_marketing.id} {investment_projects[1].id},{new_referral_activity.id},NULL """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command( 'update_investment_project_referral_source_activity_marketing', bucket, object_key, simulate=True, ) for investment_project in investment_projects: investment_project.refresh_from_db() for i in range(2): assert ( investment_projects[i].referral_source_activity == referral_source_activities[i] ) assert ( investment_projects[i].referral_source_activity_marketing == referral_source_marketings[i] )
def test_run(s3_stubber, caplog): """Test that the command updates the specified records (ignoring ones with errors).""" caplog.set_level('ERROR') allow_blank_estimated_land_date = [False, False, True, True] estimated_land_date = [date(2016, 2, 20), None, None, date(2016, 8, 23)] investment_projects = InvestmentProjectFactory.create_batch( 4, allow_blank_estimated_land_date=factory.Iterator(allow_blank_estimated_land_date), estimated_land_date=factory.Iterator(estimated_land_date), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,allow_blank_estimated_land_date,estimated_land_date 00000000-0000-0000-0000-000000000000,true,null {investment_projects[0].pk},true,null {investment_projects[1].pk},false,2018-01-01 {investment_projects[2].pk},false,2017-01-05 {investment_projects[3].pk},true,2016-08-23 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_estimated_land_date', bucket, object_key) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [project.allow_blank_estimated_land_date for project in investment_projects] == [ True, False, False, True, ] assert [project.estimated_land_date for project in investment_projects] == [ None, date(2018, 1, 1), date(2017, 1, 5), date(2016, 8, 23), ]
def test_simulate(s3_stubber, caplog): """Test that the command simulates updates if --simulate is passed in.""" caplog.set_level('ERROR') old_dates = [date(2016, 2, 20), None, date(2013, 6, 13), date(2016, 8, 23)] investment_projects = InvestmentProjectFactory.create_batch( 4, actual_land_date=factory.Iterator(old_dates), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_actual_land_date,new_actual_land_date 00000000-0000-0000-0000-000000000000,2016-01-20,null {investment_projects[0].pk},2016-02-20,null {investment_projects[1].pk},2016-02-28,2016-03-28 {investment_projects[2].pk},2013-06-13, {investment_projects[3].pk},2016-08-23,2016-08-24 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_actual_land_date', bucket, object_key, simulate=True) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [project.actual_land_date for project in investment_projects] == old_dates
def test_simulate(s3_stubber): """Test that the command only simulates the actions if --simulate is passed in.""" companies = CompanyFactory.create_batch(3) investment_projects = InvestmentProjectFactory.create_batch( 2, investor_company=companies[0], intermediate_company=companies[1], uk_company=companies[2], uk_company_decided=True, ) file_companies = CompanyFactory.create_batch(3) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,investor_company_id,intermediate_company_id,uk_company_id,uk_company_decided {investment_projects[0].id},{file_companies[0].pk},{file_companies[1].pk},{file_companies[2].pk},1 {investment_projects[1].id},{file_companies[0].pk},{file_companies[1].pk},{file_companies[2].pk},1 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_company', bucket, object_key, simulate=True) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].investor_company == companies[0] assert investment_projects[0].intermediate_company == companies[1] assert investment_projects[0].uk_company == companies[2] assert investment_projects[0].uk_company_decided is True assert investment_projects[1].investor_company == companies[0] assert investment_projects[1].intermediate_company == companies[1] assert investment_projects[1].uk_company == companies[2] assert investment_projects[1].uk_company_decided is True
def test_run(s3_stubber, caplog): """Test that the command updates the specified records (ignoring ones with errors).""" caplog.set_level('WARNING') stages = [ InvestmentProjectStage.prospect.value.id, InvestmentProjectStage.assign_pm.value.id, InvestmentProjectStage.active.value.id, InvestmentProjectStage.verify_win.value.id, InvestmentProjectStage.won.value. id, # won projects should not be updated ] old_dates = [ date(2016, 2, 20), None, date(2013, 6, 13), date(2016, 8, 23), date(2016, 8, 23) ] investment_projects = InvestmentProjectFactory.create_batch( 5, stage_id=factory.Iterator(stages), actual_land_date=factory.Iterator(old_dates), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_actual_land_date,new_actual_land_date 00000000-0000-0000-0000-000000000000,2016-01-20,null {investment_projects[0].pk},2016-02-20,null {investment_projects[1].pk},2016-02-28,2016-03-28 {investment_projects[2].pk},2013-06-13, {investment_projects[3].pk},2016-08-23,2016-08-24 {investment_projects[4].pk},2016-08-23,2016-08-24 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_actual_land_date', bucket, object_key) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert 'Not updating project in Won stage' in caplog.text assert len(caplog.records) == 2 assert [project.actual_land_date for project in investment_projects] == [ None, None, None, date(2016, 8, 24), date(2016, 8, 23), ]
def test_run_ignore_old_regions(s3_stubber, caplog): """ Test that the command updates the specified records (ignoring the old regions column). """ caplog.set_level('ERROR') regions = list(UKRegion.objects.all()) old_allow_blank_possible_uk_regions = [False, True, False, True] old_uk_region_locations = [[], [], regions[0:2], regions[2:3]] investment_projects = InvestmentProjectFactory.create_batch( 4, allow_blank_possible_uk_regions=factory.Iterator( old_allow_blank_possible_uk_regions), ) for project, project_regions in zip(investment_projects, old_uk_region_locations): project.uk_region_locations.set(project_regions) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,allow_blank_possible_uk_regions,uk_region_locations 00000000-0000-0000-0000-000000000000,true,null {investment_projects[0].pk},true,null {investment_projects[1].pk},false,{regions[2].pk} {investment_projects[2].pk},false,"{regions[0].pk},{regions[1].pk}" {investment_projects[3].pk},true,"{regions[0].pk},{regions[1].pk},{regions[2].pk},{regions[3].pk}" """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command( 'update_investment_project_possible_uk_regions', bucket, object_key, ignore_old_regions=True, ) for project in investment_projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [ project.allow_blank_possible_uk_regions for project in investment_projects ] == [ True, False, False, True, ] assert [ list(project.uk_region_locations.all()) for project in investment_projects ] == [ [], regions[2:3], regions[0:2], regions[0:4], ]