def test_audit_log(s3_stubber): """Test that reversion revisions are created.""" sector_without_change = SectorFactory(segment='sector_1', ) sector_with_change = SectorFactory(segment='sector_2', ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_segment,new_sector_segment {sector_without_change.pk},{sector_without_change.segment},{sector_without_change.segment} {sector_with_change.pk},{sector_with_change.segment},sector_new """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_segment', bucket, object_key) versions = Version.objects.get_for_object(sector_without_change) assert versions.count() == 0 versions = Version.objects.get_for_object(sector_with_change) assert versions.count() == 1 assert versions[0].revision.get_comment() == 'Sector segment correction.'
def test_no_change(s3_stubber, caplog): """Test that the command ignores records that haven't changed or records with incorrect current values. """ caplog.set_level('WARNING') old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) pipeline_items = PipelineItemFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {pipeline_items[1].pk},{old_sectors[1].pk},{old_sectors[1].pk} {pipeline_items[2].pk},00000000-0000-0000-0000-000000000000,{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_pipeline_item_sector', bucket, object_key) for pipeline_item in pipeline_items: pipeline_item.refresh_from_db() assert ( f'Not updating PipelineItem {pipeline_items[1]} as its sector has not changed' ) in caplog.text assert ( f'Not updating PipelineItem {pipeline_items[2]} as its sector has not changed' ) in caplog.text assert len(caplog.records) == 2 assert [pipeline_item.sector for pipeline_item in pipeline_items] == [ new_sectors[0], old_sectors[1], old_sectors[2], ]
def test_non_existent_sector(s3_stubber, caplog): """Test that the command logs an error when the sector PK does not exist.""" caplog.set_level('ERROR') sectors = ['sector_1', 'sector_2', 'section_3'] old_parent_segments = [ 'sector_1_parent_old', 'sector_2_parent_old', 'sector_3_parent_old' ] new_parent_segments = [ 'sector_1_parent_new', 'sector_2_parent_new', 'sector_3_parent_new' ] old_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(old_parent_segments), ) new_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(new_parent_segments), ) sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(sectors), parent=factory.Iterator(old_parents), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_parent_id,new_parent_id {sectors[0].pk},{old_parents[0].pk},{new_parents[0].pk} {sectors[1].pk},{old_parents[1].pk},{new_parents[1].pk} 00000000-0000-0000-0000-000000000000,{old_parents[2].pk},{new_parents[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_parent', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert 'Sector matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [sector.parent.pk for sector in sectors] == [ new_parents[0].pk, new_parents[1].pk, old_parents[2].pk, ]
def test_sector_name_level_two(): """Test the sector path for a sector two levels deep.""" grandparent = SectorFactory() parent = SectorFactory(parent=grandparent) sector = SectorFactory(parent=parent) assert sector.name == f'{grandparent.segment} : {parent.segment} : {sector.segment}'
def test_sector_name_level_recursive_unsaved(): """ Test that accessing the path of a sector raises an exception when its parent points at itself. """ sector = SectorFactory() sector.parent = sector with pytest.raises(DataHubError): sector.name
def test_sector_name_recursive_via_parent_unsaved(): """ Test that accessing the path of a sector raises an exception when it's part of a recursive hierarchy. """ parent = SectorFactory() sector = SectorFactory(parent=parent) parent.parent = sector with pytest.raises(DataHubError): sector.name
def test_non_existent_sector(s3_stubber, caplog): """Test that the command logs an error when the sector PK does not exist.""" caplog.set_level('ERROR') SectorFactory.create_batch( 3, segment=factory.Iterator(['sector1', 'sector2', 'sector3']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'sector', 'fdi_sic_grouping_id', 'fdi_sic_grouping_name', ] data = [ ( Sector(segment='does not exist').id, 'path1', fdi_sic_groupings[0].pk, 'name1', ), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) n_investment_sectors_before = len(InvestmentSector.objects.all()) call_command('create_investment_sector', bucket, object_key) investment_sectors = InvestmentSector.objects.all() assert len(investment_sectors) == n_investment_sectors_before assert len(caplog.records) == 1 assert 'Sector matching query does not exist' in caplog.text
def test_annotated_sector_name_level_one(): """Test the sector name annotation for a sector one level deep.""" parent = SectorFactory() sector = SectorFactory(parent=parent) annotated_sector = Sector.objects.annotate( name_annotation=get_sector_name_subquery(), ).get( pk=sector.pk, ) assert annotated_sector.name_annotation == sector.name
def test_non_existent_project(s3_stubber, caplog): """Test that the command logs an error when the investment project PK does not exist.""" caplog.set_level('ERROR') old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) projects = InvestmentProjectFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {projects[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {projects[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} 00000000-0000-0000-0000-000000000000,{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector_disabled_signals', bucket, object_key) for project in projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [project.sector for project in projects] == [ new_sectors[0], new_sectors[1], old_sectors[2], ]
def test_duplicate_sector(s3_stubber, caplog): """Test that the command logs an error when the sector PK already exists.""" caplog.set_level('ERROR') sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] segments = ['segment_1', 'segment_2', 'segment_3'] clusters = SectorClusterFactory.create_batch( 3, name=factory.Iterator(['cluster_1', 'cluster_2', 'cluster_3']), ) parent_sector = SectorFactory() duplicate_sector = SectorFactory(id=sector_pks[2]) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,segment,sector_cluster_id,parent_id {sector_pks[0]},{segments[0]},{clusters[0].pk},{parent_sector.pk} {sector_pks[1]},{segments[1]},{clusters[1].pk},{parent_sector.pk} {duplicate_sector.pk},{segments[2]},{clusters[2].pk},{parent_sector.pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('create_sector', bucket, object_key) sectors = Sector.objects.filter(pk__in=sector_pks).order_by('pk') assert len(sectors) == 3 assert f'Key (id)=({duplicate_sector.pk}) already exists' in caplog.text assert len(caplog.records) == 1 assert [str(sectors[0].pk), str(sectors[1].pk), str(sectors[2].pk)] == sector_pks assert [sectors[0].segment, sectors[1].segment] == segments[:2] assert [sectors[0].sector_cluster, sectors[1].sector_cluster] == clusters[:2] assert [ sectors[0].parent, sectors[1].parent, ] == [parent_sector, parent_sector]
def test_annotated_sector_name_via_relation(): """Test the sector name annotation via a relation.""" grandparent = SectorFactory() parent = SectorFactory(parent=grandparent) sector = SectorFactory(parent=parent) annotated_sector = Sector.objects.annotate( parent_name_annotation=get_sector_name_subquery('parent'), ).get( pk=sector.pk, ) assert annotated_sector.parent_name_annotation == parent.name
def test_annotated_sector_name_level_two(): """Test the sector name annotation for a sector two levels deep.""" grandparent = SectorFactory() parent = SectorFactory(parent=grandparent) sector = SectorFactory(parent=parent) annotated_sector = Sector.objects.annotate( name_annotation=get_sector_name_subquery(), ).get( pk=sector.pk, ) assert annotated_sector.name_annotation == sector.name
def test_root_node_to_child_node(s3_stubber): """Test that the command changes a root node to a child node if parent is provided.""" sectors = ['sector_1', 'sector_2', 'section_3'] old_parent_segments = [ 'sector_1_parent_old', 'sector_2_parent_old', 'sector_3_parent_old' ] new_parent_segments = [ 'sector_1_parent_new', 'sector_2_parent_new', 'sector_3_parent_new' ] old_parents = SectorFactory.create_batch( 2, segment=factory.Iterator(old_parent_segments), ) old_parents.append(None) new_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(new_parent_segments), ) sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(sectors), parent=factory.Iterator(old_parents), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_parent_id,new_parent_id {sectors[0].pk},{old_parents[0].pk},{new_parents[0].pk} {sectors[1].pk},{old_parents[1].pk},{new_parents[1].pk} {sectors[2].pk},,{new_parents[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_parent', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert [sector.parent.pk for sector in sectors] == [parent.pk for parent in new_parents]
def test_run(s3_stubber): """Test that the command updates the relevant records ignoring ones with errors.""" sectors = SectorFactory.create_batch(5) investment_projects = [ # investment project in CSV doesn't exist so row should fail # sector should get updated InvestmentProjectFactory(sector_id=sectors[0].id), # sector should get updated InvestmentProjectFactory(sector_id=None), # sector should not get updated InvestmentProjectFactory(sector_id=None), # should be ignored InvestmentProjectFactory(sector_id=sectors[3].id), # should be skipped because of an error InvestmentProjectFactory(sector_id=sectors[4].id), ] new_sectors = SectorFactory.create_batch(5) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector 00000000-0000-0000-0000-000000000000,NULL,NULL {investment_projects[0].id},{sectors[0].id},{new_sectors[0].id} {investment_projects[1].id},NULL,{new_sectors[1].id} {investment_projects[2].id},{new_sectors[2].id},{new_sectors[2].id} {investment_projects[4].id},invalid_id,another_invalid_id """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].sector == new_sectors[0] assert investment_projects[1].sector == new_sectors[1] assert investment_projects[2].sector is None assert investment_projects[3].sector == sectors[3] assert investment_projects[4].sector == sectors[4]
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" sectors = ['sector_1', 'sector_2', 'section_3'] old_parent_segments = [ 'sector_1_parent_old', 'sector_2_parent_old', 'sector_3_parent_old' ] new_parent_segments = [ 'sector_1_parent_new', 'sector_2_parent_new', 'sector_3_parent_new' ] old_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(old_parent_segments), ) new_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(new_parent_segments), ) sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(sectors), parent=factory.Iterator(old_parents), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_parent_id,new_parent_id {sectors[0].pk},{old_parents[0].pk},{new_parents[0].pk} {sectors[1].pk},{old_parents[1].pk},{new_parents[1].pk} {sectors[2].pk},{old_parents[2].pk},{new_parents[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_parent', bucket, object_key, simulate=True) for sector in sectors: sector.refresh_from_db() assert [sector.parent.pk for sector in sectors] == [parent.pk for parent in old_parents]
def test_non_existent_order(s3_stubber, caplog): """Test that the command logs an error when the order PK does not exist.""" caplog.set_level('ERROR') old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_old', 'sector_2_old', 'sector_3_old']), ) orders = OrderFactory.create_batch( 3, reference=factory.Iterator(['order_1', 'order_2', 'order_3']), sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {orders[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {orders[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} 00000000-0000-0000-0000-000000000000,{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_order_sector', bucket, object_key) for order in orders: order.refresh_from_db() assert 'Order matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [order.sector for order in orders] == [ new_sectors[0], new_sectors[1], old_sectors[2], ]
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) projects = InvestmentProjectFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {projects[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {projects[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} {projects[2].pk},{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command( 'update_investment_project_sector_disabled_signals', bucket, object_key, simulate=True, ) for project in projects: project.refresh_from_db() assert [project.sector for project in projects] == old_sectors
def test_get_sector_cluster_from_project_sector(sector_cluster_id, expected): """Tests that sector cluster can be mapped from sector.""" parent_sector = SectorFactory( segment='Cats', sector_cluster_id=sector_cluster_id, ) sector = SectorFactory(segment='Rockets', parent=parent_sector) InvestmentProjectFactory(sector_id=sector.id, ) query = InvestmentProject.objects.annotate( sector_name=get_sector_name_subquery('sector'), sector_cluster=get_sector_cluster_expression('sector'), ).values('sector_cluster') for project in query.all(): assert project['sector_cluster'] == expected
def test_happy_path(s3_stubber): """Test that the command updates the specified records.""" old_sectors = ['sector_1_old', 'sector_2_old', 'sector_3_old'] new_sectors = ['sector_1_new', 'sector_2_new', 'sector_3_new'] sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(old_sectors), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_segment,new_sector_segment {sectors[0].pk},{old_sectors[0]},{new_sectors[0]} {sectors[1].pk},{old_sectors[1]},{new_sectors[1]} {sectors[2].pk},{old_sectors[2]},{new_sectors[2]} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_segment', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert [sector.segment for sector in sectors] == new_sectors
def test_audit_log(s3_stubber): """Test that audit log is being created.""" new_sector = SectorFactory() investment_project = InvestmentProjectFactory() old_sector = investment_project.sector bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector {investment_project.id},{old_sector.id},{new_sector.id} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key) investment_project.refresh_from_db() assert investment_project.sector == new_sector versions = Version.objects.get_for_object(investment_project) assert len(versions) == 1 assert versions[0].revision.get_comment() == 'Sector migration.'
def test_get_unreferenced_objects_query(get_unreferenced_objects_query, s3_stubber): """Test that the get_unreferenced_objects_query function is only called once per file.""" sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) assert get_unreferenced_objects_query.call_count == 1
def test_simulate(s3_stubber): """Test that the command simulates deletes if --simulate is passed in.""" sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key, simulate=True) sectors = Sector.objects.filter(pk__in=sector_pks) assert [str(sector.pk) for sector in sectors] == sector_pks
def test_happy_path(s3_stubber): """Test that the command deletes the specified records.""" sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) sectors = Sector.objects.filter(pk__in=sector_pks) assert not sectors
def test_simulate(s3_stubber): """Test that the command only simulates the actions if --simulate is passed in.""" new_sectors = SectorFactory.create_batch(5) investment_projects = InvestmentProjectFactory.create_batch(2) old_sectors = [ip.sector for ip in investment_projects] bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector {investment_projects[0].id},{old_sectors[0].id},{new_sectors[0].id} {investment_projects[1].id},{old_sectors[1].id},{new_sectors[1].id} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key, simulate=True) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].sector == old_sectors[0] assert investment_projects[1].sector == old_sectors[1]
def test_sector_cluster(sector_cluster_id, expected): """Tests that sector cluster has correct default value.""" parent_sector = SectorFactory( segment='Cats', sector_cluster_id=sector_cluster_id, ) sector = SectorFactory(segment='Rockets', parent=parent_sector) InvestmentProjectFactory(sector_id=sector.id, ) etl = ETLInvestmentProjects(destination=MIInvestmentProject) updated, created = etl.load() assert (0, 1) == (updated, created) mi_investment_project = MIInvestmentProject.objects.values( *etl.COLUMNS).first() assert mi_investment_project['sector_cluster'] == expected
def test_entry_already_exists_for_sector(s3_stubber, caplog): """ Test that the command ignores records for with sector_ids that already exist in the InvestmentSector table """ caplog.set_level('ERROR') sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector1', 'sector2', 'sector3']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) investment_sector = InvestmentSector( sector=sectors[0], fdi_sic_grouping=fdi_sic_groupings[0], ) investment_sector.save() bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'sector', 'fdi_sic_grouping_id', 'fdi_sic_grouping_name', ] data = [ (sectors[0].pk, 'path1', fdi_sic_groupings[1].pk, 'name1'), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) n_investment_sectors_before = len(InvestmentSector.objects.all()) call_command('create_investment_sector', bucket, object_key) investment_sectors = InvestmentSector.objects.all() assert len(investment_sectors) == n_investment_sectors_before assert len(caplog.records) == 1 assert f'InvestmentSector for sector_id: {sectors[0].pk} already exists' in caplog.text
def test_happy_path(s3_stubber): """Test that the command updates the specified records.""" old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) pipeline_items = PipelineItemFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {pipeline_items[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} {pipeline_items[2].pk},{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_pipeline_item_sector', bucket, object_key) for pipeline_item in pipeline_items: pipeline_item.refresh_from_db() assert [pipeline_item.sector for pipeline_item in pipeline_items] == new_sectors
def test_sector_with_children(s3_stubber, caplog): """Test that the command logs a warning if the sector has children.""" caplog.set_level('WARNING') sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) # Create a child belonging to sector 3 SectorFactory(parent=sectors[2]) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) sectors = Sector.objects.filter(pk__in=sector_pks) assert f'Not deleting sector {sectors[0]} as it is referenced by another object' in caplog.text assert len(caplog.records) == 1 assert len(sectors) == 1 assert str(sectors[0].pk) == sector_pks[2]
def test_annotated_sector_name_root_node(): """Test the sector name annotation for a sector at root level.""" sector = SectorFactory(parent=None) annotated_sector = Sector.objects.annotate( name_annotation=get_sector_name_subquery(), ).get( pk=sector.pk, ) assert annotated_sector.name_annotation == sector.name
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_old', 'sector_2_old', 'sector_3_old']), ) orders = OrderFactory.create_batch( 3, reference=factory.Iterator(['order_1', 'order_2', 'order_3']), sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {orders[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {orders[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} {orders[2].pk},{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_order_sector', bucket, object_key, simulate=True) for order in orders: order.refresh_from_db() assert [order.sector for order in orders] == old_sectors