def test_non_existent_sector(s3_stubber, caplog): """Test that the command logs an error when the sector PK does not exist.""" caplog.set_level('ERROR') sectors = ['sector_1', 'sector_2', 'section_3'] old_parent_segments = [ 'sector_1_parent_old', 'sector_2_parent_old', 'sector_3_parent_old' ] new_parent_segments = [ 'sector_1_parent_new', 'sector_2_parent_new', 'sector_3_parent_new' ] old_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(old_parent_segments), ) new_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(new_parent_segments), ) sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(sectors), parent=factory.Iterator(old_parents), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_parent_id,new_parent_id {sectors[0].pk},{old_parents[0].pk},{new_parents[0].pk} {sectors[1].pk},{old_parents[1].pk},{new_parents[1].pk} 00000000-0000-0000-0000-000000000000,{old_parents[2].pk},{new_parents[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_parent', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert 'Sector matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [sector.parent.pk for sector in sectors] == [ new_parents[0].pk, new_parents[1].pk, old_parents[2].pk, ]
def test_no_change(s3_stubber, caplog): """Test that the command ignores records that haven't changed or records with incorrect current values. """ caplog.set_level('WARNING') old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) pipeline_items = PipelineItemFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {pipeline_items[1].pk},{old_sectors[1].pk},{old_sectors[1].pk} {pipeline_items[2].pk},00000000-0000-0000-0000-000000000000,{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_pipeline_item_sector', bucket, object_key) for pipeline_item in pipeline_items: pipeline_item.refresh_from_db() assert ( f'Not updating PipelineItem {pipeline_items[1]} as its sector has not changed' ) in caplog.text assert ( f'Not updating PipelineItem {pipeline_items[2]} as its sector has not changed' ) in caplog.text assert len(caplog.records) == 2 assert [pipeline_item.sector for pipeline_item in pipeline_items] == [ new_sectors[0], old_sectors[1], old_sectors[2], ]
def test_non_existent_sector(s3_stubber, caplog): """Test that the command logs an error when the sector PK does not exist.""" caplog.set_level('ERROR') SectorFactory.create_batch( 3, segment=factory.Iterator(['sector1', 'sector2', 'sector3']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'sector', 'fdi_sic_grouping_id', 'fdi_sic_grouping_name', ] data = [ ( Sector(segment='does not exist').id, 'path1', fdi_sic_groupings[0].pk, 'name1', ), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) n_investment_sectors_before = len(InvestmentSector.objects.all()) call_command('create_investment_sector', bucket, object_key) investment_sectors = InvestmentSector.objects.all() assert len(investment_sectors) == n_investment_sectors_before assert len(caplog.records) == 1 assert 'Sector matching query does not exist' in caplog.text
def test_non_existent_project(s3_stubber, caplog): """Test that the command logs an error when the investment project PK does not exist.""" caplog.set_level('ERROR') old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) projects = InvestmentProjectFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {projects[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {projects[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} 00000000-0000-0000-0000-000000000000,{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector_disabled_signals', bucket, object_key) for project in projects: project.refresh_from_db() assert 'InvestmentProject matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [project.sector for project in projects] == [ new_sectors[0], new_sectors[1], old_sectors[2], ]
def test_root_node_to_child_node(s3_stubber): """Test that the command changes a root node to a child node if parent is provided.""" sectors = ['sector_1', 'sector_2', 'section_3'] old_parent_segments = [ 'sector_1_parent_old', 'sector_2_parent_old', 'sector_3_parent_old' ] new_parent_segments = [ 'sector_1_parent_new', 'sector_2_parent_new', 'sector_3_parent_new' ] old_parents = SectorFactory.create_batch( 2, segment=factory.Iterator(old_parent_segments), ) old_parents.append(None) new_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(new_parent_segments), ) sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(sectors), parent=factory.Iterator(old_parents), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_parent_id,new_parent_id {sectors[0].pk},{old_parents[0].pk},{new_parents[0].pk} {sectors[1].pk},{old_parents[1].pk},{new_parents[1].pk} {sectors[2].pk},,{new_parents[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_parent', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert [sector.parent.pk for sector in sectors] == [parent.pk for parent in new_parents]
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" sectors = ['sector_1', 'sector_2', 'section_3'] old_parent_segments = [ 'sector_1_parent_old', 'sector_2_parent_old', 'sector_3_parent_old' ] new_parent_segments = [ 'sector_1_parent_new', 'sector_2_parent_new', 'sector_3_parent_new' ] old_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(old_parent_segments), ) new_parents = SectorFactory.create_batch( 3, segment=factory.Iterator(new_parent_segments), ) sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(sectors), parent=factory.Iterator(old_parents), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_parent_id,new_parent_id {sectors[0].pk},{old_parents[0].pk},{new_parents[0].pk} {sectors[1].pk},{old_parents[1].pk},{new_parents[1].pk} {sectors[2].pk},{old_parents[2].pk},{new_parents[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_parent', bucket, object_key, simulate=True) for sector in sectors: sector.refresh_from_db() assert [sector.parent.pk for sector in sectors] == [parent.pk for parent in old_parents]
def test_run(s3_stubber): """Test that the command updates the relevant records ignoring ones with errors.""" sectors = SectorFactory.create_batch(5) investment_projects = [ # investment project in CSV doesn't exist so row should fail # sector should get updated InvestmentProjectFactory(sector_id=sectors[0].id), # sector should get updated InvestmentProjectFactory(sector_id=None), # sector should not get updated InvestmentProjectFactory(sector_id=None), # should be ignored InvestmentProjectFactory(sector_id=sectors[3].id), # should be skipped because of an error InvestmentProjectFactory(sector_id=sectors[4].id), ] new_sectors = SectorFactory.create_batch(5) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector 00000000-0000-0000-0000-000000000000,NULL,NULL {investment_projects[0].id},{sectors[0].id},{new_sectors[0].id} {investment_projects[1].id},NULL,{new_sectors[1].id} {investment_projects[2].id},{new_sectors[2].id},{new_sectors[2].id} {investment_projects[4].id},invalid_id,another_invalid_id """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].sector == new_sectors[0] assert investment_projects[1].sector == new_sectors[1] assert investment_projects[2].sector is None assert investment_projects[3].sector == sectors[3] assert investment_projects[4].sector == sectors[4]
def test_non_existent_order(s3_stubber, caplog): """Test that the command logs an error when the order PK does not exist.""" caplog.set_level('ERROR') old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_old', 'sector_2_old', 'sector_3_old']), ) orders = OrderFactory.create_batch( 3, reference=factory.Iterator(['order_1', 'order_2', 'order_3']), sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {orders[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {orders[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} 00000000-0000-0000-0000-000000000000,{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_order_sector', bucket, object_key) for order in orders: order.refresh_from_db() assert 'Order matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert [order.sector for order in orders] == [ new_sectors[0], new_sectors[1], old_sectors[2], ]
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) projects = InvestmentProjectFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {projects[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {projects[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} {projects[2].pk},{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command( 'update_investment_project_sector_disabled_signals', bucket, object_key, simulate=True, ) for project in projects: project.refresh_from_db() assert [project.sector for project in projects] == old_sectors
def test_simulate(s3_stubber): """Test that the command simulates deletes if --simulate is passed in.""" sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key, simulate=True) sectors = Sector.objects.filter(pk__in=sector_pks) assert [str(sector.pk) for sector in sectors] == sector_pks
def test_happy_path(s3_stubber): """Test that the command deletes the specified records.""" sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) sectors = Sector.objects.filter(pk__in=sector_pks) assert not sectors
def test_simulate(s3_stubber): """Test that the command only simulates the actions if --simulate is passed in.""" new_sectors = SectorFactory.create_batch(5) investment_projects = InvestmentProjectFactory.create_batch(2) old_sectors = [ip.sector for ip in investment_projects] bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector,new_sector {investment_projects[0].id},{old_sectors[0].id},{new_sectors[0].id} {investment_projects[1].id},{old_sectors[1].id},{new_sectors[1].id} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(bytes(csv_content, encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_investment_project_sector', bucket, object_key, simulate=True) for investment_project in investment_projects: investment_project.refresh_from_db() assert investment_projects[0].sector == old_sectors[0] assert investment_projects[1].sector == old_sectors[1]
def test_happy_path(s3_stubber): """Test that the command updates the specified records.""" old_sectors = ['sector_1_old', 'sector_2_old', 'sector_3_old'] new_sectors = ['sector_1_new', 'sector_2_new', 'sector_3_new'] sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(old_sectors), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_segment,new_sector_segment {sectors[0].pk},{old_sectors[0]},{new_sectors[0]} {sectors[1].pk},{old_sectors[1]},{new_sectors[1]} {sectors[2].pk},{old_sectors[2]},{new_sectors[2]} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_segment', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert [sector.segment for sector in sectors] == new_sectors
def test_get_unreferenced_objects_query(get_unreferenced_objects_query, s3_stubber): """Test that the get_unreferenced_objects_query function is only called once per file.""" sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) assert get_unreferenced_objects_query.call_count == 1
def test_entry_already_exists_for_sector(s3_stubber, caplog): """ Test that the command ignores records for with sector_ids that already exist in the InvestmentSector table """ caplog.set_level('ERROR') sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector1', 'sector2', 'sector3']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) investment_sector = InvestmentSector( sector=sectors[0], fdi_sic_grouping=fdi_sic_groupings[0], ) investment_sector.save() bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'sector', 'fdi_sic_grouping_id', 'fdi_sic_grouping_name', ] data = [ (sectors[0].pk, 'path1', fdi_sic_groupings[1].pk, 'name1'), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) n_investment_sectors_before = len(InvestmentSector.objects.all()) call_command('create_investment_sector', bucket, object_key) investment_sectors = InvestmentSector.objects.all() assert len(investment_sectors) == n_investment_sectors_before assert len(caplog.records) == 1 assert f'InvestmentSector for sector_id: {sectors[0].pk} already exists' in caplog.text
def test_happy_path(s3_stubber): """Test that the command updates the specified records.""" old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_old', 'sector_2_old', 'sector_3_old']), ) pipeline_items = PipelineItemFactory.create_batch( 3, sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator( ['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {pipeline_items[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {pipeline_items[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} {pipeline_items[2].pk},{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_pipeline_item_sector', bucket, object_key) for pipeline_item in pipeline_items: pipeline_item.refresh_from_db() assert [pipeline_item.sector for pipeline_item in pipeline_items] == new_sectors
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" old_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_old', 'sector_2_old', 'sector_3_old']), ) orders = OrderFactory.create_batch( 3, reference=factory.Iterator(['order_1', 'order_2', 'order_3']), sector_id=factory.Iterator([sector.pk for sector in old_sectors]), ) new_sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1_new', 'sector_2_new', 'sector_3_new']), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {orders[0].pk},{old_sectors[0].pk},{new_sectors[0].pk} {orders[1].pk},{old_sectors[1].pk},{new_sectors[1].pk} {orders[2].pk},{old_sectors[2].pk},{new_sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_order_sector', bucket, object_key, simulate=True) for order in orders: order.refresh_from_db() assert [order.sector for order in orders] == old_sectors
def test_happy_path(s3_stubber): """Test that command creates specified investment sectors""" sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector1', 'sector2', 'sector3']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'sector', 'fdi_sic_grouping_id', 'fdi_sic_grouping_name', ] data = [ (sectors[0].pk, 'path1', fdi_sic_groupings[0].pk, 'name1'), (sectors[1].pk, 'path2', fdi_sic_groupings[0].pk, 'name1'), (sectors[2].pk, 'path3', fdi_sic_groupings[1].pk, 'name2'), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) n_investment_sectors_before = len(InvestmentSector.objects.all()) call_command('create_investment_sector', bucket, object_key) investment_sectors = InvestmentSector.objects.all() assert len(investment_sectors) == n_investment_sectors_before + 3 for d in data: matches = InvestmentSector.objects.filter(sector_id=d[0]) assert len(matches) == 1 investment_sector = matches[0] assert investment_sector.fdi_sic_grouping_id == d[2]
def test_simulate(s3_stubber): """Test that the command simulates updates if --simulate is passed in.""" sectors = SectorFactory.create_batch( 2, segment=factory.Iterator(['sector1', 'sector2']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) investment_sectors = InvestmentSectorFactory.create_batch( 2, sector=factory.Iterator([sectors[0], sectors[1]]), fdi_sic_grouping=factory.Iterator( [fdi_sic_groupings[0], fdi_sic_groupings[1]], ), ) bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'fdi_sic_grouping_id', ] data = [ (sectors[0].pk, fdi_sic_groupings[0].pk), (sectors[1].pk, fdi_sic_groupings[1].pk), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) n_investment_sectors_before = len(InvestmentSector.objects.all()) call_command('delete_investment_sector', bucket, object_key, simulate=True) investment_sectors = InvestmentSector.objects.all() assert len(investment_sectors) == n_investment_sectors_before assert len(InvestmentSector.objects.filter(sector=sectors[0])) == 1 assert len(InvestmentSector.objects.filter(sector=sectors[1])) == 1
def test_audit_log(s3_stubber): """Test that reversion revisions are created.""" sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector1', 'sector2', 'sector3']), ) fdi_sic_groupings = FDISICGroupingFactory.create_batch( 2, name=factory.Iterator(['fdi_sic_grouping1', 'fdi_sic_grouping2']), ) bucket = 'test_bucket' object_key = 'test_key' headers = [ 'sector_id', 'sector', 'fdi_sic_grouping_id', 'fdi_sic_grouping_name', ] data = [ (sectors[0].pk, 'path1', fdi_sic_groupings[0].pk, 'name1'), (sectors[1].pk, 'path2', fdi_sic_groupings[0].pk, 'name1'), (sectors[2].pk, 'path3', fdi_sic_groupings[1].pk, 'name2'), ] csv_content = ','.join(headers) for row in data: csv_content += '\n' + ','.join([str(col) for col in row]) s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('create_investment_sector', bucket, object_key) new_investment_sectors = InvestmentSector.objects.filter( sector_id__in=[d[0] for d in data], ) for investment_sector in new_investment_sectors: versions = Version.objects.get_for_object(investment_sector) assert versions.count() == 1 assert versions[0].revision.get_comment( ) == 'InvestmentSector creation.'
def test_no_change(s3_stubber, caplog): """Test that the command ignores records that haven't changed or records with incorrect current values. """ caplog.set_level('WARNING') old_sectors = ['sector_1_old', 'sector_2_old', 'sector_3_old'] new_sectors = ['sector_1_new', 'sector_2_new', 'sector_3_new'] sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(old_sectors), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_segment,new_sector_segment {sectors[0].pk},{old_sectors[0]},{new_sectors[0]} {sectors[1].pk},{old_sectors[1]},{old_sectors[1]} {sectors[2].pk},bla,{new_sectors[2]} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_sector_segment', bucket, object_key) for sector in sectors: sector.refresh_from_db() assert f'Not updating sector {sectors[1]} as its segment has not changed' in caplog.text assert f'Not updating sector {sectors[2]} as its segment has not changed' in caplog.text assert len(caplog.records) == 2 assert [sector.segment for sector in sectors] == [ new_sectors[0], old_sectors[1], old_sectors[2], ]
def test_sector_with_children(s3_stubber, caplog): """Test that the command logs a warning if the sector has children.""" caplog.set_level('WARNING') sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) # Create a child belonging to sector 3 SectorFactory(parent=sectors[2]) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} {sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) sectors = Sector.objects.filter(pk__in=sector_pks) assert f'Not deleting sector {sectors[0]} as it is referenced by another object' in caplog.text assert len(caplog.records) == 1 assert len(sectors) == 1 assert str(sectors[0].pk) == sector_pks[2]
def test_audit_log(s3_stubber): """Test that reversion revisions are created.""" sectors = SectorFactory.create_batch( 3, segment=factory.Iterator(['sector_1', 'sector_2', 'sector_3']), ) order_without_change = OrderFactory( reference='order_1', sector_id=sectors[0].pk, ) order_with_change = OrderFactory( reference='order_2', sector_id=sectors[1].pk, ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id,old_sector_id,new_sector_id {order_without_change.pk},{sectors[0].pk},{sectors[0].pk} {order_with_change.pk},{sectors[1].pk},{sectors[2].pk} """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('update_order_sector', bucket, object_key) versions = Version.objects.get_for_object(order_without_change) assert versions.count() == 0 versions = Version.objects.get_for_object(order_with_change) assert versions.count() == 1 assert versions[0].revision.get_comment() == 'Order sector correction.'
def test_non_existent_sector(s3_stubber, caplog): """Test that the command logs an error when PK does not exist.""" caplog.set_level('ERROR') sector_pks = [ '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003', ] sectors = SectorFactory.create_batch( 3, id=factory.Iterator(sector_pks), ) bucket = 'test_bucket' object_key = 'test_key' csv_content = f"""id {sectors[0].pk} {sectors[1].pk} 00000000-0000-0000-0000-000000000000 """ s3_stubber.add_response( 'get_object', { 'Body': BytesIO(csv_content.encode(encoding='utf-8')), }, expected_params={ 'Bucket': bucket, 'Key': object_key, }, ) call_command('delete_sector', bucket, object_key) sectors = Sector.objects.filter(pk__in=sector_pks) assert 'Sector matching query does not exist' in caplog.text assert len(caplog.records) == 1 assert len(sectors) == 1 assert str(sectors[0].pk) == sector_pks[2]