def test_update_id_indices_new_entry_in_cuboid_index(self): """ Test adding ids to new cuboids in the s3 cuboid index. """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 20 bytes[2] = 20 bytes[5] = 55 bytes[8] = 1000 bytes[9] = 55 expected = ['20', '55', '1000'] key = 'hash_coll_exp_chan_key' version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 # Method under test. self.obj_ind.update_id_indices(resource, resolution, [key], [bytes], version) response = self.dynamodb.get_item( TableName=self.object_store_config["s3_index_table"], Key={'object-key': {'S': key}, 'version-node': {'N': "{}".format(version)}}, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response) self.assertIn('id-set', response['Item']) self.assertIn('NS', response['Item']['id-set']) self.assertCountEqual(expected, response['Item']['id-set']['NS'])
def test_too_many_cuboids_for_id_index(self): """ Test error handling when number of cuboids that contain an id exceeds the limits allowed by DynamoDB. This test writes 7651 cuboids which causes DynamoDB throttling, so we normally skip this test. """ version = 0 resolution = 0 time_sample = 0 resource = BossResourceBasic(data=get_anno_dict()) y = 0 z = 0 obj_keys = [] cubes = [] for x in range(0, 7651): mortonid = XYZMorton([x, y, z]) obj_keys.append(self.obj_store.generate_object_key( resource, resolution, time_sample, mortonid)) # Just need one non-zero number to represent each cuboid. cubes.append(np.ones(1, dtype='uint64')) with self.assertRaises(SpdbError) as ex: self.obj_ind.update_id_indices( resource, resolution, obj_keys, cubes, version) self.assertEqual(ErrorCodes.OBJECT_STORE_ERROR, ex.exception.error_code)
def test_basic_resource_from_json_annotation(self): """Test basic to json deserialization method Returns: None """ setup_data = get_anno_dict() resource1 = BossResourceBasic(setup_data) resource2 = BossResourceBasic() resource2.from_json(resource1.to_json()) # Check Collection col = resource2.get_collection() assert col.name == setup_data['collection']['name'] assert col.description == setup_data['collection']['description'] # Check coord frame coord = resource2.get_coord_frame() assert coord.name == setup_data['coord_frame']['name'] assert coord.description == setup_data['coord_frame']['description'] assert coord.x_start == setup_data['coord_frame']['x_start'] assert coord.x_stop == setup_data['coord_frame']['x_stop'] assert coord.y_start == setup_data['coord_frame']['y_start'] assert coord.y_stop == setup_data['coord_frame']['y_stop'] assert coord.z_start == setup_data['coord_frame']['z_start'] assert coord.z_stop == setup_data['coord_frame']['z_stop'] assert coord.x_voxel_size == setup_data['coord_frame']['x_voxel_size'] assert coord.y_voxel_size == setup_data['coord_frame']['y_voxel_size'] assert coord.z_voxel_size == setup_data['coord_frame']['z_voxel_size'] assert coord.voxel_unit == setup_data['coord_frame']['voxel_unit'] # Check exp exp = resource2.get_experiment() assert exp.name == setup_data['experiment']['name'] assert exp.description == setup_data['experiment']['description'] assert exp.num_hierarchy_levels == setup_data['experiment']['num_hierarchy_levels'] assert exp.hierarchy_method == setup_data['experiment']['hierarchy_method'] assert exp.num_time_samples == setup_data['experiment']['num_time_samples'] assert exp.time_step == setup_data['experiment']['time_step'] assert exp.time_step_unit == setup_data['experiment']['time_step_unit'] # Check channel channel = resource2.get_channel() assert channel.is_image() is False assert channel.name == setup_data['channel']['name'] assert channel.description == setup_data['channel']['description'] assert channel.datatype == setup_data['channel']['datatype'] assert channel.base_resolution == setup_data['channel']['base_resolution'] assert channel.sources == setup_data['channel']['sources'] assert channel.related == setup_data['channel']['related'] assert channel.default_time_sample == setup_data['channel']['default_time_sample'] # check keys assert resource2.get_lookup_key() == setup_data['lookup_key'] assert resource2.get_boss_key() == setup_data['boss_key']
def test_basic_resource_get_iso_level_anisotropic(self): """Test get iso level anisotropic Returns: None """ setup_data = get_anno_dict() resource = BossResourceBasic(setup_data) self.assertEqual(resource.get_isotropic_level(), 3)
def test_factory(self): """Test the Cube factory in Cube""" data = get_anno_dict() resource = BossResourceBasic(data) c = Cube.create_cube(resource, [30, 20, 13], [0, 15]) assert isinstance(c, AnnotateCube64) is True assert c.cube_size == [13, 20, 30] assert c.is_time_series is True assert c.time_range == [0, 15]
def test_basic_resource_get_iso_level_isotropic(self): """Test get iso level isotropic Returns: None """ setup_data = get_anno_dict() setup_data["experiment"]['hierarchy_method'] = "isotropic" resource = BossResourceBasic(setup_data) self.assertEqual(resource.get_isotropic_level(), 0)
def test_get_cuboids(self): resource = BossResourceBasic(data=get_anno_dict()) id = 22222 bytes = np.zeros(10, dtype='uint64') bytes[1] = id resolution = 1 key = self.obj_store.generate_object_key(resource, resolution, 0, 56) version = 0 resource = BossResourceBasic(data=get_anno_dict()) new_bytes = np.zeros(4, dtype='uint64') new_bytes[0] = id # Pre-existing id. new_key = self.obj_store.generate_object_key(resource, resolution, 0, 59) self.obj_ind.update_id_indices( resource, resolution, [key, new_key], [bytes, new_bytes], version) # Method under test. actual = self.obj_ind.get_cuboids(resource, resolution, id) expected = [key, new_key] self.assertCountEqual(expected, actual)
def test_basic_resource_get_downsampled_extent_dims_anisotropic(self): """Test downsample extent anisotropic Returns: None """ setup_data = get_anno_dict() resource = BossResourceBasic(setup_data) extent_dims = resource.get_downsampled_extent_dims() self.assertEqual(len(extent_dims), setup_data["experiment"]['num_hierarchy_levels']) self.assertEqual(extent_dims[0], [2000, 5000, 200]) self.assertEqual(extent_dims[4], [125, 313, 200])
def test_basic_resource_get_downsampled_voxel_dims_anisotropic(self): """Test downsample voxel dims anisotropic Returns: None """ setup_data = get_anno_dict() resource = BossResourceBasic(setup_data) voxel_dims = resource.get_downsampled_voxel_dims() self.assertEqual(len(voxel_dims), setup_data["experiment"]['num_hierarchy_levels']) self.assertEqual(voxel_dims[0], [4, 4, 35]) self.assertEqual(voxel_dims[4], [64, 64, 35])
def test_too_many_ids_in_cuboid(self): """ Test error handling when a cuboid has more unique ids than DynamoDB can support. """ version = 0 resolution = 0 time_sample = 0 resource = BossResourceBasic(data=get_anno_dict()) mortonid = XYZMorton([0, 0, 0]) obj_keys = [self.obj_store.generate_object_key(resource, resolution, time_sample, mortonid)] cubes = [np.random.randint(2000000, size=(16, 512, 512), dtype='uint64')] # If too many ids, the index is skipped, logged, and False is returned to the caller. result = self.obj_ind.update_id_indices(resource, resolution, obj_keys, cubes, version) self.assertFalse(result)
def test_basic_resource_get_downsampled_voxel_dims_anisotropic_iso(self): """Test downsample voxel dims anisotropic with iso flag Returns: None """ setup_data = get_anno_dict() resource = BossResourceBasic(setup_data) voxel_dims = resource.get_downsampled_voxel_dims(iso=True) self.assertEqual(len(voxel_dims), setup_data["experiment"]['num_hierarchy_levels']) self.assertEqual(voxel_dims[0], [4, 4, 35]) self.assertEqual(voxel_dims[1], [8, 8, 35]) self.assertEqual(voxel_dims[2], [16, 16, 35]) self.assertEqual(voxel_dims[3], [32, 32, 35]) self.assertEqual(voxel_dims[4], [64, 64, 70]) self.assertEqual(voxel_dims[5], [128, 128, 140])
def test_basic_resource_get_downsampled_voxel_dims_isotropic(self): """Test downsample voxel dims isotropic Returns: None """ setup_data = get_anno_dict() setup_data['coord_frame']['x_voxel_size'] = 6 setup_data['coord_frame']['y_voxel_size'] = 6 setup_data['coord_frame']['z_voxel_size'] = 6 setup_data['experiment']['hierarchy_method'] = "isotropic" resource = BossResourceBasic(setup_data) voxel_dims = resource.get_downsampled_voxel_dims() self.assertEqual(len(voxel_dims), setup_data["experiment"]['num_hierarchy_levels']) self.assertEqual(voxel_dims[0], [6, 6, 6]) self.assertEqual(voxel_dims[3], [48, 48, 48])
def test_basic_resource_annotation_no_time(self): """Test basic get layer interface Returns: None """ setup_data = get_anno_dict() resource = BossResourceBasic(setup_data) channel = resource.get_channel() assert channel.is_image() is False assert channel.name == setup_data['channel']['name'] assert channel.description == setup_data['channel']['description'] assert channel.datatype == setup_data['channel']['datatype'] assert channel.base_resolution == setup_data['channel']['base_resolution'] assert channel.sources == setup_data['channel']['sources'] assert channel.related == setup_data['channel']['related'] assert channel.default_time_sample == setup_data['channel']['default_time_sample']
def test_update_id_indices_replaces_existing_entry_in_cuboid_index(self): """ Test calling update_id_indices() replaces existing id set in the s3 cuboid index. Id set should be replaced because the entire cuboid is rewritten to s3 before this method is called. Thus, the ids in the cuboid data are the only ids that should exist in the index for that cuboid. """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 20 bytes[2] = 20 bytes[5] = 55 bytes[8] = 1000 bytes[9] = 55 key = 'hash_coll_exp_chan_key_existing' version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 # Place initial ids for cuboid. self.obj_ind.update_id_indices(resource, resolution, [key], [bytes], version) new_bytes = np.zeros(4, dtype='uint64') new_bytes[0] = 1000 new_bytes[1] = 4444 new_bytes[3] = 55 # Test adding one new id to the index. self.obj_ind.update_id_indices(resource, resolution, [key], [new_bytes], version) response = self.dynamodb.get_item( TableName=self.object_store_config["s3_index_table"], Key={'object-key': {'S': key}, 'version-node': {'N': "{}".format(version)}}, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response) self.assertIn('id-set', response['Item']) self.assertIn('NS', response['Item']['id-set']) # Id 20 should no longer be present. expected = ['55', '1000', '4444'] self.assertCountEqual(expected, response['Item']['id-set']['NS'])
def test_basic_resource_get_downsampled_extent_dims_isotropic(self): """Test downsample extent isotropic Returns: None """ setup_data = get_anno_dict() setup_data['coord_frame']['x_voxel_size'] = 6 setup_data['coord_frame']['y_voxel_size'] = 6 setup_data['coord_frame']['z_voxel_size'] = 6 setup_data['experiment']['hierarchy_method'] = "isotropic" resource = BossResourceBasic(setup_data) extent_dims = resource.get_downsampled_extent_dims() self.assertEqual(len(extent_dims), setup_data["experiment"]['num_hierarchy_levels']) self.assertEqual(extent_dims[0], [2000, 5000, 200]) self.assertEqual(extent_dims[1], [1000, 2500, 100]) self.assertEqual(extent_dims[3], [250, 625, 25])
def test_get_cuboids(self): id = 22222 bytes = np.zeros(10, dtype='uint64') bytes[1] = id key = 'hash_coll_exp_chan_key_cuboids' version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 new_bytes = np.zeros(4, dtype='uint64') new_bytes[0] = id # Pre-existing id. new_key = 'hash_coll_exp_chan_key_cuboids2' self.obj_ind.update_id_indices(resource, resolution, [key, new_key], [bytes, new_bytes], version) # Method under test. actual = self.obj_ind.get_cuboids(resource, resolution, id) expected = [key, new_key] self.assertCountEqual(expected, actual)
def test_too_many_ids_in_cuboid(self): """ Test error handling when a cuboid has more unique ids than DynamoDB can support. """ version = 0 resolution = 0 time_sample = 0 resource = BossResourceBasic(data=get_anno_dict()) mortonid = XYZMorton([0, 0, 0]) obj_keys = [ self.obj_store.generate_object_key(resource, resolution, time_sample, mortonid) ] cubes = [ np.random.randint(2000000, size=(16, 512, 512), dtype='uint64') ] with self.assertRaises(SpdbError) as ex: self.obj_ind.update_id_indices(resource, resolution, obj_keys, cubes, version) self.assertEqual(ErrorCodes.OBJECT_STORE_ERROR, ex.exception.error_code)
def test_too_many_ids_in_cuboid(self): """ Test error handling when a cuboid has more unique ids than DynamoDB can support. """ version = 0 resolution = 0 time_sample = 0 resource = BossResourceBasic(data=get_anno_dict()) mortonid = XYZMorton([0, 0, 0]) obj_keys = [ AWSObjectStore.generate_object_key(resource, resolution, time_sample, mortonid) ] cubes = [ np.random.randint(2000000, size=(16, 512, 512), dtype='uint64') ] # If too many ids, the index is skipped, logged, and False is returned to the caller. result = self.obj_ind.update_id_indices(resource, resolution, obj_keys, cubes, version) self.assertFalse(result)
def test_legacy_cuboids_in_id_index(self): """Tet to verify that legacy and "new" cuboid indices in the ID index table both work Returns: """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 222 bytes[2] = 222 bytes[5] = 555 bytes[8] = 1001 expected_ids = ['222', '555', '1001', '12345'] version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 time_sample = 0 morton_id = 2000 object_key = self.obj_store.generate_object_key( resource, resolution, time_sample, morton_id) # Write a legacy index self.dynamodb.update_item(TableName=self.object_store_config["id_index_table"], Key={'channel-id-key': {'S': self.obj_ind.generate_channel_id_key(resource, resolution, 12345)}, 'version': {'N': "{}".format(version)}}, UpdateExpression='ADD #cuboidset :objkey', ExpressionAttributeNames={'#cuboidset': 'cuboid-set'}, ExpressionAttributeValues={':objkey': {'SS': [object_key]}}, ReturnConsumedCapacity='NONE') # Add new index values self.obj_ind.update_id_indices(resource, resolution, [object_key], [bytes], version) # Confirm each id has the object_key in its cuboid-set attribute. for id in expected_ids: cuboid_object_keys = self.obj_ind.get_cuboids(resource, resolution, id) self.assertEqual(cuboid_object_keys[0], object_key)
def test_update_id_indices_new_entry_in_cuboid_index(self): """ Test adding ids to new cuboids in the s3 cuboid index. """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 20 bytes[2] = 20 bytes[5] = 55 bytes[8] = 1000 bytes[9] = 55 expected = ['20', '55', '1000'] key = 'hash_coll_exp_chan_key' version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 # Method under test. self.obj_ind.update_id_indices(resource, resolution, [key], [bytes], version) response = self.dynamodb.get_item( TableName=self.object_store_config["s3_index_table"], Key={ 'object-key': { 'S': key }, 'version-node': { 'N': "{}".format(version) } }, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response) self.assertIn('id-set', response['Item']) self.assertIn('NS', response['Item']['id-set']) self.assertCountEqual(expected, response['Item']['id-set']['NS'])
def test_update_id_indices_new_entry_for_id_index(self): """ Test adding new ids to the id index. """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 20 bytes[2] = 20 bytes[5] = 55 bytes[8] = 1000 bytes[9] = 55 expected_ids = ['20', '55', '1000'] version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 time_sample = 0 morton_id = 20 object_key = self.obj_store.generate_object_key( resource, resolution, time_sample, morton_id) # Method under test. self.obj_ind.update_id_indices(resource, resolution, [object_key], [bytes], version) # Confirm each id has the object_key in its cuboid-set attribute. for id in expected_ids: key = self.obj_ind.generate_channel_id_key(resource, resolution, id) response = self.dynamodb.get_item( TableName=self.object_store_config["id_index_table"], Key={'channel-id-key': {'S': key}, 'version': {'N': "{}".format(version)}}, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response) self.assertIn('cuboid-set', response['Item']) self.assertIn('SS', response['Item']['cuboid-set']) self.assertIn(object_key.split("&")[-1], response['Item']['cuboid-set']['SS'])
def test_update_id_indices_add_new_cuboids_to_existing_ids(self): """ Test that new cuboid object keys are added to the cuboid-set attributes of pre-existing ids. """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 20 bytes[2] = 20 bytes[5] = 55 bytes[8] = 1000 bytes[9] = 55 expected_ids = ['20', '55', '1000'] version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 time_sample = 0 morton_id = 20 object_key = self.obj_store.generate_object_key( resource, resolution, time_sample, morton_id) self.obj_ind.update_id_indices(resource, resolution, [object_key], [bytes], version) new_bytes = np.zeros(4, dtype='uint64') new_bytes[0] = 1000 # Pre-existing id. new_bytes[1] = 4444 new_bytes[3] = 55 # Pre-existing id. new_morton_id = 90 new_object_key = self.obj_store.generate_object_key( resource, resolution, time_sample, new_morton_id) # Method under test. self.obj_ind.update_id_indices(resource, resolution, [new_object_key], [new_bytes], version) # Confirm cuboids for id 55. key55 = self.obj_ind.generate_channel_id_key(resource, resolution, 55) response = self.dynamodb.get_item( TableName=self.object_store_config["id_index_table"], Key={ 'channel-id-key': { 'S': key55 }, 'version': { 'N': '{}'.format(version) } }, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response) self.assertIn('cuboid-set', response['Item']) self.assertIn('SS', response['Item']['cuboid-set']) self.assertIn(object_key, response['Item']['cuboid-set']['SS']) self.assertIn(new_object_key, response['Item']['cuboid-set']['SS']) # Confirm cuboids for id 1000. key1000 = self.obj_ind.generate_channel_id_key(resource, resolution, 1000) response2 = self.dynamodb.get_item( TableName=self.object_store_config["id_index_table"], Key={ 'channel-id-key': { 'S': key1000 }, 'version': { 'N': '{}'.format(version) } }, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response2) self.assertIn('cuboid-set', response2['Item']) self.assertIn('SS', response2['Item']['cuboid-set']) self.assertIn(object_key, response2['Item']['cuboid-set']['SS']) self.assertIn(new_object_key, response2['Item']['cuboid-set']['SS'])
def test_get_tight_bounding_box_multi_cuboids_z_axis(self): """ Get the tight bounding box for an object that exists in two cuboids on the y axis. """ resolution = 0 [x_cube_dim, y_cube_dim, z_cube_dim] = CUBOIDSIZE[resolution] id = 33333 # Customize resource with so it writes to its own channel and uses a # coord frame large enough to encompass the data written. This is # important for proper loose bounding box calculations. data = get_anno_dict(boss_key='col1&exp1&ch100', lookup_key='1&1&100') data['coord_frame']['x_stop'] = 10000 data['coord_frame']['y_stop'] = 10000 data['coord_frame']['z_stop'] = 10000 resource = BossResourceBasic(data) time_sample = 0 version = 0 x_rng = [0, x_cube_dim] y_rng = [0, y_cube_dim] z_rng = [0, z_cube_dim] t_rng = [0, 1] cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim) cube1 = Cube.create_cube(resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.zeros() cube1.data[0][14][509][508] = id cube1.data[0][15][510][509] = id cube1.data[0][15][510][510] = id cube1.data[0][14][511][511] = id pos1 = [10*self.x_dim, 15*self.y_dim, 2*self.z_dim] cube1.morton_id = XYZMorton(pos1) cube2 = Cube.create_cube(resource, [self.x_dim, self.y_dim, self.z_dim]) cube2.zeros() cube2.data[0][0][509][508] = id cube2.data[0][0][510][509] = id cube2.data[0][1][510][510] = id cube2.data[0][2][511][511] = id pos2 = [10*self.x_dim, 15*self.y_dim, 3*self.z_dim] cube2.morton_id = XYZMorton(pos2) sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(resource, pos1, resolution, cube1.data, time_sample_start=0) sp.write_cuboid(resource, pos2, resolution, cube2.data, time_sample_start=0) # Make sure cube write complete and correct. actual_cube = sp.cutout(resource, pos1, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube1.data, actual_cube.data) actual_cube2 = sp.cutout(resource, pos2, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube2.data, actual_cube2.data) del cube1 del actual_cube del cube2 del actual_cube2 # Method under test. actual = sp.get_bounding_box(resource, resolution, id, bb_type='tight') expected = { 'x_range': [pos1[0]+508, pos2[0]+512], 'y_range': [pos1[1]+509, pos2[1]+512], 'z_range': [pos1[2]+14, pos2[2]+3], 't_range': t_rng } self.assertEqual(expected, actual)
def test_basic_resource_from_json_annotation(self): """Test basic to json deserialization method Returns: None """ setup_data = get_anno_dict() resource1 = BossResourceBasic(setup_data) resource2 = BossResourceBasic() resource2.from_json(resource1.to_json()) # Check Collection col = resource2.get_collection() assert col.name == setup_data['collection']['name'] assert col.description == setup_data['collection']['description'] # Check coord frame coord = resource2.get_coord_frame() assert coord.name == setup_data['coord_frame']['name'] assert coord.description == setup_data['coord_frame']['description'] assert coord.x_start == setup_data['coord_frame']['x_start'] assert coord.x_stop == setup_data['coord_frame']['x_stop'] assert coord.y_start == setup_data['coord_frame']['y_start'] assert coord.y_stop == setup_data['coord_frame']['y_stop'] assert coord.z_start == setup_data['coord_frame']['z_start'] assert coord.z_stop == setup_data['coord_frame']['z_stop'] assert coord.x_voxel_size == setup_data['coord_frame']['x_voxel_size'] assert coord.y_voxel_size == setup_data['coord_frame']['y_voxel_size'] assert coord.z_voxel_size == setup_data['coord_frame']['z_voxel_size'] assert coord.voxel_unit == setup_data['coord_frame']['voxel_unit'] # Check exp exp = resource2.get_experiment() assert exp.name == setup_data['experiment']['name'] assert exp.description == setup_data['experiment']['description'] assert exp.num_hierarchy_levels == setup_data['experiment'][ 'num_hierarchy_levels'] assert exp.hierarchy_method == setup_data['experiment'][ 'hierarchy_method'] assert exp.num_time_samples == setup_data['experiment'][ 'num_time_samples'] assert exp.time_step == setup_data['experiment']['time_step'] assert exp.time_step_unit == setup_data['experiment']['time_step_unit'] # Check channel channel = resource2.get_channel() assert channel.is_image() is False assert channel.name == setup_data['channel']['name'] assert channel.description == setup_data['channel']['description'] assert channel.datatype == setup_data['channel']['datatype'] assert channel.base_resolution == setup_data['channel'][ 'base_resolution'] assert channel.sources == setup_data['channel']['sources'] assert channel.related == setup_data['channel']['related'] assert channel.default_time_sample == setup_data['channel'][ 'default_time_sample'] # check keys assert resource2.get_lookup_key() == setup_data['lookup_key'] assert resource2.get_boss_key() == setup_data['boss_key']
def test_update_id_indices_add_new_cuboids_to_existing_ids(self): """ Test that new cuboid object keys are added to the cuboid-set attributes of pre-existing ids. """ bytes = np.zeros(10, dtype='uint64') bytes[1] = 20 bytes[2] = 20 bytes[5] = 55 bytes[8] = 1000 bytes[9] = 55 expected_ids = ['20', '55', '1000'] version = 0 resource = BossResourceBasic(data=get_anno_dict()) resolution = 1 time_sample = 0 morton_id = 20 object_key = self.obj_store.generate_object_key( resource, resolution, time_sample, morton_id) self.obj_ind.update_id_indices(resource, resolution, [object_key], [bytes], version) new_bytes = np.zeros(4, dtype='uint64') new_bytes[0] = 1000 # Pre-existing id. new_bytes[1] = 4444 new_bytes[3] = 55 # Pre-existing id. new_morton_id = 90 new_object_key = self.obj_store.generate_object_key( resource, resolution, time_sample, new_morton_id) # Method under test. self.obj_ind.update_id_indices(resource, resolution, [new_object_key], [new_bytes], version) # Confirm cuboids for id 55. key55 = self.obj_ind.generate_channel_id_key(resource, resolution, 55) response = self.dynamodb.get_item( TableName=self.object_store_config["id_index_table"], Key={'channel-id-key': {'S': key55}, 'version': {'N': '{}'.format(version)}}, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response) self.assertIn('cuboid-set', response['Item']) self.assertIn('SS', response['Item']['cuboid-set']) # Check that mortons are there since using "new" index style self.assertIn(object_key.split("&")[-1], response['Item']['cuboid-set']['SS']) self.assertIn(new_object_key.split("&")[-1], response['Item']['cuboid-set']['SS']) # Confirm cuboids for id 1000. key1000 = self.obj_ind.generate_channel_id_key(resource, resolution, 1000) response2 = self.dynamodb.get_item( TableName=self.object_store_config["id_index_table"], Key={'channel-id-key': {'S': key1000}, 'version': {'N': '{}'.format(version)}}, ConsistentRead=True, ReturnConsumedCapacity='NONE') self.assertIn('Item', response2) self.assertIn('cuboid-set', response2['Item']) self.assertIn('SS', response2['Item']['cuboid-set']) # Check that mortons are there since using "new" index style self.assertIn(object_key.split("&")[-1], response2['Item']['cuboid-set']['SS']) self.assertIn(new_object_key.split("&")[-1], response2['Item']['cuboid-set']['SS'])
def get_anno64_dict(self): """Method to get the config dictionary for an image16 resource""" data = get_anno_dict() return data