예제 #1
0
    def test_basic_resource_get_bit_depth(self):
        """Test basic get bit depth interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        assert resource.get_bit_depth() == 8
예제 #2
0
    def test_basic_resource_numpy_data_type(self):
        """Test basic get bit depth interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        assert resource.get_numpy_data_type() == np.uint8
예제 #3
0
    def test_basic_resource_get_lookup_key(self):
        """Test basic get lookup key interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        assert resource.get_lookup_key() == setup_data['lookup_key']
예제 #4
0
    def test_basic_resource_get_data_type(self):
        """Test basic get datatype interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        assert resource.get_data_type() == setup_data['channel']['datatype']
예제 #5
0
    def test_basic_resource_get_iso_level_anisotropic(self):
        """Test get iso level anisotropic

        Returns:
            None

        """
        setup_data = get_anno_dict()
        resource = BossResourceBasic(setup_data)

        self.assertEqual(resource.get_isotropic_level(), 3)
예제 #6
0
    def test_basic_resource_get_iso_level_isotropic(self):
        """Test get iso level isotropic

        Returns:
            None

        """
        setup_data = get_anno_dict()
        setup_data["experiment"]['hierarchy_method'] = "isotropic"
        resource = BossResourceBasic(setup_data)

        self.assertEqual(resource.get_isotropic_level(), 0)
예제 #7
0
    def test_basic_resource_get_downsampled_voxel_dims_anisotropic(self):
        """Test downsample voxel dims anisotropic

        Returns:
            None

        """
        setup_data = get_anno_dict()
        resource = BossResourceBasic(setup_data)

        voxel_dims = resource.get_downsampled_voxel_dims()
        self.assertEqual(len(voxel_dims), setup_data["experiment"]['num_hierarchy_levels'])
        self.assertEqual(voxel_dims[0], [4, 4, 35])
        self.assertEqual(voxel_dims[4], [64, 64, 35])
예제 #8
0
    def test_basic_resource_get_downsampled_extent_dims_anisotropic(self):
        """Test downsample extent anisotropic

        Returns:
            None

        """
        setup_data = get_anno_dict()
        resource = BossResourceBasic(setup_data)

        extent_dims = resource.get_downsampled_extent_dims()
        self.assertEqual(len(extent_dims), setup_data["experiment"]['num_hierarchy_levels'])
        self.assertEqual(extent_dims[0], [2000, 5000, 200])
        self.assertEqual(extent_dims[4], [125, 313, 200])
예제 #9
0
    def test_basic_resource_col(self):
        """Test basic get collection interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        col = resource.get_collection()

        assert col.name == setup_data['collection']['name']
        assert col.description == setup_data['collection']['description']
예제 #10
0
    def test_is_downsampled(self):
        """Test is downsampled method

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        self.assertFalse(resource.is_downsampled())

        setup_data['channel']['downsample_status'] = "DOWNSAMPLED"
        resource = BossResourceBasic(setup_data)

        self.assertTrue(resource.is_downsampled())
예제 #11
0
    def test_basic_resource_get_downsampled_voxel_dims_anisotropic_iso(self):
        """Test downsample voxel dims anisotropic with iso flag

        Returns:
            None

        """
        setup_data = get_anno_dict()
        resource = BossResourceBasic(setup_data)

        voxel_dims = resource.get_downsampled_voxel_dims(iso=True)
        self.assertEqual(len(voxel_dims), setup_data["experiment"]['num_hierarchy_levels'])
        self.assertEqual(voxel_dims[0], [4, 4, 35])
        self.assertEqual(voxel_dims[1], [8, 8, 35])
        self.assertEqual(voxel_dims[2], [16, 16, 35])
        self.assertEqual(voxel_dims[3], [32, 32, 35])
        self.assertEqual(voxel_dims[4], [64, 64, 70])
        self.assertEqual(voxel_dims[5], [128, 128, 140])
예제 #12
0
    def test_basic_resource_get_downsampled_voxel_dims_isotropic(self):
        """Test downsample voxel dims isotropic

        Returns:
            None

        """
        setup_data = get_anno_dict()
        setup_data['coord_frame']['x_voxel_size'] = 6
        setup_data['coord_frame']['y_voxel_size'] = 6
        setup_data['coord_frame']['z_voxel_size'] = 6
        setup_data['experiment']['hierarchy_method'] = "isotropic"
        resource = BossResourceBasic(setup_data)

        voxel_dims = resource.get_downsampled_voxel_dims()
        self.assertEqual(len(voxel_dims), setup_data["experiment"]['num_hierarchy_levels'])
        self.assertEqual(voxel_dims[0], [6, 6, 6])
        self.assertEqual(voxel_dims[3], [48, 48, 48])
예제 #13
0
    def test_basic_resource_experiment(self):
        """Test basic get experiment interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        exp = resource.get_experiment()

        assert exp.name == setup_data['experiment']['name']
        assert exp.description == setup_data['experiment']['description']
        assert exp.num_hierarchy_levels == setup_data['experiment']['num_hierarchy_levels']
        assert exp.hierarchy_method == setup_data['experiment']['hierarchy_method']
        assert exp.num_time_samples == setup_data['experiment']['num_time_samples']
        assert exp.time_step == setup_data['experiment']['time_step']
        assert exp.time_step_unit == setup_data['experiment']['time_step_unit']
예제 #14
0
    def test_basic_resource_get_downsampled_extent_dims_isotropic(self):
        """Test downsample extent isotropic

        Returns:
            None

        """
        setup_data = get_anno_dict()
        setup_data['coord_frame']['x_voxel_size'] = 6
        setup_data['coord_frame']['y_voxel_size'] = 6
        setup_data['coord_frame']['z_voxel_size'] = 6
        setup_data['experiment']['hierarchy_method'] = "isotropic"
        resource = BossResourceBasic(setup_data)

        extent_dims = resource.get_downsampled_extent_dims()
        self.assertEqual(len(extent_dims), setup_data["experiment"]['num_hierarchy_levels'])
        self.assertEqual(extent_dims[0], [2000, 5000, 200])
        self.assertEqual(extent_dims[1], [1000, 2500, 100])
        self.assertEqual(extent_dims[3], [250, 625, 25])
예제 #15
0
    def test_basic_resource_annotation_no_time(self):
        """Test basic get layer interface

        Returns:
            None

        """
        setup_data = get_anno_dict()
        resource = BossResourceBasic(setup_data)

        channel = resource.get_channel()
        assert channel.is_image() is False
        assert channel.name == setup_data['channel']['name']
        assert channel.description == setup_data['channel']['description']
        assert channel.datatype == setup_data['channel']['datatype']
        assert channel.base_resolution == setup_data['channel']['base_resolution']
        assert channel.sources == setup_data['channel']['sources']
        assert channel.related == setup_data['channel']['related']
        assert channel.default_time_sample == setup_data['channel']['default_time_sample']
예제 #16
0
    def setUpClass(cls):
        """Setup the redis client at the start of the test"""
        cls.data = get_image_dict()
        cls.resource = BossResourceBasic(cls.data)

        cls.state_client = redis.StrictRedis(host='localhost',
                                             port=6379,
                                             db=1,
                                             decode_responses=False)

        cls.config_data = {"state_client": cls.state_client}
예제 #17
0
    def test_basic_resource_to_dict(self):
        """Test basic to dict serialization method

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        data = resource.to_dict()

        assert data['channel'] == setup_data['channel']

        assert data['collection'] == setup_data['collection']

        assert data['experiment'] == setup_data['experiment']

        assert data['lookup_key'] == '4&3&2'
        assert data['boss_key'] == 'col1&exp1&ch1'
예제 #18
0
    def test_factory(self):
        """Test the Cube factory in Cube"""

        data = get_image_dict()
        resource = BossResourceBasic(data)

        c = Cube.create_cube(resource, [30, 20, 13], [0, 15])
        assert isinstance(c, ImageCube8) is True
        assert c.cube_size == [13, 20, 30]
        assert c.is_time_series is True
        assert c.time_range == [0, 15]
예제 #19
0
    def setUpClass(cls):
        """Setup the redis client at the start of the test"""
        cls.data = get_image_dict()
        cls.resource = BossResourceBasic(cls.data)

        cls.config = configuration.BossConfig()

        cls.cache_client = redis.StrictRedis(host=cls.config["aws"]["cache"], port=6379, db=1,
                                             decode_responses=False)

        cls.config_data = {"cache_client": cls.cache_client, "read_timeout": 86400}
예제 #20
0
    def test_basic_resource_to_dict(self):
        """Test basic to dict serialization method

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        data = resource.to_dict()

        assert data['channel'] == setup_data['channel']

        assert data['collection'] == setup_data['collection']

        assert data['experiment'] == setup_data['experiment']

        assert data['lookup_key'] == '4&3&2'
        assert data['boss_key'] == 'col1&exp1&ch1'
예제 #21
0
    def test_basic_resource_channel_with_related(self):
        """Test basic get channel interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        setup_data['channel']['related'] = ["ch_2", "ch_3"]
        resource = BossResourceBasic(setup_data)

        channel = resource.get_channel()
        assert channel.is_image() is True
        assert channel.name == setup_data['channel']['name']
        assert channel.description == setup_data['channel']['description']
        assert channel.datatype == setup_data['channel']['datatype']
        assert channel.base_resolution == setup_data['channel']['base_resolution']
        assert channel.sources == setup_data['channel']['sources']
        assert channel.related == setup_data['channel']['related']
        assert channel.default_time_sample == setup_data['channel']['default_time_sample']
예제 #22
0
    def test_basic_resource_channel_no_time(self):
        """Test basic get channel interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        channel = resource.get_channel()
        assert channel.is_image() is True
        assert channel.name == setup_data['channel']['name']
        assert channel.description == setup_data['channel']['description']
        assert channel.datatype == setup_data['channel']['datatype']
        assert channel.base_resolution == setup_data['channel']['base_resolution']
        assert channel.sources == setup_data['channel']['sources']
        assert channel.related == setup_data['channel']['related']
        assert channel.default_time_sample == setup_data['channel']['default_time_sample']
        assert channel.downsample_status == setup_data['channel']['downsample_status']
예제 #23
0
    def test_factory_no_time(self):
        """Test the Cube factory in Cube"""

        data = get_anno_dict()

        resource = BossResourceBasic(data)

        c = Cube.create_cube(resource, [30, 20, 13])
        assert isinstance(c, AnnotateCube64) is True
        assert c.cube_size == [13, 20, 30]
        assert c.is_time_series is False
        assert c.time_range == [0, 1]
예제 #24
0
    def test_factory_no_time(self):
        """Test the Cube factory in Cube"""

        data = get_image_dict()
        data['channel']['datatype'] = 'uint16'
        resource = BossResourceBasic(data)

        c = Cube.create_cube(resource, [30, 20, 13])
        assert isinstance(c, ImageCube16) is True
        assert c.cube_size == [13, 20, 30]
        assert c.is_time_series is False
        assert c.time_range == [0, 1]
예제 #25
0
    def test_get_tight_bounding_box_single_cuboid(self):
        """
        Get the tight bounding box for an object that exists within a single cuboid.
        """
        resolution = 0
        [x_cube_dim, y_cube_dim, z_cube_dim] = CUBOIDSIZE[resolution]

        id = 33333
        id_as_str = '33333'
        # Customize resource with so it writes to its own channel and uses a
        # coord frame large enough to encompass the data written.  This is
        # important for proper loose bounding box calculations.
        data = get_anno_dict(boss_key='col1&exp1&ch50', lookup_key='1&1&50')
        data['coord_frame']['x_stop'] = 10000
        data['coord_frame']['y_stop'] = 10000
        data['coord_frame']['z_stop'] = 10000
        resource = BossResourceBasic(data)
        time_sample = 0
        version = 0
        x_rng = [0, x_cube_dim]
        y_rng = [0, y_cube_dim]
        z_rng = [0, z_cube_dim]
        t_rng = [0, 1]

        cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim)
        cube1 = Cube.create_cube(resource, [self.x_dim, self.y_dim, self.z_dim])
        cube1.zeros()
        cube1.data[0][14][500][104] = id
        cube1.data[0][15][501][105] = id
        cube1.data[0][15][502][104] = id
        cube1.data[0][14][503][105] = id

        pos1 = [10*self.x_dim, 15*self.y_dim, 2*self.z_dim]
        cube1.morton_id = XYZMorton(pos1)

        sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config)
        sp.write_cuboid(resource, pos1, resolution, cube1.data, time_sample_start=0)

        # Make sure cube write complete and correct.
        actual_cube = sp.cutout(resource, pos1, cube_dim_tuple, resolution)
        np.testing.assert_array_equal(cube1.data, actual_cube.data)

        # Method under test.
        actual = sp.get_bounding_box(resource, resolution, id_as_str, bb_type='tight')

        expected = {
            'x_range': [pos1[0]+104, pos1[0]+106],
            'y_range': [pos1[1]+500, pos1[1]+504],
            'z_range': [pos1[2]+14, pos1[2]+16],
            't_range': t_rng
        }

        self.assertEqual(expected, actual)
예제 #26
0
    def test_update_id_indices_replaces_existing_entry_in_cuboid_index(self):
        """
        Test calling update_id_indices() replaces existing id set in the s3 cuboid index.

        Id set should be replaced because the entire cuboid is rewritten to s3
        before this method is called.  Thus, the ids in the cuboid data are the
        only ids that should exist in the index for that cuboid.
        """
        bytes = np.zeros(10, dtype='uint64')
        bytes[1] = 20
        bytes[2] = 20
        bytes[5] = 55
        bytes[8] = 1000
        bytes[9] = 55
        key = 'hash_coll_exp_chan_key_existing'
        version = 0
        resource = BossResourceBasic(data=get_anno_dict())
        resolution = 1

        # Place initial ids for cuboid.
        self.obj_ind.update_id_indices(resource, resolution, [key], [bytes],
                                       version)

        new_bytes = np.zeros(4, dtype='uint64')
        new_bytes[0] = 1000
        new_bytes[1] = 4444
        new_bytes[3] = 55

        # Test adding one new id to the index.
        self.obj_ind.update_id_indices(resource, resolution, [key],
                                       [new_bytes], version)

        response = self.dynamodb.get_item(
            TableName=self.object_store_config["s3_index_table"],
            Key={
                'object-key': {
                    'S': key
                },
                'version-node': {
                    'N': "{}".format(version)
                }
            },
            ConsistentRead=True,
            ReturnConsumedCapacity='NONE')

        self.assertIn('Item', response)
        self.assertIn('id-set', response['Item'])
        self.assertIn('NS', response['Item']['id-set'])

        # Id 20 should no longer be present.
        expected = ['55', '1000', '4444']
        self.assertCountEqual(expected, response['Item']['id-set']['NS'])
예제 #27
0
    def test_basic_resource_experiment(self):
        """Test basic get experiment interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        exp = resource.get_experiment()

        assert exp.name == setup_data['experiment']['name']
        assert exp.description == setup_data['experiment']['description']
        assert exp.num_hierarchy_levels == setup_data['experiment'][
            'num_hierarchy_levels']
        assert exp.hierarchy_method == setup_data['experiment'][
            'hierarchy_method']
        assert exp.num_time_samples == setup_data['experiment'][
            'num_time_samples']
        assert exp.time_step == setup_data['experiment']['time_step']
        assert exp.time_step_unit == setup_data['experiment']['time_step_unit']
예제 #28
0
    def setUpClass(cls):
        """Setup the redis client at the start of the test"""
        cls.data = get_image_dict()
        cls.resource = BossResourceBasic(cls.data)

        cls.config = load_test_config_file()

        cls.state_client = redis.StrictRedis(
            host=cls.config["aws"]["cache-state"],
            port=6379,
            db=1,
            decode_responses=False)

        cls.config_data = {"state_client": cls.state_client}
예제 #29
0
    def test_get_cuboids(self):
        resource = BossResourceBasic(data=get_anno_dict())
        id = 22222
        bytes = np.zeros(10, dtype='uint64')
        bytes[1] = id
        resolution = 1
        key = AWSObjectStore.generate_object_key(resource, resolution, 0, 56)
        version = 0
        resource = BossResourceBasic(data=get_anno_dict())

        new_bytes = np.zeros(4, dtype='uint64')
        new_bytes[0] = id  # Pre-existing id.
        new_key = AWSObjectStore.generate_object_key(resource, resolution, 0,
                                                     59)

        self.obj_ind.update_id_indices(resource, resolution, [key, new_key],
                                       [bytes, new_bytes], version)

        # Method under test.
        actual = self.obj_ind.get_cuboids(resource, resolution, id)

        expected = [key, new_key]
        self.assertCountEqual(expected, actual)
예제 #30
0
    def test_legacy_cuboids_in_id_index(self):
        """Tet to verify that legacy and "new" cuboid indices in the ID index table both work

        Returns:

        """
        bytes = np.zeros(10, dtype='uint64')
        bytes[1] = 222
        bytes[2] = 222
        bytes[5] = 555
        bytes[8] = 1001
        expected_ids = ['222', '555', '1001', '12345']
        version = 0
        resource = BossResourceBasic(data=get_anno_dict())
        resolution = 1
        time_sample = 0
        morton_id = 2000
        object_key = AWSObjectStore.generate_object_key(
            resource, resolution, time_sample, morton_id)

        # Write a legacy index
        self.dynamodb.update_item(
            TableName=self.object_store_config["id_index_table"],
            Key={
                'channel-id-key': {
                    'S':
                    self.obj_ind.generate_channel_id_key(
                        resource, resolution, 12345)
                },
                'version': {
                    'N': "{}".format(version)
                }
            },
            UpdateExpression='ADD #cuboidset :objkey',
            ExpressionAttributeNames={'#cuboidset': 'cuboid-set'},
            ExpressionAttributeValues={':objkey': {
                'SS': [object_key]
            }},
            ReturnConsumedCapacity='NONE')

        # Add new index values
        self.obj_ind.update_id_indices(resource, resolution, [object_key],
                                       [bytes], version)

        # Confirm each id has the object_key in its cuboid-set attribute.
        for id in expected_ids:
            cuboid_object_keys = self.obj_ind.get_cuboids(
                resource, resolution, id)
            self.assertEqual(cuboid_object_keys[0], object_key)
예제 #31
0
    def test_basic_resource_coord_frame(self):
        """Test basic get coordinate frame interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        coord = resource.get_coord_frame()

        assert coord.name == setup_data['coord_frame']['name']
        assert coord.description == setup_data['coord_frame']['description']
        assert coord.x_start == setup_data['coord_frame']['x_start']
        assert coord.x_stop == setup_data['coord_frame']['x_stop']
        assert coord.y_start == setup_data['coord_frame']['y_start']
        assert coord.y_stop == setup_data['coord_frame']['y_stop']
        assert coord.z_start == setup_data['coord_frame']['z_start']
        assert coord.z_stop == setup_data['coord_frame']['z_stop']
        assert coord.x_voxel_size == setup_data['coord_frame']['x_voxel_size']
        assert coord.y_voxel_size == setup_data['coord_frame']['y_voxel_size']
        assert coord.z_voxel_size == setup_data['coord_frame']['z_voxel_size']
        assert coord.voxel_unit == setup_data['coord_frame']['voxel_unit']
예제 #32
0
    def test_basic_resource_coord_frame(self):
        """Test basic get coordinate frame interface

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        coord = resource.get_coord_frame()

        assert coord.name == setup_data['coord_frame']['name']
        assert coord.description == setup_data['coord_frame']['description']
        assert coord.x_start == setup_data['coord_frame']['x_start']
        assert coord.x_stop == setup_data['coord_frame']['x_stop']
        assert coord.y_start == setup_data['coord_frame']['y_start']
        assert coord.y_stop == setup_data['coord_frame']['y_stop']
        assert coord.z_start == setup_data['coord_frame']['z_start']
        assert coord.z_stop == setup_data['coord_frame']['z_stop']
        assert coord.x_voxel_size == setup_data['coord_frame']['x_voxel_size']
        assert coord.y_voxel_size == setup_data['coord_frame']['y_voxel_size']
        assert coord.z_voxel_size == setup_data['coord_frame']['z_voxel_size']
        assert coord.voxel_unit == setup_data['coord_frame']['voxel_unit']
예제 #33
0
    def test_basic_resource_channel_cloudvolume_uint16(self):
        """Test basic get channel interface

        Returns:
            None

        """
        setup_data = get_image_dict(datatype="uint16", storage_type='cloudvol')
        resource = BossResourceBasic(setup_data)

        channel = resource.get_channel()
        assert channel.is_image() is True
        assert channel.is_cloudvolume() is True
        assert channel.name == setup_data['channel']['name']
        assert channel.description == setup_data['channel']['description']
        assert channel.datatype == setup_data['channel']['datatype']
        assert channel.base_resolution == setup_data['channel']['base_resolution']
        assert channel.sources == setup_data['channel']['sources']
        assert channel.related == setup_data['channel']['related']
        assert channel.default_time_sample == setup_data['channel']['default_time_sample']
        assert channel.downsample_status == setup_data['channel']['downsample_status']
        assert channel.storage_type == setup_data['channel']['storage_type']
        assert channel.bucket == setup_data['channel']['bucket']
        assert channel.cv_path == setup_data['channel']['cv_path']
    def setUp(self):
        # Suppress ResourceWarning messages about unclosed connections.
        warnings.simplefilter('ignore')

        self.dead_letter = DeadLetterDaemon('foo')
        self.dead_letter.set_spatialdb(
            SpatialDB(self.kvio_config, self.state_config,
                      self.object_store_config))
        self.setup_helper = SetupTests()
        self.data = self.setup_helper.get_image8_dict()
        self.resource = BossResourceBasic(self.data)

        # Make the daemon look at the flush queue so we don't need to create
        # a deadletter queue for testing.
        self.dead_letter.dead_letter_queue = (
            self.object_store_config['s3_flush_queue'])
예제 #35
0
    def test_generate_object_keys_iso_isotropic(self):
        """Test to create object key when asking for isotropic data, in an isotropic channel"""
        data = self.setup_helper.get_image8_dict()
        data['experiment']['hierarchy_method'] = "isotropic"
        data['coord_frame']['z_voxel_size'] = 4
        resource = BossResourceBasic(data)

        os = AWSObjectStore(self.object_store_config)
        object_keys = os.generate_object_key(resource, 0, 2, 56, iso=True)
        assert object_keys == '631424bf68302b683a0be521101c192b&4&3&2&0&2&56'

        object_keys = os.generate_object_key(resource, 3, 2, 56, iso=True)
        assert object_keys == 'cf934dccf1764290fd3db83b9b46b07b&4&3&2&3&2&56'

        object_keys = os.generate_object_key(resource, 5, 2, 56, iso=True)
        assert object_keys == '831adead1bc05b24d0799206ee9fe832&4&3&2&5&2&56'
예제 #36
0
    def test_update_id_indices_new_entry_for_id_index(self):
        """
        Test adding new ids to the id index.
        """
        bytes = np.zeros(10, dtype='uint64')
        bytes[1] = 20
        bytes[2] = 20
        bytes[5] = 55
        bytes[8] = 1000
        bytes[9] = 55
        expected_ids = ['20', '55', '1000']
        version = 0
        resource = BossResourceBasic(data=get_anno_dict())
        resolution = 1
        time_sample = 0
        morton_id = 20
        object_key = AWSObjectStore.generate_object_key(
            resource, resolution, time_sample, morton_id)

        # Method under test.
        self.obj_ind.update_id_indices(resource, resolution, [object_key],
                                       [bytes], version)

        # Confirm each id has the object_key in its cuboid-set attribute.
        for id in expected_ids:
            key = self.obj_ind.generate_channel_id_key(resource, resolution,
                                                       id)

            response = self.dynamodb.get_item(
                TableName=self.object_store_config["id_index_table"],
                Key={
                    'channel-id-key': {
                        'S': key
                    },
                    'version': {
                        'N': "{}".format(version)
                    }
                },
                ConsistentRead=True,
                ReturnConsumedCapacity='NONE')

            self.assertIn('Item', response)
            self.assertIn('cuboid-set', response['Item'])
            self.assertIn('SS', response['Item']['cuboid-set'])
            self.assertIn(
                object_key.split("&")[-1],
                response['Item']['cuboid-set']['SS'])
예제 #37
0
    def setUp(self):
        """ Copy params from the Layer setUpClass
        """
        # Setup Data
        #self.data = self.layer.setup_helper.get_anno64_dict()
        self.data = get_anno_dict()

        # Make the coord frame extra large for this test suite.
        self.data['coord_frame']['x_stop'] = 10000
        self.data['coord_frame']['y_stop'] = 10000
        self.data['coord_frame']['z_stop'] = 10000
        self.resource = BossResourceBasic(self.data)

        # Setup config
        self.kvio_config = self.layer.kvio_config
        self.state_config = self.layer.state_config
        self.object_store_config = self.layer.object_store_config
    def setUpParams(self):
        self.setup_helper = SetupTests()
        # Don't use mock Amazon resources.
        self.setup_helper.mock = False

        self.data = self.setup_helper.get_image8_dict()
        self.resource = BossResourceBasic(self.data)

        self.config = configuration.BossConfig()

        # kvio settings, 1 is the test DB.
        self.kvio_config = {
            "cache_host": self.config['aws']['cache'],
            "cache_db": 1,
            "read_timeout": 86400
        }

        # state settings, 1 is the test DB.
        self.state_config = {
            "cache_state_host": self.config['aws']['cache-state'],
            "cache_state_db": 1
        }

        # object store settings
        _, self.domain = self.config['aws']['cuboid_bucket'].split('.', 1)
        self.s3_flush_queue_name = "intTest.S3FlushQueue.{}".format(
            self.domain).replace('.', '-')
        self.object_store_config = {
            "s3_flush_queue":
            '',  # This will get updated after the queue is created.
            "cuboid_bucket":
            "intTest{}.{}".format(random.randint(0, 9999),
                                  self.config['aws']['cuboid_bucket']),
            "page_in_lambda_function":
            self.config['lambda']['page_in_function'],
            "page_out_lambda_function":
            self.config['lambda']['flush_function'],
            "s3_index_table":
            "intTest.{}".format(self.config['aws']['s3-index-table']),
            "id_index_table":
            "intTest.{}".format(self.config['aws']['id-index-table']),
            "id_count_table":
            "intTest.{}".format(self.config['aws']['id-count-table'])
        }
예제 #39
0
    def test_get_cuboids(self):
        id = 22222
        bytes = np.zeros(10, dtype='uint64')
        bytes[1] = id
        key = 'hash_coll_exp_chan_key_cuboids'
        version = 0
        resource = BossResourceBasic(data=get_anno_dict())
        resolution = 1

        new_bytes = np.zeros(4, dtype='uint64')
        new_bytes[0] = id  # Pre-existing id.
        new_key = 'hash_coll_exp_chan_key_cuboids2'

        self.obj_ind.update_id_indices(resource, resolution, [key, new_key],
                                       [bytes, new_bytes], version)

        # Method under test.
        actual = self.obj_ind.get_cuboids(resource, resolution, id)

        expected = [key, new_key]
        self.assertCountEqual(expected, actual)
예제 #40
0
    def test_is_downsampled(self):
        """Test is downsampled method

        Returns:
            None

        """
        setup_data = get_image_dict()
        resource = BossResourceBasic(setup_data)

        self.assertFalse(resource.is_downsampled())

        setup_data['channel']['downsample_status'] = "DOWNSAMPLED"
        resource = BossResourceBasic(setup_data)

        self.assertTrue(resource.is_downsampled())
예제 #41
0
    def setUpClass(cls):
        """ Create a diction of configuration values for the test resource. """
        # Create resource
        cls.setup_helper = SetupTests()
        cls.data = cls.setup_helper.get_image8_dict()
        cls.resource = BossResourceBasic(cls.data)

        # Load config
        cls.config = configuration.BossConfig()
        cls.object_store_config = {"s3_flush_queue": 'https://mytestqueue.com',
                                   "cuboid_bucket": "test_bucket",
                                   "page_in_lambda_function": "page_in.test.boss",
                                   "page_out_lambda_function": "page_out.test.boss",
                                   "s3_index_table": "test_table",
                                   "id_index_table": "test_id_table",
                                   "id_count_table": "test_count_table",
                                   }

        # Create AWS Resources needed for tests
        cls.setup_helper.start_mocking()
        cls.setup_helper.create_index_table(cls.object_store_config["s3_index_table"], cls.setup_helper.DYNAMODB_SCHEMA)
        cls.setup_helper.create_cuboid_bucket(cls.object_store_config["cuboid_bucket"])
예제 #42
0
    def test_too_many_ids_in_cuboid(self):
        """
        Test error handling when a cuboid has more unique ids than DynamoDB
        can support.
        """
        version = 0
        resolution = 0
        time_sample = 0
        resource = BossResourceBasic(data=get_anno_dict())
        mortonid = XYZMorton([0, 0, 0])
        obj_keys = [
            AWSObjectStore.generate_object_key(resource, resolution,
                                               time_sample, mortonid)
        ]
        cubes = [
            np.random.randint(2000000, size=(16, 512, 512), dtype='uint64')
        ]

        # If too many ids, the index is skipped, logged, and False is returned to the caller.
        result = self.obj_ind.update_id_indices(resource, resolution, obj_keys,
                                                cubes, version)
        self.assertFalse(result)
예제 #43
0
 def test_too_many_ids_in_cuboid(self):
     """
     Test error handling when a cuboid has more unique ids than DynamoDB
     can support.
     """
     version = 0
     resolution = 0
     time_sample = 0
     resource = BossResourceBasic(data=get_anno_dict())
     mortonid = XYZMorton([0, 0, 0])
     obj_keys = [
         self.obj_store.generate_object_key(resource, resolution,
                                            time_sample, mortonid)
     ]
     cubes = [
         np.random.randint(2000000, size=(16, 512, 512), dtype='uint64')
     ]
     with self.assertRaises(SpdbError) as ex:
         self.obj_ind.update_id_indices(resource, resolution, obj_keys,
                                        cubes, version)
     self.assertEqual(ErrorCodes.OBJECT_STORE_ERROR,
                      ex.exception.error_code)
예제 #44
0
    def setUp(self, fake_get_region):
        """ Set everything up for testing """
        # setup resources
        fake_get_region.return_value = 'us-east-1'
        self.setup_helper = SetupTests()
        self.setup_helper.mock = True

        self.data = self.setup_helper.get_image16_dict()
        self.resource = BossResourceBasic(self.data)

        # kvio settings
        self.cache_client = redis.StrictRedis(host='https://mytestcache.com', port=6379,
                                              db=1,
                                              decode_responses=False)
        self.kvio_config = {"cache_client": self.cache_client, "read_timeout": 86400}

        # state settings
        self.state_client = redis.StrictRedis(host='https://mytestcache2.com',
                                              port=6379, db=1,
                                              decode_responses=False)
        self.state_config = {"state_client": self.state_client}

        # object store settings
        self.object_store_config = {"s3_flush_queue": 'https://mytestqueue.com',
                                    "cuboid_bucket": "test_bucket",
                                    "page_in_lambda_function": "page_in.test.boss",
                                    "page_out_lambda_function": "page_out.test.boss",
                                    "s3_index_table": "test_table",
                                    "id_index_table": "test_id_table",
                                    "id_count_table": "test_count_table",
                                    }

        # Create AWS Resources needed for tests
        self.setup_helper.start_mocking()
        with patch('spdb.spatialdb.test.setup.get_region') as fake_get_region:
            fake_get_region.return_value = 'us-east-1'
            self.setup_helper.create_index_table(self.object_store_config["s3_index_table"], self.setup_helper.DYNAMODB_SCHEMA)
            self.setup_helper.create_cuboid_bucket(self.object_store_config["cuboid_bucket"])
예제 #45
0
    def test_cutout_no_time_multi_unaligned_hit_iso_above(self):
        """Test write_cuboid and cutout methods - no time - multi - unaligned - hit - isotropic, above iso fork"""
        data = self.data
        data["channel"]["base_resolution"] = 5
        resource = BossResourceBasic(data)

        # Generate random data
        cube1 = Cube.create_cube(resource, [400, 400, 8])
        cube1.random()
        cube1.morton_id = 0

        sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config)

        sp.write_cuboid(resource, (200, 600, 3), 5, cube1.data, iso=True)

        cube2 = sp.cutout(resource, (200, 600, 3), (400, 400, 8), 5, iso=True)

        np.testing.assert_array_equal(cube1.data, cube2.data)

        # do it again...should be in cache
        cube2 = sp.cutout(resource, (200, 600, 3), (400, 400, 8), 5, iso=True)

        np.testing.assert_array_equal(cube1.data, cube2.data)
예제 #46
0
    def setUp(self):

        # Get data from nose2 layer based setup

        # Setup Data
        self.data = self.layer.setup_helper.get_image8_dict()
        self.resource = BossResourceBasic(self.data)

        # Setup config
        self.kvio_config = self.layer.kvio_config
        self.state_config = self.layer.state_config
        self.object_store_config = self.layer.object_store_config

        client = redis.StrictRedis(host=self.kvio_config['cache_host'],
                                   port=6379,
                                   db=1,
                                   decode_responses=False)
        client.flushdb()
        client = redis.StrictRedis(host=self.state_config['cache_state_host'],
                                   port=6379,
                                   db=1,
                                   decode_responses=False)
        client.flushdb()
예제 #47
0
    def test_update_id_indices_new_entry_in_cuboid_index(self):
        """
        Test adding ids to new cuboids in the s3 cuboid index.
        """
        bytes = np.zeros(10, dtype='uint64')
        bytes[1] = 20
        bytes[2] = 20
        bytes[5] = 55
        bytes[8] = 1000
        bytes[9] = 55
        expected = ['20', '55', '1000']
        key = 'hash_coll_exp_chan_key'
        version = 0
        resource = BossResourceBasic(data=get_anno_dict())
        resolution = 1

        # Method under test.
        self.obj_ind.update_id_indices(resource, resolution, [key], [bytes],
                                       version)

        response = self.dynamodb.get_item(
            TableName=self.object_store_config["s3_index_table"],
            Key={
                'object-key': {
                    'S': key
                },
                'version-node': {
                    'N': "{}".format(version)
                }
            },
            ConsistentRead=True,
            ReturnConsumedCapacity='NONE')

        self.assertIn('Item', response)
        self.assertIn('id-set', response['Item'])
        self.assertIn('NS', response['Item']['id-set'])
        self.assertCountEqual(expected, response['Item']['id-set']['NS'])
예제 #48
0
    def test_reserve_id_wrong_type(self):
        img_data = self.setup_helper.get_image8_dict()
        img_resource = BossResourceBasic(img_data)

        with self.assertRaises(SpdbError):
            start_id = self.obj_ind.reserve_ids(img_resource, 10)
예제 #49
0
    def test_basic_resource_from_json_annotation(self):
        """Test basic to json deserialization method

        Returns:
            None

        """
        setup_data = get_anno_dict()
        resource1 = BossResourceBasic(setup_data)

        resource2 = BossResourceBasic()
        resource2.from_json(resource1.to_json())

        # Check Collection
        col = resource2.get_collection()
        assert col.name == setup_data['collection']['name']
        assert col.description == setup_data['collection']['description']

        # Check coord frame
        coord = resource2.get_coord_frame()
        assert coord.name == setup_data['coord_frame']['name']
        assert coord.description == setup_data['coord_frame']['description']
        assert coord.x_start == setup_data['coord_frame']['x_start']
        assert coord.x_stop == setup_data['coord_frame']['x_stop']
        assert coord.y_start == setup_data['coord_frame']['y_start']
        assert coord.y_stop == setup_data['coord_frame']['y_stop']
        assert coord.z_start == setup_data['coord_frame']['z_start']
        assert coord.z_stop == setup_data['coord_frame']['z_stop']
        assert coord.x_voxel_size == setup_data['coord_frame']['x_voxel_size']
        assert coord.y_voxel_size == setup_data['coord_frame']['y_voxel_size']
        assert coord.z_voxel_size == setup_data['coord_frame']['z_voxel_size']
        assert coord.voxel_unit == setup_data['coord_frame']['voxel_unit']

        # Check exp
        exp = resource2.get_experiment()
        assert exp.name == setup_data['experiment']['name']
        assert exp.description == setup_data['experiment']['description']
        assert exp.num_hierarchy_levels == setup_data['experiment']['num_hierarchy_levels']
        assert exp.hierarchy_method == setup_data['experiment']['hierarchy_method']
        assert exp.num_time_samples == setup_data['experiment']['num_time_samples']
        assert exp.time_step == setup_data['experiment']['time_step']
        assert exp.time_step_unit == setup_data['experiment']['time_step_unit']

        # Check channel
        channel = resource2.get_channel()
        assert channel.is_image() is False
        assert channel.name == setup_data['channel']['name']
        assert channel.description == setup_data['channel']['description']
        assert channel.datatype == setup_data['channel']['datatype']
        assert channel.base_resolution == setup_data['channel']['base_resolution']
        assert channel.sources == setup_data['channel']['sources']
        assert channel.related == setup_data['channel']['related']
        assert channel.default_time_sample == setup_data['channel']['default_time_sample']

        # check keys
        assert resource2.get_lookup_key() == setup_data['lookup_key']
        assert resource2.get_boss_key() == setup_data['boss_key']
예제 #50
0
        # Load the message body
        flush_msg_data = json.loads(flush_msg_data['Body'])

        print("Message: {}".format(flush_msg_data))

        # Setup SPDB instance
        sp = SpatialDB(flush_msg_data["config"]["kv_config"],
                       flush_msg_data["config"]["state_config"],
                       flush_msg_data["config"]["object_store_config"])

        # Get the write-cuboid key to flush
        write_cuboid_key = flush_msg_data['write_cuboid_key']
        print("Flushing {} to S3".format(write_cuboid_key))

        # Create resource instance
        resource = BossResourceBasic()
        resource.from_dict(flush_msg_data["resource"])
    else:
        # Nothing to flush. Exit.
        print("No flush message available")
        sys.exit(0)

    # Check if cuboid is in S3
    object_keys = sp.objectio.write_cuboid_to_object_keys([write_cuboid_key])
    cache_key = sp.kvio.write_cuboid_key_to_cache_key(write_cuboid_key)
    exist_keys, missing_keys = sp.objectio.cuboids_exist(cache_key)

    print("write key: {}".format(write_cuboid_key))
    print("object key: {}".format(object_keys[0]))
    print("cache key: {}".format(cache_key))
예제 #51
0
def handler(event, context):
    # Load settings
    SETTINGS = BossSettings.load()

    # Used as a guard against trying to delete the SQS message when lambda is
    # triggered by SQS.
    sqs_triggered = 'Records' in event and len(event['Records']) > 0

    if sqs_triggered :
        # Lambda invoked by an SQS trigger.
        msg_data = json.loads(event['Records'][0]['body'])
        # Load the project info from the chunk key you are processing
        chunk_key = msg_data['chunk_key']
        proj_info = BossIngestProj.fromSupercuboidKey(chunk_key)
        proj_info.job_id = msg_data['ingest_job']
    else:
        # Standard async invoke of this lambda.

        # Load the project info from the chunk key you are processing
        proj_info = BossIngestProj.fromSupercuboidKey(event["chunk_key"])
        proj_info.job_id = event["ingest_job"]

        # Get message from SQS ingest queue, try for ~2 seconds
        rx_cnt = 0
        msg_data = None
        msg_id = None
        msg_rx_handle = None
        while rx_cnt < 6:
            ingest_queue = IngestQueue(proj_info)
            msg = [x for x in ingest_queue.receiveMessage()]
            if msg:
                msg = msg[0]
                print("MESSAGE: {}".format(msg))
                print(len(msg))
                msg_id = msg[0]
                msg_rx_handle = msg[1]
                msg_data = json.loads(msg[2])
                print("MESSAGE DATA: {}".format(msg_data))
                break
            else:
                rx_cnt += 1
                print("No message found. Try {} of 6".format(rx_cnt))
                time.sleep(1)

        if not msg_id:
            # No tiles ready to ingest.
            print("No ingest message available")
            return

        # Get the chunk key of the tiles to ingest.
        chunk_key = msg_data['chunk_key']


    tile_error_queue = TileErrorQueue(proj_info)

    print("Ingesting Chunk {}".format(chunk_key))
    tiles_in_chunk = int(chunk_key.split('&')[1])

    # Setup SPDB instance
    sp = SpatialDB(msg_data['parameters']["KVIO_SETTINGS"],
                   msg_data['parameters']["STATEIO_CONFIG"],
                   msg_data['parameters']["OBJECTIO_CONFIG"])

    # Get tile list from Tile Index Table
    tile_index_db = BossTileIndexDB(proj_info.project_name)
    # tile_index_result (dict): keys are S3 object keys of the tiles comprising the chunk.
    tile_index_result = tile_index_db.getCuboid(msg_data["chunk_key"], int(msg_data["ingest_job"]))
    if tile_index_result is None:
        # If chunk_key is gone, another lambda uploaded the cuboids and deleted the chunk_key afterwards.
        if not sqs_triggered:
            # Remove message so it's not redelivered.
            ingest_queue.deleteMessage(msg_id, msg_rx_handle)

        print("Aborting due to chunk key missing from tile index table")
        return

    # Sort the tile keys
    print("Tile Keys: {}".format(tile_index_result["tile_uploaded_map"]))
    tile_key_list = [x.rsplit("&", 2) for x in tile_index_result["tile_uploaded_map"].keys()]
    if len(tile_key_list) < tiles_in_chunk:
        print("Not a full set of 16 tiles. Assuming it has handled already, tiles: {}".format(len(tile_key_list)))
        if not sqs_triggered:
            ingest_queue.deleteMessage(msg_id, msg_rx_handle)
        return
    tile_key_list = sorted(tile_key_list, key=lambda x: int(x[1]))
    tile_key_list = ["&".join(x) for x in tile_key_list]
    print("Sorted Tile Keys: {}".format(tile_key_list))

    # Augment Resource JSON data so it will instantiate properly that was pruned due to S3 metadata size limits
    resource_dict = msg_data['parameters']['resource']
    _, exp_name, ch_name = resource_dict["boss_key"].split("&")

    resource_dict["channel"]["name"] = ch_name
    resource_dict["channel"]["description"] = ""
    resource_dict["channel"]["sources"] = []
    resource_dict["channel"]["related"] = []
    resource_dict["channel"]["default_time_sample"] = 0
    resource_dict["channel"]["downsample_status"] = "NOT_DOWNSAMPLED"

    resource_dict["experiment"]["name"] = exp_name
    resource_dict["experiment"]["description"] = ""
    resource_dict["experiment"]["num_time_samples"] = 1
    resource_dict["experiment"]["time_step"] = None
    resource_dict["experiment"]["time_step_unit"] = None

    resource_dict["coord_frame"]["name"] = "cf"
    resource_dict["coord_frame"]["name"] = ""
    resource_dict["coord_frame"]["x_start"] = 0
    resource_dict["coord_frame"]["x_stop"] = 100000
    resource_dict["coord_frame"]["y_start"] = 0
    resource_dict["coord_frame"]["y_stop"] = 100000
    resource_dict["coord_frame"]["z_start"] = 0
    resource_dict["coord_frame"]["z_stop"] = 100000
    resource_dict["coord_frame"]["voxel_unit"] = "nanometers"

    # Setup the resource
    resource = BossResourceBasic()
    resource.from_dict(resource_dict)
    dtype = resource.get_numpy_data_type()

    # read all tiles from bucket into a slab
    tile_bucket = TileBucket(proj_info.project_name)
    data = []
    num_z_slices = 0
    for tile_key in tile_key_list:
        try:
            image_data, message_id, receipt_handle, metadata = tile_bucket.getObjectByKey(tile_key)
        except KeyError:
            print('Key: {} not found in tile bucket, assuming redelivered SQS message and aborting.'.format(
                tile_key))
            if not sqs_triggered:
                # Remove message so it's not redelivered.
                ingest_queue.deleteMessage(msg_id, msg_rx_handle)
            print("Aborting due to missing tile in bucket")
            return

        image_bytes = BytesIO(image_data)
        image_size = image_bytes.getbuffer().nbytes

        # Get tiles size from metadata, need to shape black tile if actual tile is corrupt.
        if 'x_size' in metadata:
            tile_size_x = metadata['x_size']
        else:
            print('MetadataMissing: x_size not in tile metadata:  using 1024.')
            tile_size_x = 1024

        if 'y_size' in metadata:
            tile_size_y = metadata['y_size']
        else:
            print('MetadataMissing: y_size not in tile metadata:  using 1024.')
            tile_size_y = 1024

        if image_size == 0:
            print('TileError: Zero length tile, using black instead: {}'.format(tile_key))
            error_msg = 'Zero length tile'
            enqueue_tile_error(tile_error_queue, tile_key, chunk_key, error_msg)
            tile_img = np.zeros((tile_size_x, tile_size_y), dtype=dtype)
        else:
            try:
                tile_img = np.asarray(Image.open(image_bytes), dtype=dtype)
            except TypeError as te:
                print('TileError: Incomplete tile, using black instead (tile_size_in_bytes, tile_key): {}, {}'
                      .format(image_size, tile_key))
                error_msg = 'Incomplete tile'
                enqueue_tile_error(tile_error_queue, tile_key, chunk_key, error_msg)
                tile_img = np.zeros((tile_size_x, tile_size_y), dtype=dtype)
            except OSError as oe:
                print('TileError: OSError, using black instead (tile_size_in_bytes, tile_key): {}, {} ErrorMessage: {}'
                      .format(image_size, tile_key, oe))
                error_msg = 'OSError: {}'.format(oe)
                enqueue_tile_error(tile_error_queue, tile_key, chunk_key, error_msg)
                tile_img = np.zeros((tile_size_x, tile_size_y), dtype=dtype)

        data.append(tile_img)
        num_z_slices += 1


    # Make 3D array of image data. It should be in XYZ at this point
    chunk_data = np.array(data)
    del data
    tile_dims = chunk_data.shape

    # Break into Cube instances
    print("Tile Dims: {}".format(tile_dims))
    print("Num Z Slices: {}".format(num_z_slices))
    num_x_cuboids = int(math.ceil(tile_dims[2] / CUBOIDSIZE[proj_info.resolution][0]))
    num_y_cuboids = int(math.ceil(tile_dims[1] / CUBOIDSIZE[proj_info.resolution][1]))

    print("Num X Cuboids: {}".format(num_x_cuboids))
    print("Num Y Cuboids: {}".format(num_y_cuboids))

    chunk_key_parts = BossUtil.decode_chunk_key(chunk_key)
    t_index = chunk_key_parts['t_index']
    for x_idx in range(0, num_x_cuboids):
        for y_idx in range(0, num_y_cuboids):
            # TODO: check time series support
            cube = Cube.create_cube(resource, CUBOIDSIZE[proj_info.resolution])
            cube.zeros()

            # Compute Morton ID
            # TODO: verify Morton indices correct!
            print(chunk_key_parts)
            morton_x_ind = x_idx + (chunk_key_parts["x_index"] * num_x_cuboids)
            morton_y_ind = y_idx + (chunk_key_parts["y_index"] * num_y_cuboids)
            print("Morton X: {}".format(morton_x_ind))
            print("Morton Y: {}".format(morton_y_ind))
            morton_index = XYZMorton([morton_x_ind, morton_y_ind, int(chunk_key_parts['z_index'])])

            # Insert sub-region from chunk_data into cuboid
            x_start = x_idx * CUBOIDSIZE[proj_info.resolution][0]
            x_end = x_start + CUBOIDSIZE[proj_info.resolution][0]
            x_end = min(x_end, tile_dims[2])
            y_start = y_idx * CUBOIDSIZE[proj_info.resolution][1]
            y_end = y_start + CUBOIDSIZE[proj_info.resolution][1]
            y_end = min(y_end, tile_dims[1])
            z_end = CUBOIDSIZE[proj_info.resolution][2]
            # TODO: get sub-array w/o making a copy.
            print("Yrange: {}".format(y_end - y_start))
            print("Xrange: {}".format(x_end - x_start))
            print("X start: {}".format(x_start))
            print("X stop: {}".format(x_end))
            cube.data[0, 0:num_z_slices, 0:(y_end - y_start), 0:(x_end - x_start)] = chunk_data[0:num_z_slices,
                                                                                 y_start:y_end, x_start:x_end]

            # Create object key
            object_key = sp.objectio.generate_object_key(resource, proj_info.resolution, t_index, morton_index)
            print("Object Key: {}".format(object_key))

            # Put object in S3
            sp.objectio.put_objects([object_key], [cube.to_blosc()])

            # Add object to index
            sp.objectio.add_cuboid_to_index(object_key, ingest_job=int(msg_data["ingest_job"]))

            # Update id indices if this is an annotation channel
            # We no longer index during ingest.
            #if resource.data['channel']['type'] == 'annotation':
            #   try:
            #       sp.objectio.update_id_indices(
            #           resource, proj_info.resolution, [object_key], [cube.data])
            #   except SpdbError as ex:
            #       sns_client = boto3.client('sns')
            #       topic_arn = msg_data['parameters']["OBJECTIO_CONFIG"]["prod_mailing_list"]
            #       msg = 'During ingest:\n{}\nCollection: {}\nExperiment: {}\n Channel: {}\n'.format(
            #           ex.message,
            #           resource.data['collection']['name'],
            #           resource.data['experiment']['name'],
            #           resource.data['channel']['name'])
            #       sns_client.publish(
            #           TopicArn=topic_arn,
            #           Subject='Object services misuse',
            #           Message=msg)

    lambda_client = boto3.client('lambda', region_name=SETTINGS.REGION_NAME)

    names = AWSNames.create_from_lambda_name(context.function_name)

    delete_tiles_data = {
        'tile_key_list': tile_key_list,
        'region': SETTINGS.REGION_NAME,
        'bucket': tile_bucket.bucket.name
    }

    # Delete tiles from tile bucket.
    lambda_client.invoke(
        FunctionName=names.delete_tile_objs_lambda,
        InvocationType='Event',
        Payload=json.dumps(delete_tiles_data).encode()
    )       

    delete_tile_entry_data = {
        'tile_index': tile_index_db.table.name,
        'region': SETTINGS.REGION_NAME,
        'chunk_key': chunk_key,
        'task_id': msg_data['ingest_job']
    }

    # Delete entry from tile index.
    lambda_client.invoke(
        FunctionName=names.delete_tile_index_entry_lambda,
        InvocationType='Event',
        Payload=json.dumps(delete_tile_entry_data).encode()
    )       

    if not sqs_triggered:
        # Delete message since it was processed successfully
        ingest_queue.deleteMessage(msg_id, msg_rx_handle)