def test_cutout_no_time_single_aligned_miss(self): """Test the get_cubes method - no time - single - miss""" # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (1, 0, 0), 0, cube1.data) cube2 = sp.cutout(self.resource, (1, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) # Make sure data is the same np.testing.assert_array_equal(cube1.data, cube2.data) # Delete everything in the cache sp.kvio.cache_client.flushdb() # Get the data again cube3 = sp.cutout(self.resource, (1, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) # Make sure the data is the same np.testing.assert_array_equal(cube1.data, cube2.data) np.testing.assert_array_equal(cube1.data, cube3.data)
def test_filtered_cutout(self): time_axis = [1] cube_dim = [self.x_dim, self.y_dim, self.z_dim] cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim) cube1 = Cube.create_cube(self.resource, cube_dim) cube1.data = np.ones(time_axis + [cube_dim[2], cube_dim[1], cube_dim[0]], dtype='uint64') cube1.morton_id = 0 corner = (0, 0, 0) expected = np.zeros(time_axis + [cube_dim[2], cube_dim[1], cube_dim[0]], dtype='uint64') # Will filter by these ids. id1 = 55555 id2 = 66666 cube1.data[0][0][40][0] = id1 cube1.data[0][0][50][0] = id2 expected[0][0][40][0] = id1 expected[0][0][50][0] = id2 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) resolution = 0 sp.write_cuboid(self.resource, corner, resolution, cube1.data, time_sample_start=0) # Make sure cube written correctly. actual_cube = sp.cutout(self.resource, corner, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube1.data, actual_cube.data) # Method under test. actual_filtered = sp.cutout(self.resource, corner, cube_dim_tuple, resolution, filter_ids=[id1, id2]) np.testing.assert_array_equal(expected, actual_filtered.data)
def test_page_in_multi_cuboids_y_dir(self): # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim * 2, self.z_dim]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (0, 0, 0), 0, cube1.data) cube2 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim * 2, self.z_dim), 0) np.testing.assert_array_equal(cube1.data, cube2.data) # Make sure data is the same np.testing.assert_array_equal(cube1.data, cube2.data) # Delete everything in the cache sp.kvio.cache_client.flushdb() # Force use of lambda function. sp.read_lambda_threshold = 0 # Get the data again, which should trigger lambda page in. cube3 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim * 2, self.z_dim), 0) # Make sure the data is the same np.testing.assert_array_equal(cube1.data, cube3.data)
def test_cutout_iso_not_present(self): """Test write_cuboid and cutout methods with iso option, testing iso is stored in parallel""" data = self.data data["channel"]["base_resolution"] = 5 resource = BossResourceBasic(data) # Generate random data cube1 = Cube.create_cube(resource, [400, 400, 8]) cube1.random() cube1.morton_id = 0 cubez = Cube.create_cube(resource, [400, 400, 8]) cubez.zeros() cubez.morton_id = 0 # Write at 5, not iso, and verify sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(resource, (200, 600, 3), 5, cube1.data, iso=False) cube2 = sp.cutout(resource, (200, 600, 3), (400, 400, 8), 5, iso=False) np.testing.assert_array_equal(cube1.data, cube2.data) # Get at res 5 iso, which should be blank cube2 = sp.cutout(resource, (200, 600, 3), (400, 400, 8), 5, iso=True) np.testing.assert_array_equal(cubez.data, cube2.data)
def test_get_ids_in_region_multiple_cubes_and_x_partials(self): """ Region has some full cuboids and some partial cuboids along the x axis. """ cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim) cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.zeros() cube1.data[0][0][40][105] = 55555 cube1.data[0][0][50][105] = 66666 pos1 = [7*self.x_dim, 5*self.y_dim, 2*self.z_dim] cube1.morton_id = XYZMorton(pos1) cube2 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube2.zeros() cube2.data[0][0][40][105] = 55555 cube2.data[0][0][50][105] = 77777 pos2 = [8*self.x_dim, 5*self.y_dim, 2*self.z_dim] cube2.morton_id = XYZMorton(pos2) cube3 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube3.zeros() cube3.data[0][0][0][105] = 88888 pos3 = [9*self.x_dim, 5*self.y_dim, 2*self.z_dim] cube3.morton_id = XYZMorton(pos3) sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) resolution = 0 sp.write_cuboid(self.resource, pos1, resolution, cube1.data, time_sample_start=0) sp.write_cuboid(self.resource, pos2, resolution, cube2.data, time_sample_start=0) sp.write_cuboid(self.resource, pos3, resolution, cube3.data, time_sample_start=0) # Make sure cube write complete and correct. actual_cube = sp.cutout(self.resource, pos1, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube1.data, actual_cube.data) actual_cube = sp.cutout(self.resource, pos2, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube2.data, actual_cube.data) actual_cube = sp.cutout(self.resource, pos3, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube3.data, actual_cube.data) corner = (7*self.x_dim+100, 5*self.y_dim, 2*self.z_dim) extent = (2*self.x_dim+self.x_dim//2, self.y_dim, self.z_dim) t_range = [0, 1] version = 0 expected = ['55555', '66666', '77777', '88888'] # Method under test. actual = sp.get_ids_in_region( self.resource, resolution, corner, extent, t_range, version) self.assertIn('ids', actual) self.assertCountEqual(expected, actual['ids'])
def test_cutout_no_time_single_aligned_zero(self): """Test the get_cubes method - no time - single""" db = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) cube = db.cutout(self.resource, (7, 88, 243), (self.x_dim, self.y_dim, self.z_dim), 0) np.testing.assert_array_equal(np.sum(cube.data), 0)
def test_get_ids_in_region_single_cube(self): """Test single cuboid using DynamoDB index.""" cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim) cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.zeros() cube1.data[0][0][40][0] = 55555 cube1.data[0][0][50][0] = 66666000000000 pos1 = [2*self.x_dim, 3*self.y_dim, 2*self.z_dim] cube1.morton_id = XYZMorton(pos1) sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) resolution = 0 sp.write_cuboid(self.resource, pos1, resolution, cube1.data, time_sample_start=0) # Make sure cube write complete and correct. actual_cube = sp.cutout(self.resource, pos1, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube1.data, actual_cube.data) corner = (2*self.x_dim, 3*self.y_dim, 2*self.z_dim) extent = (self.x_dim, self.y_dim, self.z_dim) t_range = [0, 1] version = 0 expected = ['55555', '66666000000000'] # Method under test. actual = sp.get_ids_in_region( self.resource, resolution, corner, extent, t_range, version) self.assertIn('ids', actual) self.assertCountEqual(expected, actual['ids'])
def test_cutout_no_time_single_aligned_zero_access_mode_cache(self, fake_get_region): """Test the get_cubes method - no time - single - DO NOT bypass cache""" db = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) cube = db.cutout(self.resource, (7, 88, 243), (self.x_dim, self.y_dim, self.z_dim), 0, access_mode="cache") np.testing.assert_array_equal(np.sum(cube.data), 0)
def test_filtered_cutout_bad_id_list(self): time_axis = [1] cube_dim = [self.x_dim, self.y_dim, self.z_dim] cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim) cube1 = Cube.create_cube(self.resource, cube_dim) cube1.data = np.ones(time_axis + [cube_dim[2], cube_dim[1], cube_dim[0]], dtype='uint64') cube1.morton_id = 0 corner = (6*self.x_dim, 6*self.y_dim, 2*self.z_dim) sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) resolution = 0 sp.write_cuboid(self.resource, corner, resolution, cube1.data, time_sample_start=0) # Method under test. with self.assertRaises(SpdbError): sp.cutout(self.resource, corner, cube_dim_tuple, resolution, filter_ids=['foo', 55555])
def test_cutout_no_time_multi_unaligned_hit(self): """Test the get_cubes method - no time - multi - unaligned - hit""" # Generate random data cube1 = Cube.create_cube(self.resource, [400, 400, 8]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (200, 600, 3), 0, cube1.data) cube2 = sp.cutout(self.resource, (200, 600, 3), (400, 400, 8), 0) np.testing.assert_array_equal(cube1.data, cube2.data) # do it again...shoudl be in cache cube2 = sp.cutout(self.resource, (200, 600, 3), (400, 400, 8), 0) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_cutout_no_time_multi_unaligned_hit_iso_below(self): """Test write_cuboid and cutout methods - no time - multi - unaligned - hit - isotropic, below iso fork""" # Generate random data cube1 = Cube.create_cube(self.resource, [400, 400, 8]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (200, 600, 3), 0, cube1.data, iso=True) cube2 = sp.cutout(self.resource, (200, 600, 3), (400, 400, 8), 0, iso=True) np.testing.assert_array_equal(cube1.data, cube2.data) # do it again...should be in cache cube2 = sp.cutout(self.resource, (200, 600, 3), (400, 400, 8), 0, iso=True) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_get_tight_bounding_box_single_cuboid(self): """ Get the tight bounding box for an object that exists within a single cuboid. """ resolution = 0 [x_cube_dim, y_cube_dim, z_cube_dim] = CUBOIDSIZE[resolution] id = 33333 id_as_str = '33333' # Customize resource with so it writes to its own channel and uses a # coord frame large enough to encompass the data written. This is # important for proper loose bounding box calculations. data = get_anno_dict(boss_key='col1&exp1&ch50', lookup_key='1&1&50') data['coord_frame']['x_stop'] = 10000 data['coord_frame']['y_stop'] = 10000 data['coord_frame']['z_stop'] = 10000 resource = BossResourceBasic(data) time_sample = 0 version = 0 x_rng = [0, x_cube_dim] y_rng = [0, y_cube_dim] z_rng = [0, z_cube_dim] t_rng = [0, 1] cube_dim_tuple = (self.x_dim, self.y_dim, self.z_dim) cube1 = Cube.create_cube(resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.zeros() cube1.data[0][14][500][104] = id cube1.data[0][15][501][105] = id cube1.data[0][15][502][104] = id cube1.data[0][14][503][105] = id pos1 = [10*self.x_dim, 15*self.y_dim, 2*self.z_dim] cube1.morton_id = XYZMorton(pos1) sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(resource, pos1, resolution, cube1.data, time_sample_start=0) # Make sure cube write complete and correct. actual_cube = sp.cutout(resource, pos1, cube_dim_tuple, resolution) np.testing.assert_array_equal(cube1.data, actual_cube.data) # Method under test. actual = sp.get_bounding_box(resource, resolution, id_as_str, bb_type='tight') expected = { 'x_range': [pos1[0]+104, pos1[0]+106], 'y_range': [pos1[1]+500, pos1[1]+504], 'z_range': [pos1[2]+14, pos1[2]+16], 't_range': t_rng } self.assertEqual(expected, actual)
def test_cutout_time_offset_single_aligned_hit(self): """Test the get_cubes method - w/ time - single - hit""" # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim], time_range=[0, 3]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (0, 0, 0), 0, cube1.data, time_sample_start=6) cube2 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0, time_sample_range=[6, 9]) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_cutout_no_time_single_aligned_hit(self): """Test the get_cubes method - no time - single - hit""" # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (0, 0, 0), 0, cube1.data) start = time.time() cube2 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) cutout1_time = time.time() - start np.testing.assert_array_equal(cube1.data, cube2.data) start = time.time() cube2 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) cutout2_time = time.time() - start np.testing.assert_array_equal(cube1.data, cube2.data) assert cutout2_time < cutout1_time
def test_cutout_no_time_single_aligned_hit_shifted(self): """Test the get_cubes method - no time - single - hit - shifted into a different location""" # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (self.x_dim, self.y_dim, 0), 0, cube1.data) cube2 = sp.cutout(self.resource, (self.x_dim, self.y_dim, 0), (self.x_dim, self.y_dim, self.z_dim), 0) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_cutout_no_time_multi_unaligned_hit_iso_above(self): """Test write_cuboid and cutout methods - no time - multi - unaligned - hit - isotropic, above iso fork""" data = self.data data["channel"]["base_resolution"] = 5 resource = BossResourceBasic(data) # Generate random data cube1 = Cube.create_cube(resource, [400, 400, 8]) cube1.random() cube1.morton_id = 0 sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(resource, (200, 600, 3), 5, cube1.data, iso=True) cube2 = sp.cutout(resource, (200, 600, 3), (400, 400, 8), 5, iso=True) np.testing.assert_array_equal(cube1.data, cube2.data) # do it again...should be in cache cube2 = sp.cutout(resource, (200, 600, 3), (400, 400, 8), 5, iso=True) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_cutout_iso_below_fork(self): """Test write_cuboid and cutout methods with iso option, testing iso is equal below the res fork""" # Generate random data cube1 = Cube.create_cube(self.resource, [400, 400, 8]) cube1.random() cube1.morton_id = 0 cubez = Cube.create_cube(self.resource, [400, 400, 8]) cubez.zeros() cubez.morton_id = 0 # Write at 5, not iso, and verify sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (200, 600, 3), 0, cube1.data, iso=False) cube2 = sp.cutout(self.resource, (200, 600, 3), (400, 400, 8), 0, iso=False) np.testing.assert_array_equal(cube1.data, cube2.data) # Get at res 5 iso, which should be equal to non-iso call cube2 = sp.cutout(self.resource, (200, 600, 3), (400, 400, 8), 0, iso=True) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_cutout_no_time_single_aligned_existing_hit(self): """Test the get_cubes method - no time - aligned - existing data - miss""" # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.random() sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) sp.write_cuboid(self.resource, (0, 0, 0), 0, cube1.data) cube2 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) np.testing.assert_array_equal(cube1.data, cube2.data) del cube1 del cube2 # now write to cuboid again cube3 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube3.random() sp.write_cuboid(self.resource, (0, 0, 0), 0, cube3.data) cube4 = sp.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) np.testing.assert_array_equal(cube3.data, cube4.data)
def test_cutout_no_time_single_aligned_miss(self, fake_get_region): """Test the get_cubes method - no time - single""" # Generate random data cube1 = Cube.create_cube(self.resource, [self.x_dim, self.y_dim, self.z_dim]) cube1.random() cube1.morton_id = 0 db = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) # populate dummy data self.write_test_cube(db, self.resource, 0, cube1, cache=False, s3=True) cube2 = db.cutout(self.resource, (0, 0, 0), (self.x_dim, self.y_dim, self.z_dim), 0) np.testing.assert_array_equal(cube1.data, cube2.data)
def test_delayed_write_daemon_simple(self): """Test handling delayed writes""" sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) dwd = DelayedWriteDaemon("boss-delayedwrited-test.pid") # Create some a single delayed write cube1 = Cube.create_cube(self.resource, [512, 512, 16]) cube1.random() cube1.morton_id = 0 res = 0 time_sample = 0 write_cuboid_base = "WRITE-CUBOID&{}&{}".format(self.resource.get_lookup_key(), 0) write_cuboid_key = sp.kvio.insert_cube_in_write_buffer(write_cuboid_base, res, cube1.morton_id, cube1.to_blosc_by_time_index(time_sample)) sp.cache_state.add_to_delayed_write(write_cuboid_key, self.resource.get_lookup_key(), res, cube1.morton_id, time_sample, self.resource.to_json()) # Use Daemon To handle writes dwd.process(sp) time.sleep(30) # Make sure they went through cube2 = sp.cutout(self.resource, (0, 0, 0), (512, 512, 16), 0) np.testing.assert_array_equal(cube1.data, cube2.data) # Make sure delay key got deleted keys = sp.cache_state.get_all_delayed_write_keys() assert not keys
def test_delayed_write_daemon_simple(self): """Test handling delayed writes""" sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) dwd = DelayedWriteDaemon("boss-delayedwrited-test.pid") # Create some a single delayed write cube1 = Cube.create_cube(self.resource, [512, 512, 16]) cube1.random() cube1.morton_id = 0 res = 0 time_sample = 0 write_cuboid_base = "WRITE-CUBOID&{}&{}".format( self.resource.get_lookup_key(), 0) write_cuboid_key = sp.kvio.insert_cube_in_write_buffer( write_cuboid_base, res, cube1.morton_id, cube1.to_blosc_by_time_index(time_sample)) sp.cache_state.add_to_delayed_write(write_cuboid_key, self.resource.get_lookup_key(), res, cube1.morton_id, time_sample, self.resource.to_json()) # Use Daemon To handle writes dwd.process(sp) time.sleep(30) # Make sure they went through cube2 = sp.cutout(self.resource, (0, 0, 0), (512, 512, 16), 0) np.testing.assert_array_equal(cube1.data, cube2.data) # Make sure delay key got deleted keys = sp.cache_state.get_all_delayed_write_keys() assert not keys
class IntegrationTestPrefetchDaemon(unittest.TestCase): layer = AWSSetupLayer def setUp(self): # Setup Data self.data = self.layer.setup_helper.get_image8_dict() self.resource = BossResourceBasic(self.data) # Setup config self.kvio_config = self.layer.kvio_config self.state_config = self.layer.state_config self.object_store_config = self.layer.object_store_config client = redis.StrictRedis(host=self.kvio_config['cache_host'], port=6379, db=1, decode_responses=False) client.flushdb() client = redis.StrictRedis(host=self.state_config['cache_state_host'], port=6379, db=1, decode_responses=False) client.flushdb() # Suppress ResourceWarning messages about unclosed connections. warnings.simplefilter('ignore') self.prefetch = PrefetchDaemon('foo') self.sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) self.prefetch.set_spatialdb(self.sp) def tearDown(self): """Clean kv store in between tests""" client = redis.StrictRedis(host=self.kvio_config['cache_host'], port=6379, db=1, decode_responses=False) client.flushdb() client = redis.StrictRedis(host=self.state_config['cache_state_host'], port=6379, db=1, decode_responses=False) client.flushdb() def test_add_to_prefetch(self): cuboid_dims = CUBOIDSIZE[0] # Cuboid dimensions. x_dim = cuboid_dims[0] y_dim = cuboid_dims[1] z_dim = cuboid_dims[2] cube_above = Cube.create_cube(self.resource, [x_dim, y_dim, z_dim]) cube_above.random() # Write cuboid that are stacked vertically. self.sp.write_cuboid(self.resource, (0, 0, z_dim * 2), 0, cube_above.data) cube_above.morton_id = ndlib.XYZMorton([0, 0, z_dim * 2 // z_dim]) cube_above_cache_key = self.sp.kvio.generate_cached_cuboid_keys( self.resource, 0, [0], [cube_above.morton_id]) # Make sure cuboid saved. cube_act = self.sp.cutout(self.resource, (0, 0, z_dim * 2), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube_above.data, cube_act.data) # Clear cache so we can test prefetch. self.sp.kvio.cache_client.flushdb() # Also clear cache state before running test. self.sp.cache_state.status_client.flushdb() obj_keys = self.sp.objectio.cached_cuboid_to_object_keys( cube_above_cache_key) # Place a cuboid in the pretch queue. self.sp.cache_state.status_client.rpush('PRE-FETCH', obj_keys[0]) # This is the system under test. self.prefetch.process() # Wait for cube to be prefetched. i = 0 while not self.sp.kvio.cube_exists(cube_above_cache_key[0]) and i < 30: time.sleep(1) i += 1 # Confirm cuboid now in cache. self.assertTrue(self.sp.kvio.cube_exists(cube_above_cache_key[0])) cube_act = self.sp.cutout( self.resource, (0, 0, z_dim * 2), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube_above.data, cube_act.data)
class IntegrationTestPrefetchDaemon(unittest.TestCase): layer = AWSSetupLayer def setUp(self): # Setup Data self.data = self.layer.setup_helper.get_image8_dict() self.resource = BossResourceBasic(self.data) # Setup config self.kvio_config = self.layer.kvio_config self.state_config = self.layer.state_config self.object_store_config = self.layer.object_store_config client = redis.StrictRedis(host=self.kvio_config['cache_host'], port=6379, db=1, decode_responses=False) client.flushdb() client = redis.StrictRedis(host=self.state_config['cache_state_host'], port=6379, db=1, decode_responses=False) client.flushdb() # Suppress ResourceWarning messages about unclosed connections. warnings.simplefilter('ignore') self.prefetch = PrefetchDaemon('foo') self.sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) self.prefetch.set_spatialdb(self.sp) def tearDown(self): """Clean kv store in between tests""" client = redis.StrictRedis(host=self.kvio_config['cache_host'], port=6379, db=1, decode_responses=False) client.flushdb() client = redis.StrictRedis(host=self.state_config['cache_state_host'], port=6379, db=1, decode_responses=False) client.flushdb() def test_add_to_prefetch(self): cuboid_dims = CUBOIDSIZE[0] # Cuboid dimensions. x_dim = cuboid_dims[0] y_dim = cuboid_dims[1] z_dim = cuboid_dims[2] cube_above = Cube.create_cube(self.resource, [x_dim, y_dim, z_dim]) cube_above.random() # Write cuboid that are stacked vertically. self.sp.write_cuboid(self.resource, (0, 0, z_dim * 2), 0, cube_above.data) cube_above.morton_id = ndlib.XYZMorton([0, 0, z_dim * 2 // z_dim]) cube_above_cache_key = self.sp.kvio.generate_cached_cuboid_keys( self.resource, 0, [0], [cube_above.morton_id]) # Make sure cuboid saved. cube_act = self.sp.cutout(self.resource, (0, 0, z_dim * 2), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube_above.data, cube_act.data) # Clear cache so we can test prefetch. self.sp.kvio.cache_client.flushdb() # Also clear cache state before running test. self.sp.cache_state.status_client.flushdb() obj_keys = self.sp.objectio.cached_cuboid_to_object_keys( cube_above_cache_key) # Place a cuboid in the pretch queue. self.sp.cache_state.status_client.rpush('PRE-FETCH', obj_keys[0]) # This is the system under test. self.prefetch.process() # Wait for cube to be prefetched. i = 0 while not self.sp.kvio.cube_exists(cube_above_cache_key[0]) and i < 30: time.sleep(1) i += 1 # Confirm cuboid now in cache. self.assertTrue(self.sp.kvio.cube_exists(cube_above_cache_key[0])) cube_act = self.sp.cutout(self.resource, (0, 0, z_dim * 2), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube_above.data, cube_act.data)
def test_cutout_no_time_single_aligned_zero_access_mode_invalid(self, fake_get_region): """Test the get_cubes method - no time - single - Raise error due to invalid access_mode""" db = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) with self.assertRaises(SpdbError): db.cutout(self.resource, (7, 88, 243), (self.x_dim, self.y_dim, self.z_dim), 0, access_mode="wrong")
class IntegrationTestCacheMissDaemon(unittest.TestCase): layer = AWSSetupLayer def setUp(self): # Get data from nose2 layer based setup # Setup Data self.data = self.layer.setup_helper.get_image8_dict() self.resource = BossResourceBasic(self.data) # Setup config self.kvio_config = self.layer.kvio_config self.state_config = self.layer.state_config self.object_store_config = self.layer.object_store_config client = redis.StrictRedis(host=self.kvio_config['cache_host'], port=6379, db=1, decode_responses=False) client.flushdb() client = redis.StrictRedis(host=self.state_config['cache_state_host'], port=6379, db=1, decode_responses=False) client.flushdb() self.cache_miss = CacheMissDaemon('foo') self.sp = SpatialDB(self.kvio_config, self.state_config, self.object_store_config) self.cache_miss.set_spatialdb(self.sp) def tearDown(self): """Clean kv store in between tests""" client = redis.StrictRedis(host=self.kvio_config['cache_host'], port=6379, db=1, decode_responses=False) client.flushdb() client = redis.StrictRedis(host=self.state_config['cache_state_host'], port=6379, db=1, decode_responses=False) client.flushdb() def test_add_to_prefetch(self): cuboid_dims = CUBOIDSIZE[0] # Cuboid dimensions. x_dim = cuboid_dims[0] y_dim = cuboid_dims[1] z_dim = cuboid_dims[2] cube = Cube.create_cube(self.resource, [x_dim, y_dim, z_dim]) cube.random() cube_above = Cube.create_cube(self.resource, [x_dim, y_dim, z_dim]) cube_above.random() cube_below = Cube.create_cube(self.resource, [x_dim, y_dim, z_dim]) cube_below.random() # Write 3 cuboids that are stacked vertically. self.sp.write_cuboid(self.resource, (0, 0, 0), 0, cube_below.data) self.sp.write_cuboid(self.resource, (0, 0, z_dim), 0, cube.data) self.sp.write_cuboid(self.resource, (0, 0, z_dim * 2), 0, cube_above.data) cube.morton_id = ndlib.XYZMorton([0, 0, z_dim // z_dim]) cube_below.morton_id = ndlib.XYZMorton([0, 0, 0]) cube_above.morton_id = ndlib.XYZMorton([0, 0, z_dim * 2 // z_dim]) print('mortons: {}, {}, {}'.format( cube_below.morton_id, cube.morton_id, cube_above.morton_id)) cube_below_cache_key, cube_cache_key, cube_above_cache_key = self.sp.kvio.generate_cached_cuboid_keys( self.resource, 0, [0], [cube_below.morton_id, cube.morton_id, cube_above.morton_id]) # Make sure cuboids saved. cube_act = self.sp.cutout(self.resource, (0, 0, 0), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube_below.data, cube_act.data) cube_act = self.sp.cutout(self.resource, (0, 0, z_dim), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube.data, cube_act.data) cube_act = self.sp.cutout(self.resource, (0, 0, z_dim * 2), (x_dim, y_dim, z_dim), 0) np.testing.assert_array_equal(cube_above.data, cube_act.data) # Clear cache so we can get a cache miss. self.sp.kvio.cache_client.flushdb() # Also clear CACHE-MISS before running testing. self.sp.cache_state.status_client.flushdb() # Get middle cube again. This should trigger a cache miss. cube_act = self.sp.cutout(self.resource, (0, 0, z_dim), (x_dim, y_dim, z_dim), 0) # Confirm there is a cache miss. misses = self.sp.cache_state.status_client.lrange('CACHE-MISS', 0, 10) print('misses:') print(misses) miss_actual = self.sp.cache_state.status_client.lindex('CACHE-MISS', 0) self.assertEqual(cube_cache_key, str(miss_actual, 'utf-8')) # This is the system under test. self.cache_miss.process() # Confirm PRE-FETCH has the object keys for the cube above and below. fetch_actual1 = self.sp.cache_state.status_client.lindex('PRE-FETCH', 0) fetch_actual2 = self.sp.cache_state.status_client.lindex('PRE-FETCH', 1) obj_keys = self.sp.objectio.cached_cuboid_to_object_keys( [cube_above_cache_key, cube_below_cache_key]) self.assertEqual(obj_keys[0], str(fetch_actual1, 'utf-8')) self.assertEqual(obj_keys[1], str(fetch_actual2, 'utf-8'))