def test_luminance_levels_task(): directory = '/tmp/removeme/luminance_levels/' layer_path = 'file://' + directory delete_layer(layer_path) storage, imgd = create_layer(size=(256, 256, 128, 1), offset=(0, 0, 0), layer_type="image", layer_name='luminance_levels') tq = MockTaskQueue() tasks = tc.create_luminance_levels_tasks(layer_path=layer_path, coverage_factor=0.01, shape=None, offset=(0, 0, 0), mip=0) tq.insert_all(tasks) gt = [0] * 256 for x, y, z in lib.xyzrange((0, 0, 0), list(imgd.shape[:2]) + [1]): gt[imgd[x, y, 0, 0]] += 1 with open('/tmp/removeme/luminance_levels/levels/0/0', 'rt') as f: levels = f.read() levels = json.loads(levels) assert levels['coverage_ratio'] == 1.0 assert levels['levels'] == gt
def test_downsample_w_missing(): delete_layer() storage, data = create_layer(size=(512,512,128,1), offset=(3,7,11)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 4 assert len(cv.available_mips) == 4 delete_layer() cv.commit_info() try: create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=3, fill_missing=False) except EmptyVolumeException: pass create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=3, fill_missing=True) cv.refresh_info() assert len(cv.available_mips) == 4 assert np.array_equal(cv.mip_volume_size(0), [ 512, 512, 128 ]) assert np.array_equal(cv.mip_volume_size(1), [ 256, 256, 128 ]) assert np.array_equal(cv.mip_volume_size(2), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 64, 64, 128 ]) assert np.all(cv.mip_voxel_offset(3) == (0,0,11)) cv.mip = 0 cv.fill_missing = True assert np.count_nonzero(cv[3:67, 7:71, 11:75]) == 0
def downsample_dataset(dataset_name, from_mip=-1, num_mips=1, local=False, n_download_workers=1, n_threads=32): if dataset_name == "pinky": ws_path = "gs://neuroglancer/svenmd/pinky40_v11/watershed/" elif dataset_name == "basil": ws_path = "gs://neuroglancer/svenmd/basil_4k_oldnet_cg/watershed/" elif dataset_name == "pinky100": ws_path = "gs://neuroglancer/nkem/pinky100_v0/ws/lost_no-random/bbox1_0/" else: raise Exception("Dataset unknown") if local: if n_threads == 1: with MockTaskQueue() as task_queue: tc.create_downsampling_tasks(task_queue, ws_path, mip=from_mip, fill_missing=True, num_mips=num_mips, n_download_workers=n_download_workers, preserve_chunk_size=True) else: with LocalTaskQueue(parallel=n_threads) as task_queue: tc.create_downsampling_tasks(task_queue, ws_path, mip=from_mip, fill_missing=True, num_mips=num_mips, n_download_workers=n_download_workers, preserve_chunk_size=True) else: with TaskQueue(queue_server='sqs', qurl="https://sqs.us-east-1.amazonaws.com/098703261575/nkem-igneous") as task_queue: tc.create_downsampling_tasks(task_queue, ws_path, mip=from_mip, fill_missing=True, num_mips=num_mips, n_download_workers=n_download_workers, preserve_chunk_size=True)
def test_downsample_with_offset(): delete_layer() storage, data = create_layer(size=(512,512,128,1), offset=(3,7,11)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 4 assert len(cv.available_mips) == 4 cv.commit_info() create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=3) cv.refresh_info() assert len(cv.available_mips) == 4 assert np.array_equal(cv.mip_volume_size(0), [ 512, 512, 128 ]) assert np.array_equal(cv.mip_volume_size(1), [ 256, 256, 128 ]) assert np.array_equal(cv.mip_volume_size(2), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 64, 64, 128 ]) assert np.all(cv.mip_voxel_offset(3) == (0,0,11)) cv.mip = 0 assert np.all(cv[3:67, 7:71, 11:75] == data[0:64, 0:64, 0:64]) data_ds1 = downsample.downsample_with_averaging(data, factor=[2, 2, 1, 1]) cv.mip = 1 assert np.all(cv[1:33, 3:35, 11:75] == data_ds1[0:32, 0:32, 0:64]) data_ds2 = downsample.downsample_with_averaging(data_ds1, factor=[2, 2, 1, 1]) cv.mip = 2 assert np.all(cv[0:16, 1:17, 11:75] == data_ds2[0:16, 0:16, 0:64]) data_ds3 = downsample.downsample_with_averaging(data_ds2, factor=[2, 2, 1, 1]) cv.mip = 3 assert np.all(cv[0:8, 0:8, 11:75] == data_ds3[0:8,0:8,0:64])
def test_downsample_no_offset(compression_method): delete_layer() storage, data = create_layer(size=(1024, 1024, 128, 1), offset=(0, 0, 0)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 1 assert len(cv.available_mips) == 1 cv.commit_info() tq = MockTaskQueue() tasks = create_downsampling_tasks(storage.layer_path, mip=0, num_mips=4, compress=compression_method) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 5 assert np.array_equal(cv.mip_volume_size(0), [1024, 1024, 128]) assert np.array_equal(cv.mip_volume_size(1), [512, 512, 128]) assert np.array_equal(cv.mip_volume_size(2), [256, 256, 128]) assert np.array_equal(cv.mip_volume_size(3), [128, 128, 128]) assert np.array_equal(cv.mip_volume_size(4), [64, 64, 128]) slice64 = np.s_[0:64, 0:64, 0:64] cv.mip = 0 assert np.all(cv[slice64] == data[slice64]) data_ds1, = tinybrain.downsample_with_averaging(data, factor=[2, 2, 1, 1]) cv.mip = 1 assert np.all(cv[slice64] == data_ds1[slice64]) data_ds2, = tinybrain.downsample_with_averaging(data, factor=[4, 4, 1, 1]) cv.mip = 2 assert np.all(cv[slice64] == data_ds2[slice64]) data_ds3, = tinybrain.downsample_with_averaging(data, factor=[8, 8, 1, 1]) cv.mip = 3 assert np.all(cv[slice64] == data_ds3[slice64]) data_ds4, = tinybrain.downsample_with_averaging(data, factor=[16, 16, 1, 1]) cv.mip = 4 assert np.all(cv[slice64] == data_ds4[slice64])
def test_downsample_higher_mip(): delete_layer() storage, data = create_layer(size=(512,512,64,1), offset=(3,7,11)) cv = CloudVolume(storage.layer_path) cv.info['scales'] = cv.info['scales'][:1] cv.commit_info() create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=2) cv.refresh_info() assert len(cv.available_mips) == 3 create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=1, num_mips=2) cv.refresh_info() assert len(cv.available_mips) == 4 cv.mip = 3 assert cv[:,:,:].shape == (64,64,64,1)
def test_skeletonization_task(): directory = '/tmp/removeme/skeleton/' layer_path = 'file://' + directory delete_layer(layer_path) img = np.ones((256,256,256), dtype=np.uint64) img[:,:,:] = 2 cv = CloudVolume.from_numpy( img, layer_type='segmentation', vol_path=layer_path, ) tq = MockTaskQueue() tasks = tc.create_skeletonizing_tasks(layer_path, mip=0, teasar_params={ 'scale': 10, 'const': 10, }) tq.insert_all(tasks)
def test_downsample_no_offset_2x2x2(): delete_layer() cf, data = create_layer(size=(512,512,512,1), offset=(0,0,0)) cv = CloudVolume(cf.cloudpath) assert len(cv.scales) == 1 assert len(cv.available_mips) == 1 cv.commit_info() tq = MockTaskQueue() tasks = create_downsampling_tasks( cf.cloudpath, mip=0, num_mips=3, compress=None, factor=(2,2,2) ) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 4 assert np.array_equal(cv.mip_volume_size(0), [ 512, 512, 512 ]) assert np.array_equal(cv.mip_volume_size(1), [ 256, 256, 256 ]) assert np.array_equal(cv.mip_volume_size(2), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 64, 64, 64 ]) slice64 = np.s_[0:64, 0:64, 0:64] cv.mip = 0 assert np.all(cv[slice64] == data[slice64]) data_ds1, = tinybrain.downsample_with_averaging(data, factor=[2, 2, 2, 1]) cv.mip = 1 assert np.all(cv[slice64] == data_ds1[slice64]) data_ds2, = tinybrain.downsample_with_averaging(data, factor=[4, 4, 4, 1]) cv.mip = 2 assert np.all(cv[slice64] == data_ds2[slice64]) data_ds3, = tinybrain.downsample_with_averaging(data, factor=[8, 8, 8, 1]) cv.mip = 3 assert np.all(cv[slice64] == data_ds3[slice64])
def mesh(opt): gs_path = opt.gs_output # Mesh if opt.mesh: assert opt.vol_type == 'segmentation' # Create mesh with LocalTaskQueue(parallel=opt.parallel) as tq: tasks = tc.create_meshing_tasks(gs_path, mip=opt.mesh_mip) tq.insert_all(tasks) # Manifest with MockTaskQueue() as tq: tasks = tc.create_mesh_manifest_tasks(gs_path) tq.insert_all(tasks)
def test_downsample_no_offset(): delete_layer() storage, data = create_layer(size=(1024,1024,128,1), offset=(0,0,0)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 5 assert len(cv.available_mips) == 5 cv.commit_info() create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=4) cv.refresh_info() assert len(cv.available_mips) == 5 assert np.array_equal(cv.mip_volume_size(0), [ 1024, 1024, 128 ]) assert np.array_equal(cv.mip_volume_size(1), [ 512, 512, 128 ]) assert np.array_equal(cv.mip_volume_size(2), [ 256, 256, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(4), [ 64, 64, 128 ]) slice64 = np.s_[0:64, 0:64, 0:64] cv.mip = 0 assert np.all(cv[slice64] == data[slice64]) data_ds1 = downsample.downsample_with_averaging(data, factor=[2, 2, 1, 1]) cv.mip = 1 assert np.all(cv[slice64] == data_ds1[slice64]) data_ds2 = downsample.downsample_with_averaging(data_ds1, factor=[2, 2, 1, 1]) cv.mip = 2 assert np.all(cv[slice64] == data_ds2[slice64]) data_ds3 = downsample.downsample_with_averaging(data_ds2, factor=[2, 2, 1, 1]) cv.mip = 3 assert np.all(cv[slice64] == data_ds3[slice64]) data_ds4 = downsample.downsample_with_averaging(data_ds3, factor=[2, 2, 1, 1]) cv.mip = 4 assert np.all(cv[slice64] == data_ds4[slice64])
def test_downsample_higher_mip(): delete_layer() cf, data = create_layer(size=(512,512,64,1), offset=(3,7,11)) cv = CloudVolume(cf.cloudpath) cv.info['scales'] = cv.info['scales'][:1] tq = MockTaskQueue() cv.commit_info() tasks = create_downsampling_tasks(cf.cloudpath, mip=0, num_mips=2) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 3 tasks = create_downsampling_tasks(cf.cloudpath, mip=1, num_mips=2) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 4 cv.mip = 3 assert cv[:,:,:].shape == (64,64,64,1)
def test_contrast_normalization_task(): directory = '/tmp/removeme/contrast_normalization/' src_path = 'file://' + directory dest_path = src_path[:-1] + '2' delete_layer(src_path) delete_layer(dest_path) cf, imgd = create_layer( size=(300,300,129,1), offset=(0,0,0), layer_type="image", layer_name='contrast_normalization' ) tq = MockTaskQueue() tasks = tc.create_luminance_levels_tasks( layer_path=src_path, coverage_factor=0.01, shape=None, offset=(0,0,0), mip=0 ) tq.insert_all(tasks) tasks = tc.create_contrast_normalization_tasks( src_path=src_path, dest_path=dest_path, levels_path=None, shape=None, mip=0, clip_fraction=0.01, fill_missing=False, translate=(0,0,0), minval=None, maxval=None, bounds=None, bounds_mip=0, ) tq.insert_all(tasks)
volume, offset=[0, 0, 0], build_chunk_size=[1024, 1024, 128]): offset = Vec(*offset) shape = Vec(*volume.shape[:3]) build_chunk_size = Vec(*build_chunk_size) for spt in xyzrange((0, 0, 0), shape, build_chunk_size): ept = min2(spt + build_chunk_size, shape) bbox = Bbox(spt, ept) chunk = volume[bbox.to_slices()] bbox += offset filename = 'build/{}'.format(bbox.to_filename()) storage.put_file(filename, chunks.encode_npz(chunk)) storage.wait() def cascade(tq, fnlist): for fn in fnlist: fn(tq) N = tq.enqueued while N > 0: N = tq.enqueued print('\r {} remaining'.format(N), end='') time.sleep(2) if __name__ == '__main__': with MockTaskQueue() as task_queue: pass
def tq(): return MockTaskQueue()