def test_transfer(): # Bbox version delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0)) img = cv[:] cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds) ls = os.listdir('/tmp/removeme/transfer/1_1_1/') assert '0-64_0-64_0-64.gz' in ls assert len(ls) == 2 assert os.path.exists('/tmp/removeme/transfer/info') assert os.path.exists('/tmp/removeme/transfer/provenance') dcv = CloudVolume("file:///tmp/removeme/transfer") dcv.info["dont_touch_me_bro"] = True dcv.commit_info() cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds) dcv.refresh_info() assert 'dont_touch_me_bro' in dcv.info assert np.all(img == dcv[:])
def test_downsample_with_offset(): delete_layer() storage, data = create_layer(size=(512,512,128,1), offset=(3,7,11)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 4 assert len(cv.available_mips) == 4 cv.commit_info() create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=3) cv.refresh_info() assert len(cv.available_mips) == 4 assert np.array_equal(cv.mip_volume_size(0), [ 512, 512, 128 ]) assert np.array_equal(cv.mip_volume_size(1), [ 256, 256, 128 ]) assert np.array_equal(cv.mip_volume_size(2), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 64, 64, 128 ]) assert np.all(cv.mip_voxel_offset(3) == (0,0,11)) cv.mip = 0 assert np.all(cv[3:67, 7:71, 11:75] == data[0:64, 0:64, 0:64]) data_ds1 = downsample.downsample_with_averaging(data, factor=[2, 2, 1, 1]) cv.mip = 1 assert np.all(cv[1:33, 3:35, 11:75] == data_ds1[0:32, 0:32, 0:64]) data_ds2 = downsample.downsample_with_averaging(data_ds1, factor=[2, 2, 1, 1]) cv.mip = 2 assert np.all(cv[0:16, 1:17, 11:75] == data_ds2[0:16, 0:16, 0:64]) data_ds3 = downsample.downsample_with_averaging(data_ds2, factor=[2, 2, 1, 1]) cv.mip = 3 assert np.all(cv[0:8, 0:8, 11:75] == data_ds3[0:8,0:8,0:64])
def test_downsample_w_missing(): delete_layer() storage, data = create_layer(size=(512,512,128,1), offset=(3,7,11)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 4 assert len(cv.available_mips) == 4 delete_layer() cv.commit_info() try: create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=3, fill_missing=False) except EmptyVolumeException: pass create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=3, fill_missing=True) cv.refresh_info() assert len(cv.available_mips) == 4 assert np.array_equal(cv.mip_volume_size(0), [ 512, 512, 128 ]) assert np.array_equal(cv.mip_volume_size(1), [ 256, 256, 128 ]) assert np.array_equal(cv.mip_volume_size(2), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 64, 64, 128 ]) assert np.all(cv.mip_voxel_offset(3) == (0,0,11)) cv.mip = 0 cv.fill_missing = True assert np.count_nonzero(cv[3:67, 7:71, 11:75]) == 0
def test_downsample_no_offset(compression_method): delete_layer() storage, data = create_layer(size=(1024, 1024, 128, 1), offset=(0, 0, 0)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 1 assert len(cv.available_mips) == 1 cv.commit_info() tq = MockTaskQueue() tasks = create_downsampling_tasks(storage.layer_path, mip=0, num_mips=4, compress=compression_method) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 5 assert np.array_equal(cv.mip_volume_size(0), [1024, 1024, 128]) assert np.array_equal(cv.mip_volume_size(1), [512, 512, 128]) assert np.array_equal(cv.mip_volume_size(2), [256, 256, 128]) assert np.array_equal(cv.mip_volume_size(3), [128, 128, 128]) assert np.array_equal(cv.mip_volume_size(4), [64, 64, 128]) slice64 = np.s_[0:64, 0:64, 0:64] cv.mip = 0 assert np.all(cv[slice64] == data[slice64]) data_ds1, = tinybrain.downsample_with_averaging(data, factor=[2, 2, 1, 1]) cv.mip = 1 assert np.all(cv[slice64] == data_ds1[slice64]) data_ds2, = tinybrain.downsample_with_averaging(data, factor=[4, 4, 1, 1]) cv.mip = 2 assert np.all(cv[slice64] == data_ds2[slice64]) data_ds3, = tinybrain.downsample_with_averaging(data, factor=[8, 8, 1, 1]) cv.mip = 3 assert np.all(cv[slice64] == data_ds3[slice64]) data_ds4, = tinybrain.downsample_with_averaging(data, factor=[16, 16, 1, 1]) cv.mip = 4 assert np.all(cv[slice64] == data_ds4[slice64])
def test_downsample_higher_mip(): delete_layer() storage, data = create_layer(size=(512,512,64,1), offset=(3,7,11)) cv = CloudVolume(storage.layer_path) cv.info['scales'] = cv.info['scales'][:1] cv.commit_info() create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=2) cv.refresh_info() assert len(cv.available_mips) == 3 create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=1, num_mips=2) cv.refresh_info() assert len(cv.available_mips) == 4 cv.mip = 3 assert cv[:,:,:].shape == (64,64,64,1)
def test_transfer_task_subset(tq, src_cv, transfer_data): dest_cv = CloudVolume(destpath, info=copy.deepcopy(src_cv.info)) dest_cv.scale["size"] = [256, 256, 64] dest_cv.commit_info() tasks = tc.create_transfer_tasks( src_cv.cloudpath, destpath, chunk_size=(64, 64, 64), translate=(-128, -128, -64), ) tq.insert_all(tasks) dest_cv.refresh_info() assert len(dest_cv.scales) == 3 assert np.all(src_cv[128:128 + 256, 128:128 + 256, 64:64 + 64] == dest_cv[:]) rmsrc() rmdest()
def test_downsample_higher_mip(): delete_layer() cf, data = create_layer(size=(512,512,64,1), offset=(3,7,11)) cv = CloudVolume(cf.cloudpath) cv.info['scales'] = cv.info['scales'][:1] tq = MockTaskQueue() cv.commit_info() tasks = create_downsampling_tasks(cf.cloudpath, mip=0, num_mips=2) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 3 tasks = create_downsampling_tasks(cf.cloudpath, mip=1, num_mips=2) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 4 cv.mip = 3 assert cv[:,:,:].shape == (64,64,64,1)
def test_downsample_no_offset(): delete_layer() storage, data = create_layer(size=(1024,1024,128,1), offset=(0,0,0)) cv = CloudVolume(storage.layer_path) assert len(cv.scales) == 5 assert len(cv.available_mips) == 5 cv.commit_info() create_downsampling_tasks(MockTaskQueue(), storage.layer_path, mip=0, num_mips=4) cv.refresh_info() assert len(cv.available_mips) == 5 assert np.array_equal(cv.mip_volume_size(0), [ 1024, 1024, 128 ]) assert np.array_equal(cv.mip_volume_size(1), [ 512, 512, 128 ]) assert np.array_equal(cv.mip_volume_size(2), [ 256, 256, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(4), [ 64, 64, 128 ]) slice64 = np.s_[0:64, 0:64, 0:64] cv.mip = 0 assert np.all(cv[slice64] == data[slice64]) data_ds1 = downsample.downsample_with_averaging(data, factor=[2, 2, 1, 1]) cv.mip = 1 assert np.all(cv[slice64] == data_ds1[slice64]) data_ds2 = downsample.downsample_with_averaging(data_ds1, factor=[2, 2, 1, 1]) cv.mip = 2 assert np.all(cv[slice64] == data_ds2[slice64]) data_ds3 = downsample.downsample_with_averaging(data_ds2, factor=[2, 2, 1, 1]) cv.mip = 3 assert np.all(cv[slice64] == data_ds3[slice64]) data_ds4 = downsample.downsample_with_averaging(data_ds3, factor=[2, 2, 1, 1]) cv.mip = 4 assert np.all(cv[slice64] == data_ds4[slice64])
def test_downsample_no_offset_2x2x2(): delete_layer() cf, data = create_layer(size=(512,512,512,1), offset=(0,0,0)) cv = CloudVolume(cf.cloudpath) assert len(cv.scales) == 1 assert len(cv.available_mips) == 1 cv.commit_info() tq = MockTaskQueue() tasks = create_downsampling_tasks( cf.cloudpath, mip=0, num_mips=3, compress=None, factor=(2,2,2) ) tq.insert_all(tasks) cv.refresh_info() assert len(cv.available_mips) == 4 assert np.array_equal(cv.mip_volume_size(0), [ 512, 512, 512 ]) assert np.array_equal(cv.mip_volume_size(1), [ 256, 256, 256 ]) assert np.array_equal(cv.mip_volume_size(2), [ 128, 128, 128 ]) assert np.array_equal(cv.mip_volume_size(3), [ 64, 64, 64 ]) slice64 = np.s_[0:64, 0:64, 0:64] cv.mip = 0 assert np.all(cv[slice64] == data[slice64]) data_ds1, = tinybrain.downsample_with_averaging(data, factor=[2, 2, 2, 1]) cv.mip = 1 assert np.all(cv[slice64] == data_ds1[slice64]) data_ds2, = tinybrain.downsample_with_averaging(data, factor=[4, 4, 4, 1]) cv.mip = 2 assert np.all(cv[slice64] == data_ds2[slice64]) data_ds3, = tinybrain.downsample_with_averaging(data, factor=[8, 8, 8, 1]) cv.mip = 3 assert np.all(cv[slice64] == data_ds3[slice64])
def test_redirects(): info = CloudVolume.create_new_info( num_channels=1, # Increase this number when we add more tests for RGB layer_type='image', data_type='uint8', encoding='raw', resolution=[1, 1, 1], voxel_offset=[0, 0, 0], volume_size=[128, 128, 64], mesh='mesh', chunk_size=[64, 64, 64], ) vol = CloudVolume('file:///tmp/cloudvolume/redirects_0', mip=0, info=info) vol.commit_info() vol.refresh_info() vol.info['redirect'] = 'file:///tmp/cloudvolume/redirects_0' vol.commit_info() vol.refresh_info() del vol.info['redirect'] for i in range(0, 10): info['redirect'] = 'file:///tmp/cloudvolume/redirects_' + str(i + 1) vol = CloudVolume('file:///tmp/cloudvolume/redirects_' + str(i), mip=0, info=info) vol.commit_info() else: del vol.info['redirect'] vol.commit_info() vol = CloudVolume('file:///tmp/cloudvolume/redirects_0', mip=0) assert vol.cloudpath == 'file:///tmp/cloudvolume/redirects_9' info['redirect'] = 'file:///tmp/cloudvolume/redirects_10' vol = CloudVolume('file:///tmp/cloudvolume/redirects_9', mip=0, info=info) vol.commit_info() try: CloudVolume('file:///tmp/cloudvolume/redirects_0', mip=0) assert False except exceptions.TooManyRedirects: pass vol = CloudVolume('file:///tmp/cloudvolume/redirects_9', max_redirects=0) del vol.info['redirect'] vol.commit_info() vol = CloudVolume('file:///tmp/cloudvolume/redirects_5', max_redirects=0) vol.info['redirect'] = 'file:///tmp/cloudvolume/redirects_1' vol.commit_info() try: vol = CloudVolume('file:///tmp/cloudvolume/redirects_5') assert False except exceptions.CyclicRedirect: pass vol.info['redirect'] = 'file:///tmp/cloudvolume/redirects_6' vol.commit_info() vol = CloudVolume('file:///tmp/cloudvolume/redirects_1') try: vol[:, :, :] = 1 assert False except exceptions.ReadOnlyException: pass for i in range(0, 10): delete_layer('/tmp/cloudvolume/redirects_' + str(i))
def create_contrast_normalization_tasks(src_path, dest_path, levels_path=None, shape=None, mip=0, clip_fraction=0.01, fill_missing=False, translate=(0, 0, 0), minval=None, maxval=None, bounds=None): srcvol = CloudVolume(src_path, mip=mip) try: dvol = CloudVolume(dest_path, mip=mip) except Exception: # no info file info = copy.deepcopy(srcvol.info) dvol = CloudVolume(dest_path, mip=mip, info=info) dvol.info['scales'] = dvol.info['scales'][:mip + 1] dvol.commit_info() if shape == None: shape = Bbox((0, 0, 0), (2048, 2048, 64)) shape = shape.shrink_to_chunk_size(dvol.underlying).size3() shape = Vec.clamp(shape, (1, 1, 1), bounds.size3()) shape = Vec(*shape) create_downsample_scales(dest_path, mip=mip, ds_shape=shape, preserve_chunk_size=True) dvol.refresh_info() bounds = get_bounds(srcvol, bounds, shape, mip) class ContrastNormalizationTaskIterator(object): def __len__(self): return int(reduce(operator.mul, np.ceil(bounds.size3() / shape))) def __iter__(self): for startpt in xyzrange(bounds.minpt, bounds.maxpt, shape): task_shape = min2(shape.clone(), srcvol.bounds.maxpt - startpt) yield ContrastNormalizationTask( src_path=src_path, dest_path=dest_path, levels_path=levels_path, shape=task_shape, offset=startpt.clone(), clip_fraction=clip_fraction, mip=mip, fill_missing=fill_missing, translate=translate, minval=minval, maxval=maxval, ) dvol.provenance.processing.append({ 'method': { 'task': 'ContrastNormalizationTask', 'src_path': src_path, 'dest_path': dest_path, 'shape': Vec(*shape).tolist(), 'clip_fraction': clip_fraction, 'mip': mip, 'translate': Vec(*translate).tolist(), 'minval': minval, 'maxval': maxval, 'bounds': [bounds.minpt.tolist(), bounds.maxpt.tolist()], }, 'by': OPERATOR_CONTACT, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance() return ContrastNormalizationTaskIterator()
def create_contrast_normalization_tasks(src_path, dest_path, levels_path=None, shape=None, mip=0, clip_fraction=0.01, fill_missing=False, translate=(0, 0, 0), minval=None, maxval=None, bounds=None, bounds_mip=0): """ Use the output of luminence levels to contrast correct the image by stretching the histogram to cover the full range of the data type. """ srcvol = CloudVolume(src_path, mip=mip) try: dvol = CloudVolume(dest_path, mip=mip) except Exception: # no info file info = copy.deepcopy(srcvol.info) dvol = CloudVolume(dest_path, mip=mip, info=info) dvol.info['scales'] = dvol.info['scales'][:mip + 1] dvol.commit_info() if bounds is None: bounds = srcvol.bounds.clone() if shape is None: shape = Bbox((0, 0, 0), (2048, 2048, 64)) shape = shape.shrink_to_chunk_size(dvol.underlying).size3() shape = Vec.clamp(shape, (1, 1, 1), bounds.size3()) shape = Vec(*shape) downsample_scales.create_downsample_scales(dest_path, mip=mip, ds_shape=shape, preserve_chunk_size=True) dvol.refresh_info() bounds = get_bounds(srcvol, bounds, mip, bounds_mip=bounds_mip) class ContrastNormalizationTaskIterator(FinelyDividedTaskIterator): def task(self, shape, offset): return ContrastNormalizationTask( src_path=src_path, dest_path=dest_path, levels_path=levels_path, shape=shape.clone(), offset=offset.clone(), clip_fraction=clip_fraction, mip=mip, fill_missing=fill_missing, translate=translate, minval=minval, maxval=maxval, ) def on_finish(self): dvol.provenance.processing.append({ 'method': { 'task': 'ContrastNormalizationTask', 'src_path': src_path, 'dest_path': dest_path, 'shape': Vec(*shape).tolist(), 'clip_fraction': clip_fraction, 'mip': mip, 'translate': Vec(*translate).tolist(), 'minval': minval, 'maxval': maxval, 'bounds': [bounds.minpt.tolist(), bounds.maxpt.tolist()], }, 'by': operator_contact(), 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance() return ContrastNormalizationTaskIterator(bounds, shape)
def create_contrast_normalization_tasks(task_queue, src_path, dest_path, shape=None, mip=0, clip_fraction=0.01, fill_missing=False, translate=(0, 0, 0)): srcvol = CloudVolume(src_path, mip=mip) try: dvol = CloudVolume(dest_path, mip=mip) except Exception: # no info file info = copy.deepcopy(srcvol.info) dvol = CloudVolume(dest_path, mip=mip, info=info) dvol.info['scales'] = dvol.info['scales'][:mip + 1] dvol.commit_info() if shape == None: shape = Bbox((0, 0, 0), (2048, 2048, 64)) shape = shape.shrink_to_chunk_size(dvol.underlying).size3() shape = Vec(*shape) create_downsample_scales(dest_path, mip=mip, ds_shape=shape, preserve_chunk_size=True) dvol.refresh_info() bounds = srcvol.bounds.clone() for startpt in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape), desc="Inserting Contrast Normalization Tasks"): task_shape = min2(shape.clone(), srcvol.bounds.maxpt - startpt) task = ContrastNormalizationTask( src_path=src_path, dest_path=dest_path, shape=task_shape, offset=startpt.clone(), clip_fraction=clip_fraction, mip=mip, fill_missing=fill_missing, translate=translate, ) task_queue.insert(task) task_queue.wait('Uploading Contrast Normalization Tasks') dvol.provenance.processing.append({ 'method': { 'task': 'ContrastNormalizationTask', 'src_path': src_path, 'dest_path': dest_path, 'shape': Vec(*shape).tolist(), 'clip_fraction': clip_fraction, 'mip': mip, 'translate': Vec(*translate).tolist(), }, 'by': USER_EMAIL, 'date': strftime('%Y-%m-%d %H:%M %Z'), }) dvol.commit_provenance()