def test_aligned_read(): for green in (False, True): print("green", green) delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) cv.green_threads = green # the last dimension is the number of channels assert cv[0:50, 0:50, 0:50].shape == (50, 50, 50, 1) assert np.all(cv[0:50, 0:50, 0:50] == data) delete_layer() cv, data = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0)) cv.green_threads = green # the last dimension is the number of channels assert cv[0:64, 0:64, 0:64].shape == (64, 64, 64, 1) assert np.all(cv[0:64, 0:64, 0:64] == data[:64, :64, :64, :]) delete_layer() cv, data = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0)) cv.green_threads = green cutout = cv[10:74, 20:84, 0:64] # the last dimension is the number of channels assert cutout.shape == (64, 64, 64, 1) assert np.all(cutout == data[:64, :64, :64, :]) # get the second chunk cutout2 = cv[74:138, 20:84, 0:64] assert cutout2.shape == (64, 64, 64, 1) assert np.all(cutout2 == data[64:128, :64, :64, :]) assert cv[25, 25, 25].shape == (1, 1, 1, 1)
def test_non_aligned_read(): delete_layer() cv, data = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0)) # the last dimension is the number of channels assert cv[31:65, 0:64, 0:64].shape == (34, 64, 64, 1) assert np.all(cv[31:65, 0:64, 0:64] == data[31:65, :64, :64, :]) # read a single pixel delete_layer() cv, data = create_layer(size=(64, 64, 64, 1), offset=(0, 0, 0)) # the last dimension is the number of channels assert cv[22:23, 22:23, 22:23].shape == (1, 1, 1, 1) assert np.all(cv[22:23, 22:23, 22:23] == data[22:23, 22:23, 22:23, :]) # Test steps (negative steps are not supported) img1 = cv[::2, ::2, ::2, :] img2 = cv[:, :, :, :][::2, ::2, ::2, :] assert np.array_equal(img1, img2) # read a single pixel delete_layer() cv, data = create_layer(size=(256, 256, 64, 1), offset=(3, 7, 11)) # the last dimension is the number of channels assert cv[22:77:2, 22:197:3, 22:32].shape == (28, 59, 10, 1) assert data[19:74:2, 15:190:3, 11:21, :].shape == (28, 59, 10, 1) assert np.all(cv[22:77:2, 22:197:3, 22:32] == data[19:74:2, 15:190:3, 11:21, :])
def test_aligned_read(green, encoding): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0), encoding=encoding) cv.green_threads = green # the last dimension is the number of channels assert cv[0:50, 0:50, 0:50].shape == (50, 50, 50, 1) assert image_equal(cv[0:50, 0:50, 0:50], data, encoding) delete_layer() cv, data = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0), encoding=encoding) cv.green_threads = green # the last dimension is the number of channels assert cv[0:64, 0:64, 0:64].shape == (64, 64, 64, 1) assert image_equal(cv[0:64, 0:64, 0:64], data[:64, :64, :64, :], encoding) delete_layer() cv, data = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0), encoding=encoding) cv.green_threads = green cutout = cv[10:74, 20:84, 0:64] # the last dimension is the number of channels assert cutout.shape == (64, 64, 64, 1) assert image_equal(cutout, data[:64, :64, :64, :], encoding) # get the second chunk cutout2 = cv[74:138, 20:84, 0:64] assert cutout2.shape == (64, 64, 64, 1) assert image_equal(cutout2, data[64:128, :64, :64, :], encoding) assert cv[25, 25, 25].shape == (1, 1, 1, 1)
def test_write(): delete_layer() cv, data = create_layer(size=(50,50,50,1), offset=(0,0,0)) replacement_data = np.zeros(shape=(50,50,50,1), dtype=np.uint8) cv[0:50,0:50,0:50] = replacement_data assert np.all(cv[0:50,0:50,0:50] == replacement_data) replacement_data = np.random.randint(255, size=(50,50,50,1), dtype=np.uint8) cv[0:50,0:50,0:50] = replacement_data assert np.all(cv[0:50,0:50,0:50] == replacement_data) # out of bounds delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(10,20,0)) with pytest.raises(ValueError): cv[74:150,20:84,0:64] = np.ones(shape=(64,64,64,1), dtype=np.uint8) # non-aligned writes delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(10,20,0)) with pytest.raises(ValueError): cv[21:85,0:64,0:64] = np.ones(shape=(64,64,64,1), dtype=np.uint8) # test bounds check for short boundary chunk delete_layer() cv, data = create_layer(size=(25,25,25,1), offset=(1,3,5)) cv.info['scales'][0]['chunk_sizes'] = [[ 11,11,11 ]] cv[:] = np.ones(shape=(25,25,25,1), dtype=np.uint8)
def test_non_aligned_write(): delete_layer() offset = Vec(5, 7, 13) cv, _ = create_layer(size=(1024, 1024, 5, 1), offset=offset) cv[:] = np.zeros(shape=cv.shape, dtype=cv.dtype) # Write inside a single chunk onepx = Bbox((10, 200, 15), (11, 201, 16)) try: cv[onepx.to_slices()] = np.ones(shape=onepx.size3(), dtype=cv.dtype) assert False except AlignmentError: pass cv.non_aligned_writes = True cv[onepx.to_slices()] = np.ones(shape=onepx.size3(), dtype=cv.dtype) answer = np.zeros(shape=cv.shape, dtype=cv.dtype) answer[5, 193, 2] = 1 assert np.all(cv[:] == answer) # Write across multiple chunks cv[:] = np.zeros(shape=cv.shape, dtype=cv.dtype) cv.non_aligned_writes = True middle = Bbox((512 - 10, 512 - 11, 0), (512 + 10, 512 + 11, 5)) + offset cv[middle.to_slices()] = np.ones(shape=middle.size3(), dtype=cv.dtype) answer = np.zeros(shape=cv.shape, dtype=cv.dtype) answer[502:522, 501:523, :] = 1 assert np.all(cv[:] == answer) cv.non_aligned_writes = False try: cv[middle.to_slices()] = np.ones(shape=middle.size3(), dtype=cv.dtype) assert False except AlignmentError: pass # Big inner shell delete_layer() cv, _ = create_layer(size=(1024, 1024, 5, 1), offset=offset) cv[:] = np.zeros(shape=cv.shape, dtype=cv.dtype) middle = Bbox((512 - 150, 512 - 150, 0), (512 + 150, 512 + 150, 5)) + offset try: cv[middle.to_slices()] = np.ones(shape=middle.size3(), dtype=cv.dtype) assert False except AlignmentError: pass cv.non_aligned_writes = True cv[middle.to_slices()] = np.ones(shape=middle.size3(), dtype=cv.dtype) answer = np.zeros(shape=cv.shape, dtype=cv.dtype) answer[362:662, 362:662, :] = 1 assert np.all(cv[:] == answer)
def test_delete_black_uploads(): for parallel in (1, 2): delete_layer() cv, _ = create_layer(size=(256, 256, 256, 1), offset=(0, 0, 0)) ls = os.listdir('/tmp/removeme/layer/1_1_1/') assert len(ls) == 64 cv.parallel = parallel cv.delete_black_uploads = True cv[64:64 + 128, 64:64 + 128, 64:64 + 128] = 0 ls = os.listdir('/tmp/removeme/layer/1_1_1/') assert len(ls) == (64 - 8) cv[64:64 + 128, 64:64 + 128, 64:64 + 128] = 0 ls = os.listdir('/tmp/removeme/layer/1_1_1/') assert len(ls) == (64 - 8) cv.image.background_color = 1 cv[:] = 1 ls = os.listdir('/tmp/removeme/layer/1_1_1/') assert len(ls) == 0 cv[:] = 0 ls = os.listdir('/tmp/removeme/layer/1_1_1/') assert len(ls) == 64
def test_slices_to_global_coords(): delete_layer() cv, _ = create_layer(size=(1024, 1024, 5, 1), offset=(7, 0, 0)) scale = cv.info['scales'][0] scale = copy.deepcopy(scale) scale['voxel_offset'] = [3, 0, 0] scale['volume_size'] = [512, 512, 5] scale['resolution'] = [2, 2, 1] scale['key'] = '2_2_1' cv.info['scales'].append(scale) cv.commit_info() assert len(cv.available_mips) == 2 cv.mip = 1 slices = cv.slices_to_global_coords(Bbox((100, 100, 1), (500, 512, 2))) result = Bbox.from_slices(slices) assert result == Bbox((200, 200, 1), (1000, 1024, 2)) cv.mip = 0 slices = cv.slices_to_global_coords(Bbox((100, 100, 1), (500, 512, 2))) result = Bbox.from_slices(slices) assert result == Bbox((100, 100, 1), (500, 512, 2))
def test_autocropped_read(): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) cv.autocrop = True cv.bounded = False # left overlap img = cv[-25:25, -25:25, -25:25] assert img.shape == (25, 25, 25, 1) assert np.all(img == data[:25, :25, :25]) # right overlap img = cv[40:60, 40:60, 40:60] assert img.shape == (10, 10, 10, 1) assert np.all(img == data[40:, 40:, 40:]) # containing img = cv[-100:100, -100:100, -100:100] assert img.shape == (50, 50, 50, 1) assert np.all(img == data) # contained img = cv[10:20, 10:20, 10:20] assert img.shape == (10, 10, 10, 1) assert np.all(img == data[10:20, 10:20, 10:20]) # non-intersecting img = cv[100:120, 100:120, 100:120] assert img.shape == (0, 0, 0, 1) assert np.all(img == data[0:0, 0:0, 0:0])
def test_transfer(): # Bbox version delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0)) img = cv[:] cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds) ls = os.listdir('/tmp/removeme/transfer/1_1_1/') assert '0-64_0-64_0-64.gz' in ls assert len(ls) == 2 assert os.path.exists('/tmp/removeme/transfer/info') assert os.path.exists('/tmp/removeme/transfer/provenance') dcv = CloudVolume("file:///tmp/removeme/transfer") dcv.info["dont_touch_me_bro"] = True dcv.commit_info() cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds) dcv.refresh_info() assert 'dont_touch_me_bro' in dcv.info assert np.all(img == dcv[:])
def test_autocropped_write(): delete_layer() cv, _ = create_layer(size=(100, 100, 100, 1), offset=(0, 0, 0)) cv.autocrop = True cv.bounded = False replacement_data = np.ones(shape=(300, 300, 300, 1), dtype=np.uint8) cv[-100:200, -100:200, -100:200] = replacement_data assert np.all(cv[:, :, :] == replacement_data[0:100, 0:100, 0:100]) replacement_data = np.random.randint(255, size=(100, 100, 100, 1), dtype=np.uint8) cv[-50:50, -50:50, -50:50] = replacement_data assert np.all(cv[0:50, 0:50, 0:50] == replacement_data[50:, 50:, 50:]) cv[50:150, 50:150, 50:150] = replacement_data assert np.all(cv[50:, 50:, 50:] == replacement_data[:50, :50, :50]) cv[0:50, 0:50, 0:50] = replacement_data[:50, :50, :50] assert np.all(cv[0:50, 0:50, 0:50] == replacement_data[:50, :50, :50]) replacement_data = np.ones(shape=(100, 100, 100, 1), dtype=np.uint8) cv[:] = replacement_data + 1 cv[100:200, 100:200, 100:200] = replacement_data assert np.all(cv[:, :, :] != 1)
def test_bounds(): delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(100, 100, 100)) cv.bounded = True try: cutout = cv[0:, 0:, 0:, :] cutout = cv[100:229, 100:165, 100:165, 0] cutout = cv[99:228, 100:164, 100:164, 0] except ValueError: pass else: assert False # don't die cutout = cv[100:228, 100:164, 100:164, 0] cv.bounded = False cutout = cv[0:, 0:, 0:, :] assert cutout.shape == (228, 164, 164, 1) assert np.count_nonzero(cutout) != 0 cutout[100:, 100:, 100:, :] = 0 assert np.count_nonzero(cutout) == 0
def test_multiprocess(): from concurrent.futures import ProcessPoolExecutor, as_completed delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0)) cv.commit_info() # "The ProcessPoolExecutor class has known (unfixable) # problems on Python 2 and should not be relied on # for mission critical work." # https://pypi.org/project/futures/ if sys.version_info[0] < 3: print(yellow("External multiprocessing not supported in Python 2.")) return futures = [] with ProcessPoolExecutor(max_workers=4) as ppe: for _ in range(0, 5): futures.append(ppe.submit(cv.refresh_info)) for future in as_completed(futures): # an error should be re-raised in one of the futures future.result() delete_layer()
def test_provenance(): delete_layer() cv, data = create_layer(size=(64,64,64,1), offset=(0,0,0)) provobj = json.loads(cv.provenance.serialize()) assert provobj == {"sources": [], "owners": [], "processing": [], "description": ""} cv.provenance.sources.append('*****@*****.**') cv.commit_provenance() cv.refresh_provenance() assert cv.provenance.sources == [ '*****@*****.**' ] # should not die cv = CloudVolume(cv.layer_cloudpath, provenance={}) cv = CloudVolume(cv.layer_cloudpath, provenance={ 'sources': [] }) cv = CloudVolume(cv.layer_cloudpath, provenance={ 'owners': [] }) cv = CloudVolume(cv.layer_cloudpath, provenance={ 'processing': [] }) cv = CloudVolume(cv.layer_cloudpath, provenance={ 'description': '' }) # should die try: cv = CloudVolume(cv.layer_cloudpath, provenance={ 'sources': 3 }) assert False except: pass cv = CloudVolume(cv.layer_cloudpath, provenance="""{ "sources": [ "wow" ] }""") assert cv.provenance.sources[0] == 'wow'
def test_write_image_shard(): delete_layer() cv, data = create_layer(size=(256, 256, 256, 1), offset=(0, 0, 0)) spec = { "@type": "neuroglancer_uint64_sharded_v1", "data_encoding": "gzip", "hash": "murmurhash3_x86_128", "minishard_bits": 1, "minishard_index_encoding": "raw", "preshift_bits": 3, "shard_bits": 0 } cv.scale['sharding'] = spec cv[:] = data sharded_data = cv[:] assert np.all(data == sharded_data) spec['shard_bits'] = 1 try: cv[:] = data assert False except exceptions.AlignmentError: pass
def test_delete(): # Bbox version delete_layer() cv, _ = create_layer(size=(128,64,64,1), offset=(0,0,0)) defexists = Bbox( (0,0,0), (128,64,64) ) results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == True assert results['1_1_1/64-128_0-64_0-64'] == True cv.delete(defexists) results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == False assert results['1_1_1/64-128_0-64_0-64'] == False # Slice version delete_layer() cv, _ = create_layer(size=(128,64,64,1), offset=(0,0,0)) defexists = np.s_[ 0:128, :, : ] results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == True assert results['1_1_1/64-128_0-64_0-64'] == True cv.delete(defexists) results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == False assert results['1_1_1/64-128_0-64_0-64'] == False # Check errors delete_layer() cv, _ = create_layer(size=(128,64,64,1), offset=(0,0,0)) try: results = cv.exists( np.s_[1:129, :, :] ) print(results) except exceptions.OutOfBoundsError: pass else: assert False
def test_bbox_read(): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) x = Bbox((0, 1, 2), (48, 49, 50)) # the last dimension is the number of channels assert cv[x].shape == (48, 48, 48, 1) assert np.all(cv[x] == data[0:48, 1:49, 2:50])
def test_write(): for green in (False, True): print("green:", green) delete_layer() cv, _ = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) cv.green_threads = green replacement_data = np.zeros(shape=(50, 50, 50, 1), dtype=np.uint8) cv[0:50, 0:50, 0:50] = replacement_data assert np.all(cv[0:50, 0:50, 0:50] == replacement_data) replacement_data = np.random.randint(255, size=(50, 50, 50, 1), dtype=np.uint8) cv[0:50, 0:50, 0:50] = replacement_data assert np.all(cv[0:50, 0:50, 0:50] == replacement_data) replacement_data = np.random.randint(255, size=(50, 50, 50, 1), dtype=np.uint8) bbx = Bbox((0, 0, 0), (50, 50, 50)) cv[bbx] = replacement_data assert np.all(cv[bbx] == replacement_data) # out of bounds delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0)) cv.green_threads = green with pytest.raises(ValueError): cv[74:150, 20:84, 0:64] = np.ones(shape=(64, 64, 64, 1), dtype=np.uint8) # non-aligned writes delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0)) cv.green_threads = green with pytest.raises(ValueError): cv[21:85, 0:64, 0:64] = np.ones(shape=(64, 64, 64, 1), dtype=np.uint8) # test bounds check for short boundary chunk delete_layer() cv, _ = create_layer(size=(25, 25, 25, 1), offset=(1, 3, 5)) cv.green_threads = green cv.info['scales'][0]['chunk_sizes'] = [[11, 11, 11]] cv[:] = np.ones(shape=(25, 25, 25, 1), dtype=np.uint8)
def test_cdn_cache_control(): delete_layer() create_layer(size=(128,10,10,1), offset=(0,0,0)) assert cdn_cache_control(None) == 'max-age=3600, s-max-age=3600' assert cdn_cache_control(0) == 'no-cache' assert cdn_cache_control(False) == 'no-cache' assert cdn_cache_control(True) == 'max-age=3600, s-max-age=3600' assert cdn_cache_control(1337) == 'max-age=1337, s-max-age=1337' assert cdn_cache_control('private, must-revalidate') == 'private, must-revalidate' try: cdn_cache_control(-1) except ValueError: pass else: assert False
def test_pickling(): import pickle delete_layer() cv, _ = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0)) pckl = pickle.dumps(cv) cv2 = pickle.loads(pckl) assert cv2.cloudpath == cv.cloudpath assert cv2.mip == cv.mip
def test_parallel_write(): delete_layer() cv, data = create_layer(size=(512,512,128,1), offset=(0,0,0)) cv.parallel = 2 cv[:] = np.zeros(shape=(512,512,128,1), dtype=cv.dtype) + 5 data = cv[:] assert np.all(data == 5) del data cv.unlink_shared_memory()
def test_save_images(): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) img = cv[:] directory = img.save_images() for z, fname in enumerate(sorted(os.listdir(directory))): assert fname == str(z).zfill(3) + '.png' shutil.rmtree(directory)
def test_number_type_read(): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) for datatype in (np.uint8, np.uint16, np.uint32, np.uint64, np.int8, np.int16, np.int32, np.int64, np.float16, np.float32, np.float64, int, float): x = datatype(5) # the last dimension is the number of channels assert cv[x, x, x].shape == (1, 1, 1, 1) assert np.all(cv[x, x, x] == data[5, 5, 5])
def test_exists(): # Bbox version delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0)) defexists = Bbox( (0,0,0), (128,64,64) ) results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == True assert results['1_1_1/64-128_0-64_0-64'] == True fpath = os.path.join(cv.layer_cloudpath, cv.key, '64-128_0-64_0-64') fpath = fpath.replace('file://', '') + '.gz' os.remove(fpath) results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == True assert results['1_1_1/64-128_0-64_0-64'] == False # Slice version delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0)) defexists = np.s_[ 0:128, :, : ] results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == True assert results['1_1_1/64-128_0-64_0-64'] == True fpath = os.path.join(cv.layer_cloudpath, cv.key, '64-128_0-64_0-64') fpath = fpath.replace('file://', '') + '.gz' os.remove(fpath) results = cv.exists(defexists) assert len(results) == 2 assert results['1_1_1/0-64_0-64_0-64'] == True assert results['1_1_1/64-128_0-64_0-64'] == False
def test_download_upload_file(green): delete_layer() cv, _ = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) cv.green_threads = green mkdir('/tmp/file/') cv.download_to_file('/tmp/file/test', cv.bounds) cv2 = CloudVolume('file:///tmp/file/test2/', info=cv.info) cv2.upload_from_file('/tmp/file/test', cv.bounds) assert np.all(cv2[:] == cv[:]) shutil.rmtree('/tmp/file/')
def test_aligned_read(): delete_layer() cv, data = create_layer(size=(50,50,50,1), offset=(0,0,0)) # the last dimension is the number of channels assert cv[0:50,0:50,0:50].shape == (50,50,50,1) assert np.all(cv[0:50,0:50,0:50] == data) delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0)) # the last dimension is the number of channels assert cv[0:64,0:64,0:64].shape == (64,64,64,1) assert np.all(cv[0:64,0:64,0:64] == data[:64,:64,:64,:]) delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(10,20,0)) cutout = cv[10:74,20:84,0:64] # the last dimension is the number of channels assert cutout.shape == (64,64,64,1) assert np.all(cutout == data[:64,:64,:64,:]) # get the second chunk cutout2 = cv[74:138,20:84,0:64] assert cutout2.shape == (64,64,64,1) assert np.all(cutout2 == data[64:128,:64,:64,:])
def test_transfer(): # Bbox version delete_layer() cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0)) cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds) ls = os.listdir('/tmp/removeme/transfer/1_1_1/') assert '0-64_0-64_0-64' in ls assert len(ls) == 2 assert os.path.exists('/tmp/removeme/transfer/info') assert os.path.exists('/tmp/removeme/transfer/provenance')
def test_mip_locking(): delete_layer() cv, _ = create_layer(size=(1024, 1024, 2, 1), offset=(0, 0, 0)) cv.meta.lock_mips(0) cv.meta.lock_mips([0]) try: cv[:, :, :] = 0 assert False except ReadOnlyException: pass cv.meta.unlock_mips(0) cv.meta.unlock_mips([0]) cv[:, :, :] = 0 try: cv.meta.lock_mips(1) assert False except ValueError: pass try: cv.meta.unlock_mips(1) assert False except ValueError: pass cv.add_scale((2, 2, 1)) cv.commit_info() cv.mip = 1 cv[:] = 1 cv.meta.lock_mips([0, 1]) try: cv[:, :, :] = 1 assert False except ReadOnlyException: pass cv.mip = 0 try: cv[:, :, :] = 1 assert False except ReadOnlyException: pass
def test_has_data(): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) cv.add_scale((2, 2, 1)) assert cv.image.has_data(0) == True assert cv.image.has_data(1) == False assert cv.image.has_data([1, 1, 1]) == True assert cv.image.has_data([2, 2, 1]) == False try: cv.image.has_data(2) assert False except exceptions.ScaleUnavailableError: pass
def test_parallel_shared_memory_write(): delete_layer() cv, _ = create_layer(size=(256, 256, 128, 1), offset=(0, 0, 0)) shm_location = 'cloudvolume-test-shm-parallel-write' mmapfh, shareddata = shm.ndarray(shape=(256, 256, 128), dtype=np.uint8, location=shm_location) shareddata[:] = 1 cv.parallel = 1 cv.upload_from_shared_memory(shm_location, Bbox((0, 0, 0), (256, 256, 128))) assert np.all(cv[:] == 1) shareddata[:] = 2 cv.parallel = 2 cv.upload_from_shared_memory(shm_location, Bbox((0, 0, 0), (256, 256, 128))) assert np.all(cv[:] == 2) shareddata[:, :, :64] = 3 cv.upload_from_shared_memory(shm_location, bbox=Bbox((0, 0, 0), (256, 256, 128)), cutout_bbox=Bbox((0, 0, 0), (256, 256, 64))) assert np.all(cv[:, :, :64] == 3) assert np.all(cv[:, :, 64:128] == 2) shareddata[:, :, :69] = 4 cv.autocrop = True cv.upload_from_shared_memory(shm_location, bbox=Bbox((-5, -5, -5), (251, 251, 123)), cutout_bbox=Bbox((-5, -5, -5), (128, 128, 64))) assert np.all(cv[:128, :128, :63] == 4) assert np.all(cv[128:, 128:, :64] == 3) assert np.all(cv[:, :, 64:128] == 2) shareddata[:] = 0 shareddata[:, 0, 0] = 1 cv.upload_from_shared_memory(shm_location, bbox=Bbox((0, 0, 0), (256, 256, 128)), order='C') assert np.all(cv[0, 0, :] == 1) assert np.all(cv[1, 0, :] == 0) mmapfh.close() shm.unlink(shm_location)
def test_numpy_memmap(): delete_layer() cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0)) mkdir('/tmp/file/test/') with open("/tmp/file/test/chunk.data", "wb") as f: f.write(data.tobytes("F")) fp = np.memmap("/tmp/file/test/chunk.data", dtype=data.dtype, mode='r', shape=(50, 50, 50, 1), order='F') cv[:] = fp[:] shutil.rmtree('/tmp/file/')