def test_transform(): skelv = Skeleton([(0, 0, 0), (1, 0, 0), (1, 1, 0), (1, 1, 3), (2, 1, 3), (2, 2, 3)], edges=[(1, 0), (1, 2), (2, 3), (3, 4), (5, 4)], radii=[1, 2, 3, 4, 5, 6], vertex_types=[1, 2, 3, 4, 5, 6], segid=1337, transform=np.array([ [2, 0, 0, 0], [0, 2, 0, 0], [0, 0, 2, 0], ])) skelp = skelv.physical_space() assert np.all(skelp.vertices == skelv.vertices * 2) assert np.all(skelv.vertices == skelp.voxel_space().vertices) skelv.transform = [ [1, 0, 0, 1], [0, 1, 0, 2], [0, 0, 1, 3], ] skelp = skelv.physical_space() tmpskel = skelv.clone() tmpskel.vertices[:, 0] += 1 tmpskel.vertices[:, 1] += 2 tmpskel.vertices[:, 2] += 3 assert np.all(skelp.vertices == tmpskel.vertices) assert np.all(skelp.voxel_space().vertices == skelv.vertices)
def test_sharded(): skel = Skeleton([ (0, 0, 0), (1, 0, 0), (2, 0, 0), (0, 1, 0), (0, 2, 0), (0, 3, 0), ], edges=[(0, 1), (1, 2), (3, 4), (4, 5), (3, 5)], segid=1, extra_attributes=[{ "id": "radius", "data_type": "float32", "num_components": 1, }]).physical_space() skels = {} for i in range(10): sk = skel.clone() sk.id = i skels[i] = sk.to_precomputed() mkdir('/tmp/removeme/skeletons/sharded/skeletons') with open('/tmp/removeme/skeletons/sharded/info', 'wt') as f: f.write(jsonify(info)) for idxenc in ('raw', 'gzip'): for dataenc in ('raw', 'gzip'): spec = ShardingSpecification( 'neuroglancer_uint64_sharded_v1', preshift_bits=1, hash='murmurhash3_x86_128', minishard_bits=2, shard_bits=1, minishard_index_encoding=idxenc, data_encoding=dataenc, ) skel_info['sharding'] = spec.to_dict() with open('/tmp/removeme/skeletons/sharded/skeletons/info', 'wt') as f: f.write(jsonify(skel_info)) files = spec.synthesize_shards(skels) for fname in files.keys(): with open('/tmp/removeme/skeletons/sharded/skeletons/' + fname, 'wb') as f: f.write(files[fname]) cv = CloudVolume('file:///tmp/removeme/skeletons/sharded/') assert cv.skeleton.meta.mip == 3 for i in range(10): sk = cv.skeleton.get(i).physical_space() sk.id = 1 assert sk == skel shutil.rmtree('/tmp/removeme/skeletons')
def test_sharded(): skel = Skeleton([ (0, 0, 0), (1, 0, 0), (2, 0, 0), (0, 1, 0), (0, 2, 0), (0, 3, 0), ], edges=[(0, 1), (1, 2), (3, 4), (4, 5), (3, 5)], segid=1, extra_attributes=[{ "id": "radius", "data_type": "float32", "num_components": 1, }]).physical_space() skels = {} for i in range(10): sk = skel.clone() sk.id = i skels[i] = sk.to_precomputed() mkdir('/tmp/removeme/skeletons/sharded/skeletons') with open('/tmp/removeme/skeletons/sharded/info', 'wt') as f: f.write(jsonify(info)) for idxenc in ('raw', 'gzip'): for dataenc in ('raw', 'gzip'): spec = ShardingSpecification( 'neuroglancer_uint64_sharded_v1', preshift_bits=1, hash='murmurhash3_x86_128', minishard_bits=2, shard_bits=1, minishard_index_encoding=idxenc, data_encoding=dataenc, ) skel_info['sharding'] = spec.to_dict() with open('/tmp/removeme/skeletons/sharded/skeletons/info', 'wt') as f: f.write(jsonify(skel_info)) files = spec.synthesize_shards(skels) for fname in files.keys(): with open('/tmp/removeme/skeletons/sharded/skeletons/' + fname, 'wb') as f: f.write(files[fname]) cv = CloudVolume('file:///tmp/removeme/skeletons/sharded/') assert cv.skeleton.meta.mip == 3 for i in range(10): sk = cv.skeleton.get(i).physical_space() sk.id = 1 assert sk == skel labels = [] for fname in files.keys(): lbls = cv.skeleton.reader.list_labels(fname, path='skeletons') labels += list(lbls) labels.sort() assert labels == list(range(10)) for filename, shard in files.items(): decoded_skels = cv.skeleton.reader.disassemble_shard(shard) for label, binary in decoded_skels.items(): Skeleton.from_precomputed(binary) exists = cv.skeleton.reader.exists(list(range(11)), path='skeletons') assert exists == { 0: 'skeletons/0.shard', 1: 'skeletons/0.shard', 2: 'skeletons/0.shard', 3: 'skeletons/0.shard', 4: 'skeletons/0.shard', 5: 'skeletons/0.shard', 6: 'skeletons/0.shard', 7: 'skeletons/0.shard', 8: 'skeletons/1.shard', 9: 'skeletons/1.shard', 10: None, } shutil.rmtree('/tmp/removeme/skeletons')