def get_skeletons(self, folder):
     skeleton_filenames = [str(skeleton_id) for skeleton_id in self.skeleton_ids]
     cf = CloudFiles(folder)
     skeleton_files = cf.get(skeleton_filenames)
     skeletons = {}
     for skeleton_file in skeleton_files:
         skeleton_id_str = skeleton_file["path"]
         skeleton = Skeleton.from_precomputed(skeleton_file["content"])
         skeletons[skeleton_id_str] = skeleton
     return skeletons
Example #2
0
def test_skeleton_fidelity():
    segid = 1822975381
    cv = CloudVolume('gs://seunglab-test/sharded')
    sharded_skel = cv.skeleton.get(segid)

    with SimpleStorage('gs://seunglab-test/sharded') as stor:
        binary = stor.get_file('skeletons/' + str(segid))

    unsharded_skel = Skeleton.from_precomputed(
        binary,
        segid=1822975381,
        vertex_attributes=cv.skeleton.meta.info['vertex_attributes'])

    assert sharded_skel == unsharded_skel
def test_sharded():
    skel = Skeleton([
        (0, 0, 0),
        (1, 0, 0),
        (2, 0, 0),
        (0, 1, 0),
        (0, 2, 0),
        (0, 3, 0),
    ],
                    edges=[(0, 1), (1, 2), (3, 4), (4, 5), (3, 5)],
                    segid=1,
                    extra_attributes=[{
                        "id": "radius",
                        "data_type": "float32",
                        "num_components": 1,
                    }]).physical_space()

    skels = {}
    for i in range(10):
        sk = skel.clone()
        sk.id = i
        skels[i] = sk.to_precomputed()

    mkdir('/tmp/removeme/skeletons/sharded/skeletons')
    with open('/tmp/removeme/skeletons/sharded/info', 'wt') as f:
        f.write(jsonify(info))

    for idxenc in ('raw', 'gzip'):
        for dataenc in ('raw', 'gzip'):

            spec = ShardingSpecification(
                'neuroglancer_uint64_sharded_v1',
                preshift_bits=1,
                hash='murmurhash3_x86_128',
                minishard_bits=2,
                shard_bits=1,
                minishard_index_encoding=idxenc,
                data_encoding=dataenc,
            )
            skel_info['sharding'] = spec.to_dict()

            with open('/tmp/removeme/skeletons/sharded/skeletons/info',
                      'wt') as f:
                f.write(jsonify(skel_info))

            files = spec.synthesize_shards(skels)
            for fname in files.keys():
                with open('/tmp/removeme/skeletons/sharded/skeletons/' + fname,
                          'wb') as f:
                    f.write(files[fname])

            cv = CloudVolume('file:///tmp/removeme/skeletons/sharded/')
            assert cv.skeleton.meta.mip == 3

            for i in range(10):
                sk = cv.skeleton.get(i).physical_space()
                sk.id = 1
                assert sk == skel

            labels = []
            for fname in files.keys():
                lbls = cv.skeleton.reader.list_labels(fname, path='skeletons')
                labels += list(lbls)

            labels.sort()
            assert labels == list(range(10))

            for filename, shard in files.items():
                decoded_skels = cv.skeleton.reader.disassemble_shard(shard)
                for label, binary in decoded_skels.items():
                    Skeleton.from_precomputed(binary)

            exists = cv.skeleton.reader.exists(list(range(11)),
                                               path='skeletons')
            assert exists == {
                0: 'skeletons/0.shard',
                1: 'skeletons/0.shard',
                2: 'skeletons/0.shard',
                3: 'skeletons/0.shard',
                4: 'skeletons/0.shard',
                5: 'skeletons/0.shard',
                6: 'skeletons/0.shard',
                7: 'skeletons/0.shard',
                8: 'skeletons/1.shard',
                9: 'skeletons/1.shard',
                10: None,
            }

    shutil.rmtree('/tmp/removeme/skeletons')
def get_skeleton(src_path, skeleton_id_str):
    cf = CloudFiles(src_path)
    return Skeleton.from_precomputed(cf.get(skeleton_id_str))