def _create_multiset_block(blocking, block_id, ds_in, ds_out):
    fu.log("start processing block %i" % block_id)
    block = blocking.getBlock(block_id)
    bb = vu.block_to_bb(block)

    labels = ds_in[bb]

    # we can't encode the paintra ignore label
    paintera_ignore_label = 18446744073709551615
    pignore_mask = labels == paintera_ignore_label
    if pignore_mask.sum() > 0:
        labels[pignore_mask] = 0

    if labels.sum() == 0:
        fu.log("block %i is empty" % block_id)
        fu.log_block_success(block_id)
        return

    # compute multiset from input labels
    multiset = create_multiset_from_labels(labels)
    ser = serialize_multiset(multiset)

    chunk_id = tuple(bs // ch for bs, ch in zip(block.begin, ds_out.chunks))
    ds_out.write_chunk(chunk_id, ser, True)
    fu.log_block_success(block_id)
Beispiel #2
0
 def test_serialization(self):
     from elf.label_multiset import (create_multiset_from_labels,
                                     serialize_multiset,
                                     deserialize_multiset)
     shape = (32, 32, 32)
     x = np.random.randint(0, 2000, size=shape, dtype='uint64')
     l1 = create_multiset_from_labels(x)
     ser = serialize_multiset(l1)
     l2 = deserialize_multiset(ser, shape)
     self.check_multisets(l1, l2)
    def check_expected(self, key, key_expected):
        from elf.label_multiset import (create_multiset_from_labels,
                                        serialize_multiset)
        f = z5py.File(self.path)
        x = f[key][:]
        multiset = create_multiset_from_labels(x)
        self.assertEqual(multiset.shape, x.shape)
        ser = serialize_multiset(multiset)

        f = z5py.File(self.expected_path)
        expected_ser = f[key_expected].read_chunk((0, 0, 0))
        self.check_serializations(ser, expected_ser, x.shape)
    def check_downscale(self, key, key_expected):
        from elf.label_multiset import (create_multiset_from_labels,
                                        downsample_multiset,
                                        serialize_multiset)
        f = z5py.File(self.path)
        x = f[key][:]
        multiset = create_multiset_from_labels(x)
        self.assertEqual(multiset.shape, x.shape)
        multiset = downsample_multiset(multiset, [2, 2, 2], -1)
        self.assertEqual(multiset.shape, tuple(sh // 2 for sh in x.shape))
        ser = serialize_multiset(multiset)

        f = z5py.File(self.expected_path)
        expected_ser = f[key_expected].read_chunk((0, 0, 0))
        self.check_serializations(ser, expected_ser, x.shape)
def _downscale_multiset_block(blocking, block_id, ds_in, ds_out, blocking_prev,
                              scale_factor, restrict_set,
                              effective_pixel_size):
    fu.log("start processing block %i" % block_id)
    block = blocking.getBlock(block_id)

    ndim = ds_in.ndim
    # get the blocks and chunk ids
    # corresponding to this block in the previous scale level
    roi_begin_prev = [beg * sc for beg, sc in zip(block.begin, scale_factor)]
    roi_end_prev = [
        min(end * sc, sh)
        for end, sc, sh in zip(block.end, scale_factor, ds_in.shape)
    ]
    roi_shape_prev = tuple(re - rb
                           for rb, re in zip(roi_begin_prev, roi_end_prev))
    roi_prev = tuple(
        slice(rb, re) for rb, re in zip(roi_begin_prev, roi_end_prev))

    block_shape_prev = blocking_prev.blockShape
    chunk_ids_prev = list(chunks_overlapping_roi(roi_prev, block_shape_prev))
    block_ids_prev = np.ravel_multi_index(
        np.array([[cid[d] for cid in chunk_ids_prev] for d in range(ndim)],
                 dtype='int'), blocking_prev.blocksPerAxis)
    blocks_prev = [blocking_prev.getBlock(bid) for bid in block_ids_prev]

    multisets = [ds_in.read_chunk(chunk_id) for chunk_id in chunk_ids_prev]
    if all(mset is None for mset in multisets):
        fu.log_block_success(block_id)
        return

    multisets = [
        background_multiset(block_prev.shape, effective_pixel_size)
        if mset is None else deserialize_multiset(mset, block_prev.shape)
        for mset, block_prev in zip(multisets, blocks_prev)
    ]

    chunk_ids_prev = normalize_chunks(chunk_ids_prev)
    multiset = merge_multisets(multisets, chunk_ids_prev, roi_shape_prev,
                               blocking_prev.blockShape)

    # compute multiset from input labels
    multiset = downsample_multiset(multiset, scale_factor, restrict_set)
    ser = serialize_multiset(multiset)

    chunk_id = tuple(beg // ch for beg, ch in zip(block.begin, ds_out.chunks))
    ds_out.write_chunk(chunk_id, ser, True)
    fu.log_block_success(block_id)