コード例 #1
0
    def test_to_ndarray(self):

        a = daisy.Array(
            np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1))

        # not within ROI of a and no fill value provided
        with self.assertRaises(AssertionError):
            a.to_ndarray(daisy.Roi((0, 0), (5, 5)))

        b = a.to_ndarray(daisy.Roi((0, 0), (1, 5)))
        compare = np.array([[0, 1, 2, 3, 4]])

        b = a.to_ndarray(daisy.Roi((1, 0), (1, 5)))
        compare = np.array([[5, 6, 7, 8, 9]])

        b = a.to_ndarray(daisy.Roi((0, 0), (2, 2)))
        compare = np.array([[0, 1], [5, 6]])

        np.testing.assert_array_equal(b, compare)

        b = a.to_ndarray(daisy.Roi((0, 0), (5, 5)), fill_value=1)
        compare = np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [1, 1, 1, 1, 1],
                            [1, 1, 1, 1, 1], [1, 1, 1, 1, 1]])

        np.testing.assert_array_equal(b, compare)
コード例 #2
0
    def test_intersect(self):

        a = daisy.Array(
            np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1))

        b = a.intersect(daisy.Roi((1, 1), (10, 10)))

        assert b.roi == daisy.Roi((1, 1), (1, 4))
        np.testing.assert_array_equal(b.to_ndarray(), [[6, 7, 8, 9]])
コード例 #3
0
ファイル: resampled_array.py プロジェクト: pattonw/dacapo
 def __getitem__(self, roi: Roi) -> np.ndarray:
     snapped_roi = roi.snap_to_grid(self._source_array.voxel_size, mode="grow")
     resampled_array = daisy.Array(
         rescale(
             self._source_array[snapped_roi].astype(np.float32),
             self.scale,
             order=self.interp_order,
             anti_aliasing=self.interp_order != 0,
         ).astype(self.dtype),
         roi=snapped_roi,
         voxel_size=self.voxel_size,
     )
     return resampled_array.to_ndarray(roi)
コード例 #4
0
def visualize_npy(npy_file: Path, voxel_size):
    voxel_size = daisy.Coordinate(voxel_size)

    viewer = neuroglancer.Viewer()
    with viewer.txn() as s:
        v = np.load(npy_file)
        m = daisy.Array(
            v,
            daisy.Roi(daisy.Coordinate([0, 0, 0]), daisy.Coordinate(v.shape)),
            daisy.Coordinate([1, 1, 1]),
        )
        add_layer(s, m, f"npy array")
    print(viewer)
    input("Hit ENTER to quit!")
コード例 #5
0
    def test_materialize(self):

        a = daisy.Array(
            np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1))

        b = a[daisy.Roi((0, 0), (2, 2))]

        # underlying data did not change
        assert a.data.shape == b.data.shape

        assert b.shape == (2, 2)
        b.materialize()
        assert b.shape == (2, 2)

        assert b.data.shape == (2, 2)
コード例 #6
0
ファイル: gaussian_smoothing2.py プロジェクト: funkelab/daisy
def smooth(block, dataset, output, sigma=5):
    logger.debug("Block: %s" % block)

    # read data in block.read_roi
    daisy_array = dataset[block.read_roi]
    data = daisy_array.to_ndarray()
    logger.debug("Got data of shape %s" % str(data.shape))

    # apply gaussian filter
    r = scipy.ndimage.gaussian_filter(
            data, sigma=sigma, mode='constant')

    # write result to output dataset in block.write_roi
    to_write = daisy.Array(
            data=r,
            roi=block.read_roi,
            voxel_size=dataset.voxel_size)
    output[block.write_roi] = to_write[block.write_roi]
    logger.debug("Done")
    return 0
コード例 #7
0
ファイル: parallel_fragments.py プロジェクト: yajivunev/lsd
def get_mask_data_in_roi(mask, roi, target_voxel_size):

    assert mask.voxel_size.is_multiple_of(target_voxel_size), (
        "Can not upsample from %s to %s" % (mask.voxel_size, target_voxel_size))

    aligned_roi = roi.snap_to_grid(mask.voxel_size, mode='grow')
    aligned_data = mask.to_ndarray(aligned_roi, fill_value=0)

    if mask.voxel_size == target_voxel_size:
        return aligned_data

    factor = mask.voxel_size/target_voxel_size

    upsampled_aligned_data = upsample(aligned_data, factor)

    upsampled_aligned_mask = daisy.Array(
        upsampled_aligned_data,
        roi=aligned_roi,
        voxel_size=target_voxel_size)

    return upsampled_aligned_mask.to_ndarray(roi)
コード例 #8
0
    def test_getitem(self):

        a = daisy.Array(
            np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1))

        assert a[daisy.Coordinate((0, 0))] == 0
        assert a[daisy.Coordinate((0, 1))] == 1
        assert a[daisy.Coordinate((0, 2))] == 2
        assert a[daisy.Coordinate((1, 0))] == 5
        assert a[daisy.Coordinate((1, 1))] == 6
        with self.assertRaises(AssertionError):
            a[daisy.Coordinate((1, 5))]
        with self.assertRaises(AssertionError):
            a[daisy.Coordinate((2, 5))]
        with self.assertRaises(AssertionError):
            a[daisy.Coordinate((-1, 0))]
        with self.assertRaises(AssertionError):
            a[daisy.Coordinate((0, -1))]

        b = a[daisy.Roi((1, 1), (1, 4))]
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((0, 0))] == 0
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((0, 1))] == 1
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((0, 2))] == 2
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((1, 0))] == 5
        assert b[daisy.Coordinate((1, 1))] == 6
        assert b[daisy.Coordinate((1, 2))] == 7
        assert b[daisy.Coordinate((1, 3))] == 8
        assert b[daisy.Coordinate((1, 4))] == 9
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((1, 5))]
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((2, 5))]
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((-1, 0))]
        with self.assertRaises(AssertionError):
            b[daisy.Coordinate((0, -1))]
コード例 #9
0
    def test_shape(self):

        # ROI fits data

        a1 = daisy.Array(np.zeros((10, )), daisy.Roi((0, ), (10, )), (1, ))
        a2 = daisy.Array(np.zeros((10, 10)), daisy.Roi((0, 0), (10, 10)),
                         (1, 1))
        a2_3 = daisy.Array(np.zeros((3, 10, 10)), daisy.Roi((0, 0), (10, 10)),
                           (1, 1))
        a5_3_2_1 = daisy.Array(
            np.zeros((1, 2, 3, 4, 4, 4, 4, 4)),
            daisy.Roi((0, 0, 0, 0, 0), (80, 80, 80, 80, 80)),
            (20, 20, 20, 20, 20))

        assert a1.shape == (10, )
        assert a2.shape == (10, 10)
        assert a2_3.shape == (3, 10, 10)
        assert a2_3.roi.dims() == 2
        assert a5_3_2_1.shape == (1, 2, 3, 4, 4, 4, 4, 4)

        # ROI subset of data

        a1 = daisy.Array(np.zeros((20, )), daisy.Roi((0, ), (10, )), (1, ))
        a2 = daisy.Array(np.zeros((20, 20)), daisy.Roi((0, 0), (10, 10)),
                         (1, 1))
        a2_3 = daisy.Array(np.zeros((3, 20, 20)), daisy.Roi((0, 0), (10, 10)),
                           (1, 1))
        a5_3_2_1 = daisy.Array(
            np.zeros((1, 2, 3, 5, 5, 5, 5, 5)),
            daisy.Roi((0, 0, 0, 0, 0), (80, 80, 80, 80, 80)),
            (20, 20, 20, 20, 20))

        assert a1.shape == (10, )
        assert a2.shape == (10, 10)
        assert a2_3.shape == (3, 10, 10)
        assert a2_3.roi.dims() == 2
        assert a5_3_2_1.shape == (1, 2, 3, 4, 4, 4, 4, 4)
コード例 #10
0
ファイル: dataset.py プロジェクト: funkelab/synistereq
    def open_daisy(self):
        """
        Open this dataset as a daisy array.
        """
        data = daisy.open_ds(self.container, self.dataset)

        # Correct for datasets where the container does not have the voxel size
        if data.voxel_size != tuple(self.voxel_size):
            log.warn(
                "Container has different voxel size than dataset: "\
                f"{data.voxel_size} != {self.voxel_size}")
            orig_shape = data.roi.get_shape()
            data = daisy.Array(data.data,
                               daisy.Roi(
                                   data.roi.get_offset(), self.voxel_size *
                                   data.data.shape[-len(self.voxel_size):]),
                               self.voxel_size,
                               chunk_shape=data.chunk_shape)
            log.warn(
                "Reloaded container data with dataset voxel size, changing shape: "\
                f"{orig_shape} => {data.roi.get_shape()}")

        return data
コード例 #11
0
    def test_minimal(self):

        labels = np.array([[[1, 1, 1, 2, 2, 3, 2, 2, 1, 140, 140, 0]]],
                          dtype=np.uint64)

        roi = daisy.Roi((0, 0, 0), labels.shape)
        voxel_size = (1, 1, 1)

        block_size = (1, 1, 2)

        with tempfile.TemporaryDirectory() as tmpdir:

            a = daisy.Array(labels, roi=roi, voxel_size=voxel_size)
            b = daisy.prepare_ds(os.path.join(tmpdir, 'array_out.zarr'),
                                 '/volumes/b',
                                 total_roi=roi,
                                 voxel_size=voxel_size,
                                 write_size=block_size,
                                 dtype=np.uint64)

            b.data[:] = 0

            segment.arrays.relabel_connected_components(a, b, block_size, 1)

            b = b.data[:].flatten()

            self.assertTrue(b[0] == b[1] == b[2])
            self.assertTrue(b[3] == b[4])
            self.assertTrue(b[6] == b[7])
            self.assertTrue(b[9] == b[10])
            self.assertTrue(b[2] != b[3])
            self.assertTrue(b[4] != b[5])
            self.assertTrue(b[5] != b[6])
            self.assertTrue(b[7] != b[8])
            self.assertTrue(b[8] != b[9])
            self.assertTrue(b[10] != b[11])
コード例 #12
0
    labels_add = daisy.open_ds(filename, "volumes/labels_add")
    print(labels_add.roi)

    raw_fused = daisy.open_ds(filename, "volumes/raw_fused")
    print(raw_fused.roi)

    labels_fused = daisy.open_ds(filename, "volumes/labels_fused")
    print(labels_fused.roi)

    all_data = daisy.Array(
        data=np.array([
            x.to_ndarray()[0, :, :, :]
            if len(x.data.shape) == 4 else x.to_ndarray() for x in [
                raw_base,
                labels_base,
                raw_add,
                labels_add,
                raw_fused,
                labels_fused,
            ]
        ]),
        roi=daisy.Roi((0, ) + raw_base.roi.get_begin(),
                      (6, ) + raw_base.roi.get_shape()),
        voxel_size=(1, ) + raw_base.voxel_size,
    )

    inspect(all_data, all_data.roi)

    input()
コード例 #13
0
    def test_setitem(self):

        # set entirely with numpy array

        a = daisy.Array(np.zeros((2, 5)), daisy.Roi((0, 0), (2, 5)), (1, 1))

        a[daisy.Roi((0, 0), (2, 5))] = np.arange(0, 10).reshape(2, 5)
        assert a[daisy.Coordinate((0, 0))] == 0
        assert a[daisy.Coordinate((0, 1))] == 1
        assert a[daisy.Coordinate((0, 2))] == 2
        assert a[daisy.Coordinate((1, 0))] == 5
        assert a[daisy.Coordinate((1, 1))] == 6
        assert a[daisy.Coordinate((1, 4))] == 9

        # set entirely with numpy array and channels

        a = daisy.Array(np.zeros((3, 2, 5)), daisy.Roi((0, 0), (2, 5)), (1, 1))

        a[daisy.Roi((0, 0), (2, 5))] = np.arange(0, 3 * 10).reshape(3, 2, 5)
        np.testing.assert_array_equal(a[daisy.Coordinate((0, 0))], [0, 10, 20])
        np.testing.assert_array_equal(a[daisy.Coordinate((0, 1))], [1, 11, 21])
        np.testing.assert_array_equal(a[daisy.Coordinate((1, 4))], [9, 19, 29])

        # set entirely with scalar

        a = daisy.Array(np.zeros((2, 5)), daisy.Roi((0, 0), (2, 5)), (1, 1))

        a[daisy.Roi((0, 0), (2, 5))] = 42
        assert a[daisy.Coordinate((0, 0))] == 42
        assert a[daisy.Coordinate((1, 4))] == 42

        # set partially with scalar and channels

        a = daisy.Array(
            np.arange(0, 3 * 10).reshape(3, 2, 5), daisy.Roi((0, 0), (2, 5)),
            (1, 1))

        a[daisy.Roi((0, 0), (2, 2))] = 42
        np.testing.assert_array_equal(a[daisy.Coordinate((0, 0))],
                                      [42, 42, 42])
        np.testing.assert_array_equal(a[daisy.Coordinate((0, 1))],
                                      [42, 42, 42])
        np.testing.assert_array_equal(a[daisy.Coordinate((0, 2))], [2, 12, 22])
        np.testing.assert_array_equal(a[daisy.Coordinate((1, 2))], [7, 17, 27])
        np.testing.assert_array_equal(a[daisy.Coordinate((1, 3))], [8, 18, 28])
        np.testing.assert_array_equal(a[daisy.Coordinate((1, 4))], [9, 19, 29])

        # set partially with Array

        a = daisy.Array(np.zeros((2, 5)), daisy.Roi((0, 0), (2, 5)), (1, 1))
        b = daisy.Array(
            np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1))

        a[daisy.Roi((0, 0), (1, 5))] = b[daisy.Roi((0, 0), (1, 5))]
        assert a[daisy.Coordinate((0, 0))] == 0
        assert a[daisy.Coordinate((0, 1))] == 1
        assert a[daisy.Coordinate((0, 2))] == 2
        assert a[daisy.Coordinate((1, 0))] == 0
        assert a[daisy.Coordinate((1, 1))] == 0
        assert a[daisy.Coordinate((1, 4))] == 0

        a = daisy.Array(np.zeros((2, 5)), daisy.Roi((0, 0), (2, 5)), (1, 1))
        b = daisy.Array(
            np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1))

        a[daisy.Roi((0, 0), (1, 5))] = b[daisy.Roi((1, 0), (1, 5))]
        assert a[daisy.Coordinate((0, 0))] == 5
        assert a[daisy.Coordinate((0, 1))] == 6
        assert a[daisy.Coordinate((0, 4))] == 9
        assert a[daisy.Coordinate((1, 0))] == 0
        assert a[daisy.Coordinate((1, 1))] == 0
        assert a[daisy.Coordinate((1, 2))] == 0

        a[daisy.Roi((1, 0), (1, 5))] = b[daisy.Roi((0, 0), (1, 5))]
        assert a[daisy.Coordinate((0, 0))] == 5
        assert a[daisy.Coordinate((0, 1))] == 6
        assert a[daisy.Coordinate((0, 4))] == 9
        assert a[daisy.Coordinate((1, 0))] == 0
        assert a[daisy.Coordinate((1, 1))] == 1
        assert a[daisy.Coordinate((1, 2))] == 2
コード例 #14
0
def visualize_hdf5(hdf5_file: Path,
                   voxel_size,
                   mst=False,
                   maxima_for=None,
                   skip=None):
    path_list = str(hdf5_file.absolute()).split("/")
    setups_dir = Path("/", *path_list[:-3])
    setup_config = DEFAULT_CONFIG
    try:
        setup_config.update(
            json.load((setups_dir / path_list[-3] / "config.json").open()))
    except:
        pass
    voxel_size = daisy.Coordinate(setup_config["VOXEL_SIZE"])
    coordinate_scale = (setup_config["COORDINATE_SCALE"] *
                        np.array(voxel_size) / max(voxel_size))
    dataset = h5py.File(hdf5_file)
    volumes = list(dataset.get("volumes", {}).keys())
    points = list(dataset.get("points", {}).keys())

    points = set([p.split("-")[0] for p in points])

    node_id = itertools.count(start=1)

    viewer = neuroglancer.Viewer()
    with viewer.txn() as s:
        for volume in volumes:
            if skip == volume:
                continue
            v = daisy.open_ds(str(hdf5_file.absolute()), f"volumes/{volume}")
            if len(v.shape) == 5:
                v.n_channel_dims -= 1
                v.materialize()
                v.data = v.data[0]
            if v.dtype == np.int64:
                v.materialize()
                v.data = v.data.astype(np.uint64)
            if volume == maxima_for:
                v.materialize()
                max_filtered = maximum_filter(v.data, (3, 10, 10))
                maxima = np.logical_and(max_filtered == v.data, v.data > 0.01)
                m = daisy.Array(maxima, v.roi, v.voxel_size)
                add_layer(s, m, f"{volume}-maxima")
            if volume == "embedding":
                offset = v.roi.get_offset()
                mst = get_embedding_mst(
                    v.data,
                    1,
                    coordinate_scale,
                    offset / voxel_size,
                    daisy.open_ds(str(hdf5_file.absolute()),
                                  f"volumes/fg_maxima").to_ndarray(),
                )
                add_trees(s, mst, node_id, name="MST", visible=True)
                v.materialize()
                v.data = (v.data + 1) / 2
            add_layer(s, v, volume, visible=False)

        for point_set in points:
            node_ids = dataset["points"][f"{point_set}-ids"]
            locations = dataset["points"][f"{point_set}-locations"]
            edges = dataset["points"][f"{point_set}-edges"]
            components = build_trees(node_ids, locations, edges, voxel_size)
            add_trees(s, components, node_id, name=point_set, visible=False)
        if mst and False:
            emst = h5py.File(hdf5_file)["emst"]
            edges_u = h5py.File(hdf5_file)["edges_u"]
            edges_v = h5py.File(hdf5_file)["edges_v"]
            alpha = setup_config["ALPHA"]
            coordinate_scale = setup_config["COORDINATE_SCALE"]
            offset = daisy.open_ds(str(hdf5_file.absolute()),
                                   f"volumes/gt_fg").roi.get_offset()
            mst_trees = build_trees_from_mst(emst, edges_u, edges_v, alpha,
                                             coordinate_scale, offset,
                                             voxel_size)
            add_trees(s, mst_trees, node_id, name="MST", visible=True)
    print(viewer)
    input("Hit ENTER to quit!")
コード例 #15
0
ファイル: parallel_fragments.py プロジェクト: yajivunev/lsd
def watershed_in_block(
        affs,
        block,
        context,
        rag_provider,
        fragments_out,
        num_voxels_in_block,
        mask=None,
        fragments_in_xy=False,
        epsilon_agglomerate=0.0,
        filter_fragments=0.0,
        min_seed_distance=10,
        replace_sections=None):
    '''

    Args:

        filter_fragments (float):

            Filter fragments that have an average affinity lower than this
            value.

        min_seed_distance (int):

            Controls distance between seeds in the initial watershed. Reducing
            this value improves downsampled segmentation.
    '''

    total_roi = affs.roi

    logger.debug("reading affs from %s", block.read_roi)

    affs = affs.intersect(block.read_roi)
    affs.materialize()

    if affs.dtype == np.uint8:
        logger.info("Assuming affinities are in [0,255]")
        max_affinity_value = 255.0
        affs.data = affs.data.astype(np.float32)
    else:
        max_affinity_value = 1.0

    if mask is not None:

        logger.debug("reading mask from %s", block.read_roi)
        mask_data = get_mask_data_in_roi(mask, affs.roi, affs.voxel_size)
        logger.debug("masking affinities")
        affs.data *= mask_data

    # extract fragments
    fragments_data, _ = watershed_from_affinities(
        affs.data,
        max_affinity_value,
        fragments_in_xy=fragments_in_xy,
        min_seed_distance=min_seed_distance)

    if mask is not None:
        fragments_data *= mask_data.astype(np.uint64)

    if filter_fragments > 0:

        if fragments_in_xy:
            average_affs = np.mean(affs.data[0:2]/max_affinity_value, axis=0)
        else:
            average_affs = np.mean(affs.data/max_affinity_value, axis=0)

        filtered_fragments = []

        fragment_ids = np.unique(fragments_data)

        for fragment, mean in zip(
                fragment_ids,
                measurements.mean(
                    average_affs,
                    fragments_data,
                    fragment_ids)):
            if mean < filter_fragments:
                filtered_fragments.append(fragment)

        filtered_fragments = np.array(
            filtered_fragments,
            dtype=fragments_data.dtype)
        replace = np.zeros_like(filtered_fragments)
        replace_values(fragments_data, filtered_fragments, replace, inplace=True)

    if epsilon_agglomerate > 0:

        logger.info(
            "Performing initial fragment agglomeration until %f",
            epsilon_agglomerate)

        generator = waterz.agglomerate(
                affs=affs.data/max_affinity_value,
                thresholds=[epsilon_agglomerate],
                fragments=fragments_data,
                scoring_function='OneMinus<HistogramQuantileAffinity<RegionGraphType, 25, ScoreValue, 256, false>>',
                discretize_queue=256,
                return_merge_history=False,
                return_region_graph=False)
        fragments_data[:] = next(generator)

        # cleanup generator
        for _ in generator:
            pass

    if replace_sections:

        logger.info("Replacing sections...")

        block_begin = block.write_roi.get_begin()
        shape = block.write_roi.get_shape()

        z_context = context[0]/affs.voxel_size[0]
        logger.info("Z context: %i",z_context)

        mapping = {}

        voxel_offset = block_begin[0]/affs.voxel_size[0]

        for i,j in zip(
                range(fragments_data.shape[0]),
                range(shape[0])):
            mapping[i] = i
            mapping[j] = int(voxel_offset + i) \
                    if block_begin[0] == total_roi.get_begin()[0] \
                    else int(voxel_offset + (i - z_context))

        logging.info('Mapping: %s', mapping)

        replace = [k for k,v in mapping.items() if v in replace_sections]

        for r in replace:
            logger.info("Replacing mapped section %i with zero", r)
            fragments_data[r] = 0

    #todo add key value replacement option

    fragments = daisy.Array(fragments_data, affs.roi, affs.voxel_size)

    # crop fragments to write_roi
    fragments = fragments[block.write_roi]
    fragments.materialize()
    max_id = fragments.data.max()

    # ensure we don't have IDs larger than the number of voxels (that would
    # break uniqueness of IDs below)
    if max_id > num_voxels_in_block:
        logger.warning(
            "fragments in %s have max ID %d, relabelling...",
            block.write_roi, max_id)
        fragments.data, max_id = relabel(fragments.data)

        assert max_id < num_voxels_in_block

    # ensure unique IDs
    id_bump = block.block_id[1]*num_voxels_in_block
    logger.debug("bumping fragment IDs by %i", id_bump)
    fragments.data[fragments.data>0] += id_bump
    fragment_ids = range(id_bump + 1, id_bump + 1 + int(max_id))

    # store fragments
    logger.debug("writing fragments to %s", block.write_roi)
    fragments_out[block.write_roi] = fragments

    # following only makes a difference if fragments were found
    if max_id == 0:
        return

    # get fragment centers
    fragment_centers = {
        fragment: block.write_roi.get_offset() + affs.voxel_size*daisy.Coordinate(center)
        for fragment, center in zip(
            fragment_ids,
            measurements.center_of_mass(fragments.data, fragments.data, fragment_ids))
        if not np.isnan(center[0])
    }

    # store nodes
    rag = rag_provider[block.write_roi]
    rag.add_nodes_from([
        (node, {
            'center_z': c[0],
            'center_y': c[1],
            'center_x': c[2]
            }
        )
        for node, c in fragment_centers.items()
    ])
    rag.write_nodes(block.write_roi)
コード例 #16
0
def inspect(raw, roi):

    print("Reading raw data...")

    raw_data = raw.to_ndarray(roi=roi, fill_value=0)

    return spimagine.volshow(raw_data, stackUnits=raw.voxel_size[1:][::-1])


if __name__ == "__main__":

    filename = sys.argv[1]

    ch1 = daisy.open_ds(filename, 'volumes/ch1')

    a_ch1 = daisy.open_ds(filename, 'volumes/a_ch1')

    b_ch1 = daisy.open_ds(filename, 'volumes/b_ch1')

    soft_mask = daisy.open_ds(filename, 'volumes/soft_mask')

    fused = daisy.Array(data=np.array(
        [x.to_ndarray() for x in [ch1, a_ch1, b_ch1, soft_mask]]),
                        roi=daisy.Roi((0, ) + ch1.roi.get_begin(),
                                      (4, ) + ch1.roi.get_shape()),
                        voxel_size=(1, ) + ch1.voxel_size)

    inspect(fused, fused.roi)

    input()
コード例 #17
0
    def test_dtype(self):

        for dtype in [np.float32, np.uint8, np.uint64]:
            assert daisy.Array(np.zeros((1, ), dtype=dtype),
                               daisy.Roi((0, ), (1, )), (1, )).dtype == dtype
コード例 #18
0
ファイル: parallel_fragments.py プロジェクト: bentaculum/lsd
def watershed_in_block(affs, block, rag_provider, fragments_out,
                       fragments_in_xy, epsilon_agglomerate, mask):

    total_roi = affs.roi

    logger.debug("reading affs from %s", block.read_roi)
    affs = affs.intersect(block.read_roi)
    affs.materialize()

    if mask is not None:

        logger.debug("reading mask from %s", block.read_roi)
        mask_data = get_mask_data_in_roi(mask, affs.roi, affs.voxel_size)
        logger.debug("masking affinities")
        affs.data *= mask_data

    # extract fragments
    fragments_data, n = watershed_from_affinities(
        affs.data,
        fragments_in_xy=fragments_in_xy,
        epsilon_agglomerate=epsilon_agglomerate)
    if mask is not None:
        fragments_data *= mask_data.astype(np.uint64)
    fragments = daisy.Array(fragments_data, affs.roi, affs.voxel_size)

    # crop fragments to write_roi
    fragments = fragments[block.write_roi]
    fragments.materialize()

    # ensure we don't have IDs larger than the number of voxels (that would
    # break uniqueness of IDs below)
    max_id = fragments.data.max()
    if max_id > block.write_roi.size():
        logger.warning("fragments in %s have max ID %d, relabelling...",
                       block.write_roi, max_id)
        fragments.data, n = relabel(fragments.data)

    # ensure unique IDs
    size_of_voxel = daisy.Roi((0, ) * affs.roi.dims(), affs.voxel_size).size()
    num_voxels_in_block = block.requested_write_roi.size() // size_of_voxel
    id_bump = block.block_id * num_voxels_in_block
    logger.debug("bumping fragment IDs by %i", id_bump)
    fragments.data[fragments.data > 0] += id_bump
    fragment_ids = range(id_bump + 1, id_bump + 1 + n)

    # store fragments
    logger.debug("writing fragments to %s", block.write_roi)
    fragments_out[block.write_roi] = fragments

    # following only makes a difference if fragments were found
    if n == 0:
        return

    # get fragment centers
    fragment_centers = {
        fragment: block.write_roi.get_offset() + affs.voxel_size * center
        for fragment, center in zip(
            fragment_ids,
            center_of_mass(fragments.data, fragments.data, fragment_ids))
        if not np.isnan(center[0])
    }

    # store nodes
    rag = rag_provider[block.write_roi]
    rag.add_nodes_from([(node, {
        'center_z': c[0],
        'center_y': c[1],
        'center_x': c[2]
    }) for node, c in fragment_centers.items()])
    rag.write_nodes(block.write_roi)
コード例 #19
0
    def test_constructor(self):

        data = np.zeros((10, 10, 10), dtype=np.float32)
        roi = daisy.Roi((0, 0, 0), (10, 10, 10))

        # consistent configurations
        daisy.Array(data, roi, (1, 1, 1))
        daisy.Array(data, roi, (1, 1, 2))
        daisy.Array(data, roi, (1, 5, 2))
        daisy.Array(data, roi, (10, 5, 2))
        roi = daisy.Roi((1, 1, 1), (10, 10, 10))
        daisy.Array(data, roi, (1, 1, 1), data_offset=(1, 1, 1))
        roi = daisy.Roi((0, 0, 0), (20, 20, 20))
        daisy.Array(data, roi, (2, 2, 2))

        # dims don't match
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (1, 1))

        # ROI not multiple of voxel size
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (1, 1, 3))
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (1, 1, 4))

        # ROI begin doesn't align with voxel size
        roi = daisy.Roi((1, 1, 1), (11, 11, 11))
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (1, 1, 2))

        # ROI shape doesn't align with voxel size
        roi = daisy.Roi((0, 0, 0), (11, 11, 11))
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (1, 1, 2))

        # ROI outside of provided data
        roi = daisy.Roi((0, 0, 0), (20, 20, 20))
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (1, 1, 1))
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (2, 2, 1))
        with self.assertRaises(AssertionError):
            daisy.Array(data, roi, (2, 2, 2), data_offset=(0, 0, 2))
コード例 #20
0
ファイル: zarr_array.py プロジェクト: pattonw/dacapo
 def __setitem__(self, roi: Roi, value: np.ndarray[Any, Any]):
     daisy.Array(self.data, self.roi, self.voxel_size)[roi] = value
コード例 #21
0
ファイル: zarr_array.py プロジェクト: pattonw/dacapo
 def __getitem__(self, roi: Roi) -> np.ndarray[Any, Any]:
     data: np.ndarray[Any, Any] = daisy.Array(
         self.data, self.roi, self.voxel_size).to_ndarray(roi=roi)
     return data
コード例 #22
0
def segment_in_block(array_in, array_out, num_voxels_in_block, block, tmpdir,
                     segment_function):

    logger.debug("Segmenting in block %s", block)

    segmentation = segment_function(array_in, block.read_roi)

    print("========= block %d ====== " % block.block_id)
    print(segmentation)

    assert segmentation.dtype == np.uint64

    id_bump = block.block_id * num_voxels_in_block
    segmentation += id_bump
    segmentation[segmentation == id_bump] = 0

    logger.debug("Bumping segmentation IDs by %d", id_bump)

    # wrap segmentation into daisy array
    segmentation = daisy.Array(segmentation,
                               roi=block.read_roi,
                               voxel_size=array_in.voxel_size)

    # store segmentation in out array
    array_out[block.write_roi] = segmentation[block.write_roi]

    neighbor_roi = block.write_roi.grow(array_in.voxel_size,
                                        array_in.voxel_size)

    # clip segmentation to 1-voxel context
    segmentation = segmentation.to_ndarray(roi=neighbor_roi, fill_value=0)
    neighbors = array_out.to_ndarray(roi=neighbor_roi, fill_value=0)

    unique_pairs = []

    for d in range(3):

        slices_neg = tuple(
            slice(None) if dd != d else slice(0, 1) for dd in range(3))
        slices_pos = tuple(
            slice(None) if dd != d else slice(-1, None) for dd in range(3))

        pairs_neg = np.array([
            segmentation[slices_neg].flatten(),
            neighbors[slices_neg].flatten()
        ])
        pairs_neg = pairs_neg.transpose()

        pairs_pos = np.array([
            segmentation[slices_pos].flatten(),
            neighbors[slices_pos].flatten()
        ])
        pairs_pos = pairs_pos.transpose()

        unique_pairs.append(
            np.unique(np.concatenate([pairs_neg, pairs_pos]), axis=0))

    unique_pairs = np.concatenate(unique_pairs)
    zero_u = unique_pairs[:, 0] == 0
    zero_v = unique_pairs[:, 1] == 0
    non_zero_filter = np.logical_not(np.logical_or(zero_u, zero_v))

    logger.debug("Matching pairs with neighbors: %s", unique_pairs)

    edges = unique_pairs[non_zero_filter]
    nodes = np.unique(edges)

    logger.debug("Final edges: %s", edges)
    logger.debug("Final nodes: %s", nodes)

    np.savez_compressed(os.path.join(tmpdir, 'block_%d.npz' % block.block_id),
                        nodes=nodes,
                        edges=edges)