Example #1
0
def ds_dtype(request, tmp_path):
    dtype = request.param
    n5_path = str(tmp_path / "test.n5")
    ds_name = "ds" + dtype

    pyn5.create_dataset(n5_path, ds_name, DS_SIZE, BLOCKSIZE, dtype)
    yield pyn5.open(n5_path, ds_name, dtype, False), np.dtype(dtype.lower())
Example #2
0
    def setUp(self):
        self.root = "test.n5"
        self.dataset = "test"
        self.dtype = "UINT8"
        self.dataset_size = [10, 10, 10]
        self.block_size = [2, 2, 2]

        pyn5.create_dataset(self.root, self.dataset, self.dataset_size,
                            self.block_size, self.dtype)
        self.n5 = pyn5.open(self.root, self.dataset, self.dtype, False)
Example #3
0
 def setUp(self):
     self.root = "test.n5"
     self.dataset = "test_{}".format(self.dtype)
     self.dataset_size = [10, 10, 10]
     self.block_size = [2, 2, 2]
     if Path(self.root).is_dir():
         shutil.rmtree(str(Path(self.root).absolute()))
     pyn5.create_dataset(self.root, self.dataset, self.dataset_size,
                         self.block_size, self.dtype)
     self.n5 = pyn5.open(self.root, self.dataset, self.dtype, False)
Example #4
0
 def write_to_n5(self, folder, dataset):
     """
     Note: because diluvian works with z,y,x and pyn5 assumes x,y,z:
     when writing coordinates must be flipped
     """
     pyn5.create_dataset(
         folder, dataset, self.bounds[1], self.leaf_shape, str(self.dtype).upper()
     )
     dataset = pyn5.open(folder, dataset)
     for leaf in self.iter_leaves():
         pyn5.write(dataset, leaf.bounds, leaf.data, self.dtype)
Example #5
0
def test_compression(tmp_path, compression_dict):
    root = tmp_path / "test.n5"

    data = np.arange(100, dtype=np.uint8).reshape((10, 10))

    pyn5.create_dataset(str(root), "ds", data.shape, (5, 5),
                        data.dtype.name.upper(), json.dumps(compression_dict))

    with open(root / "ds" / "attributes.json") as f:
        attrs = json.load(f)

    assert attrs["compression"] == compression_dict

    ds = pyn5.DatasetUINT8(str(root), "ds", True)
    ds.write_ndarray((0, 0), data, 0)
Example #6
0
def test_vs_z5_hash(tmp_path, z5_file):
    """Check different block hashes to z5"""
    root = tmp_path / "test.n5"

    z5_path = Path(z5_file.path)
    shape = (10, 20)
    data = np.arange(np.product(shape)).reshape(shape)
    chunks = (6, 7)

    pyn5.create_dataset(str(root), "ds", shape, chunks,
                        data.dtype.name.upper(), json.dumps({"type": "raw"}))
    ds = pyn5.DatasetINT64(str(root), "ds", False)
    ds.write_ndarray((0, 0), data, 0)

    z5_file.create_dataset("ds", data=data, chunks=(6, 7), compression="raw")

    assert blocks_hash(root) != blocks_hash(z5_path)
Example #7
0
def test_data_ordering(tmp_path):
    root = tmp_path / "test.n5"

    shape = (10, 20)
    chunks = (10, 10)

    pyn5.create_dataset(str(root), "ds", shape, chunks, "UINT8")
    ds = pyn5.DatasetUINT8(str(root), "ds", False)
    arr = np.array(ds.read_ndarray((0, 0), shape))
    arr += 1
    ds.write_ndarray((0, 0), arr, 0)

    ds_path = root / "ds"

    assert blocks_in(ds_path) == {"0", "0/0", "0/1"}

    with open(ds_path / "attributes.json") as f:
        attrs = json.load(f)

    assert list(shape) == attrs["dimensions"]
Example #8
0
def test_vs_z5(tmp_path, z5_file):
    """Check different dimensions, same dtype/compression as z5"""
    root = tmp_path / "test.n5"

    z5_path = Path(z5_file.path)
    shape = (10, 20)
    data = np.arange(np.product(shape)).reshape(shape)
    chunks = (6, 7)

    pyn5.create_dataset(str(root), "ds", shape, chunks,
                        data.dtype.name.upper())
    ds = pyn5.DatasetINT64(str(root), "ds", False)
    ds.write_ndarray((0, 0), data, 0)

    z5_file.create_dataset("ds",
                           data=data,
                           chunks=(6, 7),
                           compression="gzip",
                           level=-1)

    assert np.allclose(ds.read_ndarray((0, 0), shape), z5_file["ds"][:])
    assert blocks_in(root / "ds") != blocks_in(z5_path / "ds")

    attrs = attrs_in(root / "ds")
    z5_attrs = attrs_in(z5_path / "ds")
    for key in ("blockSize", "dimensions"):
        assert attrs[key] != z5_attrs[key]

    for key in ("dataType", "compression"):
        assert attrs[key] == z5_attrs[key]

    data2 = pyn5.DatasetINT64(str(z5_path), "ds", False).read_ndarray((0, 0),
                                                                      shape)
    data3 = z5py.N5File(root)["ds"][:]

    assert not all([np.array_equal(data, data2), np.array_equal(data, data3)])
Example #9
0
 def save_data_for_CATMAID(self, folder_path: Path):
     """
     Save the segmentation confidence score
     """
     pyn5.create_dataset(
         folder_path + "/segmentations.n5",
         "confidence",
         [int(x) for x in self.end_voxel],
         [int(x) for x in self.leaf_shape_voxels],
         "UINT8",
     )
     dataset = pyn5.open(folder_path + "/segmentations.n5", "confidence")
     for leaf in self.distances.iter_leaves():
         pyn5.write(
             dataset,
             leaf.bounds,
             (
                 255
                 * self._view_weighted_mask(
                     tuple(map(slice, leaf.bounds[0], leaf.bounds[1]))
                 )
             ).astype(np.uint8),
             np.uint8,
         )
for file_path in (zarr_root / "volumes").iterdir():
    if not file_path.name.startswith("affs") and not file_path.name.startswith(
            "."):
        dataset_name = "volumes/{}".format(file_path.name)
        zarr_dataset = zarr_data[dataset_name]
        dtype = "{}".format(zarr_dataset.dtype).upper()
        if (n5_root / "volumes" / file_path.name).exists():
            shutil.rmtree(n5_root / "volumes" / file_path.name)
        all_data = zarr_dataset[:, :, :].transpose([2, 1, 0])
        # if dtype == "UINT64":
        #     vals = list(set(all_data.flatten()))
        #     mapping = {v: i for i, v in enumerate(vals)}
        #     fv = np.vectorize(lambda x: mapping[x])
        #     all_data = fv(all_data)
        #     dtype = "UINT64"
        pyn5.create_dataset(
            str(n5_root.absolute()),
            dataset_name,
            zarr_dataset.shape[::-1],
            [64, 64, 64],
            dtype,
        )
        n5_dataset = pyn5.open(str(n5_root.absolute()), dataset_name)
        pyn5.write(
            n5_dataset,
            (np.array([0, 0, 0]), np.array(zarr_dataset.shape[::-1])),
            all_data,
            dtype,
        )
Example #11
0
def test_volume_transforms_n5_volume():
    # Create test n5 dataset
    test_dataset_path = Path("test.n5")
    if test_dataset_path.is_dir():
        shutil.rmtree(str(test_dataset_path.absolute()))
    pyn5.create_dataset("test.n5", "test", [10, 10, 10], [2, 2, 2], "UINT8")
    test_dataset = pyn5.open("test.n5", "test")

    test_data = np.zeros([10, 10, 10]).astype(int)
    x = np.linspace(0, 9, 10).reshape([10, 1, 1]).astype(int)
    test_data = test_data + x + x.transpose([1, 2, 0]) + x.transpose([2, 0, 1])

    block_starts = [(i % 5, i // 5 % 5, i // 25 % 5) for i in range(5**3)]
    for block_start in block_starts:
        current_bound = list(
            map(slice, [2 * x for x in block_start],
                [2 * x + 2 for x in block_start]))
        flattened = test_data[current_bound].reshape(-1)
        try:
            test_dataset.write_block(block_start, flattened)
        except Exception as e:
            raise AssertionError(
                "Writing to n5 failed! Could not create test dataset.\nError: {}"
                .format(e))

    v = volumes.N5Volume("test.n5",
                         {"image": {
                             "path": "test",
                             "dtype": "UINT8"
                         }},
                         bounds=[10, 10, 10],
                         resolution=[1, 1, 1])
    pv = v.partition(
        [2, 1, 1],
        [1, 0, 0])  # Note axes are flipped after volume initialization
    dpv = pv.downsample((2, 2, 2))

    np.testing.assert_array_equal(
        dpv.local_coord_to_world(np.array([2, 2, 2])), np.array([9, 4, 4]))
    np.testing.assert_array_equal(
        dpv.world_coord_to_local(np.array([9, 4, 4])), np.array([2, 2, 2]))

    svb = volumes.SubvolumeBounds(np.array((5, 0, 0), dtype=np.int64),
                                  np.array((7, 2, 2), dtype=np.int64))
    sv = v.get_subvolume(svb)

    dpsvb = volumes.SubvolumeBounds(np.array((0, 0, 0), dtype=np.int64),
                                    np.array((1, 1, 1), dtype=np.int64))
    dpsv = dpv.get_subvolume(dpsvb)

    np.testing.assert_array_equal(
        dpsv.image,
        sv.image.reshape((1, 2, 1, 2, 1, 2)).mean(5).mean(3).mean(1))

    # sanity check that test.n5 contains varying data
    svb2 = volumes.SubvolumeBounds(np.array((5, 0, 1), dtype=np.int64),
                                   np.array((7, 2, 3), dtype=np.int64))
    sv2 = v.get_subvolume(svb2)
    assert not all(sv.image.flatten() == sv2.image.flatten())

    if test_dataset_path.is_dir():
        shutil.rmtree(str(test_dataset_path.absolute()))
Example #12
0
from tqdm import tqdm
import shutil
from pathlib import Path

INTERP = [True, False]
for interp in INTERP:
    root = "/home/pattonw/Work/Data/n5_datasets/L1-segmented/L1.n5"
    dataset = "volumes/segmentation_20"
    out_dataset = "volumes/segmentation_20_skeleton_mapped_{}".format(
        "interp" if interp else "no_interp")

    mapping = pickle.load(
        open("mapping_{}.obj".format("interpolated" if interp else "plain"),
             "rb"))

    image_dataset = pyn5.open(root, dataset)
    image_data = image_dataset.read_ndarray((0, 0, 0), (1125, 1125, 80))

    for key, value in tqdm(mapping.items()):
        if len(value) == 1:
            image_data = np.where(image_data == key, value.pop(), image_data)
    try:
        shutil.rmtree(Path(root, out_dataset))
    except Exception as e:
        print(e)
        pass
    pyn5.create_dataset(root, out_dataset, (1125, 1125, 80), (125, 125, 10),
                        "UINT32")
    out_ds = pyn5.open(root, out_dataset)
    out_ds.write_ndarray(np.array((0, 0, 0)), image_data, 0)