예제 #1
0
def ds_dtype(request, tmp_path):
    dtype = request.param
    n5_path = str(tmp_path / "test.n5")
    ds_name = "ds" + dtype

    pyn5.create_dataset(n5_path, ds_name, DS_SIZE, BLOCKSIZE, dtype)
    yield pyn5.open(n5_path, ds_name, dtype, False), np.dtype(dtype.lower())
예제 #2
0
 def test_writting_wrong_dtype(self):
     bad_n5 = pyn5.open(self.root, self.dataset, "FLOAT64")
     try:
         bad_n5.write_block([0, 0, 0],
                            [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8])
         self.fail("Expected TypeError")
     except TypeError:
         pass
예제 #3
0
    def setUp(self):
        self.root = "test.n5"
        self.dataset = "test"
        self.dtype = "UINT8"
        self.dataset_size = [10, 10, 10]
        self.block_size = [2, 2, 2]

        pyn5.create_dataset(self.root, self.dataset, self.dataset_size,
                            self.block_size, self.dtype)
        self.n5 = pyn5.open(self.root, self.dataset, self.dtype, False)
예제 #4
0
 def setUp(self):
     self.root = "test.n5"
     self.dataset = "test_{}".format(self.dtype)
     self.dataset_size = [10, 10, 10]
     self.block_size = [2, 2, 2]
     if Path(self.root).is_dir():
         shutil.rmtree(str(Path(self.root).absolute()))
     pyn5.create_dataset(self.root, self.dataset, self.dataset_size,
                         self.block_size, self.dtype)
     self.n5 = pyn5.open(self.root, self.dataset, self.dtype, False)
예제 #5
0
파일: octrees.py 프로젝트: pattonw/sarbor
 def write_to_n5(self, folder, dataset):
     """
     Note: because diluvian works with z,y,x and pyn5 assumes x,y,z:
     when writing coordinates must be flipped
     """
     pyn5.create_dataset(
         folder, dataset, self.bounds[1], self.leaf_shape, str(self.dtype).upper()
     )
     dataset = pyn5.open(folder, dataset)
     for leaf in self.iter_leaves():
         pyn5.write(dataset, leaf.bounds, leaf.data, self.dtype)
예제 #6
0
 def save_data_for_CATMAID(self, folder_path: Path):
     """
     Save the segmentation confidence score
     """
     pyn5.create_dataset(
         folder_path + "/segmentations.n5",
         "confidence",
         [int(x) for x in self.end_voxel],
         [int(x) for x in self.leaf_shape_voxels],
         "UINT8",
     )
     dataset = pyn5.open(folder_path + "/segmentations.n5", "confidence")
     for leaf in self.distances.iter_leaves():
         pyn5.write(
             dataset,
             leaf.bounds,
             (
                 255
                 * self._view_weighted_mask(
                     tuple(map(slice, leaf.bounds[0], leaf.bounds[1]))
                 )
             ).astype(np.uint8),
             np.uint8,
         )
    "minz": START[2] * RESOLUTION[2] + OFFSET[2],
    "maxx": END[0] * RESOLUTION[0] + OFFSET[0],
    "maxy": END[1] * RESOLUTION[1] + OFFSET[1],
    "maxz": END[2] * RESOLUTION[2] + OFFSET[2],
}
skeletons = client.fetch("/1/skeletons/in-bounding-box",
                         method="POST",
                         data=query)

print(len(skeletons))
root = "/home/pattonw/Work/Data/n5_datasets/L1-segmented/L1.n5"
dataset = "volumes/segmentation_20"
assert Path(root).is_dir(), "root is not a directory"
assert Path(root, dataset).is_dir(), "dataset is not a directory"

image_dataset = pyn5.open(root, dataset)
image_data = image_dataset.read_ndarray((0, 0, 0), (1125, 1125, 80))

# plt.imshow(image_data[:, :, 0])
# plt.show()

for interp in INTERP:
    previous_keys = set(np.unique(image_data))

    mapping = {}
    current_id = 0
    skeleton_graph = nx.DiGraph()
    skipped = 0
    for skeleton in tqdm(skeletons):
        if current_id in previous_keys:
            skipped += 1
for file_path in (zarr_root / "volumes").iterdir():
    if not file_path.name.startswith("affs") and not file_path.name.startswith(
            "."):
        dataset_name = "volumes/{}".format(file_path.name)
        zarr_dataset = zarr_data[dataset_name]
        dtype = "{}".format(zarr_dataset.dtype).upper()
        if (n5_root / "volumes" / file_path.name).exists():
            shutil.rmtree(n5_root / "volumes" / file_path.name)
        all_data = zarr_dataset[:, :, :].transpose([2, 1, 0])
        # if dtype == "UINT64":
        #     vals = list(set(all_data.flatten()))
        #     mapping = {v: i for i, v in enumerate(vals)}
        #     fv = np.vectorize(lambda x: mapping[x])
        #     all_data = fv(all_data)
        #     dtype = "UINT64"
        pyn5.create_dataset(
            str(n5_root.absolute()),
            dataset_name,
            zarr_dataset.shape[::-1],
            [64, 64, 64],
            dtype,
        )
        n5_dataset = pyn5.open(str(n5_root.absolute()), dataset_name)
        pyn5.write(
            n5_dataset,
            (np.array([0, 0, 0]), np.array(zarr_dataset.shape[::-1])),
            all_data,
            dtype,
        )
예제 #9
0
def test_volume_transforms_n5_volume():
    # Create test n5 dataset
    test_dataset_path = Path("test.n5")
    if test_dataset_path.is_dir():
        shutil.rmtree(str(test_dataset_path.absolute()))
    pyn5.create_dataset("test.n5", "test", [10, 10, 10], [2, 2, 2], "UINT8")
    test_dataset = pyn5.open("test.n5", "test")

    test_data = np.zeros([10, 10, 10]).astype(int)
    x = np.linspace(0, 9, 10).reshape([10, 1, 1]).astype(int)
    test_data = test_data + x + x.transpose([1, 2, 0]) + x.transpose([2, 0, 1])

    block_starts = [(i % 5, i // 5 % 5, i // 25 % 5) for i in range(5**3)]
    for block_start in block_starts:
        current_bound = list(
            map(slice, [2 * x for x in block_start],
                [2 * x + 2 for x in block_start]))
        flattened = test_data[current_bound].reshape(-1)
        try:
            test_dataset.write_block(block_start, flattened)
        except Exception as e:
            raise AssertionError(
                "Writing to n5 failed! Could not create test dataset.\nError: {}"
                .format(e))

    v = volumes.N5Volume("test.n5",
                         {"image": {
                             "path": "test",
                             "dtype": "UINT8"
                         }},
                         bounds=[10, 10, 10],
                         resolution=[1, 1, 1])
    pv = v.partition(
        [2, 1, 1],
        [1, 0, 0])  # Note axes are flipped after volume initialization
    dpv = pv.downsample((2, 2, 2))

    np.testing.assert_array_equal(
        dpv.local_coord_to_world(np.array([2, 2, 2])), np.array([9, 4, 4]))
    np.testing.assert_array_equal(
        dpv.world_coord_to_local(np.array([9, 4, 4])), np.array([2, 2, 2]))

    svb = volumes.SubvolumeBounds(np.array((5, 0, 0), dtype=np.int64),
                                  np.array((7, 2, 2), dtype=np.int64))
    sv = v.get_subvolume(svb)

    dpsvb = volumes.SubvolumeBounds(np.array((0, 0, 0), dtype=np.int64),
                                    np.array((1, 1, 1), dtype=np.int64))
    dpsv = dpv.get_subvolume(dpsvb)

    np.testing.assert_array_equal(
        dpsv.image,
        sv.image.reshape((1, 2, 1, 2, 1, 2)).mean(5).mean(3).mean(1))

    # sanity check that test.n5 contains varying data
    svb2 = volumes.SubvolumeBounds(np.array((5, 0, 1), dtype=np.int64),
                                   np.array((7, 2, 3), dtype=np.int64))
    sv2 = v.get_subvolume(svb2)
    assert not all(sv.image.flatten() == sv2.image.flatten())

    if test_dataset_path.is_dir():
        shutil.rmtree(str(test_dataset_path.absolute()))
예제 #10
0
파일: octrees.py 프로젝트: pattonw/sarbor
 def read_from_n5(self, folder, dataset, bounds):
     img = pyn5.read(pyn5.open(folder, dataset), bounds, self.dtype)
     self[list(map(slice, bounds[0], bounds[1]))] = img
예제 #11
0
from tqdm import tqdm
import shutil
from pathlib import Path

INTERP = [True, False]
for interp in INTERP:
    root = "/home/pattonw/Work/Data/n5_datasets/L1-segmented/L1.n5"
    dataset = "volumes/segmentation_20"
    out_dataset = "volumes/segmentation_20_skeleton_mapped_{}".format(
        "interp" if interp else "no_interp")

    mapping = pickle.load(
        open("mapping_{}.obj".format("interpolated" if interp else "plain"),
             "rb"))

    image_dataset = pyn5.open(root, dataset)
    image_data = image_dataset.read_ndarray((0, 0, 0), (1125, 1125, 80))

    for key, value in tqdm(mapping.items()):
        if len(value) == 1:
            image_data = np.where(image_data == key, value.pop(), image_data)
    try:
        shutil.rmtree(Path(root, out_dataset))
    except Exception as e:
        print(e)
        pass
    pyn5.create_dataset(root, out_dataset, (1125, 1125, 80), (125, 125, 10),
                        "UINT32")
    out_ds = pyn5.open(root, out_dataset)
    out_ds.write_ndarray(np.array((0, 0, 0)), image_data, 0)