def test_custom_initialization() -> None:

    assert Vec3Int.zeros() == Vec3Int(0, 0, 0)
    assert Vec3Int.ones() == Vec3Int(1, 1, 1)
    assert Vec3Int.full(4) == Vec3Int(4, 4, 4)

    assert Vec3Int.ones() - Vec3Int.ones() == Vec3Int.zeros()
    assert Vec3Int.full(4) == Vec3Int.ones() * 4
Beispiel #2
0
def _get_sharding_parameters(
    chunk_size: Optional[Union[Vec3IntLike, int]],
    chunks_per_shard: Optional[Union[Vec3IntLike, int]],
    block_len: Optional[int],
    file_len: Optional[int],
) -> Tuple[Optional[Vec3Int], Optional[Vec3Int]]:
    if chunk_size is not None:
        chunk_size = Vec3Int.from_vec_or_int(chunk_size)
    elif block_len is not None:
        warn_deprecated("block_len", "chunk_size")
        chunk_size = Vec3Int.full(block_len)

    if chunks_per_shard is not None:
        chunks_per_shard = Vec3Int.from_vec_or_int(chunks_per_shard)
    elif file_len is not None:
        warn_deprecated("file_len", "chunks_per_shard")
        chunks_per_shard = Vec3Int.full(file_len)

    return (chunk_size, chunks_per_shard)
def test_annotation_from_zip_file() -> None:

    annotation = wk.Annotation.load(
        TESTDATA_DIR / "annotations" /
        "l4dense_motta_et_al_demo_v2__explorational__4a6356.zip")

    assert annotation.dataset_name == "l4dense_motta_et_al_demo_v2"
    assert annotation.organization_id == "scalable_minds"
    assert annotation.owner_name == "Philipp Otto"
    assert annotation.annotation_id == "61c20205010000cc004a6356"
    assert ("timestamp" in annotation.metadata  # pylint: disable=unsupported-membership-test
            )
    assert len(list(annotation.get_volume_layer_names())) == 1
    assert len(list(annotation.skeleton.flattened_trees())) == 1

    annotation.save(TESTOUTPUT_DIR / "test_dummy.zip")
    copied_annotation = wk.Annotation.load(TESTOUTPUT_DIR / "test_dummy.zip")

    assert copied_annotation.dataset_name == "l4dense_motta_et_al_demo_v2"
    assert copied_annotation.organization_id == "scalable_minds"
    assert copied_annotation.owner_name == "Philipp Otto"
    assert copied_annotation.annotation_id == "61c20205010000cc004a6356"
    assert ("timestamp" in copied_annotation.metadata  # pylint: disable=unsupported-membership-test
            )
    assert len(list(copied_annotation.get_volume_layer_names())) == 1
    assert len(list(copied_annotation.skeleton.flattened_trees())) == 1

    copied_annotation.add_volume_layer(name="new_volume_layer")
    assert len(list(copied_annotation.get_volume_layer_names())) == 2
    copied_annotation.delete_volume_layer(volume_layer_name="new_volume_layer")
    assert len(list(copied_annotation.get_volume_layer_names())) == 1

    with annotation.temporary_volume_layer_copy() as volume_layer:
        input_annotation_mag = volume_layer.get_finest_mag()
        voxel_id = input_annotation_mag.read(absolute_offset=Vec3Int(
            2830, 4356, 1792),
                                             size=Vec3Int.full(1))

        assert voxel_id == 2504698
from webknossos.geometry import Vec3Int

DEFAULT_WKW_FILE_LEN = 32
DEFAULT_CHUNK_SIZE = Vec3Int.full(32)
DEFAULT_CHUNKS_PER_SHARD = Vec3Int.full(32)
DEFAULT_CHUNKS_PER_SHARD_ZARR = Vec3Int.full(1)
def download_dataset(
    dataset_name: str,
    organization_id: str,
    sharing_token: Optional[str] = None,
    bbox: Optional[BoundingBox] = None,
    layers: Optional[List[str]] = None,
    mags: Optional[List[Mag]] = None,
    path: Optional[Union[PathLike, str]] = None,
    exist_ok: bool = False,
) -> Dataset:
    context = _get_context()
    client = context.generated_client

    dataset_info_response = dataset_info.sync_detailed(
        organization_name=organization_id,
        data_set_name=dataset_name,
        client=client,
        sharing_token=sharing_token,
    )
    assert dataset_info_response.status_code == 200, dataset_info_response
    parsed = dataset_info_response.parsed
    assert parsed is not None

    datastore_client = context.get_generated_datastore_client(
        parsed.data_store.url)
    optional_datastore_token = sharing_token or context.datastore_token

    actual_path = Path(dataset_name) if path is None else Path(path)
    if actual_path.exists():
        logger.warning(f"{actual_path} already exists, skipping download.")
        return Dataset.open(actual_path)

    voxel_size = cast(Tuple[float, float, float],
                      tuple(parsed.data_source.scale))
    data_layers = parsed.data_source.data_layers
    dataset = Dataset(actual_path,
                      name=parsed.name,
                      voxel_size=voxel_size,
                      exist_ok=exist_ok)
    for layer_name in layers or [i.name for i in data_layers]:

        response_layers = [i for i in data_layers if i.name == layer_name]
        assert (
            len(response_layers) > 0
        ), f"The provided layer name {layer_name} could not be found in the requested dataset."
        assert (
            len(response_layers) == 1
        ), f"The provided layer name {layer_name} was found multiple times in the requested dataset."
        response_layer = response_layers[0]
        category = cast(LayerCategoryType, response_layer.category)
        layer = dataset.add_layer(
            layer_name=layer_name,
            category=category,
            dtype_per_layer=response_layer.element_class,
            num_channels=3 if response_layer.element_class == "uint24" else 1,
            largest_segment_id=response_layer.additional_properties.get(
                "largestSegmentId", None),
        )

        default_view_configuration_dict = None
        if not isinstance(response_layer.default_view_configuration, Unset):
            default_view_configuration_dict = (
                response_layer.default_view_configuration.to_dict())

        if default_view_configuration_dict is not None:
            default_view_configuration = dataset_converter.structure(
                default_view_configuration_dict, LayerViewConfiguration)
            layer.default_view_configuration = default_view_configuration

        if bbox is None:
            response_bbox = response_layer.bounding_box
            layer.bounding_box = BoundingBox(
                response_bbox.top_left,
                (response_bbox.width, response_bbox.height,
                 response_bbox.depth),
            )
        else:
            assert isinstance(
                bbox, BoundingBox
            ), f"Expected a BoundingBox object for the bbox parameter but got {type(bbox)}"
            layer.bounding_box = bbox
        if mags is None:
            mags = [Mag(mag) for mag in response_layer.resolutions]
        for mag in mags:
            mag_view = layer.get_or_add_mag(
                mag,
                compress=True,
                chunk_size=Vec3Int.full(32),
                chunks_per_shard=_DOWNLOAD_CHUNK_SIZE // 32,
            )
            aligned_bbox = layer.bounding_box.align_with_mag(mag, ceil=True)
            download_chunk_size_in_mag = _DOWNLOAD_CHUNK_SIZE * mag.to_vec3_int(
            )
            for chunk in track(
                    list(
                        aligned_bbox.chunk(download_chunk_size_in_mag,
                                           download_chunk_size_in_mag)),
                    description=f"Downloading layer={layer.name} mag={mag}",
            ):
                chunk_in_mag = chunk.in_mag(mag)
                response = dataset_download.sync_detailed(
                    organization_name=organization_id,
                    data_set_name=dataset_name,
                    data_layer_name=layer_name,
                    mag=mag.to_long_layer_name(),
                    client=datastore_client,
                    token=optional_datastore_token,
                    x=chunk.topleft.x,
                    y=chunk.topleft.y,
                    z=chunk.topleft.z,
                    width=chunk_in_mag.size.x,
                    height=chunk_in_mag.size.y,
                    depth=chunk_in_mag.size.z,
                )
                assert response.status_code == 200, response
                assert (
                    response.headers["missing-buckets"] == "[]"
                ), f"Download contained missing buckets {response.headers['missing-buckets']}."
                data = np.frombuffer(response.content,
                                     dtype=layer.dtype_per_channel).reshape(
                                         layer.num_channels,
                                         *chunk_in_mag.size,
                                         order="F")
                mag_view.write(data, absolute_offset=chunk.topleft)
    return dataset
dataset_converter.register_unstructure_hook(BoundingBox, bbox_to_wkw)
dataset_converter.register_structure_hook(
    BoundingBox, lambda d, _: BoundingBox.from_wkw_dict(d))


def mag_unstructure(mag: Mag) -> List[int]:
    return mag.to_list()


dataset_converter.register_unstructure_hook(Mag, mag_unstructure)
dataset_converter.register_structure_hook(Mag, lambda d, _: Mag(d))

vec3int_to_array: Callable[[Vec3Int], List[int]] = lambda o: o.to_list()
dataset_converter.register_unstructure_hook(Vec3Int, vec3int_to_array)
dataset_converter.register_structure_hook(
    Vec3Int, lambda d, _: Vec3Int.full(d)
    if isinstance(d, int) else Vec3Int(d))

dataset_converter.register_structure_hook_func(
    lambda d: d == LayerCategoryType, lambda d, _: str(d))

# Register (un-)structure hooks for attr-classes to bring the data into the expected format.
# The properties on disk (in datasource-properties.json) use camel case for the names of the attributes.
# However, we use snake case for the attribute names in python.
# This requires that the names of the attributes are renamed during (un-)structuring.
# Additionally we only want to unstructure attributes which don't have the default value
# (e.g. Layer.default_view_configuration has many attributes which are all optionally).
for cls in [
        DatasetProperties,
        MagViewProperties,
        DatasetViewConfiguration,
from ._array import ArrayInfo
from .layer_categories import LayerCategoryType
from .view import View


class InterpolationModes(Enum):
    MEDIAN = 0
    MODE = 1
    NEAREST = 2
    BILINEAR = 3
    BICUBIC = 4
    MAX = 5
    MIN = 6


DEFAULT_BUFFER_SHAPE = Vec3Int.full(256)


def determine_buffer_shape(array_info: ArrayInfo) -> Vec3Int:
    return min(DEFAULT_BUFFER_SHAPE, array_info.shard_size)


def calculate_mags_to_downsample(
    from_mag: Mag,
    coarsest_mag: Mag,
    dataset_to_align_with: Optional["Dataset"],
    voxel_size: Optional[Tuple[float, float, float]],
) -> List[Mag]:
    assert np.all(from_mag.to_np() <= coarsest_mag.to_np())
    mags = []
    current_mag = from_mag
Beispiel #8
0
    def upsample(
        self,
        from_mag: Mag,
        finest_mag: Mag = Mag(1),
        compress: bool = False,
        sampling_mode: Union[str, SamplingModes] = SamplingModes.ANISOTROPIC,
        align_with_other_layers: Union[bool, "Dataset"] = True,
        buffer_shape: Optional[Vec3Int] = None,
        buffer_edge_len: Optional[int] = None,
        args: Optional[Namespace] = None,
        *,
        min_mag: Optional[Mag] = None,
    ) -> None:
        """
        Upsamples the data starting from `from_mag` as long as the magnification is `>= finest_mag`.
        There are three different `sampling_modes`:
        - 'anisotropic' - The next magnification is chosen so that the width, height and depth of a downsampled voxel assimilate. For example, if the z resolution is worse than the x/y resolution, z won't be downsampled in the first downsampling step(s). As a basis for this method, the voxel_size from the datasource-properties.json is used.
        - 'isotropic' - Each dimension is downsampled equally.
        - 'constant_z' - The x and y dimensions are downsampled equally, but the z dimension remains the same.

        `min_mag` is deprecated, please use `finest_mag` instead.
        """
        assert (
            from_mag in self.mags.keys()
        ), f"Failed to upsample data. The from_mag ({from_mag.to_layer_name()}) does not exist."

        if min_mag is not None:
            warn_deprecated("upsample(min_mag=…)", "upsample(finest_mag=…)")
            assert finest_mag == Mag(
                1
            ), "Cannot set both min_mag and finest_mag, please only use finest_mag."
            finest_mag = min_mag

        sampling_mode = SamplingModes.parse(sampling_mode)

        voxel_size: Optional[Tuple[float, float, float]] = None
        if sampling_mode == SamplingModes.ANISOTROPIC:
            voxel_size = self.dataset.voxel_size
        elif sampling_mode == SamplingModes.ISOTROPIC:
            voxel_size = None
        elif sampling_mode == SamplingModes.CONSTANT_Z:
            finest_mag_with_fixed_z = finest_mag.to_list()
            finest_mag_with_fixed_z[2] = from_mag.to_list()[2]
            finest_mag = Mag(finest_mag_with_fixed_z)
            voxel_size = None
        else:
            raise AttributeError(
                f"Upsampling failed: {sampling_mode} is not a valid UpsamplingMode ({SamplingModes.ANISOTROPIC}, {SamplingModes.ISOTROPIC}, {SamplingModes.CONSTANT_Z})"
            )

        if buffer_shape is None and buffer_edge_len is not None:
            buffer_shape = Vec3Int.full(buffer_edge_len)

        dataset_to_align_with = self._get_dataset_from_align_with_other_layers(
            align_with_other_layers)
        mags_to_upsample = calculate_mags_to_upsample(from_mag, finest_mag,
                                                      dataset_to_align_with,
                                                      voxel_size)

        for prev_mag, target_mag in zip([from_mag] + mags_to_upsample[:-1],
                                        mags_to_upsample):
            assert prev_mag > target_mag
            assert target_mag not in self.mags

            prev_mag_view = self.mags[prev_mag]

            mag_factors = [
                t / s
                for (t, s) in zip(target_mag.to_list(), prev_mag.to_list())
            ]

            # initialize the new mag
            target_mag_view = self._initialize_mag_from_other_mag(
                target_mag, prev_mag_view, compress)

            # Get target view
            target_view = target_mag_view.get_view()

            # perform upsampling
            with get_executor_for_args(args) as executor:

                if buffer_shape is None:
                    buffer_shape = determine_buffer_shape(prev_mag_view.info)
                func = named_partial(
                    upsample_cube_job,
                    mag_factors=mag_factors,
                    buffer_shape=buffer_shape,
                )
                prev_mag_view.get_view().for_zipped_chunks(
                    # this view is restricted to the bounding box specified in the properties
                    func,
                    target_view=target_view,
                    executor=executor,
                    progress_desc=
                    f"Upsampling from Mag {prev_mag} to Mag {target_mag}",
                )
Beispiel #9
0
    def add_mag(
            self,
            mag: Union[int, str, list, tuple, np.ndarray, Mag],
            chunk_size: Optional[Union[Vec3IntLike,
                                       int]] = None,  # DEFAULT_CHUNK_SIZE,
            chunks_per_shard: Optional[Union[
                int, Vec3IntLike]] = None,  # DEFAULT_CHUNKS_PER_SHARD,
            compress: bool = False,
            block_len: Optional[int] = None,  # deprecated
            file_len: Optional[int] = None,  # deprecated
    ) -> MagView:
        """
        Creates a new mag called and adds it to the layer.
        The parameter `chunk_size`, `chunks_per_shard` and `compress` can be
        specified to adjust how the data is stored on disk.
        Note that writing compressed data which is not aligned with the blocks on disk may result in
        diminished performance, as full blocks will automatically be read to pad the write actions. Alternatively,
        you can call mag.compress() after all the data was written

        The return type is `webknossos.dataset.mag_view.MagView`.

        Raises an IndexError if the specified `mag` already exists.
        """
        self.dataset._ensure_writable()
        # normalize the name of the mag
        mag = Mag(mag)
        compression_mode = compress

        chunk_size, chunks_per_shard = _get_sharding_parameters(
            chunk_size=chunk_size,
            chunks_per_shard=chunks_per_shard,
            block_len=block_len,
            file_len=file_len,
        )
        if chunk_size is None:
            chunk_size = DEFAULT_CHUNK_SIZE
        if chunks_per_shard is None:
            if self.data_format == DataFormat.Zarr:
                chunks_per_shard = DEFAULT_CHUNKS_PER_SHARD_ZARR
            else:
                chunks_per_shard = DEFAULT_CHUNKS_PER_SHARD

        if chunk_size not in (Vec3Int.full(32), Vec3Int.full(64)):
            warnings.warn(
                "[INFO] `chunk_size` of `32, 32, 32` or `64, 64, 64` is recommended for optimal "
                + f"performance in webKnossos. Got {chunk_size}.")

        self._assert_mag_does_not_exist_yet(mag)
        self._create_dir_for_mag(mag)

        mag_view = MagView(
            self,
            mag,
            chunk_size=chunk_size,
            chunks_per_shard=chunks_per_shard,
            compression_mode=compression_mode,
            create=True,
        )

        mag_view._array.ensure_size(
            self.bounding_box.align_with_mag(mag).in_mag(mag).bottomright)

        self._mags[mag] = mag_view
        mag_array_info = mag_view.info
        self._properties.mags += [
            MagViewProperties(
                mag=Mag(mag_view.name),
                cube_length=(mag_array_info.shard_size.x
                             if mag_array_info.data_format == DataFormat.WKW
                             else None),
            )
        ]

        self.dataset._export_as_json()

        return self._mags[mag]