Beispiel #1
0
def test_shape_detector_wrong_chunk_value():
    with pytest.raises(Exception):
        ShapeDetector((10, 10, 10), (10, 10, 10), (2, 10, 10))
Beispiel #2
0
    def __init__(
        self,
        fs_map: str,
        mode: str = "r",
        shape=None,
        max_shape=None,
        dtype="float64",
        chunks=None,
        compressor=DEFAULT_COMPRESSOR,
    ):
        """Constructor
        Parameters
        ----------
        fs_map : MutableMap
            Maps filesystem to MutableMap
        mode : str
            Mode in which tensor is opened (default is "r"), can be used to overwrite or append
        shape : Tuple[int | None]
            Shape of tensor, (must be specified) can contains Nones meaning the shape might change
        max_shape: Tuple[int | None]
            Maximum possible shape of the tensor (must be specified)
        dtype : str
            Numpy analog dtype for this tensor
        chunks : Tuple[int] | True
            How to split the tensor into chunks (files) (default is True)
            If chunks=True then chunksize will automatically be detected

        """
        if not (shape is None):
            # otherwise shape detector fails
            shapeDt = ShapeDetector(
                shape, max_shape, chunks, dtype, compressor=compressor
            )
            shape = shapeDt.shape
            max_shape = shapeDt.max_shape
            chunks = shapeDt.chunks
        elif "r" not in mode:
            raise TypeError("shape cannot be none")

        self.fs_map = fs_map
        exist_ = fs_map.get(".hub.dynamic_tensor")

        # if not exist_ and len(fs_map) > 0 and "w" in mode:
        #     raise OverwriteIsNotSafeException()
        exist = False if "w" in mode else exist_ is not None
        if "r" in mode and not exist:
            raise DynamicTensorNotFoundException()

        synchronizer = None
        # synchronizer = zarr.ThreadSynchronizer()
        # synchronizer = zarr.ProcessSynchronizer("~/activeloop/sync/example.sync")
        # if tensor exists and mode is read or append

        if ("r" in mode or "a" in mode) and exist:
            meta = json.loads(fs_map.get(".hub.dynamic_tensor").decode("utf-8"))
            shape = meta["shape"]
            self._dynamic_dims = get_dynamic_dims(shape)
            self._storage_tensor = zarr.open_array(
                store=fs_map, mode=mode, synchronizer=synchronizer
            )
            self._dynamic_tensor = (
                zarr.open_array(
                    NestedStore(fs_map, "--dynamic--"),
                    mode=mode,
                    synchronizer=synchronizer,
                )
                if self._dynamic_dims
                else None
            )
        # else we need to create or overwrite the tensor
        else:
            self._dynamic_dims = get_dynamic_dims(shape)
            self._storage_tensor = zarr.zeros(
                max_shape,
                dtype=dtype,
                chunks=chunks,
                store=fs_map,
                overwrite=("w" in mode),
                object_codec=numcodecs.Pickle(protocol=3)
                if str(dtype) == "object"
                else None,
                compressor=compressor,
                synchronizer=synchronizer,
            )
            self._dynamic_tensor = (
                zarr.zeros(
                    shape=(max_shape[0], len(self._dynamic_dims)),
                    mode=mode,
                    dtype=np.int32,
                    store=NestedStore(fs_map, "--dynamic--"),
                    synchronizer=synchronizer,
                    compressor=None,
                )
                if self._dynamic_dims
                else None
            )

            fs_map[".hub.dynamic_tensor"] = bytes(json.dumps({"shape": shape}), "utf-8")

        self.shape = shape
        self.max_shape = self._storage_tensor.shape
        self.chunks = self._storage_tensor.chunks
        self.dtype = self._storage_tensor.dtype

        if len(self.shape) != len(self.max_shape):
            raise DynamicTensorShapeException("length")
        for item in self.max_shape:
            if item is None:
                raise DynamicTensorShapeException("none")
        for item in zip(self.shape, self.max_shape):
            if item[0] is not None:
                if item[0] != item[1]:
                    raise DynamicTensorShapeException("not_equal")
        self._enabled_dynamicness = True
Beispiel #3
0
def test_shape_detector_wrong_shape_2():
    with pytest.raises(AssertionError):
        ShapeDetector((10, 10, 10), 20)
Beispiel #4
0
def test_shape_detector_wrong_shape_3():
    with pytest.raises(HubException):
        ShapeDetector((10, 10, None), (10, 10, None))
Beispiel #5
0
def test_shape_detector_2():
    s = ShapeDetector((10, 10, 10), 10, compressor="png")
    assert str(s.dtype) == "float64"
    assert s.chunks[1:] == (10, 10)
Beispiel #6
0
def test_shape_detector():
    s = ShapeDetector((10, 10, 10), 10)
    assert str(s.dtype) == "float64"
    assert s.chunks[1:] == (10, 10)
Beispiel #7
0
def test_shape_detector_wrong_shape_3():
    try:
        ShapeDetector((10, 10, None), (10, 10, None))
    except HubException:
        return
    assert False
Beispiel #8
0
def test_shape_detector_wrong_shape_2():
    try:
        ShapeDetector((10, 10, 10), 20)
    except AssertionError:
        return
    assert False
Beispiel #9
0
def test_shape_detector_wrong_shape():
    try:
        ShapeDetector((10, 10, 10), (10, 10, 20))
    except HubException:
        return