def test_exceptions(): HubException() AuthenticationException() AuthorizationException(Response()) AuthorizationException(Response(noerror=True)) NotFoundException() BadRequestException(Response()) BadRequestException(Response(noerror=True)) OverLimitException() ServerException() BadGatewayException() GatewayTimeoutException() WaitTimeoutException() LockedException() HubDatasetNotFoundException("Hello") PermissionException("Hello") ShapeLengthException() ShapeArgumentNotFoundException() SchemaArgumentNotFoundException() ValueShapeError("Shape 1", "Shape 2") NoneValueException("Yahoo!") ModuleNotInstalledException("my_module") WrongUsernameException("usernameX") NotHubDatasetToOverwriteException() NotHubDatasetToAppendException() DynamicTensorNotFoundException() DynamicTensorShapeException("none") DynamicTensorShapeException("length") DynamicTensorShapeException("not_equal") DynamicTensorShapeException("another_cause")
def __init__( self, fs_map: str, mode: str = "r", shape=None, max_shape=None, dtype="float64", chunks=None, compressor=DEFAULT_COMPRESSOR, ): """Constructor Parameters ---------- fs_map : MutableMap Maps filesystem to MutableMap mode : str Mode in which tensor is opened (default is "r"), can be used to overwrite or append shape : Tuple[int | None] Shape of tensor, (must be specified) can contains Nones meaning the shape might change max_shape: Tuple[int | None] Maximum possible shape of the tensor (must be specified) dtype : str Numpy analog dtype for this tensor chunks : Tuple[int] | True How to split the tensor into chunks (files) (default is True) If chunks=True then chunksize will automatically be detected """ if not (shape is None): # otherwise shape detector fails shapeDt = ShapeDetector( shape, max_shape, chunks, dtype, compressor=compressor ) shape = shapeDt.shape max_shape = shapeDt.max_shape chunks = shapeDt.chunks elif "r" not in mode: raise TypeError("shape cannot be none") self.fs_map = fs_map exist_ = fs_map.get(".hub.dynamic_tensor") # if not exist_ and len(fs_map) > 0 and "w" in mode: # raise OverwriteIsNotSafeException() exist = False if "w" in mode else exist_ is not None if "r" in mode and not exist: raise DynamicTensorNotFoundException() synchronizer = None # synchronizer = zarr.ThreadSynchronizer() # synchronizer = zarr.ProcessSynchronizer("~/activeloop/sync/example.sync") # if tensor exists and mode is read or append if ("r" in mode or "a" in mode) and exist: meta = json.loads(fs_map.get(".hub.dynamic_tensor").decode("utf-8")) shape = meta["shape"] self._dynamic_dims = get_dynamic_dims(shape) self._storage_tensor = zarr.open_array( store=fs_map, mode=mode, synchronizer=synchronizer ) self._dynamic_tensor = ( zarr.open_array( NestedStore(fs_map, "--dynamic--"), mode=mode, synchronizer=synchronizer, ) if self._dynamic_dims else None ) # else we need to create or overwrite the tensor else: self._dynamic_dims = get_dynamic_dims(shape) self._storage_tensor = zarr.zeros( max_shape, dtype=dtype, chunks=chunks, store=fs_map, overwrite=("w" in mode), object_codec=numcodecs.Pickle(protocol=3) if str(dtype) == "object" else None, compressor=compressor, synchronizer=synchronizer, ) self._dynamic_tensor = ( zarr.zeros( shape=(max_shape[0], len(self._dynamic_dims)), mode=mode, dtype=np.int32, store=NestedStore(fs_map, "--dynamic--"), synchronizer=synchronizer, compressor=None, ) if self._dynamic_dims else None ) fs_map[".hub.dynamic_tensor"] = bytes(json.dumps({"shape": shape}), "utf-8") self.shape = shape self.max_shape = self._storage_tensor.shape self.chunks = self._storage_tensor.chunks self.dtype = self._storage_tensor.dtype if len(self.shape) != len(self.max_shape): raise DynamicTensorShapeException("length") for item in self.max_shape: if item is None: raise DynamicTensorShapeException("none") for item in zip(self.shape, self.max_shape): if item[0] is not None: if item[0] != item[1]: raise DynamicTensorShapeException("not_equal") self._enabled_dynamicness = True