def test_getsize(): store = dict() store['foo'] = b'aaa' store['bar'] = b'bbbb' store['baz/quux'] = b'ccccc' assert 7 == getsize(store) assert 5 == getsize(store, 'baz')
def test_getsize(): store = dict() store['foo'] = b'aaa' store['bar'] = b'bbbb' store['baz/quux'] = b'ccccc' eq(7, getsize(store)) eq(5, getsize(store, 'baz'))
def nbytes_stored(self): """The total number of stored bytes of data for the array. This includes storage required for configuration metadata and user attributes.""" m = getsize(self._store, self._path) if self._store == self._chunk_store: return m else: n = getsize(self._chunk_store, self._path) if m < 0 or n < 0: return -1 else: return m + n
def test_getsize(self): store = self.create_store() if isinstance(store, dict) or hasattr(store, 'getsize'): assert 0 == getsize(store) store['foo'] = b'x' assert 1 == getsize(store) assert 1 == getsize(store, 'foo') store['bar'] = b'yy' assert 3 == getsize(store) assert 2 == getsize(store, 'bar') store['baz'] = bytearray(b'zzz') assert 6 == getsize(store) assert 3 == getsize(store, 'baz') store['quux'] = array.array('B', b'zzzz') assert 10 == getsize(store) assert 4 == getsize(store, 'quux') store['spong'] = np.frombuffer(b'zzzzz', dtype='u1') assert 15 == getsize(store) assert 5 == getsize(store, 'spong')
def test_consolidate_metadata(): # setup initial data store = MemoryStore() z = group(store) z.create_group('g1') g2 = z.create_group('g2') g2.attrs['hello'] = 'world' arr = g2.create_dataset('arr', shape=(20, 20), chunks=(5, 5), dtype='f8') assert 16 == arr.nchunks assert 0 == arr.nchunks_initialized arr.attrs['data'] = 1 arr[:] = 1.0 assert 16 == arr.nchunks_initialized # perform consolidation out = consolidate_metadata(store) assert isinstance(out, Group) assert '.zmetadata' in store for key in [ '.zgroup', 'g1/.zgroup', 'g2/.zgroup', 'g2/.zattrs', 'g2/arr/.zarray', 'g2/arr/.zattrs' ]: del store[key] # open consolidated z2 = open_consolidated(store) assert ['g1', 'g2'] == list(z2) assert 'world' == z2.g2.attrs['hello'] assert 1 == z2.g2.arr.attrs['data'] assert (z2.g2.arr[:] == 1.0).all() assert 16 == z2.g2.arr.nchunks assert 16 == z2.g2.arr.nchunks_initialized # tests del/write on the store cmd = ConsolidatedMetadataStore(store) with pytest.raises(PermissionError): del cmd['.zgroup'] with pytest.raises(PermissionError): cmd['.zgroup'] = None # test getsize on the store assert isinstance(getsize(cmd), Integral) # test new metadata are not writeable with pytest.raises(PermissionError): z2.create_group('g3') with pytest.raises(PermissionError): z2.create_dataset('spam', shape=42, chunks=7, dtype='i4') with pytest.raises(PermissionError): del z2['g2'] # test consolidated metadata are not writeable with pytest.raises(PermissionError): z2.g2.attrs['hello'] = 'universe' with pytest.raises(PermissionError): z2.g2.arr.attrs['foo'] = 'bar' # test the data are writeable z2.g2.arr[:] = 2 assert (z2.g2.arr[:] == 2).all() # test invalid modes with pytest.raises(ValueError): open_consolidated(store, mode='a') with pytest.raises(ValueError): open_consolidated(store, mode='w') # make sure keyword arguments are passed through without error open_consolidated(store, cache_attrs=True, synchronizer=None)
def test_getsize(self): # TODO: determine proper getsize() behavior for v3 # Currently returns the combined size of entries under # meta/root/path and data/root/path. # Any path not under meta/root/ or data/root/ (including zarr.json) # returns size 0. store = self.create_store() if isinstance(store, dict) or hasattr(store, 'getsize'): assert 0 == getsize(store, 'zarr.json') store[meta_root + 'foo/a'] = b'x' assert 1 == getsize(store) assert 1 == getsize(store, 'foo') store[meta_root + 'foo/b'] = b'x' assert 2 == getsize(store, 'foo') assert 1 == getsize(store, 'foo/b') store[meta_root + 'bar/a'] = b'yy' assert 2 == getsize(store, 'bar') store[data_root + 'bar/a'] = b'zzz' assert 5 == getsize(store, 'bar') store[data_root + 'baz/a'] = b'zzz' assert 3 == getsize(store, 'baz') assert 10 == getsize(store) store[data_root + 'quux'] = array.array('B', b'zzzz') assert 14 == getsize(store) assert 4 == getsize(store, 'quux') store[data_root + 'spong'] = np.frombuffer(b'zzzzz', dtype='u1') assert 19 == getsize(store) assert 5 == getsize(store, 'spong') store.close()
def test_consolidate_metadata(with_chunk_store, zarr_version, listable, monkeypatch, stores_from_path): # setup initial data if stores_from_path: store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) if with_chunk_store: chunk_store = tempfile.mkdtemp() atexit.register(atexit_rmtree, chunk_store) else: chunk_store = None version_kwarg = {'zarr_version': zarr_version} else: if zarr_version == 2: store = MemoryStore() chunk_store = MemoryStore() if with_chunk_store else None elif zarr_version == 3: store = MemoryStoreV3() chunk_store = MemoryStoreV3() if with_chunk_store else None version_kwarg = {} path = 'dataset' if zarr_version == 3 else None z = group(store, chunk_store=chunk_store, path=path, **version_kwarg) # Reload the actual store implementation in case str store_to_copy = z.store z.create_group('g1') g2 = z.create_group('g2') g2.attrs['hello'] = 'world' arr = g2.create_dataset('arr', shape=(20, 20), chunks=(5, 5), dtype='f8') assert 16 == arr.nchunks assert 0 == arr.nchunks_initialized arr.attrs['data'] = 1 arr[:] = 1.0 assert 16 == arr.nchunks_initialized if stores_from_path: # get the actual store class for use with consolidate_metadata store_class = z._store else: store_class = store if zarr_version == 3: # error on v3 if path not provided with pytest.raises(ValueError): consolidate_metadata(store_class, path=None) with pytest.raises(ValueError): consolidate_metadata(store_class, path='') # perform consolidation out = consolidate_metadata(store_class, path=path) assert isinstance(out, Group) assert ['g1', 'g2'] == list(out) if not stores_from_path: if zarr_version == 2: assert isinstance(out._store, ConsolidatedMetadataStore) assert '.zmetadata' in store meta_keys = ['.zgroup', 'g1/.zgroup', 'g2/.zgroup', 'g2/.zattrs', 'g2/arr/.zarray', 'g2/arr/.zattrs'] else: assert isinstance(out._store, ConsolidatedMetadataStoreV3) assert 'meta/root/consolidated/.zmetadata' in store meta_keys = ['zarr.json', meta_root + 'dataset.group.json', meta_root + 'dataset/g1.group.json', meta_root + 'dataset/g2.group.json', meta_root + 'dataset/g2/arr.array.json', 'meta/root/consolidated.group.json'] for key in meta_keys: del store[key] # https://github.com/zarr-developers/zarr-python/issues/993 # Make sure we can still open consolidated on an unlistable store: if not listable: fs_memory = pytest.importorskip("fsspec.implementations.memory") monkeypatch.setattr(fs_memory.MemoryFileSystem, "isdir", lambda x, y: False) monkeypatch.delattr(fs_memory.MemoryFileSystem, "ls") fs = fs_memory.MemoryFileSystem() if zarr_version == 2: store_to_open = FSStore("", fs=fs) else: store_to_open = FSStoreV3("", fs=fs) # copy original store to new unlistable store store_to_open.update(store_to_copy) else: store_to_open = store # open consolidated z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path, **version_kwarg) assert ['g1', 'g2'] == list(z2) assert 'world' == z2.g2.attrs['hello'] assert 1 == z2.g2.arr.attrs['data'] assert (z2.g2.arr[:] == 1.0).all() assert 16 == z2.g2.arr.nchunks if listable: assert 16 == z2.g2.arr.nchunks_initialized else: with pytest.raises(NotImplementedError): _ = z2.g2.arr.nchunks_initialized if stores_from_path: # path string is note a BaseStore subclass so cannot be used to # initialize a ConsolidatedMetadataStore. if zarr_version == 2: with pytest.raises(ValueError): cmd = ConsolidatedMetadataStore(store) elif zarr_version == 3: with pytest.raises(ValueError): cmd = ConsolidatedMetadataStoreV3(store) else: # tests del/write on the store if zarr_version == 2: cmd = ConsolidatedMetadataStore(store) with pytest.raises(PermissionError): del cmd['.zgroup'] with pytest.raises(PermissionError): cmd['.zgroup'] = None else: cmd = ConsolidatedMetadataStoreV3(store) with pytest.raises(PermissionError): del cmd[meta_root + 'dataset.group.json'] with pytest.raises(PermissionError): cmd[meta_root + 'dataset.group.json'] = None # test getsize on the store assert isinstance(getsize(cmd), Integral) # test new metadata are not writeable with pytest.raises(PermissionError): z2.create_group('g3') with pytest.raises(PermissionError): z2.create_dataset('spam', shape=42, chunks=7, dtype='i4') with pytest.raises(PermissionError): del z2['g2'] # test consolidated metadata are not writeable with pytest.raises(PermissionError): z2.g2.attrs['hello'] = 'universe' with pytest.raises(PermissionError): z2.g2.arr.attrs['foo'] = 'bar' # test the data are writeable z2.g2.arr[:] = 2 assert (z2.g2.arr[:] == 2).all() # test invalid modes with pytest.raises(ValueError): open_consolidated(store, chunk_store=chunk_store, mode='a', path=path) with pytest.raises(ValueError): open_consolidated(store, chunk_store=chunk_store, mode='w', path=path) with pytest.raises(ValueError): open_consolidated(store, chunk_store=chunk_store, mode='w-', path=path) # make sure keyword arguments are passed through without error open_consolidated( store, chunk_store=chunk_store, path=path, cache_attrs=True, synchronizer=None, **version_kwarg, )