def create_store(self, normalize_keys=False, key_separator=".", **kwargs): # Since the user is passing key_separator, that will take priority. skip_if_nested_chunks(**kwargs) path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) return FSStoreV3(path, normalize_keys=normalize_keys, key_separator=key_separator)
def create_store(self, normalize_keys=False, dimension_separator=".", path=None, **kwargs): if path is None: path = tempfile.mkdtemp() atexit.register(atexit_rmtree, path) store = FSStoreV3(path, normalize_keys=normalize_keys, dimension_separator=dimension_separator, **kwargs) return store
def dest(self, request, tmpdir): if request.param == 'hdf5': h5py = pytest.importorskip('h5py') fn = tmpdir.join('dest.h5') with h5py.File(str(fn), mode='w') as h5f: yield h5f elif request.param == 'zarr': yield group(path='group2', zarr_version=3) elif request.param == 'zarr_kvstore': store = KVStoreV3(dict()) yield group(store, path='group2', zarr_version=3) elif request.param == 'zarr_fsstore': fn = tmpdir.join('dest.zr3') store = FSStoreV3(str(fn), auto_mkdir=True) yield group(store, path='group2', zarr_version=3) elif request.param == 'zarr_directorystore': fn = tmpdir.join('dest.zr3') store = DirectoryStoreV3(str(fn)) yield group(store, path='group2', zarr_version=3) elif request.param == 'zarr_sqlitestore': fn = tmpdir.join('dest.db') store = SQLiteStoreV3(str(fn)) yield group(store, path='group2', zarr_version=3)
def test_consolidate_metadata(with_chunk_store, zarr_version, listable, monkeypatch, stores_from_path): # setup initial data if stores_from_path: store = tempfile.mkdtemp() atexit.register(atexit_rmtree, store) if with_chunk_store: chunk_store = tempfile.mkdtemp() atexit.register(atexit_rmtree, chunk_store) else: chunk_store = None version_kwarg = {'zarr_version': zarr_version} else: if zarr_version == 2: store = MemoryStore() chunk_store = MemoryStore() if with_chunk_store else None elif zarr_version == 3: store = MemoryStoreV3() chunk_store = MemoryStoreV3() if with_chunk_store else None version_kwarg = {} path = 'dataset' if zarr_version == 3 else None z = group(store, chunk_store=chunk_store, path=path, **version_kwarg) # Reload the actual store implementation in case str store_to_copy = z.store z.create_group('g1') g2 = z.create_group('g2') g2.attrs['hello'] = 'world' arr = g2.create_dataset('arr', shape=(20, 20), chunks=(5, 5), dtype='f8') assert 16 == arr.nchunks assert 0 == arr.nchunks_initialized arr.attrs['data'] = 1 arr[:] = 1.0 assert 16 == arr.nchunks_initialized if stores_from_path: # get the actual store class for use with consolidate_metadata store_class = z._store else: store_class = store if zarr_version == 3: # error on v3 if path not provided with pytest.raises(ValueError): consolidate_metadata(store_class, path=None) with pytest.raises(ValueError): consolidate_metadata(store_class, path='') # perform consolidation out = consolidate_metadata(store_class, path=path) assert isinstance(out, Group) assert ['g1', 'g2'] == list(out) if not stores_from_path: if zarr_version == 2: assert isinstance(out._store, ConsolidatedMetadataStore) assert '.zmetadata' in store meta_keys = ['.zgroup', 'g1/.zgroup', 'g2/.zgroup', 'g2/.zattrs', 'g2/arr/.zarray', 'g2/arr/.zattrs'] else: assert isinstance(out._store, ConsolidatedMetadataStoreV3) assert 'meta/root/consolidated/.zmetadata' in store meta_keys = ['zarr.json', meta_root + 'dataset.group.json', meta_root + 'dataset/g1.group.json', meta_root + 'dataset/g2.group.json', meta_root + 'dataset/g2/arr.array.json', 'meta/root/consolidated.group.json'] for key in meta_keys: del store[key] # https://github.com/zarr-developers/zarr-python/issues/993 # Make sure we can still open consolidated on an unlistable store: if not listable: fs_memory = pytest.importorskip("fsspec.implementations.memory") monkeypatch.setattr(fs_memory.MemoryFileSystem, "isdir", lambda x, y: False) monkeypatch.delattr(fs_memory.MemoryFileSystem, "ls") fs = fs_memory.MemoryFileSystem() if zarr_version == 2: store_to_open = FSStore("", fs=fs) else: store_to_open = FSStoreV3("", fs=fs) # copy original store to new unlistable store store_to_open.update(store_to_copy) else: store_to_open = store # open consolidated z2 = open_consolidated(store_to_open, chunk_store=chunk_store, path=path, **version_kwarg) assert ['g1', 'g2'] == list(z2) assert 'world' == z2.g2.attrs['hello'] assert 1 == z2.g2.arr.attrs['data'] assert (z2.g2.arr[:] == 1.0).all() assert 16 == z2.g2.arr.nchunks if listable: assert 16 == z2.g2.arr.nchunks_initialized else: with pytest.raises(NotImplementedError): _ = z2.g2.arr.nchunks_initialized if stores_from_path: # path string is note a BaseStore subclass so cannot be used to # initialize a ConsolidatedMetadataStore. if zarr_version == 2: with pytest.raises(ValueError): cmd = ConsolidatedMetadataStore(store) elif zarr_version == 3: with pytest.raises(ValueError): cmd = ConsolidatedMetadataStoreV3(store) else: # tests del/write on the store if zarr_version == 2: cmd = ConsolidatedMetadataStore(store) with pytest.raises(PermissionError): del cmd['.zgroup'] with pytest.raises(PermissionError): cmd['.zgroup'] = None else: cmd = ConsolidatedMetadataStoreV3(store) with pytest.raises(PermissionError): del cmd[meta_root + 'dataset.group.json'] with pytest.raises(PermissionError): cmd[meta_root + 'dataset.group.json'] = None # test getsize on the store assert isinstance(getsize(cmd), Integral) # test new metadata are not writeable with pytest.raises(PermissionError): z2.create_group('g3') with pytest.raises(PermissionError): z2.create_dataset('spam', shape=42, chunks=7, dtype='i4') with pytest.raises(PermissionError): del z2['g2'] # test consolidated metadata are not writeable with pytest.raises(PermissionError): z2.g2.attrs['hello'] = 'universe' with pytest.raises(PermissionError): z2.g2.arr.attrs['foo'] = 'bar' # test the data are writeable z2.g2.arr[:] = 2 assert (z2.g2.arr[:] == 2).all() # test invalid modes with pytest.raises(ValueError): open_consolidated(store, chunk_store=chunk_store, mode='a', path=path) with pytest.raises(ValueError): open_consolidated(store, chunk_store=chunk_store, mode='w', path=path) with pytest.raises(ValueError): open_consolidated(store, chunk_store=chunk_store, mode='w-', path=path) # make sure keyword arguments are passed through without error open_consolidated( store, chunk_store=chunk_store, path=path, cache_attrs=True, synchronizer=None, **version_kwarg, )