def test_storage(self): store = dict() a = Attributes(store=store, key='attrs') assert 'foo' not in a assert 'bar' not in a eq(dict(), a.asdict()) a['foo'] = 'bar' a['baz'] = 42 assert 'attrs' in store assert isinstance(store['attrs'], binary_type) d = json.loads(text_type(store['attrs'], 'ascii')) eq(dict(foo='bar', baz=42), d)
def test_storage(self): store = dict() a = Attributes(store=store, key='attrs') assert 'foo' not in a assert 'bar' not in a assert dict() == a.asdict() a['foo'] = 'bar' a['baz'] = 42 assert 'attrs' in store assert isinstance(store['attrs'], bytes) d = json.loads(str(store['attrs'], 'ascii')) assert dict(foo='bar', baz=42) == d
def __init__(self, store, path=None, read_only=False, chunk_store=None, cache_attrs=True, synchronizer=None): self._store = store self._chunk_store = chunk_store self._path = normalize_storage_path(path) if self._path: self._key_prefix = self._path + '/' else: self._key_prefix = '' self._read_only = read_only self._synchronizer = synchronizer # guard conditions if contains_array(store, path=self._path): raise ContainsArrayError(path) # initialize metadata try: mkey = self._key_prefix + group_meta_key meta_bytes = store[mkey] except KeyError: raise GroupNotFoundError(path) else: meta = decode_group_metadata(meta_bytes) self._meta = meta # setup attributes akey = self._key_prefix + attrs_key self._attrs = Attributes(store, key=akey, read_only=read_only, cache=cache_attrs, synchronizer=synchronizer) # setup info self._info = InfoReporter(self)
def __init__(self, store, path=None, read_only=False, chunk_store=None, synchronizer=None, cache_metadata=True): # N.B., expect at this point store is fully initialized with all # configuration metadata fully specified and normalized self._store = store self._path = normalize_storage_path(path) if self._path: self._key_prefix = self._path + '/' else: self._key_prefix = '' self._read_only = read_only if chunk_store is None: self._chunk_store = store else: self._chunk_store = chunk_store self._synchronizer = synchronizer self._cache_metadata = cache_metadata self._is_view = False # initialize metadata self._load_metadata() # initialize attributes akey = self._key_prefix + attrs_key self._attrs = Attributes(store, key=akey, read_only=read_only, synchronizer=synchronizer)
def init_attributes(self, store, read_only=False): key = 'attrs' store[key] = json.dumps(dict()).encode('ascii') synchronizer = ThreadSynchronizer() return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only)
def init_attributes(self, store, read_only=False, cache=True): key = 'attrs' synchronizer = ThreadSynchronizer() return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only, cache=cache)
def test_storage(self, store_from_dict): if store_from_dict: store = dict() else: store = KVStore(dict()) a = Attributes(store=store, key='attrs') assert isinstance(a.store, KVStore) assert 'foo' not in a assert 'bar' not in a assert dict() == a.asdict() a['foo'] = 'bar' a['baz'] = 42 assert 'attrs' in store assert isinstance(store['attrs'], bytes) d = json.loads(str(store['attrs'], 'ascii')) assert dict(foo='bar', baz=42) == d
def init_attributes(self, store, read_only=False, cache=True, zarr_version=2): root = '.z' if zarr_version == 2 else meta_root return Attributes(store, key=root + 'attrs', read_only=read_only, cache=cache)
def init_attributes(self, store, read_only=False, cache=True): key = 'attrs' sync_path = mkdtemp() atexit.register(shutil.rmtree, sync_path) synchronizer = ProcessSynchronizer(sync_path) return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only, cache=cache)
def init_attributes(self, store, read_only=False): key = 'attrs' store[key] = json.dumps(dict()).encode('ascii') sync_path = mkdtemp() atexit.register(shutil.rmtree, sync_path) synchronizer = ProcessSynchronizer(sync_path) return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only)
def test_storage(self, zarr_version): store = _init_store(zarr_version) root = '.z' if zarr_version == 2 else meta_root attrs_key = root + 'attrs' a = Attributes(store=store, key=attrs_key) assert isinstance(a.store, KVStore) assert 'foo' not in a assert 'bar' not in a assert dict() == a.asdict() a['foo'] = 'bar' a['baz'] = 42 assert attrs_key in store assert isinstance(store[attrs_key], bytes) d = json.loads(str(store[attrs_key], 'ascii')) if zarr_version == 3: d = d['attributes'] assert dict(foo='bar', baz=42) == d
def init_attributes(self, store, read_only=False, cache=True, zarr_version=zarr_version): key = '.zattrs' if zarr_version == 2 else meta_root + 'attrs' synchronizer = ThreadSynchronizer() return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only, cache=cache)
def init_attributes(self, store, read_only=False, cache=True, zarr_version=zarr_version): key = '.zattrs' if zarr_version == 2 else meta_root + 'attrs' sync_path = mkdtemp() atexit.register(shutil.rmtree, sync_path) synchronizer = ProcessSynchronizer(sync_path) return Attributes(store, synchronizer=synchronizer, key=key, read_only=read_only, cache=cache)
def __init__(self, store, path=None, read_only=False, chunk_store=None, synchronizer=None): self._store = store self._path = normalize_storage_path(path) if self._path: self._key_prefix = self._path + '/' else: self._key_prefix = '' self._read_only = read_only if chunk_store is None: self._chunk_store = store else: self._chunk_store = chunk_store self._synchronizer = synchronizer # guard conditions if contains_array(store, path=self._path): err_contains_array(path) # initialize metadata try: mkey = self._key_prefix + group_meta_key meta_bytes = store[mkey] except KeyError: err_group_not_found(path) else: meta = decode_group_metadata(meta_bytes) self._meta = meta # setup attributes akey = self._key_prefix + attrs_key self._attrs = Attributes(store, key=akey, read_only=read_only, synchronizer=synchronizer)
def init_attributes(self, store, read_only=False, cache=True): return Attributes(store, key='attrs', read_only=read_only, cache=cache)
def __init__(self, store, path=None, read_only=False, chunk_store=None, cache_attrs=True, synchronizer=None, zarr_version=None): store: BaseStore = _normalize_store_arg(store, zarr_version=zarr_version) if zarr_version is None: zarr_version = getattr(store, '_store_version', DEFAULT_ZARR_VERSION) if chunk_store is not None: chunk_store: BaseStore = _normalize_store_arg( chunk_store, zarr_version=zarr_version) self._store = store self._chunk_store = chunk_store self._path = normalize_storage_path(path) if self._path: self._key_prefix = self._path + '/' else: self._key_prefix = '' self._read_only = read_only self._synchronizer = synchronizer self._version = zarr_version if self._version == 3: self._data_key_prefix = data_root + self._key_prefix self._data_path = data_root + self._path self._hierarchy_metadata = _get_hierarchy_metadata( store=self._store) self._metadata_key_suffix = _get_metadata_suffix(store=self._store) # guard conditions if contains_array(store, path=self._path): raise ContainsArrayError(path) # initialize metadata try: mkey = _prefix_to_group_key(self._store, self._key_prefix) assert not mkey.endswith("root/.group") meta_bytes = store[mkey] except KeyError: if self._version == 2: raise GroupNotFoundError(path) else: implicit_prefix = meta_root + self._key_prefix if self._store.list_prefix(implicit_prefix): # implicit group does not have any metadata self._meta = None else: raise GroupNotFoundError(path) else: self._meta = self._store._metadata_class.decode_group_metadata( meta_bytes) # setup attributes if self._version == 2: akey = self._key_prefix + attrs_key else: # Note: mkey doesn't actually exist for implicit groups, but the # object can still be created. akey = mkey self._attrs = Attributes(store, key=akey, read_only=read_only, cache=cache_attrs, synchronizer=synchronizer) # setup info self._info = InfoReporter(self)
def init_attributes(self, store, read_only=False): return Attributes(store, key='attrs', read_only=read_only)