def test_normalize_order(): eq('F', normalize_order('F')) eq('C', normalize_order('C')) eq('F', normalize_order('f')) eq('C', normalize_order('c')) with assert_raises(ValueError): normalize_order('foo')
def test_normalize_order(): assert 'F' == normalize_order('F') assert 'C' == normalize_order('C') assert 'F' == normalize_order('f') assert 'C' == normalize_order('c') with pytest.raises(ValueError): normalize_order('foo')
def _init_array_metadata(store, shape, chunks=None, dtype=None, compressor='default', fill_value=None, order='C', overwrite=False, path=None, chunk_store=None, filters=None): # guard conditions if overwrite: # attempt to delete any pre-existing items in store rmdir(store, path) if chunk_store is not None and chunk_store != store: rmdir(chunk_store, path) elif contains_array(store, path): err_contains_array(path) elif contains_group(store, path): err_contains_group(path) # normalize metadata shape = normalize_shape(shape) dtype = np.dtype(dtype) chunks = normalize_chunks(chunks, shape, dtype.itemsize) order = normalize_order(order) # obtain compressor config if compressor == 'none': # compatibility compressor = None elif compressor == 'default': compressor = default_compressor if compressor: try: compressor_config = compressor.get_config() except AttributeError: err_bad_compressor(compressor) else: compressor_config = None # obtain filters config if filters: filters_config = [f.get_config() for f in filters] else: filters_config = None # initialize metadata meta = dict(shape=shape, chunks=chunks, dtype=dtype, compressor=compressor_config, fill_value=fill_value, order=order, filters=filters_config) key = _path_to_prefix(path) + array_meta_key store[key] = encode_array_metadata(meta) # initialize attributes key = _path_to_prefix(path) + attrs_key store[key] = json.dumps(dict()).encode('ascii')
def _init_array_metadata(store, shape, chunks=None, dtype=None, compressor='default', fill_value=None, order='C', overwrite=False, path=None, chunk_store=None, filters=None): # guard conditions if overwrite: # attempt to delete any pre-existing items in store rmdir(store, path) if chunk_store is not None: rmdir(chunk_store, path) elif contains_array(store, path): err_contains_array(path) elif contains_group(store, path): err_contains_group(path) # normalize metadata shape = normalize_shape(shape) dtype = np.dtype(dtype) if dtype.kind in 'mM': raise ValueError( 'datetime64 and timedelta64 dtypes are not currently supported; ' 'please store the data using int64 instead') chunks = normalize_chunks(chunks, shape, dtype.itemsize) order = normalize_order(order) fill_value = normalize_fill_value(fill_value, dtype) # compressor prep if shape == (): # no point in compressing a 0-dimensional array, only a single value compressor = None elif compressor == 'none': # compatibility compressor = None elif compressor == 'default': compressor = default_compressor # obtain compressor config compressor_config = None if compressor: try: compressor_config = compressor.get_config() except AttributeError: err_bad_compressor(compressor) # obtain filters config if filters: filters_config = [f.get_config() for f in filters] else: filters_config = None # initialize metadata meta = dict(shape=shape, chunks=chunks, dtype=dtype, compressor=compressor_config, fill_value=fill_value, order=order, filters=filters_config) key = _path_to_prefix(path) + array_meta_key store[key] = encode_array_metadata(meta) # initialize attributes key = _path_to_prefix(path) + attrs_key store[key] = json.dumps(dict()).encode('ascii')