Example #1
0
def test_encode_fill_value(fill_value, dtype, object_codec, result):

    # normalize metadata (copied from _init_array_metadata)
    dtype, object_codec = normalize_dtype(dtype, object_codec)
    dtype = dtype.base
    fill_value = normalize_fill_value(fill_value, dtype)

    # test
    if result:
        encode_fill_value(fill_value, dtype, object_codec)
    else:
        with pytest.raises(ValueError):
            encode_fill_value(fill_value, dtype, object_codec)
Example #2
0
def test_normalize_fill_value():
    eq(b'', normalize_fill_value(0, dtype=np.dtype('S1')))
    eq(b'',
       normalize_fill_value(0, dtype=np.dtype([('foo', 'i4'), ('bar', 'f8')])))
    eq('', normalize_fill_value(0, dtype=np.dtype('U1')))
Example #3
0
def test_normalize_fill_value():
    assert b'' == normalize_fill_value(0, dtype=np.dtype('S1'))
    structured_dtype = np.dtype([('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')])
    expect = np.array((b'', 0, 0.), dtype=structured_dtype)[()]
    assert expect == normalize_fill_value(0, dtype=structured_dtype)
    assert '' == normalize_fill_value(0, dtype=np.dtype('U1'))
Example #4
0
def _init_array_metadata(store,
                         shape,
                         chunks=None,
                         dtype=None,
                         compressor='default',
                         fill_value=None,
                         order='C',
                         overwrite=False,
                         path=None,
                         chunk_store=None,
                         filters=None):

    # guard conditions
    if overwrite:
        # attempt to delete any pre-existing items in store
        rmdir(store, path)
        if chunk_store is not None:
            rmdir(chunk_store, path)
    elif contains_array(store, path):
        err_contains_array(path)
    elif contains_group(store, path):
        err_contains_group(path)

    # normalize metadata
    shape = normalize_shape(shape)
    dtype = np.dtype(dtype)
    if dtype.kind in 'mM':
        raise ValueError(
            'datetime64 and timedelta64 dtypes are not currently supported; '
            'please store the data using int64 instead')
    chunks = normalize_chunks(chunks, shape, dtype.itemsize)
    order = normalize_order(order)
    fill_value = normalize_fill_value(fill_value, dtype)

    # compressor prep
    if shape == ():
        # no point in compressing a 0-dimensional array, only a single value
        compressor = None
    elif compressor == 'none':
        # compatibility
        compressor = None
    elif compressor == 'default':
        compressor = default_compressor

    # obtain compressor config
    compressor_config = None
    if compressor:
        try:
            compressor_config = compressor.get_config()
        except AttributeError:
            err_bad_compressor(compressor)

    # obtain filters config
    if filters:
        filters_config = [f.get_config() for f in filters]
    else:
        filters_config = None

    # initialize metadata
    meta = dict(shape=shape,
                chunks=chunks,
                dtype=dtype,
                compressor=compressor_config,
                fill_value=fill_value,
                order=order,
                filters=filters_config)
    key = _path_to_prefix(path) + array_meta_key
    store[key] = encode_array_metadata(meta)

    # initialize attributes
    key = _path_to_prefix(path) + attrs_key
    store[key] = json.dumps(dict()).encode('ascii')