Пример #1
0
def test_bad_shape():
    """Negative test for bad data shapes"""

    with tempfile.TemporaryDirectory() as td:
        fp = os.path.join(td, 'outputs.h5')

        with Outputs(fp, 'w') as f:
            f.meta = meta
            f.time_index = time_index

        with pytest.raises(HandlerValueError):
            Outputs.add_dataset(fp, 'dset3', np.ones(10), None, float)

        with pytest.raises(HandlerValueError):
            Outputs.add_dataset(fp, 'dset3', np.ones((10, 10)), None, float)
Пример #2
0
def test_add_dset():
    """Test the addition of datasets to a pre-existing h5 file"""

    with tempfile.TemporaryDirectory() as td:
        fp = os.path.join(td, 'outputs.h5')

        with Outputs(fp, 'w') as f:
            f.meta = meta
            f.time_index = time_index

        with pytest.raises(HandlerRuntimeError):
            f.add_dataset(fp,
                          'dset1',
                          arr1,
                          None,
                          int,
                          chunks=None,
                          unscale=True,
                          mode='a',
                          str_decode=True,
                          group=None)

        with pytest.raises(HandlerRuntimeError):
            Outputs.add_dataset(fp,
                                'dset2',
                                arr2, {'scale_factor': 1},
                                int,
                                chunks=(None, 10),
                                unscale=True,
                                mode='a',
                                str_decode=True,
                                group=None)

        Outputs.add_dataset(fp,
                            'dset1',
                            arr1.astype(int),
                            None,
                            int,
                            chunks=None,
                            unscale=True,
                            mode='a',
                            str_decode=True,
                            group=None)

        with h5py.File(fp, 'r') as f:
            assert 'dset1' in f
            data = f['dset1'][...]
            assert data.dtype == int
            assert np.allclose(arr1, data)

        Outputs.add_dataset(fp,
                            'dset2',
                            arr2.astype(np.int16), {'scale_factor': 1},
                            np.int16,
                            chunks=(None, 10),
                            unscale=True,
                            mode='a',
                            str_decode=True,
                            group=None)

        with h5py.File(fp, 'r') as f:
            assert 'dset1' in f
            assert 'dset2' in f
            assert f['dset1'].chunks is None
            assert f['dset2'].chunks == (8760, 10)
            assert np.allclose(f['dset2'][...], arr2)

        Outputs.add_dataset(fp,
                            'dset3',
                            arr3, {'scale_factor': 100},
                            np.int32,
                            chunks=(100, 25),
                            unscale=True,
                            mode='a',
                            str_decode=True,
                            group=None)

        with h5py.File(fp, 'r') as f:
            assert 'dset1' in f
            assert 'dset2' in f
            assert 'dset3' in f
            assert f['dset1'].chunks is None
            assert f['dset2'].chunks == (8760, 10)
            assert f['dset3'].chunks == (100, 25)
            assert f['dset3'].attrs['scale_factor'] == 100
            assert f['dset3'].dtype == np.int32
            assert np.allclose(f['dset3'][...], arr3 * 100)