Esempio n. 1
0
    def test_write_overwrite(self):
        for ndim in range(1, 4):
            chunk_shape = random_int_tuple(1, 10, ndim)
            nchunks = random_int_tuple(1, 4, ndim)
            total_shape = tuple(n * c for n, c in zip(chunk_shape, nchunks))

            stream = BytesIO()
            chunk = np.random.uniform(-10, 10, chunk_shape).astype(random.choice((int, float)))
            file_content = np.random.uniform(-10, 10, total_shape).astype(chunk.dtype)
            with h5.File(stream, "w") as h5f:
                h5f.create_dataset("data", data=file_content, chunks=chunk_shape, maxshape=(None,) * ndim)

            buffer = ChunkBuffer(stream, "data", data=chunk)
            # valid indices
            for chunk_index in product_range(nchunks):
                with h5.File(stream, "a") as h5f:
                    h5f["data"][...] = file_content

                buffer.select(chunk_index)
                buffer.write(must_exist=True)
                desired_file_content = file_content.copy()
                desired_file_content[_chunk_slices(chunk_index, chunk_shape)] = chunk
                with h5.File(stream, "r") as h5f:
                    np.testing.assert_allclose(h5f["data"][()], desired_file_content)

            # index out of bounds
            for dim in range(ndim):
                chunk_index = tuple(map(lambda n: random.randint(0, n - 1), nchunks))
                chunk_index = chunk_index[:dim] + (nchunks[dim] + random.randint(1, 10),) + chunk_index[dim + 1:]
                buffer.select(chunk_index)
                with self.assertRaises(RuntimeError):
                    buffer.write(must_exist=True)
Esempio n. 2
0
    def test_real_files(self):
        with TemporaryDirectory() as tempdir:
            filename = Path(tempdir) / "test_file.h5"
            chunk_shape = (1, 2, 3)
            array = np.random.uniform(-10, 10, chunk_shape)
            buffer = ChunkBuffer(filename, "data", data=array)
            buffer.create_dataset(filemode="w")

            self.assertTrue(filename.exists())
            with h5.File(filename, "r") as h5f:
                np.testing.assert_allclose(h5f["data"][()], array)

            # extend dataset with stored filename
            array = np.random.uniform(-10, 10, chunk_shape)
            buffer.select((1, 0, 0))
            buffer.data[...] = array
            buffer.write(must_exist=False)
            with h5.File(filename, "r") as h5f:
                np.testing.assert_allclose(h5f["data"][1:, :, :], array)

            # extend dataset with passed in filename
            array = np.random.uniform(-10, 10, chunk_shape)
            buffer.select((1, 1, 0))
            buffer.data[...] = array
            buffer.write(must_exist=False, file=filename)
            with h5.File(filename, "r") as h5f:
                np.testing.assert_allclose(h5f["data"][1:, 2:, :], array)

            # extend dataset with passed in dataset
            array = np.random.uniform(-10, 10, chunk_shape)
            buffer.select((1, 0, 1))
            buffer.data[...] = array
            with h5.File(filename, "r+") as h5f:
                dataset = h5f["data"]
                buffer.write(must_exist=False, dataset=dataset)
                np.testing.assert_allclose(dataset[1:, :2, 3:], array)

            # wrong filename
            with self.assertRaises(ValueError):
                buffer.write(must_exist=False, file="wrong_file.h5")

            # wrong dataset
            with h5.File(filename, "a") as h5f:
                wrong_dataset = h5f.create_dataset("wrong_data", (1,))
                with self.assertRaises(ValueError):
                    buffer.write(must_exist=False, dataset=wrong_dataset)
Esempio n. 3
0
    def test_write_extend(self):
        for ndim in range(1, 4):
            chunk_shape = random_int_tuple(1, 10, ndim)
            nchunks = random_int_tuple(1, 5, ndim)
            chunks = []

            stream = BytesIO()
            with h5.File(stream, "w") as h5f:
                h5f.create_dataset("data", shape=chunk_shape, dtype=float,
                                   chunks=chunk_shape, maxshape=(None,) * ndim)

            buffer = ChunkBuffer(stream, "data", shape=chunk_shape, dtype=float)
            for chunk_index in product_range(nchunks):
                chunks.append((_chunk_slices(chunk_index, chunk_shape), np.random.uniform(-10, 10, chunk_shape)))
                buffer.select(chunk_index)
                buffer.data[...] = chunks[-1][1]
                buffer.write(must_exist=False)

                with h5.File(stream, "r") as f:
                    dataset = f["data"]
                    for chunk_slice, expected in chunks:
                        np.testing.assert_allclose(dataset[chunk_slice], expected)