def test_load(self):
        for ndim in range(1, 4):
            chunk_shape = random_int_tuple(1, 10, ndim)
            nchunks = random_int_tuple(1, 4, ndim)
            total_shape = tuple(n * c for n, c in zip(chunk_shape, nchunks))
            array = np.random.uniform(-10, 10, total_shape)

            stream = BytesIO()
            with h5.File(stream, "w") as h5f:
                h5f.create_dataset("data", data=array, chunks=chunk_shape)
            # valid, load all chunks, positive indices
            for chunk_index in product_range(nchunks):
                buffer = ChunkBuffer.load(stream, "data", chunk_index)
                np.testing.assert_allclose(buffer.data, array[_chunk_slices(chunk_index, chunk_shape)],
                                           err_msg=capture_variables(ndim=ndim,
                                                                      chunk_shape=chunk_shape,
                                                                      nchunks=nchunks,
                                                                      chunk_index=chunk_index))
            # negative index
            neg_index = (-1,) * ndim
            pos_index = tuple(n - 1 for n in nchunks)
            buffer = ChunkBuffer.load(stream, "data", neg_index)
            np.testing.assert_allclose(buffer.data, array[_chunk_slices(pos_index, chunk_shape)],
                                       err_msg=capture_variables(ndim=ndim,
                                                                  chunk_shape=chunk_shape,
                                                                  nchunks=nchunks,
                                                                  chunk_index=neg_index))

            # invalid, load non-existent chunk
            # outside of maxshape, discoverable through maxshape
            with self.assertRaises(IndexError):
                ChunkBuffer.load(stream, "data", nchunks)
            # outside of maxshape, not discoverable through maxshape
            with self.assertRaises(IndexError):
                ChunkBuffer.load(stream, "data", (nchunks[0] + 1,) + nchunks[1:])
            # within maxshape but not stored
            with h5.File(stream, "w") as h5f:
                h5f.create_dataset("partially_filled", shape=total_shape, chunks=chunk_shape,
                                   maxshape=tuple(n * 2 for n in total_shape))
            with self.assertRaises(IndexError):
                ChunkBuffer.load(stream, "partially_filled", (nchunks[0] + 1,) + nchunks[1:])

        # invalid, contiguous dataset
        stream = BytesIO()
        with h5.File(stream, "w") as h5f:
            h5f.create_dataset("data", data=np.random.uniform(-10, 10, (5, 3)))
        with self.assertRaises(RuntimeError):
            ChunkBuffer.load(stream, "data", (0, 0))
    def test_dataset_creation(self):
        for ndim in range(1, 4):
            max_nchunks = random_int_tuple(1, 4, ndim)
            for chunk_index in product_range(max_nchunks):
                chunk_shape = random_int_tuple(1, 10, ndim)
                for fill_level in chain((None,), product_range((1,) * ndim, chunk_shape)):
                    if fill_level is None:
                        total_shape = tuple(n * (i + 1)
                                            for n, i in zip(chunk_shape, chunk_index))
                    else:
                        total_shape = tuple(n * i + fl
                                            for n, i, fl in zip(chunk_shape, chunk_index, fill_level))
                    chunk_data = np.random.uniform(-10, 10, chunk_shape).astype(random.choice((float, int)))

                    stream = BytesIO()
                    buffer = ChunkBuffer(stream, "data", data=chunk_data, maxshape=(None,) * ndim)
                    buffer.select(chunk_index)
                    buffer.create_dataset(stream if random.random() < 0.5 else None, filemode="w",
                                          write=True, fill_level=fill_level)

                    with h5.File(stream, "r") as h5f:
                        dataset = h5f["data"]
                        self.assertEqual(dataset.shape, total_shape)
                        self.assertEqual(dataset.chunks, chunk_shape)
                        self.assertEqual(dataset.dtype, chunk_data.dtype)
                        self.assertEqual(dataset.maxshape, buffer.maxshape)
                        fill_slices = tuple(map(slice, fill_level)) if fill_level is not None else ...
                        np.testing.assert_allclose(ChunkBuffer.load(h5f, "data", chunk_index).data[fill_slices],
                                                   chunk_data[fill_slices])