Пример #1
0
def _batcher_bs_100(
        dataset: Dataset,
        batch_size: int) -> Iterable[tuple[TensorType, TensorType]]:
    ds = tf.data.Dataset.from_tensor_slices(dataset.astuple())
    ds = ds.shuffle(100)
    ds = ds.batch(batch_size)
    ds = ds.repeat()
    return iter(ds)
Пример #2
0
    def build_stacked_independent_objectives_model(
            data: Dataset) -> ModelStack:
        gprs = []
        for idx in range(2):
            single_obj_data = Dataset(
                data.query_points, tf.gather(data.observations, [idx], axis=1))
            variance = tf.math.reduce_variance(single_obj_data.observations)
            kernel = gpflow.kernels.Matern52(
                variance, tf.constant([0.2, 0.2], tf.float64))
            gpr = gpflow.models.GPR(single_obj_data.astuple(),
                                    kernel,
                                    noise_variance=1e-5)
            gpflow.utilities.set_trainable(gpr.likelihood, False)
            gprs.append((GaussianProcessRegression(gpr), 1))

        return ModelStack(*gprs)
Пример #3
0
def _batcher_2(dataset: Dataset, batch_size: int):
    return dataset.astuple()
Пример #4
0
 def _assert_data(self, dataset: Dataset) -> None:
     qp, obs = dataset.astuple()
     expected_obs = data.observations[..., self._output_dims]
     assert_datasets_allclose(dataset, Dataset(qp, expected_obs))
Пример #5
0
def _batcher_full_batch(dataset: Dataset,
                        batch_size: int) -> tuple[TensorType, TensorType]:
    return dataset.astuple()