def test_model_stack_missing_predict_y() -> None: x = tf.constant(np.arange(5).reshape(-1, 1), dtype=gpflow.default_float()) model1 = _gpr(x, _3x_plus_10(x)) model2 = _QuadraticModel([1.0], [2.0]) stack = ModelStack((model1, 1), (model2, 1)) x_predict = tf.constant([[0]], gpflow.default_float()) with pytest.raises(NotImplementedError): stack.predict_y(x_predict)
def test_model_stack_predict_y() -> None: x = tf.constant(np.arange(5).reshape(-1, 1), dtype=gpflow.default_float()) model1 = _gpr(x, _3x_plus_10(x)) model2 = _sgpr(x, _2sin_x_over_3(x)) stack = ModelStack((model1, 1), (model2, 1)) mean, variance = stack.predict_y(x) npt.assert_allclose(mean[:, 0:1], model1.predict_y(x)[0]) npt.assert_allclose(mean[:, 1:2], model2.predict_y(x)[0]) npt.assert_allclose(variance[:, 0:1], model1.predict_y(x)[1]) npt.assert_allclose(variance[:, 1:2], model2.predict_y(x)[1])
def test_model_stack_training() -> None: class Model(GaussianProcess, TrainableProbabilisticModel): def __init__( self, mean_functions: Sequence[Callable[[TensorType], TensorType]], kernels: Sequence[tfp.math.psd_kernels.PositiveSemidefiniteKernel], output_dims: slice, ): super().__init__(mean_functions, kernels) self._output_dims = output_dims def _assert_data(self, dataset: Dataset) -> None: qp, obs = dataset.astuple() expected_obs = data.observations[..., self._output_dims] assert_datasets_allclose(dataset, Dataset(qp, expected_obs)) optimize = _assert_data update = _assert_data rbf = tfp.math.psd_kernels.ExponentiatedQuadratic() model01 = Model([quadratic, quadratic], [rbf, rbf], slice(0, 2)) model2 = Model([quadratic], [rbf], slice(2, 3)) model3 = Model([quadratic], [rbf], slice(3, 4)) stack = ModelStack((model01, 2), (model2, 1), (model3, 1)) data = Dataset(tf.random.uniform([5, 7, 3]), tf.random.uniform([5, 7, 4])) stack.update(data) stack.optimize(data)
def _model_stack() -> tuple[ModelStack, tuple[TrainableProbabilisticModel, ...]]: class Model(GaussianProcess, PseudoTrainableProbModel): def __init__(self, mean_shifts: list[float], kernel_amplitudes: list[float]): super().__init__( [(lambda y: lambda x: quadratic(x) + y)(shift) for shift in mean_shifts], [tfp.math.psd_kernels.ExponentiatedQuadratic(x) for x in kernel_amplitudes], ) model01 = Model([0.0, 0.5], [1.0, 0.3]) model2 = Model([2.0], [2.0]) model3 = Model([-1.0], [0.1]) return ModelStack((model01, 2), (model2, 1), (model3, 1)), (model01, model2, model3)
def build_stacked_independent_objectives_model( data: Dataset) -> ModelStack: gprs = [] for idx in range(2): single_obj_data = Dataset( data.query_points, tf.gather(data.observations, [idx], axis=1)) variance = tf.math.reduce_variance(single_obj_data.observations) kernel = gpflow.kernels.Matern52( variance, tf.constant([0.2, 0.2], tf.float64)) gpr = gpflow.models.GPR(single_obj_data.astuple(), kernel, noise_variance=1e-5) gpflow.utilities.set_trainable(gpr.likelihood, False) gprs.append((GaussianProcessRegression(gpr), 1)) return ModelStack(*gprs)
def build_stacked_independent_objectives_model(data: Dataset, num_output) -> ModelStack: gprs = [] for idx in range(num_output): single_obj_data = Dataset(data.query_points, tf.gather(data.observations, [idx], axis=1)) variance = tf.math.reduce_variance(single_obj_data.observations) kernel = gpflow.kernels.Matern52(variance) gpr = gpflow.models.GPR( (single_obj_data.query_points, single_obj_data.observations), kernel, noise_variance=1e-5) gpflow.utilities.set_trainable(gpr.likelihood, False) gprs.append((create_model({ "model": gpr, "optimizer": gpflow.optimizers.Scipy(), "optimizer_args": { "minimize_args": { "options": dict(maxiter=100) } } }), 1)) return ModelStack(*gprs)
def _model_stack() -> tuple[ModelStack, tuple[TrainableProbabilisticModel, ...]]: model01 = _QuadraticModel([0.0, 0.5], [1.0, 0.3]) model2 = _QuadraticModel([2.0], [2.0]) model3 = _QuadraticModel([-1.0], [0.1]) return ModelStack((model01, 2), (model2, 1), (model3, 1)), (model01, model2, model3)