Esempio n. 1
0
def test_sparse_variational_optimize_with_defaults() -> None:
    x_observed = np.linspace(0, 100, 100).reshape((-1, 1))
    y_observed = _3x_plus_gaussian_noise(x_observed)
    data = x_observed, y_observed
    dataset = Dataset(*data)
    optimizer = create_optimizer(tf.optimizers.Adam(), dict(max_iter=20))
    model = SparseVariational(_svgp(x_observed[:10]), dataset, optimizer=optimizer)
    loss = model.model.training_loss(data)
    model.optimize(dataset)
    assert model.model.training_loss(data) < loss
Esempio n. 2
0
def test_gaussian_process_regression_optimize(
    optimizer: gpflow.optimizers.Scipy | tf.optimizers.Optimizer | None, gpr_interface_factory
) -> None:
    data = _mock_data()
    optimizer_wrapper = create_optimizer(optimizer, {})
    model = gpr_interface_factory(*data, optimizer=optimizer_wrapper)
    internal_model = model.model
    loss = internal_model.training_loss()
    model.optimize(Dataset(*data))
    assert internal_model.training_loss() < loss
Esempio n. 3
0
def test_sparse_variational_optimize(batcher, compile: bool) -> None:
    x_observed = np.linspace(0, 100, 100).reshape((-1, 1))
    y_observed = _3x_plus_gaussian_noise(x_observed)
    data = x_observed, y_observed
    dataset = Dataset(*data)

    optimizer = create_optimizer(
        tf.optimizers.Adam(),
        dict(max_iter=20, batch_size=10, dataset_builder=batcher, compile=compile),
    )
    model = SparseVariational(_svgp(x_observed[:10]), dataset, optimizer=optimizer)
    loss = model.model.training_loss(data)
    model.optimize(dataset)
    assert model.model.training_loss(data) < loss
Esempio n. 4
0
def test_gaussian_process_regression_optimize(
    optimizer: gpflow.optimizers.Scipy | tf.optimizers.Optimizer,
    gpflow_interface_factory: ModelFactoryType,
    compile: bool,
) -> None:
    data = mock_data()
    optimizer_wrapper = create_optimizer(optimizer, dict(compile=compile))
    model, _ = gpflow_interface_factory(*data, optimizer=optimizer_wrapper)
    internal_model = model.model
    if isinstance(internal_model, SVGP):
        args = {"data": data}
    else:
        args = {}
    loss = internal_model.training_loss(**args)
    model.optimize(Dataset(*data))
    assert internal_model.training_loss(**args) < loss
Esempio n. 5
0
def test_vgp_optimize_with_and_without_natgrads(
    batcher: DatasetTransformer, compile: bool, use_natgrads: bool
) -> None:
    x_observed = np.linspace(0, 100, 100).reshape((-1, 1))
    y_observed = _3x_plus_gaussian_noise(x_observed)
    data = x_observed, y_observed
    dataset = Dataset(*data)

    optimizer = create_optimizer(
        tf.optimizers.Adam(),
        dict(max_iter=10, batch_size=10, dataset_builder=batcher, compile=compile),
    )
    model = VariationalGaussianProcess(
        vgp_model(x_observed[:10], y_observed[:10]), optimizer=optimizer, use_natgrads=use_natgrads
    )
    loss = model.model.training_loss()
    model.optimize(dataset)
    assert model.model.training_loss() < loss
Esempio n. 6
0
def test_gaussian_process_regression_ref_optimize(
    gpflow_interface_factory: ModelFactoryType,
) -> None:
    x = tf.constant(np.arange(5).reshape(-1, 1), dtype=gpflow.default_float())
    y = fnc_2sin_x_over_3(x)

    model, _reference_model = gpflow_interface_factory(
        x, y, optimizer=create_optimizer(gpflow.optimizers.Scipy(), {})
    )

    reference_model = _reference_model(x, y)
    model.optimize(Dataset(x, y))
    internal_model = model.model

    if isinstance(internal_model, SVGP):
        args = {"data": (x, y)}
    else:
        args = {}
        reference_model.data = (
            tf.Variable(
                reference_model.data[0],
                trainable=False,
                shape=[None, *reference_model.data[0].shape[1:]],
            ),
            tf.Variable(
                reference_model.data[1],
                trainable=False,
                shape=[None, *reference_model.data[1].shape[1:]],
            ),
        )

    gpflow.optimizers.Scipy().minimize(
        reference_model.training_loss_closure(**args, compile=False),
        reference_model.trainable_variables,
    )

    npt.assert_allclose(
        internal_model.training_loss(**args), reference_model.training_loss(**args), rtol=1e-6
    )
Esempio n. 7
0
def test_create_optimizer_tf_produces_correct_optimizer() -> None:
    optim = create_optimizer(tf.optimizers.Adam(), {})
    assert isinstance(optim, BatchOptimizer)
Esempio n. 8
0
def test_create_optimizer_raises_on_none() -> None:
    with pytest.raises(NotImplementedError):
        create_optimizer(None, {})
Esempio n. 9
0
def test_create_optimizer_scipy_produces_correct_optimizer() -> None:
    optim = create_optimizer(gpflow.optimizers.Scipy(), {})
    assert isinstance(optim,
                      Optimizer) and not isinstance(optim, BatchOptimizer)