Example #1
0
def test_probability_of_feasibility_builder_builds_pof(threshold: float,
                                                       at: tf.Tensor) -> None:
    builder = ProbabilityOfFeasibility(threshold)
    acq = builder.prepare_acquisition_function(zero_dataset(),
                                               QuadraticWithUnitVariance())
    expected = probability_of_feasibility(QuadraticWithUnitVariance(),
                                          threshold, at)
    npt.assert_allclose(acq(at), expected)
Example #2
0
def test_probability_of_feasibility_builder_builds_pof(threshold: float,
                                                       at: tf.Tensor) -> None:
    builder = ProbabilityOfFeasibility(threshold)
    acq = builder.prepare_acquisition_function(empty_dataset([1], [1]),
                                               QuadraticMeanAndRBFKernel())
    expected = probability_of_feasibility(QuadraticMeanAndRBFKernel(),
                                          threshold, at)
    npt.assert_allclose(acq(at), expected)
Example #3
0
def test_expected_constrained_improvement_raises_for_invalid_batch_size(at: TensorType) -> None:
    pof = ProbabilityOfFeasibility(0.0).using("")
    builder = ExpectedConstrainedImprovement("", pof, tf.constant(0.0))
    initial_query_points = tf.constant([[-1.0]])
    initial_objective_function_values = tf.constant([[1.0]])
    data = {"": Dataset(initial_query_points, initial_objective_function_values)}

    eci = builder.prepare_acquisition_function(data, {"": QuadraticMeanAndRBFKernel()})

    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        eci(at)
Example #4
0
def test_expected_constrained_improvement_raises_for_non_scalar_min_pof() -> None:
    pof = ProbabilityOfFeasibility(0.0).using("")
    with pytest.raises(ValueError):
        ExpectedConstrainedImprovement("", pof, tf.constant([0.0]))
Example #5
0
def test_probability_of_feasibility_builder_raises_on_non_scalar_threshold(
    shape: ShapeLike,
) -> None:
    threshold = tf.ones(shape)
    with pytest.raises(ValueError):
        ProbabilityOfFeasibility(threshold)
def test_optimizer_finds_minima_of_Gardners_Simulation_1(
    num_steps: int, acquisition_function_builder
) -> None:
    """
    Test that tests the covergence of constrained BO algorithms on the
    synthetic "simulation 1" experiment of :cite:`gardner14`.
    """
    search_space = Box([0, 0], [6, 6])

    def objective(input_data):
        x, y = input_data[..., -2], input_data[..., -1]
        z = tf.cos(2.0 * x) * tf.cos(y) + tf.sin(x)
        return z[:, None]

    def constraint(input_data):
        x, y = input_data[:, -2], input_data[:, -1]
        z = tf.cos(x) * tf.cos(y) - tf.sin(x) * tf.sin(y)
        return z[:, None]

    MINIMUM = -2.0
    MINIMIZER = [math.pi * 1.5, 0.0]

    OBJECTIVE = "OBJECTIVE"
    CONSTRAINT = "CONSTRAINT"

    def observer(query_points):  # observe both objective and constraint data
        return {
            OBJECTIVE: Dataset(query_points, objective(query_points)),
            CONSTRAINT: Dataset(query_points, constraint(query_points)),
        }

    num_initial_points = 5
    initial_data = observer(search_space.sample(num_initial_points))

    def build_model(data):
        variance = tf.math.reduce_variance(data.observations)
        kernel = gpflow.kernels.Matern52(variance, tf.constant([0.2, 0.2], tf.float64))
        gpr = gpflow.models.GPR((data.query_points, data.observations), kernel, noise_variance=1e-5)
        gpflow.utilities.set_trainable(gpr.likelihood, False)
        return GaussianProcessRegression(gpr)

    models = map_values(build_model, initial_data)

    pof = ProbabilityOfFeasibility(threshold=0.5)
    acq = acquisition_function_builder(OBJECTIVE, pof.using(CONSTRAINT))
    rule: EfficientGlobalOptimization[Box] = EfficientGlobalOptimization(acq)

    dataset = (
        BayesianOptimizer(observer, search_space)
        .optimize(num_steps, initial_data, models, rule)
        .try_get_final_datasets()[OBJECTIVE]
    )

    arg_min_idx = tf.squeeze(tf.argmin(dataset.observations, axis=0))

    best_y = dataset.observations[arg_min_idx]
    best_x = dataset.query_points[arg_min_idx]

    relative_minimizer_err = tf.abs(best_x - MINIMIZER)
    # these accuracies are the current best for the given number of optimization steps, which makes
    # this is a regression test
    assert tf.reduce_all(relative_minimizer_err < 0.03, axis=-1)
    npt.assert_allclose(best_y, MINIMUM, rtol=0.03)
Example #7
0
def test_expected_constrained_improvement_raises_for_out_of_range_min_pof() -> None:
    pof = ProbabilityOfFeasibility(0.0).using("")
    with pytest.raises(ValueError):
        ExpectedConstrainedImprovement("", pof, 1.5)
Example #8
0
    ), axis=-1), axis=0)
    # fmt: on

    builder = BatchMonteCarloExpectedImprovement(10_000)
    acq = builder.prepare_acquisition_function(mk_dataset([[0.3], [0.5]], [[0.09], [0.25]]), model)

    npt.assert_allclose(acq(xs), expected, rtol=0.05)


@pytest.mark.parametrize(
    "function, function_repr",
    [
        (ExpectedImprovement(), "ExpectedImprovement()"),
        (NegativeLowerConfidenceBound(1.96), "NegativeLowerConfidenceBound(1.96)"),
        (NegativePredictiveMean(), "NegativePredictiveMean()"),
        (ProbabilityOfFeasibility(0.5), "ProbabilityOfFeasibility(0.5)"),
        (ExpectedHypervolumeImprovement(), "ExpectedHypervolumeImprovement()"),
        (
            BatchMonteCarloExpectedImprovement(10_000),
            f"BatchMonteCarloExpectedImprovement(10000, jitter={DEFAULTS.JITTER})",
        ),
    ],
)
def test_single_model_acquisition_function_builder_reprs(function, function_repr) -> None:
    assert repr(function) == function_repr
    assert repr(function.using("TAG")) == f"{function_repr} using tag 'TAG'"
    assert (
        repr(ExpectedConstrainedImprovement("TAG", function.using("TAG"), 0.0))
        == f"ExpectedConstrainedImprovement('TAG', {function_repr} using tag 'TAG', 0.0)"
    )