示例#1
0
def test_expected_improvement_builder_builds_expected_improvement(
        query_at: tf.Tensor) -> None:
    dataset = Dataset(tf.constant([[-2.], [-1.], [0.], [1.], [2.]]),
                      tf.zeros([5, 1]))
    model = QuadraticWithUnitVariance()
    builder = ExpectedImprovement()
    acq_fn = builder.prepare_acquisition_function(dataset, model)
    expected = expected_improvement(model, tf.constant([0.]), query_at)
    npt.assert_array_almost_equal(acq_fn(query_at), expected)
示例#2
0
def test_expected_constrained_improvement_min_feasibility_probability_bound_is_inclusive(
) -> None:
    pof = tfp.bijectors.Sigmoid().forward

    class _Constraint(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
                self, datasets: Mapping[str, Dataset],
                models: Mapping[str,
                                ProbabilisticModel]) -> AcquisitionFunction:
            return pof

    models_ = {"foo": QuadraticMeanAndRBFKernel()}

    data = {
        "foo": Dataset(tf.constant([[1.1], [2.0]]), tf.constant([[1.21],
                                                                 [4.0]]))
    }
    eci = ExpectedConstrainedImprovement(
        "foo", _Constraint(),
        min_feasibility_probability=pof(1.0)).prepare_acquisition_function(
            data, models_)

    ei = ExpectedImprovement().using("foo").prepare_acquisition_function(
        data, models_)

    x = tf.constant([[1.5]])
    npt.assert_allclose(eci(x), ei(x) * pof(x))
示例#3
0
def test_batch_monte_carlo_expected_improvement_can_reproduce_ei() -> None:
    known_query_points = tf.random.uniform([5, 2], dtype=tf.float64)
    data = Dataset(known_query_points, quadratic(known_query_points))
    model = QuadraticMeanAndRBFKernel()
    batch_ei = BatchMonteCarloExpectedImprovement(10_000).prepare_acquisition_function(data, model)
    ei = ExpectedImprovement().prepare_acquisition_function(data, model)
    xs = tf.random.uniform([3, 5, 1, 2], dtype=tf.float64)
    npt.assert_allclose(batch_ei(xs), ei(tf.squeeze(xs, -2)), rtol=0.03)
示例#4
0
def test_expected_improvement_builder_builds_expected_improvement_using_best_from_model() -> None:
    dataset = Dataset(
        tf.constant([[-2.0], [-1.0], [0.0], [1.0], [2.0]]),
        tf.constant([[4.1], [0.9], [0.1], [1.1], [3.9]]),
    )
    model = QuadraticMeanAndRBFKernel()
    acq_fn = ExpectedImprovement().prepare_acquisition_function(dataset, model)
    xs = tf.linspace([-10.0], [10.0], 100)
    expected = expected_improvement(model, tf.constant([0.0]), xs)
    npt.assert_allclose(acq_fn(xs), expected)
示例#5
0
def test_reducers_on_ei(reducer):
    m = 6
    zero = tf.convert_to_tensor([0.0], dtype=tf.float64)
    model = QuadraticWithUnitVariance()
    acqs = [ExpectedImprovement().using("foo") for _ in range(m)]
    acq = reducer.type_class(*acqs)
    acq_fn = acq.prepare_acquisition_function({"foo": reducer.dataset}, {"foo": model})
    individual_ei = [expected_improvement(model, zero, reducer.query_point) for _ in range(m)]
    expected = reducer.raw_reduce_op(individual_ei)
    desired = acq_fn(reducer.query_point)
    np.testing.assert_array_almost_equal(desired, expected)
示例#6
0
def test_expected_constrained_improvement_is_relative_to_feasible_point() -> None:
    class _Constraint(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
            self, datasets: Mapping[str, Dataset], models: Mapping[str, ProbabilisticModel]
        ) -> AcquisitionFunction:
            return lambda x: tf.cast(x >= 0, x.dtype)

    models_ = {"foo": QuadraticMeanAndRBFKernel()}

    eci_data = {"foo": Dataset(tf.constant([[-0.2], [0.3]]), tf.constant([[0.04], [0.09]]))}
    eci = ExpectedConstrainedImprovement("foo", _Constraint()).prepare_acquisition_function(
        eci_data, models_
    )

    ei_data = {"foo": Dataset(tf.constant([[0.3]]), tf.constant([[0.09]]))}
    ei = ExpectedImprovement().using("foo").prepare_acquisition_function(ei_data, models_)

    npt.assert_allclose(eci(tf.constant([[0.1]])), ei(tf.constant([[0.1]])))
示例#7
0
def test_expected_constrained_improvement_can_reproduce_expected_improvement() -> None:
    class _Certainty(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
            self, datasets: Mapping[str, Dataset], models: Mapping[str, ProbabilisticModel]
        ) -> AcquisitionFunction:
            return tf.ones_like

    data = {"foo": Dataset(tf.constant([[0.5]]), tf.constant([[0.25]]))}
    models_ = {"foo": QuadraticMeanAndRBFKernel()}

    eci = ExpectedConstrainedImprovement("foo", _Certainty(), 0).prepare_acquisition_function(
        data, models_
    )

    ei = ExpectedImprovement().using("foo").prepare_acquisition_function(data, models_)

    at = tf.constant([[-0.1], [1.23], [-6.78]])
    npt.assert_allclose(eci(at), ei(at))
示例#8
0
def test_augmented_expected_improvement_builder_builds_expected_improvement_times_augmentation(
    observation_noise: float, ) -> None:
    dataset = Dataset(
        tf.constant([[-2.0], [-1.0], [0.0], [1.0], [2.0]]),
        tf.constant([[4.1], [0.9], [0.1], [1.1], [3.9]]),
    )

    model = QuadraticMeanAndRBFKernel(noise_variance=observation_noise)
    acq_fn = AugmentedExpectedImprovement().prepare_acquisition_function(
        dataset, model)

    xs = tf.linspace([[-10.0]], [[10.0]], 100)
    ei = ExpectedImprovement().prepare_acquisition_function(dataset, model)(xs)

    _, variance = model.predict(tf.squeeze(xs, -2))
    augmentation = 1.0 - (tf.math.sqrt(observation_noise)) / (
        tf.math.sqrt(observation_noise + variance))
    npt.assert_allclose(acq_fn(xs), ei * augmentation)
示例#9
0
def test_expected_improvement_builder_raises_for_empty_data() -> None:
    data = Dataset(tf.zeros([0, 1]), tf.ones([0, 1]))

    with pytest.raises(ValueError):
        ExpectedImprovement().prepare_acquisition_function(data, QuadraticMeanAndRBFKernel())
示例#10
0
    # fmt: off
    expected = tf.reduce_mean(tf.reduce_max(tf.maximum(
        min_predictive_mean_at_known_points - mvn_samples, 0.0
    ), axis=-1), axis=0)
    # fmt: on

    builder = BatchMonteCarloExpectedImprovement(10_000)
    acq = builder.prepare_acquisition_function(mk_dataset([[0.3], [0.5]], [[0.09], [0.25]]), model)

    npt.assert_allclose(acq(xs), expected, rtol=0.05)


@pytest.mark.parametrize(
    "function, function_repr",
    [
        (ExpectedImprovement(), "ExpectedImprovement()"),
        (NegativeLowerConfidenceBound(1.96), "NegativeLowerConfidenceBound(1.96)"),
        (NegativePredictiveMean(), "NegativePredictiveMean()"),
        (ProbabilityOfFeasibility(0.5), "ProbabilityOfFeasibility(0.5)"),
        (ExpectedHypervolumeImprovement(), "ExpectedHypervolumeImprovement()"),
        (
            BatchMonteCarloExpectedImprovement(10_000),
            f"BatchMonteCarloExpectedImprovement(10000, jitter={DEFAULTS.JITTER})",
        ),
    ],
)
def test_single_model_acquisition_function_builder_reprs(function, function_repr) -> None:
    assert repr(function) == function_repr
    assert repr(function.using("TAG")) == f"{function_repr} using tag 'TAG'"
    assert (
        repr(ExpectedConstrainedImprovement("TAG", function.using("TAG"), 0.0))