예제 #1
0
def test_probability_of_feasibility_builder_builds_pof(threshold: float,
                                                       at: tf.Tensor) -> None:
    builder = ProbabilityOfFeasibility(threshold)
    acq = builder.prepare_acquisition_function(zero_dataset(),
                                               QuadraticWithUnitVariance())
    expected = probability_of_feasibility(QuadraticWithUnitVariance(),
                                          threshold, at)
    npt.assert_allclose(acq(at), expected)
예제 #2
0
def test_single_builder_using_passes_on_correct_dataset_and_model() -> None:
    class _Mock(SingleModelAcquisitionBuilder):
        def prepare_acquisition_function(
            self, dataset: Dataset, model: ProbabilisticModel
        ) -> AcquisitionFunction:
            assert dataset is data["foo"]
            assert model is models["foo"]
            return lambda at: at

    builder = _Mock().using("foo")

    data = {"foo": zero_dataset(), "bar": zero_dataset()}
    models = {"foo": QuadraticWithUnitVariance(), "bar": QuadraticWithUnitVariance()}
    builder.prepare_acquisition_function(data, models)
예제 #3
0
def test_trust_region_for_unsuccessful_local_to_global_trust_region_reduced(
) -> None:
    tr = TrustRegion(NegativeLowerConfidenceBound(0).using(OBJECTIVE))
    dataset = Dataset(tf.constant([[0.1, 0.2], [-0.1, -0.2]]),
                      tf.constant([[0.4], [0.5]]))
    lower_bound = tf.constant([-2.2, -1.0])
    upper_bound = tf.constant([1.3, 3.3])
    search_space = Box(lower_bound, upper_bound)

    eps = 0.5 * (search_space.upper - search_space.lower) / 10
    previous_y_min = dataset.observations[0]
    is_global = False
    acquisition_space = Box(dataset.query_points - eps,
                            dataset.query_points + eps)
    previous_state = TrustRegion.State(acquisition_space, eps, previous_y_min,
                                       is_global)

    query_point, current_state = tr.acquire(
        search_space, {OBJECTIVE: dataset},
        {OBJECTIVE: QuadraticWithUnitVariance()}, previous_state)

    npt.assert_array_less(
        current_state.eps,
        previous_state.eps)  # current TR smaller than previous
    assert current_state.is_global
    npt.assert_array_almost_equal(current_state.acquisition_space.lower,
                                  lower_bound)
예제 #4
0
def test_ego(search_space: SearchSpace, expected_minimum: tf.Tensor) -> None:
    ego = EfficientGlobalOptimization(
        NegativeLowerConfidenceBound(0).using(OBJECTIVE))
    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))
    query_point, _ = ego.acquire(search_space, {OBJECTIVE: dataset},
                                 {OBJECTIVE: QuadraticWithUnitVariance()})
    npt.assert_array_almost_equal(query_point, expected_minimum, decimal=5)
예제 #5
0
def test_single_builder_raises_immediately_for_wrong_key() -> None:
    builder = _IdentitySingleBuilder().using("foo")

    with pytest.raises(KeyError):
        builder.prepare_acquisition_function(
            {"bar": zero_dataset()}, {"bar": QuadraticWithUnitVariance()}
        )
예제 #6
0
def test_expected_constrained_improvement_min_feasibility_probability_bound_is_inclusive(
) -> None:
    pof = tfp.bijectors.Sigmoid().forward

    class _Constraint(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
                self, datasets: Mapping[str, Dataset],
                models: Mapping[str,
                                ProbabilisticModel]) -> AcquisitionFunction:
            return pof

    models_ = {"foo": QuadraticWithUnitVariance()}

    data = {
        "foo": Dataset(tf.constant([[1.1], [2.0]]), tf.constant([[1.21],
                                                                 [4.0]]))
    }
    eci = ExpectedConstrainedImprovement(
        "foo", _Constraint(),
        min_feasibility_probability=pof(1.0)).prepare_acquisition_function(
            data, models_)

    ei = ExpectedImprovement().using("foo").prepare_acquisition_function(
        data, models_)

    x = tf.constant([[1.5]])
    npt.assert_allclose(eci(x), ei(x) * pof(x))
예제 #7
0
def test_negative_lower_confidence_bound_builder_builds_negative_lower_confidence_bound(
) -> None:
    model = QuadraticWithUnitVariance()
    beta = 1.96
    acq_fn = NegativeLowerConfidenceBound(beta).prepare_acquisition_function(
        Dataset(tf.constant([[]]), tf.constant([[]])), model)
    query_at = tf.constant([[-3.], [-2.], [-1.], [0.], [1.], [2.], [3.]])
    expected = -lower_confidence_bound(model, beta, query_at)
    npt.assert_array_almost_equal(acq_fn(query_at), expected)
예제 #8
0
def test_negative_lower_confidence_bound_builder_builds_negative_lower_confidence_bound() -> None:
    model = QuadraticWithUnitVariance()
    beta = 1.96
    acq_fn = NegativeLowerConfidenceBound(beta).prepare_acquisition_function(
        Dataset(tf.zeros([0, 1]), tf.zeros([0, 1])), model
    )
    query_at = tf.linspace([-10], [10], 100)
    expected = -lower_confidence_bound(model, beta, query_at)
    npt.assert_array_almost_equal(acq_fn(query_at), expected)
예제 #9
0
def test_expected_improvement_builder_builds_expected_improvement(
        query_at: tf.Tensor) -> None:
    dataset = Dataset(tf.constant([[-2.], [-1.], [0.], [1.], [2.]]),
                      tf.zeros([5, 1]))
    model = QuadraticWithUnitVariance()
    builder = ExpectedImprovement()
    acq_fn = builder.prepare_acquisition_function(dataset, model)
    expected = expected_improvement(model, tf.constant([0.]), query_at)
    npt.assert_array_almost_equal(acq_fn(query_at), expected)
예제 #10
0
def test_expected_improvement_builder_builds_expected_improvement_using_best_from_model() -> None:
    dataset = Dataset(
        tf.constant([[-2.0], [-1.0], [0.0], [1.0], [2.0]]),
        tf.constant([[4.1], [0.9], [0.1], [1.1], [3.9]]),
    )
    model = QuadraticWithUnitVariance()
    acq_fn = ExpectedImprovement().prepare_acquisition_function(dataset, model)
    xs = tf.linspace([-10.0], [10.0], 100)
    expected = expected_improvement(model, tf.constant([0.0]), xs)
    npt.assert_allclose(acq_fn(xs), expected)
예제 #11
0
def test_product_reducer_multiplies_tensors(combination, inputs):
    combination_builder, expected_fn = combination
    inputs = [np.array(i) for i in inputs]
    expected = expected_fn(inputs)
    builders = [_InputIdentity(i) for i in inputs]
    reducer = combination_builder(*builders)
    data = Dataset(tf.zeros((1, 1)), tf.zeros((1, 1)))
    prepared_fn = reducer.prepare_acquisition_function(data, QuadraticWithUnitVariance())
    result = prepared_fn(tf.zeros(1))
    np.testing.assert_allclose(result, expected)
예제 #12
0
def test_expected_improvement() -> None:
    def _ei(x: tf.Tensor) -> tf.Tensor:
        n = tfp.distributions.Normal(0, 1)
        return -x * n.cdf(-x) + n.prob(-x)

    query_at = tf.constant([[-2.0], [-1.5], [-1.0], [-0.5], [0.0], [0.5],
                            [1.0], [1.5], [2.0]])
    actual = expected_improvement(QuadraticWithUnitVariance(),
                                  tf.constant([0.]), query_at)
    npt.assert_array_almost_equal(actual, _ei(query_at**2))
예제 #13
0
def test_batch_acquisition_rule_acquire() -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    num_query_points = 4
    ego = BatchAcquisitionRule(num_query_points,
                               _BatchModelMinusMeanMaximumSingleBuilder())
    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))
    query_point, _ = ego.acquire(search_space, {OBJECTIVE: dataset},
                                 {OBJECTIVE: QuadraticWithUnitVariance()})

    npt.assert_allclose(query_point, [[0.0, 0.0]] * num_query_points,
                        atol=1e-3)
예제 #14
0
def test_reducers_on_lcb(reducer):
    m = 6
    beta = tf.convert_to_tensor(1.96, dtype=tf.float64)
    model = QuadraticWithUnitVariance()
    acqs = [NegativeLowerConfidenceBound(beta).using("foo") for _ in range(m)]
    acq = reducer.type_class(*acqs)
    acq_fn = acq.prepare_acquisition_function({"foo": reducer.dataset}, {"foo": model})
    individual_lcb = [-lower_confidence_bound(model, beta, reducer.query_point) for _ in range(m)]
    expected = reducer.raw_reduce_op(individual_lcb)
    desired = acq_fn(reducer.query_point)
    np.testing.assert_array_almost_equal(expected, desired)
예제 #15
0
def test_reducers_on_ei(reducer):
    m = 6
    zero = tf.convert_to_tensor([0.0], dtype=tf.float64)
    model = QuadraticWithUnitVariance()
    acqs = [ExpectedImprovement().using("foo") for _ in range(m)]
    acq = reducer.type_class(*acqs)
    acq_fn = acq.prepare_acquisition_function({"foo": reducer.dataset}, {"foo": model})
    individual_ei = [expected_improvement(model, zero, reducer.query_point) for _ in range(m)]
    expected = reducer.raw_reduce_op(individual_ei)
    desired = acq_fn(reducer.query_point)
    np.testing.assert_array_almost_equal(desired, expected)
예제 #16
0
def test_expected_constrained_improvement_raises_for_empty_data() -> None:
    class _Constraint(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
            self, datasets: Mapping[str, Dataset], models: Mapping[str, ProbabilisticModel]
        ) -> AcquisitionFunction:
            return lambda x: x

    data = {"foo": Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))}
    models_ = {"foo": QuadraticWithUnitVariance()}
    builder = ExpectedConstrainedImprovement("foo", _Constraint())

    with pytest.raises(ValueError):
        builder.prepare_acquisition_function(data, models_)
예제 #17
0
def test_trust_region_for_default_state() -> None:
    tr = TrustRegion(NegativeLowerConfidenceBound(0).using(OBJECTIVE))
    dataset = Dataset(tf.constant([[0.1, 0.2]]), tf.constant([[0.012]]))
    lower_bound = tf.constant([-2.2, -1.0])
    upper_bound = tf.constant([1.3, 3.3])
    search_space = Box(lower_bound, upper_bound)

    query_point, state = tr.acquire(search_space, {OBJECTIVE: dataset},
                                    {OBJECTIVE: QuadraticWithUnitVariance()},
                                    None)

    npt.assert_array_almost_equal(query_point, tf.constant([[0.0, 0.0]]), 5)
    npt.assert_array_almost_equal(state.acquisition_space.lower, lower_bound)
    npt.assert_array_almost_equal(state.acquisition_space.upper, upper_bound)
    npt.assert_array_almost_equal(state.y_min, [0.012])
    assert state.is_global
예제 #18
0
def test_expected_constrained_improvement_is_constraint_when_no_feasible_points() -> None:
    class _Constraint(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
            self, datasets: Mapping[str, Dataset], models: Mapping[str, ProbabilisticModel]
        ) -> AcquisitionFunction:
            return lambda x: tf.cast(tf.logical_and(0.0 <= x, x < 1.0), x.dtype)

    data = {"foo": Dataset(tf.constant([[-2.0], [1.0]]), tf.constant([[4.0], [1.0]]))}
    models_ = {"foo": QuadraticWithUnitVariance()}
    eci = ExpectedConstrainedImprovement("foo", _Constraint()).prepare_acquisition_function(
        data, models_
    )

    constraint_fn = _Constraint().prepare_acquisition_function(data, models_)

    xs = tf.range(-10.0, 10.0, 100)
    npt.assert_allclose(eci(xs), constraint_fn(xs))
예제 #19
0
def test_expected_constrained_improvement_is_relative_to_feasible_point() -> None:
    class _Constraint(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
            self, datasets: Mapping[str, Dataset], models: Mapping[str, ProbabilisticModel]
        ) -> AcquisitionFunction:
            return lambda x: tf.cast(x >= 0, x.dtype)

    models_ = {"foo": QuadraticWithUnitVariance()}

    eci_data = {"foo": Dataset(tf.constant([[-0.2], [0.3]]), tf.constant([[0.04], [0.09]]))}
    eci = ExpectedConstrainedImprovement("foo", _Constraint()).prepare_acquisition_function(
        eci_data, models_
    )

    ei_data = {"foo": Dataset(tf.constant([[0.3]]), tf.constant([[0.09]]))}
    ei = ExpectedImprovement().using("foo").prepare_acquisition_function(ei_data, models_)

    npt.assert_allclose(eci(tf.constant([[0.1]])), ei(tf.constant([[0.1]])))
예제 #20
0
def test_expected_constrained_improvement_can_reproduce_expected_improvement() -> None:
    class _Certainty(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
            self, datasets: Mapping[str, Dataset], models: Mapping[str, ProbabilisticModel]
        ) -> AcquisitionFunction:
            return tf.ones_like

    data = {"foo": Dataset(tf.constant([[0.5]]), tf.constant([[0.25]]))}
    models_ = {"foo": QuadraticWithUnitVariance()}

    eci = ExpectedConstrainedImprovement("foo", _Certainty(), 0).prepare_acquisition_function(
        data, models_
    )

    ei = ExpectedImprovement().using("foo").prepare_acquisition_function(data, models_)

    at = tf.constant([[-0.1], [1.23], [-6.78]])
    npt.assert_allclose(eci(at), ei(at))
예제 #21
0
def test_probability_of_feasibility(threshold: float, at: tf.Tensor, expected: float) -> None:
    actual = probability_of_feasibility(QuadraticWithUnitVariance(), threshold, at)
    npt.assert_allclose(actual, expected, rtol=1e-4)
예제 #22
0
def test_probability_of_feasibility_raises_on_non_scalar_threshold(shape: ShapeLike) -> None:
    threshold = tf.ones(shape)
    with pytest.raises(ValueError):
        probability_of_feasibility(QuadraticWithUnitVariance(), threshold, tf.constant([[0.0]]))
예제 #23
0
def test_probability_of_feasibility_raises_on_incorrect_at_shape(shape: ShapeLike) -> None:
    at = tf.ones(shape)
    with pytest.raises(ValueError):
        probability_of_feasibility(QuadraticWithUnitVariance(), 0.0, at)
예제 #24
0
    OBJECTIVE,
    BatchAcquisitionRule,
    BatchAcquisitionFunction,
)
from trieste.data import Dataset
from trieste.models import ProbabilisticModel
from trieste.space import SearchSpace, DiscreteSearchSpace, Box
from trieste.acquisition.function import AcquisitionFunction, BatchAcquisitionFunctionBuilder

from tests.util.misc import one_dimensional_range, random_seed, zero_dataset
from tests.util.model import QuadraticWithUnitVariance


@pytest.mark.parametrize("datasets", [{}, {"foo": zero_dataset()}])
@pytest.mark.parametrize("models", [{}, {
    "foo": QuadraticWithUnitVariance()
}, {
    OBJECTIVE: QuadraticWithUnitVariance()
}])
def test_trust_region_raises_for_missing_datasets_key(
        datasets: Dict[str, Dataset],
        models: Dict[str, ProbabilisticModel]) -> None:
    search_space = one_dimensional_range(-1, 1)
    rule = TrustRegion()
    with pytest.raises(KeyError):
        rule.acquire(search_space, datasets, models, None)


@pytest.mark.parametrize(
    "models",
    [
예제 #25
0
def test_expected_improvement_builder_raises_for_empty_data() -> None:
    data = Dataset(tf.zeros([0, 1]), tf.ones([0, 1]))

    with pytest.raises(ValueError):
        ExpectedImprovement().prepare_acquisition_function(data, QuadraticWithUnitVariance())
예제 #26
0
def test_independent_reparametrization_sampler_raises_for_negative_sample_size(
    sample_size: int,
) -> None:
    with pytest.raises((ValueError, tf.errors.InvalidArgumentError)):
        IndependentReparametrizationSampler(sample_size, QuadraticWithUnitVariance())
예제 #27
0
def test_independent_reparametrization_sampler_samples_are_distinct_for_new_instances() -> None:
    sampler1 = IndependentReparametrizationSampler(100, QuadraticWithUnitVariance())
    sampler2 = IndependentReparametrizationSampler(100, QuadraticWithUnitVariance())
    xs = tf.linspace([-10.0], [10.0], 100)
    npt.assert_array_less(1e-9, tf.abs(sampler2.sample(xs) - sampler1.sample(xs)))
예제 #28
0
def test_independent_reparametrization_sampler_sample_is_repeatable() -> None:
    sampler = IndependentReparametrizationSampler(100, QuadraticWithUnitVariance())
    xs = tf.linspace([-10.0], [10.0], 100)
    npt.assert_allclose(sampler.sample(xs), sampler.sample(xs))
예제 #29
0
def test_independent_reparametrization_sampler_sample_is_continuous() -> None:
    sampler = IndependentReparametrizationSampler(100, QuadraticWithUnitVariance())
    xs = tf.linspace([-10.0], [10.0], 100)
    diff = tf.abs(sampler.sample(xs + 1e-9) - sampler.sample(xs))
    npt.assert_array_less(diff, 1e-9)
예제 #30
0
def test_independent_reparametrization_sampler_sample_raises_for_invalid_at_shape() -> None:
    sampler = IndependentReparametrizationSampler(1, QuadraticWithUnitVariance())
    with pytest.raises((ValueError, tf.errors.InvalidArgumentError)):
        sampler.sample(tf.constant(0))