Ejemplo n.º 1
0
def test_optimization_result_try_get_final_models_for_multiple_models() -> None:
    data = {"foo": empty_dataset([1], [1]), "bar": empty_dataset([2], [2])}
    models = {"foo": _PseudoTrainableQuadratic(), "bar": _PseudoTrainableQuadratic()}
    result: OptimizationResult[None] = OptimizationResult(Ok(Record(data, models, None)), [])
    assert result.try_get_final_models() is models
    with pytest.raises(ValueError):
        result.try_get_final_model()
Ejemplo n.º 2
0
def test_single_model_acquisition_builder_using_passes_on_correct_dataset_and_model() -> None:
    class Builder(SingleModelAcquisitionBuilder):
        def prepare_acquisition_function(
            self, dataset: Dataset, model: ProbabilisticModel
        ) -> AcquisitionFunction:
            assert dataset is data["foo"]
            assert model is models["foo"]
            return raise_exc

    data = {"foo": empty_dataset([1], [1]), "bar": empty_dataset([1], [1])}
    models = {"foo": QuadraticMeanAndRBFKernel(), "bar": QuadraticMeanAndRBFKernel()}
    Builder().using("foo").prepare_acquisition_function(data, models)
Ejemplo n.º 3
0
def test_optimization_result_try_get_final_datasets_for_successful_optimization(
) -> None:
    data = {"foo": empty_dataset([1], [1])}
    result: OptimizationResult[None] = OptimizationResult(
        Ok(Record(data, {"foo": _PseudoTrainableQuadratic()}, None)), [])
    assert result.try_get_final_datasets() is data
    assert result.try_get_final_dataset() is data["foo"]
Ejemplo n.º 4
0
def test_efficient_global_optimization(optimizer: AcquisitionOptimizer[Box]) -> None:
    class NegQuadratic(SingleModelAcquisitionBuilder):
        def __init__(self) -> None:
            self._updated = False

        def prepare_acquisition_function(
            self,
            model: ProbabilisticModel,
            dataset: Optional[Dataset] = None,
        ) -> AcquisitionFunction:
            return lambda x: -quadratic(tf.squeeze(x, -2) - 1)

        def update_acquisition_function(
            self,
            function: AcquisitionFunction,
            model: ProbabilisticModel,
            dataset: Optional[Dataset] = None,
        ) -> AcquisitionFunction:
            self._updated = True
            return function

    function = NegQuadratic()
    search_space = Box([-10], [10])
    ego = EfficientGlobalOptimization(function, optimizer)
    data, model = empty_dataset([1], [1]), QuadraticMeanAndRBFKernel(x_shift=1)
    query_point = ego.acquire_single(search_space, model, dataset=data)
    npt.assert_allclose(query_point, [[1]], rtol=1e-4)
    assert not function._updated
    query_point = ego.acquire(search_space, {OBJECTIVE: model})
    npt.assert_allclose(query_point, [[1]], rtol=1e-4)
    assert function._updated
Ejemplo n.º 5
0
def test_sum_and_product_for_single_builder(reducer_class: type[Sum | Product]) -> None:
    data, models = {"": empty_dataset([1], [1])}, {"": QuadraticMeanAndRBFKernel()}
    acq = reducer_class(_Static(lambda x: x ** 2)).prepare_acquisition_function(
        models, datasets=data
    )
    xs = tf.random.uniform([3, 5, 1], minval=-1.0)
    npt.assert_allclose(acq(xs), xs ** 2)
Ejemplo n.º 6
0
def test_optimization_result_try_get_final_models_for_successful_optimization(
) -> None:
    models = {"foo": _PseudoTrainableQuadratic()}
    result: OptimizationResult[None] = OptimizationResult(
        Ok(Record({"foo": empty_dataset([1], [1])}, models, None)), [])
    assert result.try_get_final_models() is models
    assert result.try_get_final_model() is models["foo"]
Ejemplo n.º 7
0
def test_single_model_acquisition_builder_raises_immediately_for_wrong_key() -> None:
    builder = _ArbitrarySingleBuilder().using("foo")

    with pytest.raises(KeyError):
        builder.prepare_acquisition_function(
            {"bar": empty_dataset([1], [1])}, {"bar": QuadraticMeanAndRBFKernel()}
        )
Ejemplo n.º 8
0
def test_ehvi_builder_raises_for_empty_data() -> None:
    num_obj = 3
    dataset = empty_dataset([2], [num_obj])
    model = QuadraticMeanAndRBFKernel()

    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        ExpectedHypervolumeImprovement().prepare_acquisition_function(dataset, model)
Ejemplo n.º 9
0
def test_probability_of_feasibility_builder_builds_pof(threshold: float,
                                                       at: tf.Tensor) -> None:
    builder = ProbabilityOfFeasibility(threshold)
    acq = builder.prepare_acquisition_function(empty_dataset([1], [1]),
                                               QuadraticMeanAndRBFKernel())
    expected = probability_of_feasibility(QuadraticMeanAndRBFKernel(),
                                          threshold, at)
    npt.assert_allclose(acq(at), expected)
Ejemplo n.º 10
0
def test_single_builder_raises_immediately_for_wrong_key(
    single_builder: SingleModelAcquisitionBuilder
    | SingleModelBatchAcquisitionBuilder,
) -> None:
    builder = single_builder.using("foo")

    with pytest.raises(KeyError):
        builder.prepare_acquisition_function(
            {"bar": empty_dataset([1], [1])},
            {"bar": QuadraticMeanAndRBFKernel()})
Ejemplo n.º 11
0
def test_reducer__reduce() -> None:
    class Mean(Reducer):
        def _reduce(self, inputs: Sequence[tf.Tensor]) -> tf.Tensor:
            return tf.reduce_mean(inputs, axis=0)

    mean = Mean(_Static(lambda x: -2.0 * x), _Static(lambda x: 3.0 * x))
    data, models = {"": empty_dataset([1], [1])}, {"": QuadraticMeanAndRBFKernel()}
    acq = mean.prepare_acquisition_function(models, datasets=data)
    xs = tf.random.uniform([3, 5, 1], minval=-1.0)
    npt.assert_allclose(acq(xs), 0.5 * xs)
Ejemplo n.º 12
0
def test_product() -> None:
    prod = Product(_Static(lambda x: x + 1), _Static(lambda x: x + 2))
    data, models = {
        "": empty_dataset([1], [1])
    }, {
        "": QuadraticMeanAndRBFKernel()
    }
    acq = prod.prepare_acquisition_function(data, models)
    xs = tf.random.uniform([3, 5, 1], minval=-1.0, dtype=tf.float64)
    npt.assert_allclose(acq(xs), (xs + 1) * (xs + 2))
Ejemplo n.º 13
0
def test_bayesian_optimizer_optimize_raises_for_invalid_rule_keys_and_default_acquisition(
) -> None:
    optimizer = BayesianOptimizer(lambda x: x[:1], Box([-1], [1]))
    data, models = {
        "foo": empty_dataset([1], [1])
    }, {
        "foo": _PseudoTrainableQuadratic()
    }
    with pytest.raises(ValueError):
        optimizer.optimize(3, data, models)
Ejemplo n.º 14
0
def test_sum() -> None:
    sum_ = Sum(_Static(lambda x: x), _Static(lambda x: x**2),
               _Static(lambda x: x**3))
    data, models = {
        "": empty_dataset([1], [1])
    }, {
        "": QuadraticMeanAndRBFKernel()
    }
    acq = sum_.prepare_acquisition_function(data, models)
    xs = tf.random.uniform([3, 5, 1], minval=-1.0)
    npt.assert_allclose(acq(xs), xs + xs**2 + xs**3)
Ejemplo n.º 15
0
def test_bayesian_optimizer_optimize_raises_for_negative_steps(
        num_steps: int) -> None:
    optimizer = BayesianOptimizer(_quadratic_observer, Box([-1], [1]))

    data, models = {
        "": empty_dataset([1], [1])
    }, {
        "": _PseudoTrainableQuadratic()
    }
    with pytest.raises(ValueError, match="num_steps"):
        optimizer.optimize(num_steps, data, models)
Ejemplo n.º 16
0
def test_bayesian_optimizer_optimize_doesnt_track_state_if_told_not_to() -> None:
    class _UncopyableModel(_PseudoTrainableQuadratic):
        def __deepcopy__(self, memo: dict[int, object]) -> NoReturn:
            assert False

    data, models = {OBJECTIVE: empty_dataset([1], [1])}, {OBJECTIVE: _UncopyableModel()}
    history = (
        BayesianOptimizer(_quadratic_observer, Box([-1], [1]))
        .optimize(5, data, models, track_state=False)
        .history
    )
    assert len(history) == 0
Ejemplo n.º 17
0
def test_efficient_global_optimization(
        optimizer: AcquisitionOptimizer[Box]) -> None:
    class NegQuadratic(AcquisitionFunctionBuilder):
        def prepare_acquisition_function(
                self, datasets: Mapping[str, Dataset],
                models: Mapping[str,
                                ProbabilisticModel]) -> AcquisitionFunction:
            return lambda x: -quadratic(tf.squeeze(x, -2) - 1)

    search_space = Box([-10], [10])
    ego = EfficientGlobalOptimization(NegQuadratic(), optimizer)
    data, model = empty_dataset([1], [1]), QuadraticMeanAndRBFKernel(x_shift=1)
    query_point, _ = ego.acquire(search_space, {"": data}, {"": model})
    npt.assert_allclose(query_point, [[1]], rtol=1e-4)
Ejemplo n.º 18
0
        )
        .astuple()
    )
    final_model = final_opt_state.unwrap().model

    if fit_initial_model:  # optimized at start and end of first BO step
        assert final_model._optimize_count == 2  # type: ignore
    else:  # optimized just at end of first BO step
        assert final_model._optimize_count == 1  # type: ignore


@pytest.mark.parametrize(
    "datasets, models",
    [
        ({}, {}),
        ({"foo": empty_dataset([1], [1])}, {}),
        ({"foo": empty_dataset([1], [1])}, {"bar": _PseudoTrainableQuadratic()}),
        (
            {"foo": empty_dataset([1], [1])},
            {"foo": _PseudoTrainableQuadratic(), "bar": _PseudoTrainableQuadratic()},
        ),
    ],
)
def test_bayesian_optimizer_optimize_raises_for_invalid_keys(
    datasets: dict[str, Dataset], models: dict[str, TrainableProbabilisticModel]
) -> None:
    search_space = Box([-1], [1])
    optimizer = BayesianOptimizer(lambda x: {"foo": Dataset(x, x)}, search_space)
    rule = FixedAcquisitionRule([[0.0]])
    with pytest.raises(ValueError):
        optimizer.optimize(10, datasets, models, rule)
Ejemplo n.º 19
0
@pytest.mark.parametrize(
    "models",
    [
        {},
        {
            "foo": QuadraticMeanAndRBFKernel()
        },
        {
            "foo": QuadraticMeanAndRBFKernel(),
            OBJECTIVE: QuadraticMeanAndRBFKernel()
        },
    ],
)
@pytest.mark.parametrize("datasets", [{}, {
    OBJECTIVE: empty_dataset([1], [1])
}])
def test_thompson_sampling_raises_for_invalid_models_keys(
        datasets: dict[str, Dataset],
        models: dict[str, ProbabilisticModel]) -> None:
    search_space = Box([-1], [1])
    rule = ThompsonSampling(100, 10)
    with pytest.raises(ValueError):
        rule.acquire(search_space, datasets, models)


def test_efficient_global_optimization_raises_for_no_query_points() -> None:
    with pytest.raises(ValueError):
        EfficientGlobalOptimization(num_query_points=0)

Ejemplo n.º 20
0
    observer = _CountingObserver()
    optimizer = BayesianOptimizer(observer, Box([-1], [1]))
    data = mk_dataset([[0.5]], [[0.25]])

    optimizer.optimize(steps, data,
                       _PseudoTrainableQuadratic()).final_result.unwrap()

    assert observer.call_count == steps


@pytest.mark.parametrize(
    "datasets, models",
    [
        ({}, {}),
        ({
            "foo": empty_dataset([1], [1])
        }, {}),
        ({
            "foo": empty_dataset([1], [1])
        }, {
            "bar": _PseudoTrainableQuadratic()
        }),
        (
            {
                "foo": empty_dataset([1], [1])
            },
            {
                "foo": _PseudoTrainableQuadratic(),
                "bar": _PseudoTrainableQuadratic()
            },
        ),