Пример #1
0
def test_rff_sampler_returns_correctly_shaped_samples(
    sample_min_value: bool, sample_size: int
) -> None:
    search_space = Box([0.0, 0.0], [1.0, 1.0])
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions

    x_range = tf.linspace(0.0, 1.0, 5)
    x_range = tf.cast(x_range, dtype=tf.float64)
    xs = tf.reshape(tf.stack(tf.meshgrid(x_range, x_range, indexing="ij"), axis=-1), (-1, 2))
    ys = quadratic(xs)
    dataset = Dataset(xs, ys)

    sampler = RandomFourierFeatureThompsonSampler(
        sample_size, model, dataset, num_features=100, sample_min_value=sample_min_value
    )

    query_points = search_space.sample(100)
    thompson_samples = sampler.sample(query_points)
    if sample_min_value:
        tf.debugging.assert_shapes([(thompson_samples, [sample_size, 1])])
    else:
        tf.debugging.assert_shapes([(thompson_samples, [sample_size, 2])])
Пример #2
0
def test_async_keeps_track_of_pending_points(
    async_rule: AcquisitionRule[State[TensorType, AsynchronousRuleState], Box]
) -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))

    state_fn = async_rule.acquire_single(search_space, QuadraticMeanAndRBFKernel(), dataset=dataset)
    state, point1 = state_fn(None)
    state, point2 = state_fn(state)

    assert state is not None
    assert len(state.pending_points) == 2

    # pretend we saw observation for the first point
    new_observations = Dataset(
        query_points=point1,
        observations=tf.constant([[1]], dtype=tf.float32),
    )
    state_fn = async_rule.acquire_single(
        search_space,
        QuadraticMeanAndRBFKernel(),
        dataset=dataset + new_observations,
    )
    state, point3 = state_fn(state)

    assert state is not None
    assert len(state.pending_points) == 2

    # we saw first point, so pendings points are
    # second point and new third point
    npt.assert_allclose(state.pending_points, tf.concat([point2, point3], axis=0))
Пример #3
0
def test_greedy_batch_acquisition_rule_acquire(
    rule_fn: Callable[
        # callable input type(s)
        [_GreedyBatchModelMinusMeanMaximumSingleBuilder],
        # callable output type
        AcquisitionRule[TensorType, Box]
        | AcquisitionRule[State[TensorType, AsynchronousGreedy.State], Box],
    ],
    num_query_points: int,
) -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    acq = _GreedyBatchModelMinusMeanMaximumSingleBuilder()
    assert acq._update_count == 0
    acq_rule: AcquisitionRule[TensorType, Box] | AcquisitionRule[
        State[TensorType, AsynchronousGreedy.State], Box
    ] = rule_fn(acq)
    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))
    points_or_stateful = acq_rule.acquire_single(search_space, dataset, QuadraticMeanAndRBFKernel())
    if callable(points_or_stateful):
        _, query_points = points_or_stateful(None)
    else:
        query_points = points_or_stateful
    assert acq._update_count == num_query_points - 1
    npt.assert_allclose(query_points, [[0.0, 0.0]] * num_query_points, atol=1e-3)

    points_or_stateful = acq_rule.acquire_single(search_space, dataset, QuadraticMeanAndRBFKernel())
    if callable(points_or_stateful):
        _, query_points = points_or_stateful(None)
    else:
        query_points = points_or_stateful
    npt.assert_allclose(query_points, [[0.0, 0.0]] * num_query_points, atol=1e-3)
    assert acq._update_count == 2 * num_query_points - 1
Пример #4
0
def test_async_ego_keeps_track_of_pending_points() -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    acq = _GreedyBatchModelMinusMeanMaximumSingleBuilder()
    async_rule: AsynchronousGreedy[Box] = AsynchronousGreedy(acq)
    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))

    state_fn = async_rule.acquire_single(search_space, dataset, QuadraticMeanAndRBFKernel())
    state, point1 = state_fn(None)
    state, point2 = state_fn(state)

    assert state is not None
    assert len(state.pending_points) == 2

    # let's pretend we saw observations for the first point
    new_observations = Dataset(
        query_points=point1,
        observations=tf.constant([[1]], dtype=tf.float32),
    )
    state_fn = async_rule.acquire_single(
        search_space, dataset + new_observations, QuadraticMeanAndRBFKernel()
    )
    state, point3 = state_fn(state)

    assert state is not None
    assert len(state.pending_points) == 2
    # two points from the first batch and all points from second
    npt.assert_allclose(state.pending_points, tf.concat([point2, point3], axis=0))
Пример #5
0
def test_rff_sampler_raises_for_invalid_number_of_features(
    num_features: int,
) -> None:
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions
    dataset = Dataset(
        tf.constant([[-2.0]], dtype=tf.float64), tf.constant([[4.1]], dtype=tf.float64)
    )
    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        RandomFourierFeatureThompsonSampler(1, model, dataset, num_features=num_features)
Пример #6
0
def test_discrete_thompson_sampling_acquire_returns_correct_shape(
    num_fourier_features: bool, num_query_points: int
) -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    ts = DiscreteThompsonSampling(100, num_query_points, num_fourier_features=num_fourier_features)
    dataset = Dataset(tf.zeros([1, 2], dtype=tf.float64), tf.zeros([1, 1], dtype=tf.float64))
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions
    query_points = ts.acquire_single(search_space, model, dataset=dataset)

    npt.assert_array_equal(query_points.shape, tf.constant([num_query_points, 2]))
Пример #7
0
def test_rff_sampler_sample_raises_for_invalid_at_shape(
    shape: ShapeLike,
) -> None:
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions
    dataset = Dataset(
        tf.constant([[-2.0]], dtype=tf.float64), tf.constant([[4.1]], dtype=tf.float64)
    )
    sampler = RandomFourierFeatureThompsonSampler(1, model, dataset, num_features=100)

    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        sampler.sample(tf.zeros(shape))
Пример #8
0
def test_rff_sampler_returns_trajectory_function_with_correct_shaped_output(num_evals: int) -> None:
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions
    dataset = Dataset(
        tf.constant([[-2.0]], dtype=tf.float64), tf.constant([[4.1]], dtype=tf.float64)
    )
    sampler = RandomFourierFeatureThompsonSampler(1, model, dataset, num_features=100)

    trajectory = sampler.get_trajectory()
    xs = tf.linspace([-10.0], [10.0], num_evals)

    tf.debugging.assert_shapes([(trajectory(xs), [num_evals, 1])])
Пример #9
0
def test_sum_and_product_for_single_builder(reducer_class: type[Sum | Product]) -> None:
    data, models = {"": empty_dataset([1], [1])}, {"": QuadraticMeanAndRBFKernel()}
    acq = reducer_class(_Static(lambda x: x ** 2)).prepare_acquisition_function(
        models, datasets=data
    )
    xs = tf.random.uniform([3, 5, 1], minval=-1.0)
    npt.assert_allclose(acq(xs), xs ** 2)
Пример #10
0
def test_exact_thompson_sampler_sample_raises_for_invalid_at_shape(
    shape: ShapeLike,
) -> None:
    sampler = ExactThompsonSampler(1, QuadraticMeanAndRBFKernel())

    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        sampler.sample(tf.zeros(shape))
Пример #11
0
def test_trust_region_successful_global_to_global_trust_region_unchanged(
    rule: AcquisitionRule[TensorType, Box], expected_query_point: TensorType
) -> None:
    tr = TrustRegion(rule)
    dataset = Dataset(tf.constant([[0.1, 0.2], [-0.1, -0.2]]), tf.constant([[0.4], [0.3]]))
    lower_bound = tf.constant([-2.2, -1.0])
    upper_bound = tf.constant([1.3, 3.3])
    search_space = Box(lower_bound, upper_bound)

    eps = 0.5 * (search_space.upper - search_space.lower) / 10
    previous_y_min = dataset.observations[0]
    is_global = True
    previous_state = TrustRegion.State(search_space, eps, previous_y_min, is_global)

    current_state, query_point = tr.acquire(
        search_space,
        {OBJECTIVE: QuadraticMeanAndRBFKernel()},
        datasets={OBJECTIVE: dataset},
    )(previous_state)

    assert current_state is not None
    npt.assert_array_almost_equal(current_state.eps, previous_state.eps)
    assert current_state.is_global
    npt.assert_array_almost_equal(query_point, expected_query_point, 5)
    npt.assert_array_almost_equal(current_state.acquisition_space.lower, lower_bound)
    npt.assert_array_almost_equal(current_state.acquisition_space.upper, upper_bound)
Пример #12
0
def test_rff_sampler_raises_for_invalid_sample_size(
    sample_size: int,
) -> None:
    model = QuadraticMeanAndRBFKernel()
    dataset = Dataset(tf.constant([[-2.0]]), tf.constant([[4.1]]))
    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        RandomFourierFeatureThompsonSampler(sample_size, model, dataset)
Пример #13
0
def test_independent_reparametrization_sampler_sample_raises_for_invalid_at_shape(
    shape: ShapeLike,
) -> None:
    sampler = IndependentReparametrizationSampler(1, QuadraticMeanAndRBFKernel())

    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        sampler.sample(tf.zeros(shape))
Пример #14
0
def test_reparametrization_sampler_reprs(
    sampler: type[BatchReparametrizationSampler | IndependentReparametrizationSampler],
) -> None:
    assert (
        repr(sampler(20, QuadraticMeanAndRBFKernel()))
        == f"{sampler.__name__}(20, QuadraticMeanAndRBFKernel())"
    )
Пример #15
0
def test_efficient_global_optimization(optimizer: AcquisitionOptimizer[Box]) -> None:
    class NegQuadratic(SingleModelAcquisitionBuilder):
        def __init__(self) -> None:
            self._updated = False

        def prepare_acquisition_function(
            self,
            model: ProbabilisticModel,
            dataset: Optional[Dataset] = None,
        ) -> AcquisitionFunction:
            return lambda x: -quadratic(tf.squeeze(x, -2) - 1)

        def update_acquisition_function(
            self,
            function: AcquisitionFunction,
            model: ProbabilisticModel,
            dataset: Optional[Dataset] = None,
        ) -> AcquisitionFunction:
            self._updated = True
            return function

    function = NegQuadratic()
    search_space = Box([-10], [10])
    ego = EfficientGlobalOptimization(function, optimizer)
    data, model = empty_dataset([1], [1]), QuadraticMeanAndRBFKernel(x_shift=1)
    query_point = ego.acquire_single(search_space, model, dataset=data)
    npt.assert_allclose(query_point, [[1]], rtol=1e-4)
    assert not function._updated
    query_point = ego.acquire(search_space, {OBJECTIVE: model})
    npt.assert_allclose(query_point, [[1]], rtol=1e-4)
    assert function._updated
Пример #16
0
def test_trust_region_for_unsuccessful_local_to_global_trust_region_reduced(
    rule: AcquisitionRule[TensorType, Box]
) -> None:
    tr = TrustRegion(rule)
    dataset = Dataset(tf.constant([[0.1, 0.2], [-0.1, -0.2]]), tf.constant([[0.4], [0.5]]))
    lower_bound = tf.constant([-2.2, -1.0])
    upper_bound = tf.constant([1.3, 3.3])
    search_space = Box(lower_bound, upper_bound)

    eps = 0.5 * (search_space.upper - search_space.lower) / 10
    previous_y_min = dataset.observations[0]
    is_global = False
    acquisition_space = Box(dataset.query_points[0] - eps, dataset.query_points[0] + eps)
    previous_state = TrustRegion.State(acquisition_space, eps, previous_y_min, is_global)

    current_state, _ = tr.acquire(
        search_space,
        {OBJECTIVE: QuadraticMeanAndRBFKernel()},
        datasets={OBJECTIVE: dataset},
    )(previous_state)

    assert current_state is not None
    npt.assert_array_less(current_state.eps, previous_state.eps)  # current TR smaller than previous
    assert current_state.is_global
    npt.assert_array_almost_equal(current_state.acquisition_space.lower, lower_bound)
Пример #17
0
def test_joint_batch_acquisition_rule_acquire(
    rule_fn: Callable[
        # callable input type(s)
        [_JointBatchModelMinusMeanMaximumSingleBuilder, int],
        # callable output type
        AcquisitionRule[TensorType, Box]
        | AcquisitionRule[State[TensorType, AsynchronousRuleState], Box],
    ]
) -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    num_query_points = 4
    acq = _JointBatchModelMinusMeanMaximumSingleBuilder()
    acq_rule: AcquisitionRule[TensorType, Box] | AcquisitionRule[
        State[TensorType, AsynchronousRuleState], Box
    ] = rule_fn(acq, num_query_points)

    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))
    points_or_stateful = acq_rule.acquire_single(
        search_space, QuadraticMeanAndRBFKernel(), dataset=dataset
    )
    if callable(points_or_stateful):
        _, query_point = points_or_stateful(None)
    else:
        query_point = points_or_stateful

    print(query_point)
    npt.assert_allclose(query_point, [[0.0, 0.0]] * num_query_points, atol=1e-3)
Пример #18
0
def test_rff_sampler_returns_deterministic_trajectory() -> None:
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions
    x_range = tf.linspace(0.0, 1.0, 5)
    x_range = tf.cast(x_range, dtype=tf.float64)
    xs = tf.reshape(tf.stack(tf.meshgrid(x_range, x_range, indexing="ij"), axis=-1), (-1, 2))
    ys = quadratic(xs)
    dataset = Dataset(xs, ys)

    sampler = RandomFourierFeatureThompsonSampler(1, model, dataset, num_features=100)
    trajectory = sampler.get_trajectory()

    trajectory_eval_1 = trajectory(xs)
    trajectory_eval_2 = trajectory(xs)

    npt.assert_allclose(trajectory_eval_1, trajectory_eval_2)
Пример #19
0
def test_gumbel_samples_are_minima() -> None:
    search_space = Box([0, 0], [1, 1])

    x_range = tf.linspace(0.0, 1.0, 5)
    x_range = tf.cast(x_range, dtype=tf.float64)
    xs = tf.reshape(tf.stack(tf.meshgrid(x_range, x_range, indexing="ij"), axis=-1), (-1, 2))
    ys = quadratic(xs)
    dataset = Dataset(xs, ys)

    model = QuadraticMeanAndRBFKernel()
    gumbel_sampler = GumbelSampler(5, model)

    query_points = search_space.sample(100)
    query_points = tf.concat([dataset.query_points, query_points], 0)
    gumbel_samples = gumbel_sampler.sample(query_points)

    fmean, _ = model.predict(dataset.query_points)
    assert max(gumbel_samples) < min(fmean)
Пример #20
0
def test_rff_sampler_returns_same_posterior_from_each_calculation_method() -> None:
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1.0, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions
    x_range = tf.linspace(0.0, 1.0, 5)
    x_range = tf.cast(x_range, dtype=tf.float64)
    xs = tf.reshape(tf.stack(tf.meshgrid(x_range, x_range, indexing="ij"), axis=-1), (-1, 2))
    ys = quadratic(xs)
    dataset = Dataset(xs, ys)

    sampler = RandomFourierFeatureThompsonSampler(1, model, dataset, num_features=100)
    sampler.get_trajectory()

    posterior_1 = sampler._prepare_theta_posterior_in_design_space()
    posterior_2 = sampler._prepare_theta_posterior_in_gram_space()

    npt.assert_allclose(posterior_1.loc, posterior_2.loc, rtol=0.02)
    npt.assert_allclose(posterior_1.scale_tril, posterior_2.scale_tril, rtol=0.02)
Пример #21
0
def test_product() -> None:
    prod = Product(_Static(lambda x: x + 1), _Static(lambda x: x + 2))
    data, models = {
        "": empty_dataset([1], [1])
    }, {
        "": QuadraticMeanAndRBFKernel()
    }
    acq = prod.prepare_acquisition_function(data, models)
    xs = tf.random.uniform([3, 5, 1], minval=-1.0, dtype=tf.float64)
    npt.assert_allclose(acq(xs), (xs + 1) * (xs + 2))
Пример #22
0
def test_reducer__reduce() -> None:
    class Mean(Reducer):
        def _reduce(self, inputs: Sequence[tf.Tensor]) -> tf.Tensor:
            return tf.reduce_mean(inputs, axis=0)

    mean = Mean(_Static(lambda x: -2.0 * x), _Static(lambda x: 3.0 * x))
    data, models = {"": empty_dataset([1], [1])}, {"": QuadraticMeanAndRBFKernel()}
    acq = mean.prepare_acquisition_function(models, datasets=data)
    xs = tf.random.uniform([3, 5, 1], minval=-1.0)
    npt.assert_allclose(acq(xs), 0.5 * xs)
Пример #23
0
def test_sum() -> None:
    sum_ = Sum(_Static(lambda x: x), _Static(lambda x: x**2),
               _Static(lambda x: x**3))
    data, models = {
        "": empty_dataset([1], [1])
    }, {
        "": QuadraticMeanAndRBFKernel()
    }
    acq = sum_.prepare_acquisition_function(data, models)
    xs = tf.random.uniform([3, 5, 1], minval=-1.0)
    npt.assert_allclose(acq(xs), xs + xs**2 + xs**3)
Пример #24
0
def test_joint_batch_acquisition_rule_acquire() -> None:
    search_space = Box(tf.constant([-2.2, -1.0]), tf.constant([1.3, 3.3]))
    num_query_points = 4
    acq = _JointBatchModelMinusMeanMaximumSingleBuilder()
    ego: EfficientGlobalOptimization[Box] = EfficientGlobalOptimization(
        acq, num_query_points=num_query_points
    )
    dataset = Dataset(tf.zeros([0, 2]), tf.zeros([0, 1]))
    query_point = ego.acquire_single(search_space, dataset, QuadraticMeanAndRBFKernel())

    npt.assert_allclose(query_point, [[0.0, 0.0]] * num_query_points, atol=1e-3)
Пример #25
0
 def __init__(self, samples: TensorType):
     """
     :param samples `[S, B, L]`, where `S` is the `sample_size`, `B` the
         number of points per batch, and `L` the dimension of the model's predictive
         distribution.
     """
     tf.debugging.assert_shapes(
         [(samples, ["S", "B", "L"])],
         message="This sampler takes samples of shape "
         "[sample_size, batch_points, output_dimension].",
     )
     self._samples = samples  # [S, B, L]
     super().__init__(1, QuadraticMeanAndRBFKernel())
Пример #26
0
def test_exact_thompson_sampler_returns_correctly_shaped_samples(
    sample_min_value: bool, sample_size: int
) -> None:
    search_space = Box([0, 0], [1, 1])
    thompson_sampler = ExactThompsonSampler(
        sample_size, QuadraticMeanAndRBFKernel(), sample_min_value=sample_min_value
    )
    query_points = search_space.sample(500)
    thompson_samples = thompson_sampler.sample(query_points)
    if sample_min_value:
        tf.debugging.assert_shapes([(thompson_samples, [sample_size, 1])])
    else:
        tf.debugging.assert_shapes([(thompson_samples, [sample_size, 2])])
Пример #27
0
def test_rff_thompson_samples_are_minima() -> None:
    search_space = Box([0.0, 0.0], [1.0, 1.0])
    model = QuadraticMeanAndRBFKernel(noise_variance=tf.constant(1e-5, dtype=tf.float64))
    model.kernel = (
        gpflow.kernels.RBF()
    )  # need a gpflow kernel object for random feature decompositions

    x_range = tf.linspace(0.0, 1.0, 5)
    x_range = tf.cast(x_range, dtype=tf.float64)
    xs = tf.reshape(tf.stack(tf.meshgrid(x_range, x_range, indexing="ij"), axis=-1), (-1, 2))
    ys = quadratic(xs)
    dataset = Dataset(xs, ys)

    sampler = RandomFourierFeatureThompsonSampler(
        1, model, dataset, num_features=100, sample_min_value=True
    )

    query_points = search_space.sample(100)
    query_points = tf.concat([dataset.query_points, query_points], 0)
    thompson_samples = sampler.sample(query_points)

    fmean, _ = model.predict(dataset.query_points)
    assert max(thompson_samples) < min(fmean)
Пример #28
0
def test_trust_region_for_default_state(
    rule: AcquisitionRule[TensorType, Box], expected_query_point: TensorType
) -> None:
    tr = TrustRegion(rule)
    dataset = Dataset(tf.constant([[0.1, 0.2]]), tf.constant([[0.012]]))
    lower_bound = tf.constant([-2.2, -1.0])
    upper_bound = tf.constant([1.3, 3.3])
    search_space = Box(lower_bound, upper_bound)

    state, query_point = tr.acquire_single(search_space, dataset, QuadraticMeanAndRBFKernel())(None)

    assert state is not None
    npt.assert_array_almost_equal(query_point, expected_query_point, 5)
    npt.assert_array_almost_equal(state.acquisition_space.lower, lower_bound)
    npt.assert_array_almost_equal(state.acquisition_space.upper, upper_bound)
    npt.assert_array_almost_equal(state.y_min, [0.012])
    assert state.is_global
Пример #29
0
def test_exact_thompson_sampler_raises_for_invalid_sample_size(
    sample_size: int,
) -> None:
    with pytest.raises(TF_DEBUGGING_ERROR_TYPES):
        ExactThompsonSampler(sample_size, QuadraticMeanAndRBFKernel())
Пример #30
0
        (10, 50, 0),
        (10, 50, -2),
    ],
)
def test_discrete_thompson_sampling_raises_for_invalid_init_params(
    num_search_space_samples: int, num_query_points: int, num_fourier_features: int
) -> None:
    with pytest.raises(ValueError):
        DiscreteThompsonSampling(num_search_space_samples, num_query_points, num_fourier_features)


@pytest.mark.parametrize(
    "models",
    [
        {},
        {"foo": QuadraticMeanAndRBFKernel()},
        {"foo": QuadraticMeanAndRBFKernel(), OBJECTIVE: QuadraticMeanAndRBFKernel()},
    ],
)
@pytest.mark.parametrize("datasets", [{}, {OBJECTIVE: empty_dataset([1], [1])}])
def test_discrete_thompson_sampling_raises_for_invalid_models_keys(
    datasets: dict[str, Dataset], models: dict[str, ProbabilisticModel]
) -> None:
    search_space = Box([-1], [1])
    rule = DiscreteThompsonSampling(100, 10)
    with pytest.raises(ValueError):
        rule.acquire(search_space, models, datasets=datasets)


@pytest.mark.parametrize("models", [{}, {OBJECTIVE: QuadraticMeanAndRBFKernel()}])
@pytest.mark.parametrize(