예제 #1
0
def test_every_iteration_model_updater_with_cost():
    """
    Tests that the model updater can use a different attribute from loop_state as the training targets
    """

    class MockModel(IModel):
        def optimize(self):
            pass

        def set_data(self, X: np.ndarray, Y: np.ndarray):
            self._X = X
            self._Y = Y

        @property
        def X(self):
            return self._X

        @property
        def Y(self):
            return self._Y

    mock_model = MockModel()
    updater = FixedIntervalUpdater(mock_model, 1, lambda loop_state: loop_state.cost)

    loop_state_mock = mock.create_autospec(LoopState)
    loop_state_mock.iteration = 1
    loop_state_mock.X.return_value(np.random.rand(5, 1))
    loop_state_mock.cost.return_value(np.random.rand(5, 1))

    cost = np.random.rand(5, 1)
    loop_state_mock.cost.return_value(cost)
    updater.update(loop_state_mock)
    assert np.array_equiv(mock_model.X, cost)
예제 #2
0
def test_every_iteration_model_updater():
    mock_model = mock.create_autospec(IModel)
    mock_model.optimize.return_value(None)
    updater = FixedIntervalUpdater(mock_model, 1)

    loop_state_mock = mock.create_autospec(LoopState)
    loop_state_mock.iteration = 1
    loop_state_mock.X.return_value(np.random.rand(5, 1))
    loop_state_mock.Y.return_value(np.random.rand(5, 1))
    updater.update(loop_state_mock)
    mock_model.optimize.assert_called_once()
예제 #3
0
    def __init__(self, space: ParameterSpace,
                 X_init: np.ndarray, Y_init: np.ndarray, cost_init: np.ndarray,
                 s_min: float, s_max: float,
                 update_interval: int = 1,
                 num_eval_points: int = 2000,
                 marginalize_hypers: bool = True):
        """
        Implements FAst Bayesian Optimization for LArge DataSets as described in:

        Fast Bayesian hyperparameter optimization on large datasets
        A. Klein and S. Falkner and S. Bartels and P. Hennig and F. Hutter
        Electronic Journal of Statistics (2017)

        :param space: input space where the optimization is carried out.
        :param X_init: initial data points
        :param Y_init: initial function values
        :param cost_init: initial costs
        :param s_min: smallest possible dataset size
        :param s_max: highest possible dataset size
        :param update_interval:  number of iterations between optimization of model hyper-parameters. Defaults to 1.
        :param num_eval_points: number of points to evaluate the acquisition function
        :param marginalize_hypers: if true, marginalize over the GP hyperparameters
        """

        l = space.parameters
        l.extend([ContinuousParameter("s", s_min, s_max)])  
        extended_space = ParameterSpace(l)

        model_objective = FabolasModel(X_init=X_init, Y_init=Y_init, s_min=s_min, s_max=s_max)
        model_cost = FabolasModel(X_init=X_init, Y_init=cost_init[:, None], s_min=s_min, s_max=s_max)

        if marginalize_hypers:
            acquisition_generator = lambda model: ContinuousFidelityEntropySearch(model_objective, space=extended_space,
                                                                                  target_fidelity_index=len(
                                                                                      extended_space.parameters) - 1)
            entropy_search = IntegratedHyperParameterAcquisition(model_objective, acquisition_generator)
        else:
            entropy_search = ContinuousFidelityEntropySearch(model_objective, space=extended_space,
                                                             target_fidelity_index=len(extended_space.parameters) - 1)

        acquisition = acquisition_per_expected_cost(entropy_search, model_cost)

        model_updater_objective = FixedIntervalUpdater(model_objective, update_interval)
        model_updater_cost = FixedIntervalUpdater(model_cost, update_interval, lambda state: state.cost)

        acquisition_optimizer = RandomSearchAcquisitionOptimizer(extended_space, num_eval_points=num_eval_points)
        candidate_point_calculator = SequentialPointCalculator(acquisition, acquisition_optimizer)

        loop_state = create_loop_state(model_objective.X, model_objective.Y, model_cost.Y)

        super(CostSensitiveBayesianOptimizationLoop, self).__init__(candidate_point_calculator,
                                                                    [model_updater_objective, model_updater_cost],
                                                                    loop_state)
예제 #4
0
def test_iteration_end_event():
    space = ParameterSpace([ContinuousParameter('x', 0, 1)])

    def user_function(x):
        return x

    x_test = np.linspace(0, 1)[:, None]
    y_test = user_function(x_test)

    x_init = np.linspace(0, 1, 5)[:, None]
    y_init = user_function(x_init)

    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    mse = []

    def compute_mse(self, loop_state):
        mse.append(np.mean(np.square(model.predict(x_test)[0] - y_test)))

    loop_state = create_loop_state(x_init, y_init)

    acquisition = ModelVariance(model)
    acquisition_optimizer = AcquisitionOptimizer(space)
    candidate_point_calculator = SequentialPointCalculator(
        acquisition, acquisition_optimizer)
    model_updater = FixedIntervalUpdater(model)

    loop = OuterLoop(candidate_point_calculator, model_updater, loop_state)
    loop.iteration_end_event.append(compute_mse)
    loop.run_loop(user_function, 5)

    assert len(mse) == 5
예제 #5
0
    def __init__(self,
                 model: VanillaBayesianQuadrature,
                 acquisition: Acquisition = None,
                 model_updater: ModelUpdater = None):
        """
        The loop for vanilla Bayesian Quadrature

        :param model: the vanilla Bayesian quadrature method
        :param acquisition: The acquisition function that is be used to collect new points.
        default, IntegralVarianceReduction
        :param model_updater: Defines how and when the quadrature model is updated if new data arrives.
                              Defaults to updating hyper-parameters every iteration.
        """

        self.model = model

        if acquisition is None:
            acquisition = IntegralVarianceReduction(self.model)

        if model_updater is None:
            model_updater = FixedIntervalUpdater(self.model, 1)

        space = ParameterSpace(self.model.integral_bounds.
                               convert_to_list_of_continuous_parameters())
        acquisition_optimizer = AcquisitionOptimizer(space)
        candidate_point_calculator = SequentialPointCalculator(
            acquisition, acquisition_optimizer)
        loop_state = create_loop_state(self.model.X, self.model.Y)

        super().__init__(candidate_point_calculator, model_updater, loop_state)
예제 #6
0
def test_multi_source_batch_experimental_design():
    objective, space = multi_fidelity_forrester_function()

    # Create initial data
    random_design = RandomDesign(space)
    x_init = random_design.get_samples(10)
    intiial_results = objective.evaluate(x_init)
    y_init = np.array([res.Y for res in intiial_results])

    # Create multi source acquisition optimizer
    acquisition_optimizer = GradientAcquisitionOptimizer(space)
    multi_source_acquisition_optimizer = MultiSourceAcquisitionOptimizer(
        acquisition_optimizer, space)

    # Create GP model
    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    # Create acquisition
    acquisition = ModelVariance(model)

    # Create batch candidate point calculator
    batch_candidate_point_calculator = GreedyBatchPointCalculator(
        model, acquisition, multi_source_acquisition_optimizer, batch_size=5)

    initial_loop_state = LoopState(intiial_results)
    loop = OuterLoop(batch_candidate_point_calculator,
                     FixedIntervalUpdater(model, 1), initial_loop_state)

    loop.run_loop(objective, 10)
    assert loop.loop_state.X.shape[0] == 60