class ExpectedImprovementPerCost(Acquisition):

    def __init__(self, model: Union[IModel, IDifferentiable], cost_model: Union[IModel, IDifferentiable],
                 jitter: np.float64 = np.float64(0))-> None:
        """
        This acquisition computes for a given input the improvement over the current best observed value in
        expectation. For more information see:

        Efficient Global Optimization of Expensive Black-Box Functions
        Jones, Donald R. and Schonlau, Matthias and Welch, William J.
        Journal of Global Optimization

        :param model: model that is used to compute the improvement.
        :param jitter: parameter to encourage extra exploration.
        """

        self.model = model
        self.cost_model = cost_model
        self.jitter = jitter

        self.ei = ExpectedImprovement(model, jitter)

    def evaluate(self, x: np.ndarray) -> np.ndarray:
        """
        Computes the Expected Improvement.

        :param x: points where the acquisition is evaluated.
        """
        improvement = self.ei.evaluate(x)

        mean, _ = self.cost_model.predict(x)

        return improvement / mean

    def evaluate_with_gradients(self, x: np.ndarray) -> Tuple:
        """
        Computes the Expected Improvement and its derivative.

        :param x: locations where the evaluation with gradients is done.
        """

        improvement, dimprovement_dx = self.ei.evaluate_with_gradients(x)

        mean, _ = self.cost_model.predict(x)

        dmean_dx, _ = self.model.get_prediction_gradients(x)

        return improvement / mean, (dimprovement_dx * mean - dmean_dx * improvement) / (mean ** 2)

    def has_gradients(self) -> bool:
        """Returns that this acquisition has gradients"""
        return True
Exemplo n.º 2
0
                                              Y_train,
                                              n_fidelities=2,
                                              kernels=kernels,
                                              verbose=True,
                                              optimization_restarts=1)
for m in nonlin_mf_model.models:
    m.Gaussian_noise.variance.fix(0)

nonlin_mf_model.optimize()

from emukit.bayesian_optimization.acquisitions import ExpectedImprovement
ei = ExpectedImprovement(nonlin_mf_model)

ei_locations = np.atleast_2d(np.array([[0.4232, 0.6761]]))

print(ei.evaluate(ei_locations))

# ## Compute mean and variance predictions
#
# hf_mean_nonlin_mf_model, hf_var_nonlin_mf_model = nonlin_mf_model.predict(X_plot_high)
# hf_std_nonlin_mf_model = np.sqrt(hf_var_nonlin_mf_model)
#
# lf_mean_nonlin_mf_model, lf_var_nonlin_mf_model = nonlin_mf_model.predict(X_plot_low)
# lf_std_nonlin_mf_model = np.sqrt(lf_var_nonlin_mf_model)
#
#
# ## Plot posterior mean and variance of nonlinear multi-fidelity model
#
# plt.figure(figsize=(12,8))
# plt.fill_between(x_plot.flatten(), (lf_mean_nonlin_mf_model - 1.96*lf_std_nonlin_mf_model).flatten(),
#                  (lf_mean_nonlin_mf_model + 1.96*lf_std_nonlin_mf_model).flatten(), color='g', alpha=0.3)