Exemple #1
0
    def __init__(self, model, space, optimizer, cost_withGradients=None):
        super(EntropyWeightedEI, self).__init__(model, space, optimizer)

        self.EI = AcquisitionEI(model, space, optimizer, cost_withGradients)

        if cost_withGradients == None:
            self.cost_withGradients = constant_cost_withGradients
        else:
            self.cost_withGradients = cost_withGradients
Exemple #2
0
    def __init__(self,
                 model,
                 space,
                 optimizer=None,
                 cost_withGradients=None,
                 par_a=1,
                 par_b=1,
                 num_samples=10):
        super(jitter_integrated_EI, self).__init__(model, space, optimizer)

        self.par_a = par_a
        self.par_b = par_b
        self.num_samples = num_samples
        self.samples = beta(self.par_a, self.par_b, self.num_samples)
        self.EI = AcquisitionEI(model, space, optimizer, cost_withGradients)
Exemple #3
0
    def __init__(self, api_config):
        """Build wrapper class to use optimizer in benchmark.

        Parameters
        ----------
        api_config : dict-like of dict-like
            Configuration of the optimization variables. See API description.
        """
        AbstractOptimizer.__init__(self, api_config)
        
        api_space = BoEI.api_manipulator(api_config)  # used for GPyOpt initialization

        self.space_x = JointSpace(api_config) # used for warping & unwarping of new suggestions & observations

        self.hasCat, self.cat_vec = BoEI.is_cat(api_config)
        
        self.dim = len(self.space_x.get_bounds())

        self.objective = GPyOpt.core.task.SingleObjective(None)

        self.space = GPyOpt.Design_space(api_space)
        
        self.model = GPyOpt.models.GPModel(optimize_restarts=5,verbose=False)
        
        self.aquisition_optimizer = GPyOpt.optimization.AcquisitionOptimizer(self.space)
        
        
        self.aquisition = AcquisitionEI(self.model, self.space, optimizer=self.aquisition_optimizer, cost_withGradients=None)
        
        self.batch_size = None
Exemple #4
0
class jitter_integrated_EI(AcquisitionBase):
    analytical_gradient_prediction = True

    def __init__(self,
                 model,
                 space,
                 optimizer=None,
                 cost_withGradients=None,
                 par_a=1,
                 par_b=1,
                 num_samples=10):
        super(jitter_integrated_EI, self).__init__(model, space, optimizer)

        self.par_a = par_a
        self.par_b = par_b
        self.num_samples = num_samples
        self.samples = beta(self.par_a, self.par_b, self.num_samples)
        self.EI = AcquisitionEI(model, space, optimizer, cost_withGradients)

    def acquisition_function(self, x):
        acqu_x = np.zeros((x.shape[0], 1))
        for k in range(self.num_samples):
            self.EI.jitter = self.samples[k]
            acqu_x += self.EI.acquisition_function(x)
        return acqu_x / self.num_samples

    def acquisition_function_withGradients(self, x):
        acqu_x = np.zeros((x.shape[0], 1))
        acqu_x_grad = np.zeros(x.shape)

        for k in range(self.num_samples):
            self.EI.jitter = self.samples[k]
            acqu_x_sample, acqu_x_grad_sample = self.EI.acquisition_function_withGradients(
                x)
            acqu_x += acqu_x_sample
            acqu_x_grad += acqu_x_grad_sample
        return acqu_x / self.num_samples, acqu_x_grad / self.num_samples
Exemple #5
0
class EntropyWeightedEI(AcquisitionBase):
    analytical_gradient_prediction = False

    def __init__(self, model, space, optimizer, cost_withGradients=None):
        super(EntropyWeightedEI, self).__init__(model, space, optimizer)

        self.EI = AcquisitionEI(model, space, optimizer, cost_withGradients)

        if cost_withGradients == None:
            self.cost_withGradients = constant_cost_withGradients
        else:
            self.cost_withGradients = cost_withGradients

    def _compute_acq(self, x):
        m, s = self.model.predict(x)
        acqu_x = self.EI.acquisition_function(x)

        h = 0.5 * np.log(2 * math.pi * math.e * np.square(s))
        for i in range(acqu_x.shape[0]):
            acqu_x[i] += h[i]
        return acqu_x
Exemple #6
0
class jitter_integrated_EI(AcquisitionBase):
    analytical_gradient_prediction = True

    def __init__(self,
                 model,
                 space,
                 constraint_model,
                 optimizer=None,
                 cost_withGradients=None,
                 par_a=1,
                 par_b=1,
                 num_samples=10):
        super(jitter_integrated_EI, self).__init__(model, space, optimizer)

        self.par_a = par_a
        self.par_b = par_b
        self.num_samples = num_samples
        self.samples = beta(self.par_a, self.par_b, self.num_samples)
        self.EI = AcquisitionEI(model, space, optimizer, cost_withGradients)
        self.xall = np.empty((1, 13))
        self.yall = np.empty((1, 1))
        self.constraint_model = constraint_model

    def acquisition_function(self, x):
        acqu_x = np.zeros((x.shape[0], 1))
        #x = np.matmul(x,np.random.normal(size=(13,1)))
        #print(x.shape)
        for k in range(self.num_samples):
            self.EI.jitter = self.samples[k] + np.random.normal(size=1)
            acqu_x += self.EI.acquisition_function(x)

        #indicator = check_if_valid_x(x)
        #self.xall = np.vstack((self.xall, x))
        #self.yall = np.concatenate((self.yall, indicator.reshape(indicator.shape[0], 1)))
        #self.constrain_model = self.constraint_model.updateModel(X_all=self.xall, Y_all=self.yall, X_new=x,
        #                                                        Y_new=indicator)
        #indicator_m, indicator_s = self.constraint_model.predict(x)
        #indicator_acqu = -indicator_m + self.exploration_weight * indicator_s
        #f_acqu = f_acqu.transpose() * indicator_acqu + np.random.beta(10, 20, size=(x.shape[0]))
        #print(f_acqu)
        #m,s = self.constraint_model.predict(x)
        #acqu_ind = -m+10*s
        #print(indicator.shape)
        #print(x.shape)
        acqu_x = acqu_x / self.num_samples
        #acqu_x = acqu_x.transpose()*np.random.normal(size=x.shape[0])
        #print("ACQUIsiition {}".format(acqu_x))
        #acqu_x = acqu_x.transpose()*acqu_ind
        return acqu_x

    def acquisition_function_withGradients(self, x):
        acqu_x = np.zeros((x.shape[0], 1))
        acqu_x_grad = np.zeros(x.shape)

        for k in range(self.num_samples):
            self.EI.jitter = self.samples[k] + np.random.normal(size=1)
            acqu_x_sample, acqu_x_grad_sample = self.EI.acquisition_function_withGradients(
                x)
            acqu_x += acqu_x_sample
            acqu_x_grad += acqu_x_grad_sample
        return acqu_x / self.num_samples, acqu_x_grad / self.num_samples