class Metamodel(): """ The Metamodel has four significant elements, the model, the surrogate, the relevator and the history of evaluations. At every step, the metamodel uses the relevator to decide wheter to use the model or the surrogate and afterwards updates the history and its components. All methods and attributes that start with '_' should be treated as private. """ def __init__(self, metamodel_kwargs, model_kwargs, surrogate_kwargs, relevator_kwargs, history_kwargs): """ Initialize the Metamodel with the specified components. """ self.step_index = 0 self.model_evaluations = 0 self.model = Model(model_kwargs) self.surrogate = Surrogate(self, surrogate_kwargs) self.relevator = Relevator(self, relevator_kwargs) self.history = History(self, history_kwargs) self._random_seed = metamodel_kwargs.get("random_seed", None) if self._random_seed is not None: self.model.set_random_seed(self._random_seed) self.surrogate.set_random_seed(self._random_seed) self.relevator.set_random_seed(self._random_seed) return def evaluate(self, coords): """ Evaluate the Metamodel at the given point. """ # Determine relevance. relevance = self.relevator.evaluate(coords) is_relevant = self.relevator.is_relevant(relevance) # Decide which prediction to use. if is_relevant: prediction = self.model.evaluate(coords) self.model_evaluations += 1 else: prediction = self.surrogate.evaluate(coords) # Update history and components. self.history.update(coords, prediction, relevance, is_relevant) # Update index and return result. self.step_index += 1 return prediction
def get_toolbox(self, xtr, phi_failure, phi_parallel, global_failed_indices, skwargs, cfunc=None, \ cargs=(), ckwargs={}, verbose=True): ''' Modify parent's toolbox method for single objective optimisation. Parameters. ---------- xtr (np.array): training decision vectors. skwargs (dict): keyword arguments for infill criterion; not used here. cfunc (function): cheap constraint function. cargs (tuple): arguments of cheap constraint function. verbose (bool): whether to print verbose comments. Returns a DEAP toolbox. ''' ytr, xtr, xinds, local_failed_indices, success = self.scalarise(xtr, kwargs=skwargs) self.current_hv = self.current_hpv() surr = Surrogate(xtr, ytr, phi_failure, phi_parallel, local_failed_indices, self.kernel.copy(), verbose=verbose) self.surr = surr self.surr.success = success #self.surr = phi_old #for i in range (len(local_failed_indices)): # self.surr.global_failed_indices.append(local_failed_indices[i]) print("surr.global_failed_indices", self.surr.global_failed_indices) print("local_failed_indices", local_failed_indices) #self.surr.global_failed_indices = np.concatenate([self.surr.global_failed_indices, local_failed_indices]) self.surr.xinds = xinds ######################## # Get bounds (APR added) ######################## lb=self.lower_bounds ub=self.upper_bounds xmean = self.surr.xbar xsd = self.surr.xstd ysd = self.surr.ystd bounds = self.get_bounds((lb-xmean)/xsd, (ub-xmean)/xsd) ################################## # Compute the gradient (APR added) ################################## L = 1.0*(self.max_predictive_gradient(surr, bounds)) * (ysd / xsd) self.surr.L = L ###################################################################### # This represents the best value from the surrogate model max(mean(x)) # It is not the rough approximation max_i{y_i} (APR added) ###################################################################### maxfevals = 20000 tx = np.linspace(lb, ub, maxfevals)[:,None] pred_y, pred_s = self.surr.predict(tx) Min = min(pred_y) self.surr.best = Min return self.init_deap(surr.penalized_acquisition, obj_sense=self.obj_sense[0], \ cfunc=cfunc, cargs=cargs, ckwargs=ckwargs, \ lb=self.lower_bounds, ub=self.upper_bounds)
def __init__(self, metamodel_kwargs, model_kwargs, surrogate_kwargs, relevator_kwargs, history_kwargs): """ Initialize the Metamodel with the specified components. """ self.step_index = 0 self.model_evaluations = 0 self.model = Model(model_kwargs) self.surrogate = Surrogate(self, surrogate_kwargs) self.relevator = Relevator(self, relevator_kwargs) self.history = History(self, history_kwargs) self._random_seed = metamodel_kwargs.get("random_seed", None) if self._random_seed is not None: self.model.set_random_seed(self._random_seed) self.surrogate.set_random_seed(self._random_seed) self.relevator.set_random_seed(self._random_seed) return
def get_toolbox(self, xtr, skwargs, cfunc=None, \ cargs=(), ckwargs={}, verbose=True): """ Generate a DEAP toolbox for the infill criterion optimiser. Parameters. ----------- xtr (np.array): traing decision vectors. skwargs (dict): options for infill criterion calculation; varies with technique. cfunc (function): cheap constraint function. cargs (tuple): argumetns for constraint function. ckwargs (dict): keyword arguments for constraint function. verbose (bool): whether to print more comments. Returns a DEAP toolbox. """ ytr = self.scalarise(xtr, kwargs=skwargs) self.current_hv = self.current_hpv() surr = Surrogate(xtr, ytr, self.kernel.copy(), verbose=verbose) ######################################### # Add surrogate to base class (APR added) ######################################### self.surr = surr ######################## # Get bounds (APR added) ######################## lb = self.lower_bounds ub = self.upper_bounds xmean = self.surr.xbar xsd = self.surr.xstd ysd = self.surr.ystd bounds = self.get_bounds((lb - xmean) / xsd, (ub - xmean) / xsd) ################################## # Compute the gradient (APR added) ################################## L = (self.max_predictive_gradient(surr, bounds)) * (ysd / xsd) self.surr.L = L ####################################################### # Set best based on minimum observed so far (APR added) ####################################################### self.surr.best = min(ytr) return self.init_deap(surr.penalized_acquisition, obj_sense=self.obj_sense[0], cfunc=cfunc, cargs=cargs, \ ckwargs=ckwargs, lb=self.lower_bounds, ub=self.upper_bounds)
def get_toolbox(self, xtr, skwargs, cfunc=None, \ cargs=(), ckwargs={}, verbose=True): """ Generate a DEAP toolbox for the infill criterion optimiser. Parameters. ----------- xtr (np.array): traing decision vectors. skwargs (dict): options for infill criterion calculation; varies with technique. cfunc (function): cheap constraint function. cargs (tuple): argumetns for constraint function. ckwargs (dict): keyword arguments for constraint function. verbose (bool): whether to print more comments. Returns a DEAP toolbox. """ ytr = self.scalarise(xtr, kwargs=skwargs) self.current_hv = self.current_hpv() surr = Surrogate(xtr, ytr, self.kernel.copy(), verbose=verbose) return self.init_deap(surr.expected_improvement, obj_sense=1, cfunc=cfunc, cargs=cargs, \ ckwargs=ckwargs, lb=self.lower_bounds, ub=self.upper_bounds)
def get_toolbox(self, xtr, skwargs, cfunc=None, \ cargs=(), ckwargs={}, verbose=True): ''' Modify parent's toolbox method for single objective optimisation. Parameters. ---------- xtr (np.array): training decision vectors. skwargs (dict): keyword arguments for infill criterion; not used here. cfunc (function): cheap constraint function. cargs (tuple): arguments of cheap constraint function. verbose (bool): whether to print verbose comments. Returns a DEAP toolbox. ''' ytr = self.scalarise(xtr, kwargs=skwargs) self.current_hv = self.current_hpv() surr = Surrogate(xtr, ytr, self.kernel.copy(), verbose=verbose) self.surr = surr return self.init_deap(surr.expected_improvement, obj_sense=self.obj_sense[0], \ cfunc=cfunc, cargs=cargs, ckwargs=ckwargs, \ lb=self.lower_bounds, ub=self.upper_bounds)