def setup_bo_basics(self): if self.num_objs == 1: self.surrogate_model = build_surrogate(func_str=self.surrogate_type, config_space=self.config_space, rng=self.rng, history_hpo_data=self.history_bo_data) else: # multi-objectives self.surrogate_model = [build_surrogate(func_str=self.surrogate_type, config_space=self.config_space, rng=self.rng, history_hpo_data=self.history_bo_data) for _ in range(self.num_objs)] if self.num_constraints > 0: self.constraint_models = [build_surrogate(func_str=self.constraint_surrogate_type, config_space=self.config_space, rng=self.rng) for _ in range(self.num_constraints)] self.acquisition_function = build_acq_func(func_str=self.acq_type, model=self.surrogate_model, constraint_models=self.constraint_models, mc_times=self.mc_times, ref_point=self.ref_point) self.optimizer = build_optimizer(func_str=self.acq_optimizer_type, acq_func=self.acquisition_function, config_space=self.config_space, rng=self.rng) if self.use_trust_region: types, bounds = get_types(self.config_space) cont_dim = np.sum(types == 0) self.turbo_state = TurboState(cont_dim)
def setup_bo_basics(self): """ Prepare the basic BO components. Returns ------- An optimizer object. """ if self.num_objs == 1 or self.acq_type == 'parego': self.surrogate_model = build_surrogate( func_str=self.surrogate_type, config_space=self.config_space, rng=self.rng, history_hpo_data=self.history_bo_data) else: # multi-objectives self.surrogate_model = [ build_surrogate(func_str=self.surrogate_type, config_space=self.config_space, rng=self.rng, history_hpo_data=self.history_bo_data) for _ in range(self.num_objs) ] if self.num_constraints > 0: self.constraint_models = [ build_surrogate(func_str=self.constraint_surrogate_type, config_space=self.config_space, rng=self.rng) for _ in range(self.num_constraints) ] if self.acq_type in ['mesmo', 'mesmoc', 'mesmoc2', 'usemo']: types, bounds = get_types(self.config_space) self.acquisition_function = build_acq_func( func_str=self.acq_type, model=self.surrogate_model, constraint_models=self.constraint_models, types=types, bounds=bounds) else: self.acquisition_function = build_acq_func( func_str=self.acq_type, model=self.surrogate_model, constraint_models=self.constraint_models, ref_point=self.ref_point) if self.acq_type == 'usemo': self.acq_optimizer_type = 'usemo_optimizer' elif self.acq_type.startswith('mesmo'): self.acq_optimizer_type = 'mesmo_optimizer' self.optimizer = build_optimizer(func_str=self.acq_optimizer_type, acq_func=self.acquisition_function, config_space=self.config_space, rng=self.rng)
def setup_bo_basics(self, acq_type='ei', acq_optimizer_type='local_random'): self.surrogate_model = build_surrogate(func_str=self.surrogate_type, config_space=self.config_space, rng=self.rng, history_hpo_data=self.history_bo_data) self.acquisition_function = build_acq_func(func_str=acq_type, model=self.surrogate_model) self.optimizer = build_optimizer(func_str=acq_optimizer_type, acq_func=self.acquisition_function, config_space=self.config_space, rng=self.rng)
def build_single_surrogate(self, X: np.ndarray, y: np.array, normalize): assert normalize in ['standardize', 'scale', 'none'] model = build_surrogate(self.surrogate_type, self.config_space, np.random.RandomState(self.random_seed)) if normalize == 'standardize': if (y == y[0]).all(): y[0] += 1e-4 y, _, _ = zero_mean_unit_var_normalization(y) elif normalize == 'scale': if (y == y[0]).all(): y[0] += 1e-4 y, _, _ = zero_one_normalization(y) else: pass model.train(X, y) return model
def train_regressor(self, X, y, is_top=False): model = build_surrogate(self.surrogate_type, self.config_space, np.random.RandomState(self.random_seed)) if is_top: self.final_num = len(X) else: self.num_configs.append(len(X)) if len(self.base_regressors) == 0 or is_top: model.train(X, y) else: stacked_mu, stacked_sigma = self.calculate_stacked_results(X) model.train(X, y - stacked_mu) if not is_top: self.base_regressors.append(model) else: self.final_regressor = model
def build_source_surrogates(self, normalize): if self.source_hpo_data is None: self.logger.warning( 'No history BO data provided, resort to naive BO optimizer without TL.' ) return self.logger.info('Start to train base surrogates.') start_time = time.time() self.source_surrogates = list() for hpo_evaluation_data in self.source_hpo_data: print('.', end='') model = build_surrogate(self.surrogate_type, self.config_space, np.random.RandomState(self.random_seed)) _X, _y = list(), list() for _config, _config_perf in hpo_evaluation_data.items(): _X.append(_config) _y.append(_config_perf) X = convert_configurations_to_array(_X) y = np.array(_y, dtype=np.float64) if self.num_src_hpo_trial != -1: X = X[:self.num_src_hpo_trial] y = y[:self.num_src_hpo_trial] if normalize == 'standardize': if (y == y[0]).all(): y[0] += 1e-4 y, _, _ = zero_mean_unit_var_normalization(y) elif normalize == 'scale': if (y == y[0]).all(): y[0] += 1e-4 y, _, _ = zero_one_normalization(y) y = 2 * y - 1. else: raise ValueError('Invalid parameter in norm.') self.eta_list.append(np.min(y)) model.train(X, y) self.source_surrogates.append(model) self.logger.info('Building base surrogates took %.3fs.' % (time.time() - start_time))