class TestOptimization(unittest.TestCase): def setUp(self): def f(x): return x**2 def grad(x): return 2.0 * x self.bounds = [(-1, 1)] self.opt = Optimization(LBFGS_NAME, f, self.bounds, grad) self.opt_2 = Optimization(LBFGS_NAME, f, self.bounds, grad, minimize=False) def test_get_optimizer(self): assert Optimization._get_optimizer(LBFGS_NAME) == fmin_l_bfgs_b def test_optimize(self): opt = self.opt.optimize(np.array([0.9])) assert opt['solution'] == 0 assert opt['optimal_value'] == 0 assert opt['gradient'] == 0 opt_2 = self.opt_2.optimize(np.array([0.9])) assert opt_2['solution'] == 1 assert opt_2['optimal_value'] == 1 assert opt_2['gradient'] == 2
def setUp(self): def f(x): return x**2 def grad(x): return 2.0 * x self.bounds = [(-1, 1)] self.opt = Optimization(LBFGS_NAME, f, self.bounds, grad) self.opt_2 = Optimization(LBFGS_NAME, f, self.bounds, grad, minimize=False)
def iteration_algorithm(self, n_restarts=10, n_samples=10): """ Checked :param n_restarts: :param n_samples: :return: """ if self.parameters is None: self.estimate_parameters_kernel() parameters = self.parameters else: parameters = self.parameters samples = self.sample_variable(parameters, n_samples) bounds = [ tuple(bound) for bound in [self.gp.bounds[i] for i in range(self.x_domain)] ] start = DomainService.get_points_domain( n_restarts, self.gp.bounds[0:self.x_domain], type_bounds=self.gp.type_bounds[0:self.x_domain]) dim = len(start[0]) start_points = {} for i in range(n_restarts): start_points[i] = start[i] optimization = Optimization(NELDER, wrapper_ei_objective, bounds, None, hessian=None, tol=None, minimize=False) args = (False, None, True, 0, optimization, self, samples, parameters) sol = Parallel.run_function_different_arguments_parallel( wrapper_optimize, start_points, *args) solutions = [] results_opt = [] for i in range(n_restarts): if sol.get(i) is None: logger.info( "Error in computing optimum of a_{n+1} at one sample at point %d" % i) continue solutions.append(sol.get(i)['optimal_value']) results_opt.append(sol.get(i)) ind_max = np.argmax(solutions) control = results_opt[ind_max]['solution'] # do in parallel environment = self.get_environment(control, parameters) return np.concatenate((control, environment))
def get_environment(self, control, parameters_kernel, n_restarts=10): """ correct See p.1142, eq. 15 :param control: :return: """ bounds = [ tuple(bound) for bound in [self.gp.bounds[i] for i in self.w_domain] ] bounds_2 = [] for bound in bounds: bounds_2.append([bound[0], bound[-1]]) bounds = bounds_2 start = DomainService.get_points_domain(n_restarts, bounds) dim = len(start[0]) start_points = {} for i in range(n_restarts): start_points[i] = start[i] optimization = Optimization(NELDER, wrapper_evaluate_squared_error, bounds, None, hessian=None, tol=None, minimize=False) run_parallel = True args = (False, None, run_parallel, 0, optimization, self, control, parameters_kernel) sol = Parallel.run_function_different_arguments_parallel( wrapper_optimize, start_points, *args) solutions = [] results_opt = [] for i in range(n_restarts): if sol.get(i) is None: logger.info( "Error in computing optimum of a_{n+1} at one sample at point %d" % i) continue solutions.append(sol.get(i)['optimal_value']) results_opt.append(sol.get(i)) ind_max = np.argmax(solutions) environment = results_opt[ind_max]['solution'] return environment
def estimate_parameters_kernel(self, n_restarts=10): """ Correct :param n_restarts: :return: """ start = self.gp.sample_parameters_posterior(n_restarts) start = [sample[2:] for sample in start] dim = len(start[0]) start_points = {} for i in xrange(n_restarts): start_points[i] = start[i] optimization = Optimization( NELDER, wrapper_log_posterior_distribution_length_scale, [(None, None) for i in range(dim)], None, hessian=None, tol=None, minimize=False) args = (False, None, True, 0, optimization, self) sol = Parallel.run_function_different_arguments_parallel( wrapper_optimize, start_points, *args) solutions = [] results_opt = [] for i in xrange(n_restarts): if sol.get(i) is None: logger.info( "Error in computing optimum of a_{n+1} at one sample at point %d" % i) continue solutions.append(sol.get(i)['optimal_value']) results_opt.append(sol.get(i)) ind_max = np.argmax(solutions) self.parameters = results_opt[ind_max]['solution'] return results_opt[ind_max]['solution']
def optimize(self, start=None, random_seed=None, parallel=True, n_restarts=10, n_best_restarts=0, n_samples_parameters=0, start_new_chain=False, maxepoch=11, **kwargs): """ Optimizes EI :param start: np.array(n) :param random_seed: int :param parallel: boolean :param n_restarts: int :param n_best_restarts: (int) Chooses the best n_best_restarts based on EI :param n_samples_parameters: int :param start_new_chain: (boolean) If True, we start a new chain with n_samples_parameters samples of the parameters of the GP model. :return: """ if random_seed is not None: np.random.seed(random_seed) if start_new_chain: if self.gp.name_model == BAYESIAN_QUADRATURE: self.gp.gp.start_new_chain() self.gp.gp.sample_parameters(DEFAULT_N_PARAMETERS) else: self.gp.start_new_chain() self.gp.sample_parameters(DEFAULT_N_PARAMETERS) bounds = self.gp.bounds if start is None: if self.gp.separate_tasks and self.gp.name_model == BAYESIAN_QUADRATURE: tasks = self.gp.tasks n_tasks = len(tasks) n_restarts = int(np.ceil(n_restarts / n_tasks) * n_tasks) ind = [[i] for i in range(n_restarts)] np.random.shuffle(ind) task_chosen = np.zeros((n_restarts, 1)) n_task_per_group = n_restarts / n_tasks for i in range(n_tasks): for j in range(n_task_per_group): tk = ind[j + i * n_task_per_group] task_chosen[tk, 0] = i start_points = DomainService.get_points_domain( n_restarts, bounds, type_bounds=self.gp.type_bounds, simplex_domain=self.simplex_domain) start_points = np.concatenate((start_points, task_chosen), axis=1) else: start_points = DomainService.get_points_domain( n_restarts, bounds, type_bounds=self.gp.type_bounds, simplex_domain=self.simplex_domain) start = np.array(start_points) if n_best_restarts > 0 and n_best_restarts < n_restarts: point_dict = {} for j in xrange(start.shape[0]): point_dict[j] = start[j, :] args = (False, None, True, 0, self, DEFAULT_N_PARAMETERS) ei_values = Parallel.run_function_different_arguments_parallel( wrapper_objective_acquisition_function, point_dict, *args) values = [ei_values[i] for i in ei_values] values_index = sorted(range(len(values)), key=lambda k: values[k]) values_index = values_index[-n_best_restarts:] start = [] for j in values_index: start.append(point_dict[j]) start = np.array(start) n_restarts = start.shape[0] bounds = [tuple(bound) for bound in self.bounds_opt] objective_function = wrapper_objective_acquisition_function grad_function = wrapper_gradient_acquisition_function if n_samples_parameters == 0: #TODO: CHECK THIS optimization = Optimization(LBFGS_NAME, objective_function, bounds, grad_function, minimize=False) args = (False, None, parallel, 0, optimization, self, n_samples_parameters) opt_method = wrapper_optimize point_dict = {} for j in xrange(n_restarts): point_dict[j] = start[j, :] else: #TODO CHANGE wrapper_objective_voi, wrapper_grad_voi_sgd TO NO SOLVE MAX_a_{n+1} in #TODO: parallel for the several starting points args_ = (self, DEFAULT_N_PARAMETERS) optimization = Optimization( SGD_NAME, objective_function, bounds, wrapper_evaluate_gradient_ei_sample_params, minimize=False, full_gradient=grad_function, args=args_, debug=True, simplex_domain=self.simplex_domain, **{'maxepoch': maxepoch}) args = (False, None, parallel, 0, optimization, n_samples_parameters, self) #TODO: THINK ABOUT N_THREADS. Do we want to run it in parallel? opt_method = wrapper_sgd random_seeds = np.random.randint(0, 4294967295, n_restarts) point_dict = {} for j in xrange(n_restarts): point_dict[j] = [start[j, :], random_seeds[j]] optimal_solutions = Parallel.run_function_different_arguments_parallel( opt_method, point_dict, *args) maximum_values = [] for j in xrange(n_restarts): maximum_values.append(optimal_solutions.get(j)['optimal_value']) ind_max = np.argmax(maximum_values) logger.info("Results of the optimization of the EI: ") logger.info(optimal_solutions.get(ind_max)) self.optimization_results.append(optimal_solutions.get(ind_max)) return optimal_solutions.get(ind_max)
def optimize_mean(self, n_restarts=10, candidate_solutions=None, candidate_values=None): """ Checked :param n_restarts: :return: """ if self.parameters is None: self.estimate_parameters_kernel() parameters = self.parameters else: parameters = self.parameters bounds = [tuple(bound) for bound in [self.gp.bounds[i] for i in range(self.x_domain)]] start = DomainService.get_points_domain( n_restarts, self.gp.bounds[0:self.x_domain], type_bounds=self.gp.type_bounds[0:self.x_domain]) dim = len(start[0]) start_points = {} for i in range(n_restarts): start_points[i] = start[i] optimization = Optimization( NELDER, wrapper_mean_objective, bounds, None, hessian=None, tol=None, minimize=False) args = (False, None, True, 0, optimization, self, parameters) sol = Parallel.run_function_different_arguments_parallel( wrapper_optimize, start_points, *args) solutions = [] results_opt = [] for i in range(n_restarts): if sol.get(i) is None: logger.info("Error in computing optimum of a_{n+1} at one sample at point %d" % i) continue solutions.append(sol.get(i)['optimal_value']) results_opt.append(sol.get(i)) ind_max = np.argmax(solutions) sol = results_opt[ind_max] sol['optimal_value'] = [sol['optimal_value']] if candidate_solutions is not None and len(candidate_solutions) > 0: n = len(candidate_values) candidate_solutions_2 = candidate_solutions values = [] point_dict = {} args = (False, None, True, 0, self, parameters) for j in range(n): point_dict[j] = np.array(candidate_solutions_2[j]) values = Parallel.run_function_different_arguments_parallel( wrapper_mean_objective, point_dict, *args) values_candidates = [] for j in range(n): values_candidates.append(values[j]) ind_max_2 = np.argmax(values_candidates) if np.max(values_candidates) > sol['optimal_value'][0]: solution = point_dict[ind_max_2] value = np.max(values_candidates) sol = {} sol['optimal_value'] = [value] sol['solution'] = solution return sol
def test_get_optimizer(self): assert Optimization._get_optimizer(LBFGS_NAME) == fmin_l_bfgs_b