def minimize_ackley_continuous_noisy(): """ SSRacos example of minimizing ackley function under Gaussian noise :return: no return value """ ackley_noise_func = ackley_noise_creator(0, 0.1) dim_size = 100 # dimensions dim_regs = [[-1, 1]] * dim_size # dimension range dim_tys = [True] * dim_size # dimension type : real dim = Dimension(dim_size, dim_regs, dim_tys) # form up the dimension object objective = Objective(ackley_noise_func, dim) # form up the objective function budget = 20000 # 20*dim_size # number of calls to the objective function # suppression=True means optimize with value suppression, which is a noise handling method # resampling=True means optimize with re-sampling, which is another common used noise handling method # non_update_allowed=500 and resample_times=100 means if the best solution doesn't change for 500 budgets, # the best solution will be evaluated repeatedly for 100 times # balance_rate is a parameter for exponential weight average of several evaluations of one sample. parameter = Parameter(budget=budget, noise_handling=True, suppression=True, non_update_allowed=200, resample_times=50, balance_rate=0.5) # parameter = Parameter(budget=budget, noise_handling=True, resampling=True, resample_times=10) parameter.set_positive_size(5) ExpOpt.min(objective, parameter, repeat=5, plot=False, plot_file="img/ackley_continuous_noisy_figure.png")
def run_test(task_name, layers, in_budget, max_step, repeat, terminal_value=None): """ example of running direct policy search for gym task. :param task_name: gym task name :param layers: layer information of the neural network e.g., [2, 5, 1] means input layer has 2 neurons, hidden layer(only one) has 5 and output layer has 1 :param in_budget: number of calls to the objective function :param max_step: max step in gym :param repeat: repeat number in a test :param terminal_value: early stop, algorithm should stop when such value is reached :return: no return value """ gym_task = GymTask(task_name) # choose a task by name gym_task.new_nnmodel(layers) # construct a neural network gym_task.set_max_step(max_step) # set max step in gym budget = in_budget # number of calls to the objective function rand_probability = 0.95 # the probability of sample in model # set dimension dim_size = gym_task.get_w_size() dim_regs = [[-10, 10]] * dim_size dim_tys = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys) # form up the objective function objective = Objective(gym_task.sum_reward, dim) parameter = Parameter(budget=budget, terminal_value=terminal_value) parameter.set_probability(rand_probability) solution_list = ExpOpt.min(objective, parameter, repeat=repeat)
def minimize_sphere_mixed(): """ Mixed optimization example of minimizing sphere function, which has mixed search search space. :return: no return value """ # setup optimization problem dim_size = 100 dim_regs = [] dim_tys = [] # In this example, the search space is discrete if this dimension index is odd, Otherwise, the search space # is continuous. for i in range(dim_size): if i % 2 == 0: dim_regs.append([0, 1]) dim_tys.append(True) else: dim_regs.append([0, 100]) dim_tys.append(False) dim = Dimension(dim_size, dim_regs, dim_tys) objective = Objective(sphere_mixed, dim) # form up the objective function budget = 100 * dim_size # number of calls to the objective function parameter = Parameter(budget=budget) solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/sphere_mixed_figure.png")
def test_sracos_performance(self): dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 * dim) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2
def test_resample(self): ackley_noise_func = ackley_noise_creator(0, 0.1) dim_size = 100 # dimensions dim_regs = [[-1, 1]] * dim_size # dimension range dim_tys = [True] * dim_size # dimension type : real dim = Dimension(dim_size, dim_regs, dim_tys) # form up the dimension object objective = Objective(ackley_noise_func, dim) # form up the objective function budget = 20000 # 20*dim_size # number of calls to the objective function # suppression=True means optimize with value suppression, which is a noise handling method # resampling=True means optimize with re-sampling, which is another common used noise handling method # non_update_allowed=500 and resample_times=100 means if the best solution doesn't change for 500 budgets, # the best solution will be evaluated repeatedly for 100 times # balance_rate is a parameter for exponential weight average of several evaluations of one sample. parameter = Parameter(budget=budget, noise_handling=True, resampling=True, resample_times=10) # parameter = Parameter(budget=budget, noise_handling=True, resampling=True, resample_times=10) parameter.set_positive_size(5) sol = Opt.min(objective, parameter) assert sol.get_value() < 4
def test_performance(self): mse = SparseMSE('example/sparse_regression/sonar.arff') mse.set_sparsity(8) # setup objective # print(mse.get_dim().get_size()) objective = Objective(func=mse.loss, dim=mse.get_dim(), constraint=mse.constraint) parameter = Parameter(algorithm='poss', budget=2 * exp(1) * (mse.get_sparsity() ** 2) * mse.get_dim().get_size()) # perform sparse regression with constraint |w|_0 <= k solution = Opt.min(objective, parameter) assert solution.get_value()[0] < 0.6
def minimize_sphere_sre(): """ Example of minimizing high-dimensional sphere function with sequential random embedding. :return: no return value """ dim_size = 10000 # dimensions dim_regs = [[-1, 1]] * dim_size # dimension range dim_tys = [True] * dim_size # dimension type : real dim = Dimension(dim_size, dim_regs, dim_tys) # form up the dimension object objective = Objective(sphere_sre, dim) # form up the objective function # setup algorithm parameters budget = 2000 # number of calls to the objective function parameter = Parameter(budget=budget, high_dim_handling=True, reducedim=True, num_sre=5, low_dimension=Dimension(10, [[-1, 1]] * 10, [True] * 10)) solution_list = ExpOpt.min(objective, parameter, repeat=5, plot=False, plot_file="img/minimize_sphere_sre.png")
def test_performance(self): dim_size = 10000 # dimensions dim_regs = [[-1, 1]] * dim_size # dimension range dim_tys = [True] * dim_size # dimension type : real dim = Dimension(dim_size, dim_regs, dim_tys) # form up the dimension object objective = Objective(sphere_sre, dim) # form up the objective function # setup algorithm parameters budget = 2000 # number of calls to the objective function parameter = Parameter(budget=budget, high_dim_handling=True, reducedim=True, num_sre=5, low_dimension=Dimension(10, [[-1, 1]] * 10, [True] * 10)) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.3
def minimize_setcover_discrete(): """ Discrete optimization example of minimizing setcover problem. :return: no return value """ problem = SetCover() dim = problem.dim # the dim is prepared by the class objective = Objective(problem.fx, dim) # form up the objective function budget = 100 * dim.get_size() # number of calls to the objective function # if autoset is False, you should define train_size, positive_size, negative_size on your own parameter = Parameter(budget=budget, autoset=False) parameter.set_train_size(6) parameter.set_positive_size(1) parameter.set_negative_size(5) ExpOpt.min(objective, parameter, repeat=10, best_n=5, plot=True, plot_file="img/setcover_discrete_figure.png")
def minimize_sphere_continuous(): """ Example of minimizing the sphere function :return: no return value """ dim_size = 100 # form up the objective function objective = Objective( sphere, Dimension(dim_size, [[-1, 1]] * dim_size, [True] * dim_size)) budget = 100 * dim_size # if intermediate_result is True, ZOOpt will output intermediate best solution every intermediate_freq budget parameter = Parameter(budget=budget, intermediate_result=True, intermediate_freq=1000) ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/sphere_continuous_figure.png")
def minimize_ackley_continuous(): """ Continuous optimization example of minimizing the ackley function. :return: no return value """ dim_size = 100 # dimensions dim_regs = [[-1, 1]] * dim_size # dimension range dim_tys = [True] * dim_size # dimension type : real dim = Dimension(dim_size, dim_regs, dim_tys) # form up the dimension object objective = Objective(ackley, dim) # form up the objective function budget = 100 * dim_size # number of calls to the objective function parameter = Parameter(budget=budget) solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/ackley_continuous_figure.png")
def test_performance(self): # load data file mse = SparseMSE('example/sparse_regression/sonar.arff') mse.set_sparsity(8) # setup objective objective = Objective(func=mse.loss, dim=mse.get_dim(), constraint=mse.constraint) # ponss_theta and ponss_b are parameters used in PONSS algorithm and should be provided by users. ponss_theta stands # for the threshold. ponss_b limits the number of solutions in the population set. parameter = Parameter(algorithm='poss', noise_handling=True, ponss=True, ponss_theta=0.5, ponss_b=mse.get_k(), budget=2 * exp(1) * (mse.get_sparsity()**2) * mse.get_dim().get_size()) # perform sparse regression with constraint |w|_0 <= k solution = Opt.min(objective, parameter) assert solution.get_value()[0] < 0.7
def minimize_sphere_discrete_order(): """ Discrete optimization example of minimizing the sphere function, which has ordered search space. :return: no return value """ dim_size = 100 # dimensions dim_regs = [[-10, 10]] * dim_size # dimension range dim_tys = [False] * dim_size # dimension type : integer dim_order = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the dimension object objective = Objective(sphere_discrete_order, dim) # form up the objective function # setup algorithm parameters budget = 10000 # number of calls to the objective function parameter = Parameter(budget=budget) ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/sphere_discrete_order_figure.png")
""" An example of using PONSS to optimize a noisy subset selection problem. """ from zoopt.example.sparse_regression.sparse_mse import SparseMSE from zoopt.zoopt import Objective, Parameter, ExpOpt from math import exp if __name__ == '__main__': # load data file mse = SparseMSE('sonar.arff') mse.set_sparsity(8) # setup objective objective = Objective(func=mse.loss, dim=mse.get_dim(), constraint=mse.constraint) # ponss_theta and ponss_b are parameters used in PONSS algorithm and should be provided by users. ponss_theta stands # for the threshold. ponss_b limits the number of solutions in the population set. parameter = Parameter(algorithm='poss', noise_handling=True, ponss=True, ponss_theta=0.5, ponss_b=mse.get_k(), budget=2 * exp(1) * (mse.get_sparsity()**2) * mse.get_dim().get_size()) # perform sparse regression with constraint |w|_0 <= k solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True)
""" Training error. """ wrong = 0.0 for i in range(len(self.__data)): fx = self.calc_product(best, i) if fx * self.trans_label(i) <= 0: wrong += 1 rate = wrong / len(self.__data) return rate def dim(self): """ Construct dimension of this problem. """ return Dimension(self.__dim_size, [[-10, 10]] * self.__dim_size, [True] * self.__dim_size) if __name__ == '__main__': # read data loss = RampLoss('ionosphere.arff') objective = Objective(loss.eval, loss.dim()) budget = 100 * loss.get_dim_size() parameter = Parameter(budget=budget) solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/ramploss.png")
""" This file contains an example of how to optimize continuous ackley function. Author: Yu-Ren Liu, Xiong-Hui Chen """ from zoopt.zoopt import Dimension, Objective, Parameter, ExpOpt, Solution from zoopt.example.simple_functions.simple_function import ackley if __name__ == '__main__': dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=5, plot=False, plot_file="img/quick_start.png") for solution in solution_list: x = solution.get_x() value = solution.get_value() print(x, value)
def test_parameter_set(self): par = Parameter(budget=1000, noise_handling=True, suppression=True) assert 1
def test_auto_set(self): par = Parameter(budget=50) assert par.get_train_size() == 4 and par.get_positive_size( ) == 1 and par.get_negative_size() == 3 par = Parameter(budget=100) assert par.get_train_size() == 6 and par.get_positive_size( ) == 1 and par.get_negative_size() == 5 par = Parameter(budget=1000) assert par.get_train_size() == 12 and par.get_positive_size( ) == 2 and par.get_negative_size() == 10 par = Parameter(budget=1001) assert par.get_train_size() == 22 and par.get_positive_size( ) == 2 and par.get_negative_size() == 20
""" An example of using POSS to optimize a subset selection problem. """ from zoopt.example.sparse_regression.sparse_mse import SparseMSE from zoopt.zoopt import Objective, Parameter, ExpOpt from math import exp if __name__ == '__main__': # load data file mse = SparseMSE('sonar.arff') mse.set_sparsity(8) # setup objective # print(mse.get_dim().get_size()) objective = Objective(func=mse.loss, dim=mse.get_dim(), constraint=mse.constraint) parameter = Parameter(algorithm='poss', budget=2 * exp(1) * (mse.get_sparsity()**2) * mse.get_dim().get_size()) # perform sparse regression with constraint |w|_0 <= k solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True)