Exemple #1
0
 def test_eval(self):
     dim = 100
     obj = Objective(func=ackley,
                     dim=Dimension(dim, [[-1, 1]] * dim, [True] * dim))
     sol = Solution(x=[0.2] * dim)
     res = obj.eval(sol)
     assert abs(res) <= 1e-7
def search(_dataset):
    '''
    Search the best hyper-paramers for the given dataset Using ZOOpt
    :param _dataset: the given dataset
    :return: (best hyper-parameters,performance of the best hyper-parameters)
    '''
    global dataset
    dataset = _dataset
    dim = Dimension(
        19, [[16, 32], [1, 8], [1, 1], [1, 1], [16, 32], [1, 8],
             [1, 1], [1, 1], [0, 1], [1, 8], [1, 10], [0, 1], [1, 8], [1, 10],
             [40, 50], [30, 40], [20, 30], [10, 20], [0.0001, 0.001]],
        [
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False, True
        ])

    obj = Objective(eval, dim)
    # perform optimization
    global round
    round = 0
    solution = Opt.min(obj, Parameter(budget=BUDGET))
    # print result
    solution.print_solution()

    plt.plot(obj.get_history_bestsofar())
    plt.savefig('figure.png')
    return (solution.get_x(), solution.get_value())
Exemple #3
0
 def test_history_best_so_far(self):
     input_data = [0.5, 0.6, 0.4, 0.7, 0.3, 0.2]
     output_data = [0.5, 0.5, 0.4, 0.4, 0.3, 0.2]
     obj = Objective()
     obj.set_history(input_data)
     best_history = obj.get_history_bestsofar()
     assert best_history == output_data
def minimize_sphere_sre():
    """
    Example of minimizing high-dimensional sphere function with sequential random embedding.

    :return: no return value
    """

    dim_size = 10000  # dimensions
    dim_regs = [[-1, 1]] * dim_size  # dimension range
    dim_tys = [True] * dim_size  # dimension type : real
    dim = Dimension(dim_size, dim_regs,
                    dim_tys)  # form up the dimension object
    objective = Objective(sphere_sre, dim)  # form up the objective function

    # setup algorithm parameters
    budget = 2000  # number of calls to the objective function
    parameter = Parameter(budget=budget,
                          high_dim_handling=True,
                          reducedim=True,
                          num_sre=5,
                          low_dimension=Dimension(10, [[-1, 1]] * 10,
                                                  [True] * 10))
    solution_list = ExpOpt.min(objective,
                               parameter,
                               repeat=1,
                               plot=True,
                               plot_file="img/minimize_sphere_sre.png")
def minimize_ackley_continuous_noisy():
    """
    SSRacos example of minimizing ackley function under Gaussian noise

    :return: no return value
    """
    ackley_noise_func = ackley_noise_creator(0, 0.1)
    dim_size = 100  # dimensions
    dim_regs = [[-1, 1]] * dim_size  # dimension range
    dim_tys = [True] * dim_size  # dimension type : real
    dim = Dimension(dim_size, dim_regs,
                    dim_tys)  # form up the dimension object
    objective = Objective(ackley_noise_func,
                          dim)  # form up the objective function
    budget = 200000  # 20*dim_size  # number of calls to the objective function
    # suppression=True means optimize with value suppression, which is a noise handling method
    # resampling=True means optimize with re-sampling, which is another common used noise handling method
    # non_update_allowed=500 and resample_times=100 means if the best solution doesn't change for 500 budgets,
    # the best solution will be evaluated repeatedly for 100 times
    # balance_rate is a parameter for exponential weight average of several evaluations of one sample.
    parameter = Parameter(budget=budget,
                          noise_handling=True,
                          suppression=True,
                          non_update_allowed=500,
                          resample_times=100,
                          balance_rate=0.5)

    # parameter = Parameter(budget=budget, noise_handling=True, resampling=True, resample_times=10)
    parameter.set_positive_size(5)

    ExpOpt.min(objective,
               parameter,
               repeat=2,
               plot=True,
               plot_file="img/ackley_continuous_noisy_figure.png")
Exemple #6
0
def run_test(task_name, layers, in_budget, max_step, repeat):

    gym_task = GymTask(task_name)  # choose a task by name
    gym_task.new_nnmodel(layers)  # construct a neural network
    gym_task.set_max_step(max_step)  # set max step in gym

    budget = in_budget  # number of calls to the objective function
    rand_probability = 0.95  # the probability of sample in model

    # set dimension
    dim_size = gym_task.get_w_size()
    dim_regs = [[-10, 10]] * dim_size
    dim_tys = [True] * dim_size
    dim = Dimension(dim_size, dim_regs, dim_tys)

    objective = Objective(gym_task.sum_reward,
                          dim)  # form up the objective function
    parameter = Parameter(
        budget=budget,
        autoset=True)  # by default, the algorithm is sequential RACOS
    parameter.set_probability(rand_probability)

    result = []
    sum = 0
    print('solved solution is:')
    for i in range(repeat):
        ins = Opt.min(objective, parameter)
        result.append(ins.get_value())
        sum += ins.get_value()
        ins.print_solution()
    print(result)  # results in repeat times
    print(sum / len(result))  # average result
Exemple #7
0
def minimize_sphere_mixed():
    """
    Mixed optimization example of minimizing sphere function, which has mixed search search space.

    :return: no return value
    """

    # setup optimization problem
    dim_size = 100
    dim_regs = []
    dim_tys = []
    # In this example, the search space is discrete if this dimension index is odd, Otherwise, the search space
    # is continuous.
    for i in range(dim_size):
        if i % 2 == 0:
            dim_regs.append([0, 1])
            dim_tys.append(True)
        else:
            dim_regs.append([0, 100])
            dim_tys.append(False)
    dim = Dimension(dim_size, dim_regs, dim_tys)
    objective = Objective(sphere_mixed, dim)  # form up the objective function
    budget = 100 * dim_size  # number of calls to the objective function
    parameter = Parameter(budget=budget)

    solution_list = ExpOpt.min(objective,
                               parameter,
                               repeat=1,
                               plot=True,
                               plot_file="img/sphere_mixed_figure.png")
Exemple #8
0
    def test_performance(self):
        ackley_noise_func = ackley_noise_creator(0, 0.1)
        dim_size = 100  # dimensions
        one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6)
        dim_list = [(one_dim)] * dim_size
        dim = Dimension2(dim_list)  # form up the dimension object
        objective = Objective(ackley_noise_func,
                              dim)  # form up the objective function
        budget = 20000  # 20*dim_size  # number of calls to the objective function
        # suppression=True means optimize with value suppression, which is a noise handling method
        # resampling=True means optimize with re-sampling, which is another common used noise handling method
        # non_update_allowed=500 and resample_times=100 means if the best solution doesn't change for 500 budgets,
        # the best solution will be evaluated repeatedly for 100 times
        # balance_rate is a parameter for exponential weight average of several evaluations of one sample.
        parameter = Parameter(budget=budget,
                              noise_handling=True,
                              suppression=True,
                              non_update_allowed=200,
                              resample_times=50,
                              balance_rate=0.5)

        # parameter = Parameter(budget=budget, noise_handling=True, resampling=True, resample_times=10)
        parameter.set_positive_size(5)

        sol = Opt.min(objective, parameter)
        assert sol.get_value() < 4
Exemple #9
0
def run_test(task_name, layers, in_budget, max_step, repeat):

    gym_task = GymTask(task_name)  # choose a task by name
    gym_task.new_nnmodel(layers)  # construct a neural network
    gym_task.set_max_step(max_step)  # set max step in gym

    budget = in_budget  # number of calls to the objective function
    rand_probability = 0.95  # the probability of sample in model

    # set dimension
    dim_size = gym_task.get_w_size()
    dim_regs = [[-10, 10]] * dim_size
    dim_tys = [True] * dim_size
    dim = Dimension(dim_size, dim_regs, dim_tys)

    result = []
    sum = 0
    print('solved solution is:')
    for i in range(repeat):
        objective = Objective(gym_task.sum_reward, dim)  # form up the objective function
        parameter = Parameter(budget=budget, autoset=True)  # by default, the algorithm is sequential RACOS
        parameter.set_probability(rand_probability)
        ins = Opt.min(objective, parameter)
        result.append(ins.get_value())

        best_stable_ins = objective.get_best_stable_ins()
        if best_stable_ins != None:
            best_stable_ins_val = best_stable_ins.get_value()
        else:
            best_stable_ins_val = float("inf")
        for i in range(1):
            ins_rewards = []
            for i in range(100):
                ins_rewards.append(gym_task.sum_reward(ins))
            # print(np.mean(ins_rewards),best_stable_ins_val)
            if np.mean(ins_rewards) < best_stable_ins_val:
                print("last mean", np.mean(ins_rewards))
                print("last std", np.std(ins_rewards))
            else:
                print("stable mean", best_stable_ins.get_value())
                print("stable std", best_stable_ins.get_std())

        sum += ins.get_value()
        #ins.print_solution()
    print(result)  # results in repeat times
    print(sum/len(result))  # average result
Exemple #10
0
 def test_sracos_performance(self):
     dim = 100  # dimension
     objective = Objective(ackley,
                           Dimension(dim, [[-1, 1]] * dim,
                                     [True] * dim))  # setup objective
     parameter = Parameter(budget=100 * dim)
     solution = Opt.min(objective, parameter)
     assert solution.get_value() < 0.2
Exemple #11
0
 def test_asracos_performance(self):
     # continuous
     dim = 100  # dimension
     objective = Objective(ackley,
                           Dimension(dim, [[-1, 1]] * dim,
                                     [True] * dim))  # setup objective
     parameter = Parameter(budget=100 * dim,
                           parallel=True,
                           server_num=2,
                           seed=2)
     # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)])  # init with init_samples
     solution_list = ExpOpt.min(objective, parameter, repeat=1)
     for solution in solution_list:
         value = solution.get_value()
         assert value < 0.2
     # discrete
     # setcover
     problem = SetCover()
     dim = problem.dim  # the dim is prepared by the class
     objective = Objective(problem.fx,
                           dim)  # form up the objective function
     budget = 100 * dim.get_size(
     )  # number of calls to the objective function
     parameter = Parameter(budget=budget,
                           parallel=True,
                           server_num=2,
                           seed=777)
     sol = ExpOpt.min(objective, parameter, repeat=1)[0]
     assert sol.get_value() < 2
     # sphere
     dim_size = 100  # dimensions
     dim_regs = [[-10, 10]] * dim_size  # dimension range
     dim_tys = [False] * dim_size  # dimension type : integer
     dim_order = [True] * dim_size
     dim = Dimension(dim_size, dim_regs, dim_tys,
                     order=dim_order)  # form up the dimension object
     objective = Objective(sphere_discrete_order,
                           dim)  # form up the objective function
     parameter = Parameter(budget=10000,
                           parallel=True,
                           server_num=2,
                           uncertain_bits=1,
                           seed=1)
     sol = ExpOpt.min(objective, parameter)[0]
     assert sol.get_value() < 10
Exemple #12
0
def opt_var_ids(exs, maps):
	dim = Dimension(len(flatten(exs)), [[0, 1]] * len(flatten(exs)),
					[False] * len(flatten(exs)))
	obj = Objective(lambda v: -consistent_score(exs, v.get_x(), maps).score,
					dim)
	param = Parameter(budget=100, autoset=True)
	solution = Opt.min(obj, param)

	return solution
Exemple #13
0
 def test_racos_performance2(self):
     # continuous
     dim = 100  # dimension
     one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6)
     dim_list = [(one_dim)] * dim
     objective = Objective(ackley, Dimension2(dim_list))  # setup objective
     parameter = Parameter(budget=100 * dim, sequential=False, seed=1)
     solution = ExpOpt.min(objective, parameter)[0]
     assert solution.get_value() < 0.2
     dim = 500
     dim_list = [(one_dim)] * dim
     objective = Objective(ackley, Dimension2(dim_list))  # setup objective
     parameter = Parameter(budget=10000, sequential=False, seed=1)
     sol = Opt.min(objective, parameter)
     sol.print_solution()
     assert solution.get_value() < 2
     # discrete
     # setcover
     problem = SetCover()
     dim_size = 20
     one_dim = (ValueType.DISCRETE, [0, 1], False)
     dim_list = [(one_dim)] * dim_size
     dim = Dimension2(dim_list)  # the dim is prepared by the class
     objective = Objective(problem.fx,
                           dim)  # form up the objective function
     budget = 100 * dim.get_size(
     )  # number of calls to the objective function
     parameter = Parameter(budget=budget, sequential=False, seed=777)
     sol = Opt.min(objective, parameter)
     sol.print_solution()
     assert sol.get_value() < 2
     # sphere
     dim_size = 100  # dimensions
     one_dim = (ValueType.DISCRETE, [-10, 10], True)
     dim_list = [(one_dim)] * dim_size
     dim = Dimension2(dim_list)  # form up the dimension object
     objective = Objective(sphere_discrete_order,
                           dim)  # form up the objective function
     parameter = Parameter(budget=10000, sequential=False, seed=77)
     sol = Opt.min(objective, parameter)
     sol.print_solution()
     assert sol.get_value() < 200
Exemple #14
0
 def test_racos_performance(self):
     # continuous
     dim = 100  # dimension
     objective = Objective(ackley,
                           Dimension(dim, [[-1, 1]] * dim,
                                     [True] * dim))  # setup objective
     parameter = Parameter(budget=100 * dim, sequential=False, seed=1)
     solution = ExpOpt.min(objective, parameter)[0]
     assert solution.get_value() < 0.2
     dim = 500
     objective = Objective(ackley,
                           Dimension(dim, [[-1, 1]] * dim,
                                     [True] * dim))  # setup objective
     parameter = Parameter(budget=10000, sequential=False, seed=1)
     sol = Opt.min(objective, parameter)
     sol.print_solution()
     assert solution.get_value() < 2
     # discrete
     # setcover
     problem = SetCover()
     dim = problem.dim  # the dim is prepared by the class
     objective = Objective(problem.fx,
                           dim)  # form up the objective function
     budget = 100 * dim.get_size(
     )  # number of calls to the objective function
     parameter = Parameter(budget=budget, sequential=False, seed=777)
     sol = Opt.min(objective, parameter)
     sol.print_solution()
     assert sol.get_value() < 2
     # sphere
     dim_size = 100  # dimensions
     dim_regs = [[-10, 10]] * dim_size  # dimension range
     dim_tys = [False] * dim_size  # dimension type : integer
     dim_order = [True] * dim_size
     dim = Dimension(dim_size, dim_regs, dim_tys,
                     order=dim_order)  # form up the dimension object
     objective = Objective(sphere_discrete_order,
                           dim)  # form up the objective function
     parameter = Parameter(budget=10000, sequential=False, seed=77)
     sol = Opt.min(objective, parameter)
     sol.print_solution()
     assert sol.get_value() < 200
def opt_var_ids_sets_chess_constraint(exs, mapping, constraint):
    num_chess = num_of_chess(exs)
    dim = Dimension(num_chess, [[0, 1]] * num_chess, [False] * num_chess)
    obj = Objective(lambda v: -consistent_score_sets_chess(
        exs, [int(i) for i in v.get_x()], mapping)[0],
                    dim=dim,
                    constraint=constraint)
    param = Parameter(budget=100, autoset=True)
    solution = Opt.min(obj, param)

    return solution
Exemple #16
0
def opt_var_ids_sets_constraint(exs, mapping, constraint):
	dim = Dimension(size=len(flatten(exs)),
					regs=[[0, 1]] * len(flatten(exs)),
					tys=[False] * len(flatten(exs)))
	obj = Objective(lambda v: -consistent_score_sets(
		exs, [int(i) for i in v.get_x()], mapping)[0],
					dim=dim,
					constraint=constraint)
	param = Parameter(budget=100, autoset=True)
	solution = Opt.min(obj, param)

	return solution
Exemple #17
0
 def test_resample(self):
     dim = 100
     obj = Objective(func=ackley,
                     dim=Dimension(dim, [[-1, 1]] * dim, [True] * dim))
     sol = Solution(x=[0.2] * dim)
     res = obj.eval(sol)
     obj.resample(sol, 3)
     assert abs(sol.get_value()) <= 1e-7
     sol.set_value(0)
     obj.resample_func(sol, 3)
     assert abs(sol.get_value()) <= 1e-7
    def test_performance(self):
        dim_size = 10000  # dimensions
        dim_regs = [[-1, 1]] * dim_size  # dimension range
        dim_tys = [True] * dim_size  # dimension type : real
        dim = Dimension(dim_size, dim_regs, dim_tys)  # form up the dimension object
        objective = Objective(sphere_sre, dim)  # form up the objective function

        # setup algorithm parameters
        budget = 2000  # number of calls to the objective function
        parameter = Parameter(budget=budget, high_dim_handling=True, reducedim=True, num_sre=5,
                              low_dimension=Dimension(10, [[-1, 1]] * 10, [True] * 10))
        solution = Opt.min(objective, parameter)
        assert solution.get_value() < 0.3
Exemple #19
0
def run_ss_test(task_name, layers, in_budget, max_step, repeat,
                terminal_value):
    gym_task = GymTask(task_name)  # choose a task by name
    gym_task.new_nnmodel(layers)  # construct a neural network
    gym_task.set_max_step(max_step)  # set max step in gym

    budget = in_budget  # number of calls to the objective function
    rand_probability = 0.95  # the probability of sample in model

    # set dimension
    dim_size = gym_task.get_w_size()
    dim_regs = [[-10, 10]] * dim_size
    dim_tys = [True] * dim_size
    dim = Dimension(dim_size, dim_regs, dim_tys)

    def resample_function(solution, iteration_num):
        eval_list = []
        for i in range(iteration_num):
            eval_list.append(gym_task.sum_reward(solution))
        return sum(eval_list) * 1.0 / len(eval_list)

    # form up the objective function
    objective = Objective(gym_task.sum_reward,
                          dim,
                          re_sample_func=resample_function)
    # by default, the algorithm is sequential RACOS
    parameter = Parameter(budget=budget,
                          autoset=True,
                          suppression=True,
                          terminal_value=terminal_value)
    parameter.set_resample_times(70)
    parameter.set_probability(rand_probability)

    result = []
    total_sum = 0
    total_step = []
    print('solved solution is:')
    for i in range(repeat):
        ins = Opt.min(objective, parameter)
        result.append(ins.get_value())
        total_sum += ins.get_value()
        ins.print_solution()
        print("total step %s" % gym_task.total_step)
        total_step.append(gym_task.total_step)
        gym_task.total_step = 0
    print(result)  # results in repeat times
    print(total_sum / len(result))  # average result
    print(total_step)
    print("------------------------avg total step %s" %
          (sum(total_step) / len(total_step)))
Exemple #20
0
    def generate_negative_data(self, dim_range):
        self.__negative_dataset = []
        dim_size = self.__dim_size  # dimensions
        dim_regs = [dim_range] * dim_size  # dimension range
        dim_tys = [True] * dim_size  # dimension type : real
        dim = Dimension(dim_size, dim_regs,
                        dim_tys)  # form up the dimension object

        budget = self.__Budget  # number of calls to the objective function
        # by setting autoset=false, the algorithm parameters will not be set by default
        parameter = Parameter(algorithm="sracos", budget=budget, autoset=True)
        # so you are allowed to setup algorithm parameters of racos
        # parameter.set_train_size(6)
        # parameter.set_probability(0.95)
        # parameter.set_uncertain_bits(2)
        # parameter.set_positive_size(1)
        # parameter.set_negative_size(5)

        print "generate negative sample of class:", self.__class_num
        for i in range(self.__generate_size):
            # init the SRACOS randomly
            sample_list = random.sample(range(self.__original_data.shape[0]),
                                        self.__init_num)
            init_data = self.__original_data[sample_list]
            parameter.set_init_samples(init_data)

            objective = Objective(self.train_Dminus, dim)
            print 'I have objective'
            solution = Opt.min(objective, parameter)
            print 'Trying for solution'
            x_minus = solution.get_x()
            self.__negative_dataset.append(x_minus)
            print x_minus
            print "[ASG] class", self.__class_num, ": Generating negative data, data size:", len(
                self.__negative_dataset)
            print "**************************************************"
            isExists = os.path.exists(self.__gendir)

            # store the generated data
            if not isExists:
                os.mkdir(self.__gendir)
            with open(self.__negative_filename, "w") as f:
                f.write("")
            with open(self.__negative_filename, "a") as f:
                for k in range(len(self.__negative_dataset)):
                    for t in range(len(self.__negative_dataset[k])):
                        f.write(str(self.__negative_dataset[k][t]) + ' ')
                    f.write("\n")
        return
Exemple #21
0
def minimize_sphere_continuous():
    """
    Example of minimizing the sphere function

    :return: no return value
    """
    dim_size = 100
    # form up the objective function
    objective = Objective(sphere, Dimension(dim_size, [[-1, 1]] * dim_size, [True] * dim_size))

    budget = 100 * dim_size
    # if intermediate_result is True, ZOOpt will output intermediate best solution every intermediate_freq budget
    parameter = Parameter(budget=budget, intermediate_result=True,
                          intermediate_freq=1000)
    ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/sphere_continuous_figure.png")
Exemple #22
0
    def __init__(self, ss, sz, focusing_list, solver, _n_iter, best_approx):
        self.dof = len(ss)
        self.rew = 2000
        self.x_prev = ss
        self.sz = sz
        self.focusing_list = focusing_list
        self.l = len(self.focusing_list)
        self.solver = solver
        self._n_iter = _n_iter
        self.t0 = time.time()
        self.timer = [0]
        self.values = [1000]
        # Spawn MAD-X process
        self.reset()
        self.best_approx = best_approx

        if self.solver == 'ZOOpt':
            dim = self.dof
            #dim_bounds = self.Bounds_maker()
            dimobj = Dimension(dim, [[0, 628.3185]] * dim, [True] * dim)
            self.parameter = Parameter(budget=self._n_iter,
                                       init_samples=[ss],
                                       exploration_rate=0.25)
            self.step = Objective(self.step, dimobj)
        elif self.solver == 'BOBYQA':  # currently broken
            self.upper = np.multiply(np.ones((self.dof), ), 628.3185)
            self.lower = np.multiply(self.upper, 0)
        elif self.solver == 'Bayesian':  # currently unfinished
            dim = self.dof
            x = [
                'x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7', 'x8', 'x9', 'x10',
                'x11', 'x12', 'x13', 'x14', 'x15', 'x16', 'x17', 'x18', 'x19',
                'x20', 'x21', 'x22', 'x23', 'x24', 'x25', 'x26', 'x27', 'x28',
                'x29', 'x30', 'x31', 'x32', 'x33', 'x34', 'x35', 'x36', 'x37',
                'x38', 'x39', 'x40'
            ]  #,
            #'x41' ,'x42' ,'x43' ,'x44' ,'x45' ,'x46' ,'x47' ,'x48' ,'x49' , 'x50']
            bounds = {}
            for n in range(self.dof):
                bounds[x[n]] = (0, 628.3185)
            self.pbounds = bounds
            self.optimizer = BayesianOptimization(
                f=self.step2,
                pbounds=self.pbounds,
                random_state=2,
            )
        else:
            self.bounds = [[0, 628.3185]] * self.dof
Exemple #23
0
    def test_noisy(self):
        ackley_noise_func = ackley_noise_creator(0, 0.1)
        dim_size = 100  # dimensions
        dim_regs = [[-1, 1]] * dim_size  # dimension range
        dim_tys = [True] * dim_size  # dimension type : real
        dim = Dimension(dim_size, dim_regs, dim_tys)  # form up the dimension object
        objective = Objective(ackley_noise_func, dim)  # form up the objective function
        budget = 20000  # 20*dim_size  # number of calls to the objective function
        parameter = Parameter(budget=budget, noise_handling=True, suppression=True, non_update_allowed=200,
                              resample_times=50, balance_rate=0.5, seed=1)

        # parameter = Parameter(budget=budget, noise_handling=True, resampling=True, resample_times=10)
        parameter.set_positive_size(5)
        sol1 = Opt.min(objective, parameter)
        sol2 = Opt.min(objective, parameter)
        assert sol1.get_value() == sol2.get_value()
Exemple #24
0
    def test_performance(self):
        mse = SparseMSE('example/sparse_regression/sonar.arff')
        mse.set_sparsity(8)

        # setup objective
        # print(mse.get_dim().get_size())
        objective = Objective(func=mse.loss,
                              dim=mse.get_dim(),
                              constraint=mse.constraint)
        parameter = Parameter(algorithm='poss',
                              budget=2 * exp(1) * (mse.get_sparsity()**2) *
                              mse.get_dim().get_size())

        # perform sparse regression with constraint |w|_0 <= k
        solution = Opt.min(objective, parameter)
        assert solution.get_value()[0] < 0.6
Exemple #25
0
def minimize_ackley_continuous():
    """
    Continuous optimization example of minimizing the ackley function.

    :return: no return value
    """
    dim_size = 100  # dimensions
    dim_regs = [[-1, 1]] * dim_size  # dimension range
    dim_tys = [True] * dim_size  # dimension type : real
    dim = Dimension(dim_size, dim_regs, dim_tys)  # form up the dimension object

    objective = Objective(ackley, dim)  # form up the objective function

    budget = 100 * dim_size  # number of calls to the objective function
    parameter = Parameter(budget=budget)

    solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/ackley_continuous_figure.png")
Exemple #26
0
def minimize_sphere_discrete_order():
    """
    Discrete optimization example of minimizing the sphere function, which has ordered search space.

    :return: no return value
    """
    dim_size = 100  # dimensions
    dim_regs = [[-10, 10]] * dim_size  # dimension range
    dim_tys = [False] * dim_size  # dimension type : integer
    dim_order = [True] * dim_size
    dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order)  # form up the dimension object
    objective = Objective(sphere_discrete_order, dim)  # form up the objective function

    # setup algorithm parameters
    budget = 10000  # number of calls to the objective function
    parameter = Parameter(budget=budget, uncertain_bits=1)

    ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/sphere_discrete_order_figure.png")
    def fit(self,
            real_data,
            budget=10000,
            server_num=3,
            repeat=1,
            seed=1,
            plot=False,
            plot_file="optimize.png",
            intermediate_freq=100,
            init_samples=None,
            loss_ord=0):
        problem = problem_maker(self, real_data, self.training_date_end,
                                loss_ord)
        dim, dim_range, dim_type = self.get_dim()  # dimension
        dim_range = [[a[0], a[1]] for a in dim_range]
        print(dim_range)
        objective = Objective(problem, Dimension(dim, dim_range,
                                                 dim_type))  # set up objective
        parameter = Parameter(algorithm='racos',
                              budget=budget,
                              intermediate_result=True,
                              intermediate_freq=intermediate_freq,
                              seed=seed,
                              parallel=True,
                              server_num=server_num,
                              init_samples=init_samples)
        parameter.set_probability(0.6)
        solution_list = ExpOpt.min(objective,
                                   parameter,
                                   repeat=repeat,
                                   plot=plot,
                                   plot_file=plot_file)

        f_min = np.inf
        x_min = None
        for s in solution_list:
            if s.get_value() < f_min:
                f_min = s.get_value()
                x_min = s.get_x()

        self.set_param(x_min)

        return x_min, f_min
Exemple #28
0
    def __init__(self, dimension, parameter):
        """
        Initialization.

        :param dimension: instance of Dimension2 class
        :param parameter: instance of Parameter class
        """

        RacosCommon.__init__(self)
        self.clear()
        objective = Objective(None, dimension)
        self.set_objective(objective)
        self.set_parameters(parameter)

        self.init_num = 0
        self.complete_num = 0
        self.semaphore = 1  # control init
        self.ub = self._parameter.get_uncertain_bits()
        if self.ub is None:
            self.ub = self.choose_ub(self.get_objective())
        return
Exemple #29
0
def minimize_setcover_discrete():
    """
    Discrete optimization example of minimizing setcover problem.

    :return: no return value
    """
    problem = SetCover()
    dim = problem.dim  # the dim is prepared by the class
    objective = Objective(problem.fx, dim)  # form up the objective function
    budget = 100 * dim.get_size()  # number of calls to the objective function
    # if autoset is False, you should define train_size, positive_size, negative_size on your own
    parameter = Parameter(budget=budget, autoset=False)
    parameter.set_train_size(6)
    parameter.set_positive_size(1)
    parameter.set_negative_size(5)

    ExpOpt.min(objective,
               parameter,
               repeat=10,
               best_n=5,
               plot=True,
               plot_file="img/setcover_discrete_figure.png")
Exemple #30
0
    def test_performance(self):
        # load data file
        mse = SparseMSE('example/sparse_regression/sonar.arff')
        mse.set_sparsity(8)

        # setup objective
        objective = Objective(func=mse.loss,
                              dim=mse.get_dim(),
                              constraint=mse.constraint)
        # ponss_theta and ponss_b are parameters used in PONSS algorithm and should be provided by users. ponss_theta stands
        # for the threshold. ponss_b limits the number of solutions in the population set.
        parameter = Parameter(algorithm='poss',
                              noise_handling=True,
                              ponss=True,
                              ponss_theta=0.5,
                              ponss_b=mse.get_k(),
                              budget=2 * exp(1) * (mse.get_sparsity()**2) *
                              mse.get_dim().get_size())

        # perform sparse regression with constraint |w|_0 <= k
        solution = Opt.min(objective, parameter)
        assert solution.get_value()[0] < 0.7