Exemple #1
0
 def test_history_best_so_far(self):
     input_data = [0.5, 0.6, 0.4, 0.7, 0.3, 0.2]
     output_data = [0.5, 0.5, 0.4, 0.4, 0.3, 0.2]
     obj = Objective()
     obj.set_history(input_data)
     best_history = obj.get_history_bestsofar()
     assert best_history == output_data
def search(_dataset):
    '''
    Search the best hyper-paramers for the given dataset Using ZOOpt
    :param _dataset: the given dataset
    :return: (best hyper-parameters,performance of the best hyper-parameters)
    '''
    global dataset
    dataset = _dataset
    dim = Dimension(
        19, [[16, 32], [1, 8], [1, 1], [1, 1], [16, 32], [1, 8],
             [1, 1], [1, 1], [0, 1], [1, 8], [1, 10], [0, 1], [1, 8], [1, 10],
             [40, 50], [30, 40], [20, 30], [10, 20], [0.0001, 0.001]],
        [
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False, True
        ])

    obj = Objective(eval, dim)
    # perform optimization
    global round
    round = 0
    solution = Opt.min(obj, Parameter(budget=BUDGET))
    # print result
    solution.print_solution()

    plt.plot(obj.get_history_bestsofar())
    plt.savefig('figure.png')
    return (solution.get_x(), solution.get_value())
import numpy as np
import matplotlib.pyplot as plt
from zoopt import Dimension, Objective, Parameter, Opt


def ackley(solution):
    x = solution.get_x()
    bias = 0.2
    value = -20 * np.exp(-0.2 * np.sqrt(sum([(i - bias) * (i - bias) for i in x]) / len(x))) - \
            np.exp(sum([np.cos(2.0*np.pi*(i-bias)) for i in x]) / len(x)) + 20.0 + np.e
    return value


dim = 100  # dimension
obj = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim))
# perform optimization
solution = Opt.min(obj, Parameter(budget=100 * dim))
# print result
solution.print_solution()

plt.plot(obj.get_history_bestsofar())
plt.savefig('figure.png')