Exemplo n.º 1
0
 def test_entropy_search(self):
     res = entropy_search(objective_function=objective,
                          lower=self.lower, upper=self.upper,
                          n_init=2,
                          num_iterations=3)
     assert len(res["x_opt"]) == 1
     assert np.array(res["x_opt"]) >= 0
     assert np.array(res["x_opt"]) <= 1
Exemplo n.º 2
0
 def test_entropy_search(self):
     res = entropy_search(objective_function=objective,
                          lower=self.lower,
                          upper=self.upper,
                          n_init=2,
                          num_iterations=3)
     assert len(res["x_opt"]) == 1
     assert np.array(res["x_opt"]) >= 0
     assert np.array(res["x_opt"]) <= 1
Exemplo n.º 3
0
    def get_res(self):
        lower = self.space.bound[0]
        upper = self.space.bound[1]

        res = entropy_search(
            self.fn,
            lower,
            upper,
            maximizer=self.params.get('maximizer', 'scipy'),
            num_iterations=self.params['niter'],
            output_path=str(self.output_path),
            rng=self.rng,
        )

        return res
Exemplo n.º 4
0
def ent_search():
    """
    Entropy search
    """
    print('\n============= START Entropy Search OPTIMIZATION =============\n')
    print("""Optimization parameters:
                    - lower = {}
                    - upper = {}
                    - num_iter = {}
                    - maximizer = {}
                    - model_type = {} 
                    - n_init = {} """.format(lower, upper, args.num_iterations,
                                             args.maximizer, args.model_type,
                                             args.n_init))

    results = entropy_search(objective_function,
                             lower,
                             upper,
                             num_iterations=args.num_iterations,
                             maximizer=args.maximizer,
                             model=args.model_type)
    print(results["x_opt"])
    print(results["f_opt"])
    print('\n============= END OPTIMIZATION =============\n')
Exemplo n.º 5
0
    output_path = "./experiments/fabolas/results/cnn_%s/entropy_search_%d" % (
        dataset, run_id)
elif dataset == "res_net":
    f = ResidualNeuralNetworkOnCIFAR10(rng=rng)
    num_iterations = 10
    output_path = "./experiments/fabolas/results/res_%s/entropy_search_%d" % (
        dataset, run_id)

os.makedirs(output_path, exist_ok=True)

info = f.get_meta_information()
bounds = np.array(info['bounds'])
results = entropy_search(f,
                         bounds[:, 0],
                         bounds[:, 1],
                         num_iterations=num_iterations,
                         n_init=2,
                         rng=rng,
                         output_path=output_path)

results["run_id"] = run_id
results['X'] = results['X'].tolist()
results['y'] = results['y'].tolist()

test_error = []
current_inc = None
current_inc_val = None

key = "incumbents"

for inc in results["incumbents"]:
Exemplo n.º 6
0
    f = SurrogateSVM(path="/ihome/kleinaa/devel/git/HPOlib/surrogates/")
elif benchmark == "cnn_cifar10":
    f = SurrogateCNN(path="/ihome/kleinaa/devel/git/HPOlib/surrogates/")
elif benchmark == "fcnet_mnist":
    f = SurrogateFCNet(path="/ihome/kleinaa/devel/git/HPOlib/surrogates/")
elif benchmark == "paramnet":
    dataset = sys.argv[4]
    f = SurrogateParamNet(dataset, "/ihome/kleinaa/devel/git/HPOlib/surrogates/")

    benchmark += "_" + dataset

info = f.get_meta_information()
bounds = np.array(info['bounds'])

if method == "entropy_search":
    results = entropy_search(f, bounds[:, 0], bounds[:, 1],
                             num_iterations=n_iters, n_init=n_init)
elif method == "gp_mcmc":
    results = bayesian_optimization(f, bounds[:, 0], bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init, model_type="gp_mcmc")
elif method == "gp":
    results = bayesian_optimization(f, bounds[:, 0], bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init, model_type="gp")
elif method == "rf":
    results = bayesian_optimization(f, bounds[:, 0], bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init, model_type="rf")
elif method == "random_search":
    results = random_search(f, bounds[:, 0], bounds[:, 1],
                            num_iterations=n_iters)
elif benchmark == "fcnet_mnist":
    f = SurrogateFCNet(path="/ihome/kleinaa/devel/git/HPOlib/surrogates/")
elif benchmark == "paramnet":
    dataset = sys.argv[4]
    f = SurrogateParamNet(dataset,
                          "/ihome/kleinaa/devel/git/HPOlib/surrogates/")

    benchmark += "_" + dataset

info = f.get_meta_information()
bounds = np.array(info['bounds'])

if method == "entropy_search":
    results = entropy_search(f,
                             bounds[:, 0],
                             bounds[:, 1],
                             num_iterations=n_iters,
                             n_init=n_init)
elif method == "gp_mcmc":
    results = bayesian_optimization(f,
                                    bounds[:, 0],
                                    bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init,
                                    model_type="gp_mcmc")
elif method == "gp":
    results = bayesian_optimization(f,
                                    bounds[:, 0],
                                    bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init,
Exemplo n.º 8
0
results_over_runs = dict()
#################################################
# Initial data setup
#################################################
version = 1
n_init_num = 5
budget_iter = 50
x_init_dir = home_dir + '/IBO_master/experiments_IBO/' + exp_name + '/initial_data/'
x_init_name = 'x_init_{}_v{}.pkl'.format(exp_name, version)
#################################################
# ES main function
#################################################
for it in range(n_runs):
    results_over_runs[it] = entropy_search(objective_function,
                                           lower,
                                           upper,
                                           n_init=n_init_num,
                                           init_data=x_init_dir + x_init_name,
                                           num_iterations=budget_iter)
#################################################
# Saving the results
#################################################
output_main_dir = home_dir + '/IBO_master/experiments_IBO/' + exp_name + '/output_main/'
pickle.dump(
    results_over_runs,
    open(
        output_main_dir + "results_{}_{}_init{}_budget{}_v{}.pkl".format(
            exp_name, method, n_init_num, budget_iter, version), "wb"))

print(results_over_runs[0])
Exemplo n.º 9
0
    output_path = "./experiments/fabolas/results/cnn_%s/entropy_search_%d" % (dataset, run_id)
elif dataset == "svhn":
    f = ConvolutionalNeuralNetworkOnSVHN(rng=rng)
    num_iterations = 15
    output_path = "./experiments/fabolas/results/cnn_%s/entropy_search_%d" % (dataset, run_id)
elif dataset == "res_net":
    f = ResidualNeuralNetworkOnCIFAR10(rng=rng)
    num_iterations = 10
    output_path = "./experiments/fabolas/results/%s/entropy_search_%d" % (dataset, run_id)

os.makedirs(output_path, exist_ok=True)

info = f.get_meta_information()
bounds = np.array(info['bounds'])
results = entropy_search(f, bounds[:, 0], bounds[:, 1],
                         num_iterations=num_iterations, n_init=2,
                         rng=rng, output_path=output_path)

results["run_id"] = run_id
results['X'] = results['X'].tolist()
results['y'] = results['y'].tolist()

test_error = []
current_inc = None
current_inc_val = None

key = "incumbents"

for inc in results["incumbents"]:
    print(inc)
    if current_inc == inc: