Ejemplo n.º 1
0
 def test_random_search(self):
     res = random_search(objective_function=objective,
                         lower=self.lower, upper=self.upper,
                         num_iterations=3)
     assert len(res["x_opt"]) == 1
     assert np.array(res["x_opt"]) >= 0
     assert np.array(res["x_opt"]) <= 1
Ejemplo n.º 2
0
 def optimization(self):
     if (self.method == 'bayesian_optimization'):
         results = bayesian_optimization(self.objective_function,
                                         lower=self.lower_bounds,
                                         upper=self.upper_bounds,
                                         num_iterations=self.num_iterations,
                                         acquisition_func='ei')
         print(results['x_opt'], results['f_opt'])
         self.results = {
             'x_opt': results['x_opt'],
             'f_opt': results['f_opt'],
             'mse': results['incumbent_values']
         }
     elif (self.method == 'random_search'):
         results = random_search(self.objective_function,
                                 lower=self.lower_bounds,
                                 upper=self.upper_bounds,
                                 num_iterations=self.num_iterations)
         print(results['x_opt'], results['f_opt'])
         self.results = {
             'x_opt': results['x_opt'],
             'f_opt': results['f_opt'],
             'mse': results['incumbent_values']
         }
     else:
         pass
Ejemplo n.º 3
0
 def test_random_search(self):
     res = random_search(objective_function=objective,
                         lower=self.lower,
                         upper=self.upper,
                         num_iterations=3)
     assert len(res["x_opt"]) == 1
     assert np.array(res["x_opt"]) >= 0
     assert np.array(res["x_opt"]) <= 1
Ejemplo n.º 4
0
    def get_res(self):
        lower = self.space.bound[0]
        upper = self.space.bound[1]

        res = random_search(
            self.fn,
            lower,
            upper,
            num_iterations=self.params['niter'],
            output_path=str(self.output_path),
            rng=self.rng,
        )

        return res
Ejemplo n.º 5
0
def rand_search():
    """
    Random search
    """
    print('\n============= START Random Search OPTIMIZATION =============\n')
    print("""Optimization parameters:
                    - lower = {}
                    - upper = {}
                    - num_iter = {}""".format(lower, upper,
                                              args.num_iterations))

    results = random_search(objective_function,
                            lower,
                            upper,
                            num_iterations=args.num_iterations)
    print(results["x_opt"])
    print(results["f_opt"])
    print('\n============= END OPTIMIZATION =============\n')
Ejemplo n.º 6
0
 def optimization(self):
     for optimization_method in self.optimization_methods:
         t_start = time.time()
         if (optimization_method == 'bayesian_optimization'):
             results = bayesian_optimization(
                 self.objective_function,
                 lower=self.lower_bounds,
                 upper=self.upper_bounds,
                 num_iterations=self.num_iterations,
                 acquisition_func='ei')
             t_end = time.time()
             t = round(t_end - t_start)
             x_opt = {}
             for i in range(len(self.tune_params)):
                 x_opt[self.tune_params[i]] = results['x_opt'][i]
             print(x_opt, results['f_opt'])
             self.results['bayesian_optimization'] = {
                 'method': 'bayesian_optimization',
                 'x_opt': x_opt,
                 'f_opt': results['f_opt'],
                 'mse': results['incumbent_values'],
                 'time_consume': t
             }
         elif (optimization_method == 'random_search'):
             results = random_search(self.objective_function,
                                     lower=self.lower_bounds,
                                     upper=self.upper_bounds,
                                     num_iterations=self.num_iterations)
             t_end = time.time()
             t = round(t_end - t_start)
             x_opt = {}
             print(results['x_opt'])
             for i in range(len(self.tune_params)):
                 x_opt[self.tune_params[i]] = results['x_opt'][i]
             print(x_opt, results['f_opt'])
             self.results['random_search'] = {
                 'method': 'random_search',
                 'x_opt': x_opt,
                 'f_opt': results['f_opt'],
                 'mse': results['incumbent_values'],
                 'time_consume': t
             }
         else:
             pass
Ejemplo n.º 7
0
import numpy as np

from robo.fmin import random_search


# The optimization function that we want to optimize.
# It gets a numpy array with shape (1,D) where D is the number of input dimensions
def objective_function(x):
    y = np.sin(3 * x[0]) * 4 * (x[0] - 1) * (x[0] + 2)
    return y


# Defining the bounds and dimensions of the input space
lower = np.array([0])
upper = np.array([6])

# Start Bayesian optimization to optimize the objective function
results = random_search(objective_function, lower, upper, num_iterations=20)
print(results["x_opt"])
print(results["f_opt"])
Ejemplo n.º 8
0
    results = entropy_search(f, bounds[:, 0], bounds[:, 1],
                             num_iterations=n_iters, n_init=n_init)
elif method == "gp_mcmc":
    results = bayesian_optimization(f, bounds[:, 0], bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init, model_type="gp_mcmc")
elif method == "gp":
    results = bayesian_optimization(f, bounds[:, 0], bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init, model_type="gp")
elif method == "rf":
    results = bayesian_optimization(f, bounds[:, 0], bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init, model_type="rf")
elif method == "random_search":
    results = random_search(f, bounds[:, 0], bounds[:, 1],
                            num_iterations=n_iters)
elif method == "bohamiann":
    results = bohamiann(f, bounds[:, 0], bounds[:, 1],
                        num_iterations=n_iters,
                        n_init=n_init)

# Offline Evaluation
test_error = []
cum_cost = 0

for i, inc in enumerate(results["incumbents"]):

    y = f.objective_function_test(np.array(inc))["function_value"]
    test_error.append(y)

    # Compute the time it would have taken to evaluate this configuration
    results = bayesian_optimization(f,
                                    bounds[:, 0],
                                    bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init,
                                    model_type="gp")
elif method == "rf":
    results = bayesian_optimization(f,
                                    bounds[:, 0],
                                    bounds[:, 1],
                                    num_iterations=n_iters,
                                    n_init=n_init,
                                    model_type="rf")
elif method == "random_search":
    results = random_search(f,
                            bounds[:, 0],
                            bounds[:, 1],
                            num_iterations=n_iters)
elif method == "bohamiann":
    results = bohamiann(f,
                        bounds[:, 0],
                        bounds[:, 1],
                        num_iterations=n_iters,
                        n_init=n_init)

# Offline Evaluation
test_error = []
cum_cost = 0

for i, inc in enumerate(results["incumbents"]):

    y = f.objective_function_test(np.array(inc))["function_value"]
Ejemplo n.º 10
0
import os
from fanova import fANOVA
import numpy as np
from robo.fmin import random_search
from hpolib.benchmarks.synthetic_functions import Branin
import fanova.visualizer

objective_function = Branin()
info = objective_function.get_meta_information()
bounds = np.array(info['bounds'])
config_space = objective_function.get_configuration_space()

# Start Bayesian optimization to optimize the objective function
results = random_search(objective_function,
                        bounds[:, 0],
                        bounds[:, 1],
                        num_iterations=50)

# Creating a fANOVA object
X = np.array([i for i in results['X']])
Y = np.array([i for i in results['y']])
f = fANOVA(X, Y)

print(f.quantify_importance((0, )))

# Visualization
os.makedirs("./plots", exist_ok=True)
vis = fanova.visualizer.Visualizer(f, config_space, "./plots/")
vis.plot_marginal(1)
Ejemplo n.º 11
0
from fanova import fANOVA
import numpy as np
from robo.fmin import random_search
from hpolib.benchmarks.synthetic_functions import Branin
import fanova.visualizer


objective_function = Branin()
info = objective_function.get_meta_information()
bounds = np.array(info['bounds'])
config_space = objective_function.get_configuration_space()

# Start Bayesian optimization to optimize the objective function
results = random_search(objective_function, bounds[:, 0], bounds[:, 1], num_iterations=50)

# Creating a fANOVA object
X = np.array([i for i in results['X']])
Y = np.array([i for i in results['y']])
f = fANOVA(X,Y)

print(f.quantify_importance((0, )))

# Visualization
vis = fanova.visualizer.Visualizer(f, config_space, "./plots/")
vis.plot_marginal(1)