class TestBayesianOptimization(unittest.TestCase):
    def setUp(self):
        lower = np.zeros([1])
        upper = np.ones([1])
        kernel = george.kernels.Matern52Kernel(np.array([1]), dim=1, ndim=1)
        model = GaussianProcess(kernel)
        lcb = LCB(model)
        maximizer = RandomSampling(lcb, lower, upper)
        self.solver = BayesianOptimization(objective_func, lower, upper, lcb,
                                           model, maximizer)

    def test_run(self):
        n_iters = 4
        inc, inc_val = self.solver.run(n_iters)

        assert len(inc) == 1
        assert np.array(inc) >= 0
        assert np.array(inc) <= 1
        assert len(self.solver.incumbents_values) == n_iters
        assert len(self.solver.incumbents) == n_iters
        assert len(self.solver.time_overhead) == n_iters
        assert len(self.solver.time_func_evals) == n_iters
        assert len(self.solver.runtime) == n_iters
        assert self.solver.X.shape[0] == n_iters
        assert self.solver.y.shape[0] == n_iters

    def test_choose_next(self):
        X = np.random.rand(10, 1)
        y = np.array([objective_func(x) for x in X])
        x_new = self.solver.choose_next(X, y)
        assert x_new.shape[0] == 1
        assert x_new >= 0
        assert x_new <= 1
    def bo(self, model, seed):
        """ Bayesian Optimization for ABLR
            Parameters:
            model: Net Object
            seed : int
        """
        rng = np.random.RandomState(seed)
        model1 = model
        acq = EI(model1)
        max_func = SciPyOptimizer(acq, self.lower_x, self.upper_x)

        if model1.transfer: init = 1  # 1 initial point for transfer case
        else: init = 5

        bo = BayesianOptimization(self.f.call,
                                  lower=self.lower_x,
                                  upper=self.upper_x,
                                  acquisition_func=acq,
                                  model=model1,
                                  maximize_func=max_func,
                                  initial_points=init,
                                  initial_design=init_latin_hypercube_sampling,
                                  rng=rng)

        bo.run(num_iterations=50)

        return bo.incumbents_values
class TestBayesianOptimization(unittest.TestCase):

    def setUp(self):
        lower = np.zeros([1])
        upper = np.ones([1])
        kernel = george.kernels.Matern52Kernel(np.array([1]), dim=1, ndim=1)
        model = GaussianProcess(kernel)
        lcb = LCB(model)
        maximizer = Direct(lcb, lower, upper, n_func_evals=10)
        self.solver = BayesianOptimization(objective_func, lower, upper,
                                           lcb, model, maximizer)

    def test_run(self):
        n_iters = 4
        inc, inc_val = self.solver.run(n_iters)

        assert len(inc) == 1
        assert np.array(inc) >= 0
        assert np.array(inc) <= 1
        assert len(self.solver.incumbents_values) == n_iters
        assert len(self.solver.incumbents) == n_iters
        assert len(self.solver.time_overhead) == n_iters
        assert len(self.solver.time_func_evals) == n_iters
        assert len(self.solver.runtime) == n_iters
        assert self.solver.X.shape[0] == n_iters
        assert self.solver.y.shape[0] == n_iters

    def test_choose_next(self):
        X = np.random.rand(10, 1)
        y = np.array([objective_func(x) for x in X])
        x_new = self.solver.choose_next(X, y)
        assert x_new.shape[0] == 1
        assert x_new >= 0
        assert x_new <= 1
 def setUp(self):
     lower = np.zeros([1])
     upper = np.ones([1])
     kernel = george.kernels.Matern52Kernel(np.array([1]), dim=1, ndim=1)
     model = GaussianProcess(kernel)
     lcb = LCB(model)
     maximizer = RandomSampling(lcb, lower, upper)
     self.solver = BayesianOptimization(objective_func, lower, upper, lcb,
                                        model, maximizer)
def benchmark_function(
        function,
        seed,
        n_eval=20,
        n_initial_points=5,
        model_class=None,
        model_kwargs=None,
):
    lower = np.array([-10])
    upper = np.array([10])
    rng1 = np.random.RandomState(seed)
    rng2 = np.random.RandomState(seed)

    cov_amp = 2
    n_dims = lower.shape[0]

    initial_ls = np.ones([n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls,
                                               ndim=n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    if model_class is None:
        model = GaussianProcess(
            kernel,
            prior=prior,
            rng=rng1,
            normalize_output=True,
            normalize_input=True,
            lower=lower,
            upper=upper,
            noise=1e-3,
        )
    else:
        model = model_class(rng=rng1, **model_kwargs)

    acq = LogEI(model)
    max_func = SciPyOptimizer(acq, lower, upper, n_restarts=50, rng=rng2)

    bo = BayesianOptimization(
        objective_func=function,
        lower=np.array([-10]),
        upper=np.array([10]),
        acquisition_func=acq,
        model=model,
        initial_points=n_initial_points,
        initial_design=init_latin_hypercube_sampling,
        rng=rng2,
        maximize_func=max_func
    )

    bo.run(n_eval)
    rval = np.minimum.accumulate(bo.y)

    return rval
Exemple #6
0
def fmin_task(task, num_iterations=30, model="GPy", maximizer="direct", kernel="Matern52", acquisition_fkt="EI"):

    if model == "GPy":
	k = get_gpy_kernel(kernel, task)
	if k is None:
		return None
        m = gpy_model(k, optimize=True, noise_variance=1e-4, num_restarts=10)
    elif model == "GPyMCMC":
	k = get_gpy_kernel(kernel, task)
	if k is None:
		return None
        m = GPyModelMCMC(k, optimize=True, noise_variance=1e-4, num_restarts=10)
    elif model == "pyGPs":
        k = get_pygps_kernel(kernel, task)
	if k is None:
		return None
        m = PyGPModel(k, optimize=True, num_restarts=10)
    else:
        print(("ERROR: %s is not a valid model!" % (model)))
        return None

    if acquisition_fkt == "EI":
        a = EI(m, X_upper=task.X_upper, X_lower=task.X_lower, compute_incumbent=compute_incumbent, par=0.1)
    elif acquisition_fkt == "PI":
        a = PI(model, X_upper=task.X_upper, X_lower=task.X_lower, compute_incumbent=compute_incumbent, par=0.1)
    elif acquisition_fkt == "UCB":
        a = UCB(model, X_upper=task.X_upper, X_lower=task.X_lower, compute_incumbent=compute_incumbent)
    elif acquisition_fkt == "Entropy":
        a = Entropy(model, X_upper=task.X_upper, X_lower=task.X_lower, compute_incumbent=compute_incumbent, par=0.1)
    elif acquisition_fkt == "EntropyMC":
        a = EntropyMC(model, X_upper=task.X_upper, X_lower=task.X_lower, compute_incumbent=compute_incumbent, par=0.1)
    else:
        print(("ERROR: %s is not a valid acquisition function!" % (acquisition_fkt)))
        return None

    if maximizer == "cmaes":
        max_fkt = cmaes.CMAES(a, task.X_lower, task.X_upper)
    elif maximizer == "direct":
        max_fkt = direct.Direct(a, task.X_lower, task.X_upper)
    elif maximizer == "stochastic_local_search":
        max_fkt = stochastic_local_search.StochasticLocalSearch(a, task.X_lower, task.X_upper)
    elif maximizer == "grid_search":
        max_fkt = grid_search.GridSearch(a, task.X_lower, task.X_upper)
    else:
        print(("ERROR: %s is not a valid function to maximize the acquisition function!" % (acquisition_fkt)))
        return None

    bo = BayesianOptimization(acquisition_func=a,
                          model=m,
                          maximize_func=max_fkt,
                          task=task)

    best_x, f_min = bo.run(num_iterations)
    return best_x, f_min
    def Gp(self, seed):
        """
        Bayesian optimization with Gaussian process(mcmc)
        """
        lower = np.zeros((self.x.shape[2]))
        upper = np.ones((self.x.shape[2]))
        inc = np.ones((self.T, 50))
        for t in range(self.T):
            rng = np.random.RandomState(seed)  # this per task per random run
            cov_amp = 2
            n_dims = self.x.shape[2]
            initial_ls = np.ones([n_dims])
            exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=n_dims)
            kernel = cov_amp * exp_kernel
            prior = DefaultPrior(len(kernel) + 1)
            n_hypers = 3 * len(kernel)

            if n_hypers % 2 == 1:
                n_hypers += 1

            model = GaussianProcess(kernel,
                                    prior=prior,
                                    rng=rng,
                                    normalize_output=False,
                                    normalize_input=False,
                                    noise=1e-6)
            acq = EI(model)
            # for the initial design, initialize the class with x-task
            f = Objective_function(self.x[t], self.y[t], self.metadata[t])
            indes = InDesign(self.x[t])
            randdes = Rand_Design(self.x[t])
            max_func = RandomSampling(acq,
                                      lower,
                                      upper,
                                      randdes,
                                      n_samples=100,
                                      rng=rng)

            bo = BayesianOptimization(
                f.call,
                lower=lower,
                upper=upper,
                acquisition_func=acq,
                model=model,
                initial_design=randdes.initial_design_random,
                initial_points=3,
                rng=rng,
                maximize_func=max_func)

            bo.run(num_iterations=50)
            inc[t] = bo.incumbents_values

        return inc
Exemple #8
0
 def test_json_base_solver(self):
     task = Levy()
     kernel = george.kernels.Matern52Kernel([1.0], ndim=1)
     model = GaussianProcess(kernel)
     ei = EI(model, task.X_lower, task.X_upper)
     maximizer = Direct(ei, task.X_lower, task.X_upper)
     solver = BayesianOptimization(acquisition_func=ei,
                                   model=model,
                                   maximize_func=maximizer,
                                   task=task)
     solver.run(1, X=None, Y=None)
     iteration = 0
     data = solver.get_json_data(it=iteration)
     assert data['iteration'] == iteration
Exemple #9
0
 def test_json_base_solver(self):
     task = Levy()
     kernel = george.kernels.Matern52Kernel([1.0], ndim=1)
     model = GaussianProcess(kernel)
     ei = EI(model, task.X_lower, task.X_upper)
     maximizer = Direct(ei, task.X_lower, task.X_upper)
     solver = BayesianOptimization(acquisition_func=ei,
                       model=model,
                       maximize_func=maximizer,
                       task=task
                       )
     solver.run(1,X =None, Y=None)
     iteration = 0
     data = solver.get_json_data(it=iteration)
     assert data['iteration'] == iteration
    def bo(self, fun, seed, model_net, indes, randdes):
        """
           Bayesian Optimization for ABLR
            Parameters:
            fun: Function_dataset object
                mapping of the data
            model: Net Object
            seed : int
            indes : Initial_design Object
        """
        # BO for the network
        lower = np.zeros((self.x.shape[2]))
        upper = np.ones((self.x.shape[2]))
        rng = np.random.RandomState(seed)
        cov_amp = 2
        n_dims = self.x.shape[2]
        initial_ls = np.ones([n_dims])
        exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=n_dims)
        kernel = cov_amp * exp_kernel
        prior = DefaultPrior(len(kernel) + 1)
        n_hypers = 3 * len(kernel)

        if n_hypers % 2 == 1:
            n_hypers += 1

        model = model_net
        acq = EI(model)
        f = fun
        #max_func = RandomSampling(acq,lower,upper,indes,n_samples=300,rng=rng)
        max_func = RandomSampling(acq,
                                  lower,
                                  upper,
                                  randdes,
                                  n_samples=300,
                                  rng=rng)
        bo = BayesianOptimization(f.call,
                                  lower=lower,
                                  upper=upper,
                                  acquisition_func=acq,
                                  model=model,
                                  initial_design=randdes.initial_design_random,
                                  initial_points=3,
                                  rng=rng,
                                  maximize_func=max_func)

        bo.run(num_iterations=50)
        return bo.incumbents_values
 def setUp(self):
     lower = np.zeros([1])
     upper = np.ones([1])
     kernel = george.kernels.Matern52Kernel(np.array([1]), dim=1, ndim=1)
     model = GaussianProcess(kernel)
     lcb = LCB(model)
     maximizer = Direct(lcb, lower, upper, n_func_evals=10)
     self.solver = BayesianOptimization(objective_func, lower, upper,
                                        lcb, model, maximizer)
Exemple #12
0
def build_optimizer(model, maximizer, acquisition_func):
    """
    General interface for Bayesian optimization for global black box
    optimization problems.

    Parameters
    ----------
    maximizer: str
        The optimizer for the acquisition function.
        Can be one of ``{"random", "scipy", "differential_evolution"}``
    acquisition_func:
        The instantiated acquisition function

    Returns
    -------
        Optimizer

    """
    if maximizer == "random":
        max_func = RandomSampling(acquisition_func,
                                  model.lower,
                                  model.upper,
                                  rng=None)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(acquisition_func,
                                  model.lower,
                                  model.upper,
                                  rng=None)
    elif maximizer == "differential_evolution":
        max_func = DifferentialEvolution(acquisition_func,
                                         model.lower,
                                         model.upper,
                                         rng=None)
    else:
        raise ValueError("'{}' is not a valid function to maximize the "
                         "acquisition function".format(maximizer))

    # NOTE: Internal RNG of BO won't be used.
    # NOTE: Nb of initial points won't be used within BO, but rather outside
    bo = BayesianOptimization(
        lambda: None,
        model.lower,
        model.upper,
        acquisition_func,
        model,
        max_func,
        initial_points=None,
        rng=None,
        initial_design=init_latin_hypercube_sampling,
        output_path=None,
    )

    return bo
Exemple #13
0
def bohamiann(objective_function,
              lower,
              upper,
              num_iterations=30,
              acquisition_func="log_ei",
              n_init=3,
              rng=None):
    """
    General interface for Bayesian optimization for global black box optimization problems.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0]
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    model = BayesianNeuralNetwork(sampling_method="sghmc",
                                  l_rate=np.sqrt(1e-4),
                                  mdecay=0.05,
                                  burn_in=3000,
                                  n_iters=50000,
                                  precondition=True,
                                  normalize_input=True,
                                  normalize_output=True)

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)

    else:
        print("ERROR: %s is not a valid acquisition function!" %
              acquisition_func)
        return

    max_func = Direct(a, lower, upper, verbose=False)

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              a,
                              model,
                              max_func,
                              initial_points=n_init,
                              rng=rng)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    return results
Exemple #14
0
def entropy_search(objective_function,
                   lower,
                   upper,
                   num_iterations=30,
                   maximizer="random",
                   model="gp_mcmc",
                   n_init=3,
                   output_path=None,
                   rng=None):
    """
    Entropy search for global black box optimization problems. This is a reimplemenation of the entropy search
    algorithm by Henning and Schuler[1].

    [1] Entropy search for information-efficient global optimization.
        P. Hennig and C. Schuler.
        JMLR, (1), 2012.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    maximizer: {"random", "scipy", "differential_evolution"}
        Defines how the acquisition function is maximized.
    model: {"gp", "gp_mcmc"}
        The model for the objective function.
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    output_path: string
        Specifies the path where the intermediate output after each iteration will be saved.
        If None no output will be saved to disk.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0], "Dimension miss match"
    assert np.all(lower < upper), "Lower bound >= upper bound"
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    cov_amp = 2
    n_dims = lower.shape[0]

    initial_ls = np.ones([n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1

    if model == "gp":
        gp = GaussianProcess(kernel,
                             prior=prior,
                             rng=rng,
                             normalize_output=False,
                             normalize_input=True,
                             lower=lower,
                             upper=upper)
    elif model == "gp_mcmc":
        gp = GaussianProcessMCMC(kernel,
                                 prior=prior,
                                 n_hypers=n_hypers,
                                 chain_length=200,
                                 burnin_steps=100,
                                 normalize_input=True,
                                 normalize_output=False,
                                 rng=rng,
                                 lower=lower,
                                 upper=upper)
    else:
        print("ERROR: %s is not a valid model!" % model)
        return

    a = InformationGain(gp, lower=lower, upper=upper, sampling_acquisition=EI)

    if model == "gp":
        acquisition_func = a
    elif model == "gp_mcmc":
        acquisition_func = MarginalizationGPMCMC(a)

    if maximizer == "random":
        max_func = RandomSampling(acquisition_func, lower, upper, rng=rng)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(acquisition_func, lower, upper, rng=rng)
    elif maximizer == "differential_evolution":
        max_func = DifferentialEvolution(acquisition_func,
                                         lower,
                                         upper,
                                         rng=rng)
    else:
        print(
            "ERROR: %s is not a valid function to maximize the acquisition function!"
            % maximizer)
        return

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              acquisition_func,
                              gp,
                              max_func,
                              initial_design=init_latin_hypercube_sampling,
                              initial_points=n_init,
                              rng=rng,
                              output_path=output_path)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    results["X"] = [x.tolist() for x in bo.X]
    results["y"] = [y for y in bo.y]
    return results
Exemple #15
0
Created on Mar 16, 2016

@author: Aaron Klein
'''

import george

from robo.maximizers.direct import Direct
from robo.models.gaussian_process import GaussianProcess
from robo.task.synthetic_functions.levy import Levy
from robo.acquisition.ei import EI
from robo.solver.bayesian_optimization import BayesianOptimization


task = Levy()
kernel = george.kernels.Matern52Kernel([1.0], ndim=1)


model = GaussianProcess(kernel)

ei = EI(model, task.X_lower, task.X_upper)

maximizer = Direct(ei, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=ei,
                          model=model,
                          maximize_func=maximizer,
                          task=task)

print bo.run(10)
Exemple #16
0
def fmin(objective_func,
         X_lower,
         X_upper,
         num_iterations=30,
         maximizer="direct",
         acquisition="LogEI",
         initX=None,
         initY=None):

    assert X_upper.shape[0] == X_lower.shape[0]

    class Task(BaseTask):
        def __init__(self, X_lower, X_upper, objective_fkt):
            super(Task, self).__init__(X_lower, X_upper)
            self.objective_function = objective_fkt

    task = Task(X_lower, X_upper, objective_func)

    cov_amp = 2

    initial_ls = np.ones([task.n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=task.n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1
    model = GaussianProcessMCMC(kernel,
                                prior=prior,
                                n_hypers=n_hypers,
                                chain_length=200,
                                burnin_steps=100)

    if acquisition == "EI":
        a = EI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "LogEI":
        a = LogEI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "PI":
        a = PI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "UCB":
        a = LCB(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "InformationGain":
        a = InformationGain(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "InformationGainMC":
        a = InformationGainMC(
            model,
            X_upper=task.X_upper,
            X_lower=task.X_lower,
        )
    else:
        logger.error("ERROR: %s is not a"
                     "valid acquisition function!" % (acquisition))
        return None

    acquisition_func = IntegratedAcquisition(model, a, task.X_lower,
                                             task.X_upper)

    if maximizer == "cmaes":
        max_fkt = cmaes.CMAES(acquisition_func, task.X_lower, task.X_upper)
    elif maximizer == "direct":
        max_fkt = direct.Direct(acquisition_func, task.X_lower, task.X_upper)
    elif maximizer == "stochastic_local_search":
        max_fkt = stochastic_local_search.StochasticLocalSearch(
            acquisition_func, task.X_lower, task.X_upper)
    elif maximizer == "grid_search":
        max_fkt = grid_search.GridSearch(acquisition_func, task.X_lower,
                                         task.X_upper)
    else:
        logger.error("ERROR: %s is not a valid function"
                     "to maximize the acquisition function!" % (acquisition))
        return None

    bo = BayesianOptimization(acquisition_func=acquisition_func,
                              model=model,
                              maximize_func=max_fkt,
                              task=task)

    best_x, f_min = bo.run(num_iterations, X=initX, Y=initY)
    return task.retransform(best_x), f_min, model, acquisition_func, max_fkt
Exemple #17
0
def fmin(objective_fkt,
         X_lower,
         X_upper,
         num_iterations=30,
         maximizer="direct",
         acquisition_fkt="EI"):

    assert X_upper.shape[0] == X_lower.shape[0]

    class Task(BaseTask):
        def __init__(self, X_lower, X_upper, objective_fkt):
            super(Task, self).__init__(X_lower, X_upper)
            self.objective_function = objective_fkt

    task = Task(X_lower, X_upper, objective_fkt)

    noise = 1.0
    cov_amp = 2

    initial_ls = np.ones([task.n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=task.n_dims)
    noise_kernel = george.kernels.WhiteKernel(noise, ndim=task.n_dims)
    kernel = cov_amp * (exp_kernel + noise_kernel)

    prior = DefaultPrior(len(kernel))

    model = GaussianProcessMCMC(kernel,
                                prior=prior,
                                n_hypers=20,
                                chain_length=100,
                                burnin_steps=50)

    if acquisition_fkt == "EI":
        a = EI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "PI":
        a = PI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "UCB":
        a = LCB(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "Entropy":
        a = Entropy(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "EntropyMC":
        a = EntropyMC(
            model,
            X_upper=task.X_upper,
            X_lower=task.X_lower,
        )
    else:
        logger.error("ERROR: %s is not a"
                     "valid acquisition function!" % (acquisition_fkt))
        return None

    if maximizer == "cmaes":
        max_fkt = cmaes.CMAES(a, task.X_lower, task.X_upper)
    elif maximizer == "direct":
        max_fkt = direct.Direct(a, task.X_lower, task.X_upper)
    elif maximizer == "stochastic_local_search":
        max_fkt = stochastic_local_search.StochasticLocalSearch(
            a, task.X_lower, task.X_upper)
    elif maximizer == "grid_search":
        max_fkt = grid_search.GridSearch(a, task.X_lower, task.X_upper)
    else:
        logger.error("ERROR: %s is not a valid function"
                     "to maximize the acquisition function!" %
                     (acquisition_fkt))
        return None

    bo = BayesianOptimization(acquisition_func=a,
                              model=model,
                              maximize_func=max_fkt,
                              task=task)

    best_x, f_min = bo.run(num_iterations)
    return best_x, f_min
Exemple #18
0
def main(method):
    for i in range(1, 40):
        task = WithinModelComparison(seed=i + 10)
        #Entropy MC
        try:
            path = "/home/kleinaa/experiments/entropy_search/model_comparison/func_" + str(i) + "/"
            os.makedirs(path)
        except:
            pass

        if method == "entropy_mc":
            save_dir = os.path.join(path, "entropy_mc/")
            kernel = GPy.kern.RBF(2, lengthscale=0.1, variance=1.0)
            model = GPyModel(kernel, optimize=False, noise_variance=1e-3)
            acq = EntropyMC(model, task.X_lower, task.X_upper, optimize_posterior_mean_and_std, Nb=50, Nf=800, Np=100)
            maximizer = Direct(acq, task.X_lower, task.X_upper)
            bo = BayesianOptimization(model=model, acquisition_fkt=acq,
                                      recommendation_strategy=optimize_posterior_mean_and_std,
                                      maximize_fkt=maximizer, task=task, save_dir=save_dir)
            bo.run(100)
        elif method == "entropy_mc_light":
            save_dir = os.path.join(path, "entropy_mc_light/")
            kernel = GPy.kern.RBF(2, lengthscale=0.1, variance=1.0)
            model = GPyModel(kernel, optimize=False, noise_variance=1e-3)
            acq = EntropyMC(model, task.X_lower, task.X_upper, optimize_posterior_mean_and_std, Nb=50, Nf=50, Np=100)
            maximizer = Direct(acq, task.X_lower, task.X_upper)
            bo = BayesianOptimization(model=model, acquisition_fkt=acq,
                                      recommendation_strategy=optimize_posterior_mean_and_std,
                                      maximize_fkt=maximizer, task=task, save_dir=save_dir)
            bo.run(100)

        #EI
        elif method == "ei":
            save_dir = os.path.join(path, "ei/")
            kernel = GPy.kern.RBF(2, lengthscale=0.1, variance=1.0)
            model = GPyModel(kernel, optimize=False, noise_variance=1e-3)
            acq = EI(model, task.X_lower, task.X_upper, compute_incumbent)
            maximizer = Direct(acq, task.X_lower, task.X_upper)
            bo = BayesianOptimization(model=model, acquisition_fkt=acq,
                                      recommendation_strategy=optimize_posterior_mean_and_std,
                                      maximize_fkt=maximizer, task=task, save_dir=save_dir)
            bo.run(100)
        #PI
        elif method == "pi":
            save_dir = os.path.join(path, "pi/")
            kernel = GPy.kern.RBF(2, lengthscale=0.1, variance=1.0)
            model = GPyModel(kernel, optimize=False, noise_variance=1e-3)
            acq = PI(model, task.X_lower, task.X_upper, compute_incumbent)
            maximizer = Direct(acq, task.X_lower, task.X_upper)
            bo = BayesianOptimization(model=model, acquisition_fkt=acq,
                                      recommendation_strategy=optimize_posterior_mean_and_std,
                                      maximize_fkt=maximizer, task=task, save_dir=save_dir)
            bo.run(100)
        #UCB
        elif method == "ucb":
            save_dir = os.path.join(path, "ucb/")
            kernel = GPy.kern.RBF(2, lengthscale=0.1, variance=1.0)
            model = GPyModel(kernel, optimize=False, noise_variance=1e-3)
            acq = UCB(model, task.X_lower, task.X_upper)
            maximizer = Direct(acq, task.X_lower, task.X_upper)
            bo = BayesianOptimization(model=model, acquisition_fkt=acq,
                                      recommendation_strategy=optimize_posterior_mean_and_std,
                                      maximize_fkt=maximizer, task=task, save_dir=save_dir)
            bo.run(100)
        #Entropy
        elif method == "entropy":
            save_dir = os.path.join(path, "entropy/")
            kernel = GPy.kern.RBF(2, lengthscale=0.1, variance=1.0)
            model = GPyModel(kernel, optimize=False, noise_variance=1e-3)
            acq = Entropy(model, task.X_lower, task.X_upper, compute_inc=optimize_posterior_mean_and_std, Nb=50)
            maximizer = Direct(acq, task.X_lower, task.X_upper)
            bo = BayesianOptimization(model=model, acquisition_fkt=acq,
                                      recommendation_strategy=optimize_posterior_mean_and_std,
                                      maximize_fkt=maximizer, task=task, save_dir=save_dir)

            bo.run(100)
Exemple #19
0
import GPy
from robo.models.gpy_model_mcmc import GPyModelMCMC
from robo.acquisition.ei import EI
from robo.acquisition.integrated_acquisition import IntegratedAcquisition
from robo.maximizers.direct import Direct
from robo.recommendation.incumbent import compute_incumbent
from robo.recommendation.optimize_posterior import optimize_posterior_mean_and_std
from robo.task.branin import Branin
from robo.solver.bayesian_optimization import BayesianOptimization


branin = Branin()

kernel = GPy.kern.Matern52(input_dim=branin.n_dims, ARD=True)
model = GPyModelMCMC(kernel, burnin=100, chain_length=100, n_hypers=20)

ei = EI(model, X_upper=branin.X_upper, X_lower=branin.X_lower, compute_incumbent=compute_incumbent, par=0.1)
acquisition_func = IntegratedAcquisition(model, ei)


maximizer = Direct(acquisition_func, branin.X_lower, branin.X_upper)
bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          recommendation_strategy=optimize_posterior_mean_and_std,
                          task=branin,
                          save_dir='/tmp'
                          )

bo.run(30)
def bayesian_optimization(objective_function,
                          lower,
                          upper,
                          num_iterations=30,
                          maximizer="random",
                          acquisition_func="log_ei",
                          model_type="gp_mcmc",
                          n_init=3,
                          rng=None,
                          output_path=None):
    """
    General interface for Bayesian optimization for global black box
    optimization problems.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy
        array (D,) as input and returns the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    maximizer: {"direct", "cmaes", "random", "scipy"}
        The optimizer for the acquisition function. NOTE: "cmaes" only works in D > 1 dimensions
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    model_type: {"gp", "gp_mcmc", "rf"}
        The model for the objective function.
    n_init: int
        Number of points for the initial design. Make sure that it
        is <= num_iterations.
    output_path: string
        Specifies the path where the intermediate output after each iteration will be saved.
        If None no output will be saved to disk.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0], "Dimension miss match"
    assert np.all(lower < upper), "Lower bound >= upper bound"
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    cov_amp = 2
    n_dims = lower.shape[0]

    initial_ls = np.ones([n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1

    if model_type == "gp":
        model = GaussianProcess(kernel,
                                prior=prior,
                                rng=rng,
                                normalize_output=False,
                                normalize_input=True,
                                lower=lower,
                                upper=upper)
    elif model_type == "gp_mcmc":
        model = GaussianProcessMCMC(kernel,
                                    prior=prior,
                                    n_hypers=n_hypers,
                                    chain_length=200,
                                    burnin_steps=100,
                                    normalize_input=True,
                                    normalize_output=True,
                                    rng=rng,
                                    lower=lower,
                                    upper=upper)

    elif model_type == "rf":
        model = RandomForest(rng=rng)

    else:
        raise ValueError("'{}' is not a valid model".format(model_type))

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)
    else:
        raise ValueError("'{}' is not a valid acquisition function".format(
            acquisition_func))

    if model_type == "gp_mcmc":
        acquisition_func = MarginalizationGPMCMC(a)
    else:
        acquisition_func = a

    if maximizer == "cmaes":
        max_func = CMAES(acquisition_func,
                         lower,
                         upper,
                         verbose=False,
                         rng=rng)
    elif maximizer == "direct":
        max_func = Direct(acquisition_func, lower, upper, verbose=True)
    elif maximizer == "random":
        max_func = RandomSampling(acquisition_func, lower, upper, rng=rng)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(acquisition_func, lower, upper, rng=rng)

    else:
        raise ValueError("'{}' is not a valid function to maximize the "
                         "acquisition function".format(maximizer))

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              acquisition_func,
                              model,
                              max_func,
                              initial_points=n_init,
                              rng=rng,
                              output_path=output_path)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    results["X"] = [x.tolist() for x in bo.X]
    results["y"] = [y for y in bo.y]
    return results
Exemple #21
0
from robo.acquisition.ei import EI
from robo.maximizers.direct import Direct
from robo.task.controlling_tasks.walker import Walker
from robo.solver.bayesian_optimization import BayesianOptimization
from robo.priors.default_priors import DefaultPrior
from robo.acquisition.integrated_acquisition import IntegratedAcquisition

task = Walker()
test = '/test'

kernel = 1 * george.kernels.Matern52Kernel(np.ones([task.n_dims]),
                                           ndim=task.n_dims)
prior = DefaultPrior(len(kernel) + 1)
model = GaussianProcessMCMC(kernel,
                            prior=prior,
                            chain_length=100,
                            burnin_steps=200,
                            n_hypers=8)

ei = EI(model, task.X_lower, task.X_upper)
acquisition_func = IntegratedAcquisition(model, ei, task.X_lower, task.X_upper)

maximizer = Direct(acquisition_func, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=task,
                          save_dir=test)

print bo.run(2)
Exemple #22
0
# Specifies the task object that defines the objective functions and
# the bounds of the input space
branin = Branin()

# Instantiate the random forest. Branin does not have any categorical
# values thus we pass a np.zero vector here.
model = RandomForest(branin.types)

# Define the acquisition function
acquisition_func = EI(model,
                     X_upper=branin.X_upper,
                     X_lower=branin.X_lower,
                     par=0.1)

# Strategy of estimating the incumbent
rec = PosteriorMeanAndStdOptimization(model, branin.X_lower,
                                      branin.X_upper, with_gradients=False)

# Define the maximizer
maximizer = CMAES(acquisition_func, branin.X_lower, branin.X_upper)

# Now we defined everything we need to instantiate the solver
bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=branin,
                          incumbent_estimation=rec)

bo.run(100)
Exemple #23
0
def bayesian_optimization(objective_function,
                          lower,
                          upper,
                          num_iterations=30,
                          maximizer="direct",
                          acquisition_func="log_ei",
                          model="gp_mcmc",
                          n_init=3,
                          rng=None):
    """
    General interface for Bayesian optimization for global black box optimization problems.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    maximizer: {"direct", "cmaes"}
        Defines how the acquisition function is maximized. NOTE: "cmaes" only works in D > 1 dimensions
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    model: {"gp", "gp_mcmc"}
        The model for the objective function.
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0]
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    cov_amp = 2
    n_dims = lower.shape[0]

    initial_ls = np.ones([n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1

    if model == "gp":
        gp = GaussianProcess(kernel,
                             prior=prior,
                             rng=rng,
                             normalize_output=True,
                             normalize_input=True,
                             lower=lower,
                             upper=upper)
    elif model == "gp_mcmc":
        gp = GaussianProcessMCMC(kernel,
                                 prior=prior,
                                 n_hypers=n_hypers,
                                 chain_length=200,
                                 burnin_steps=100,
                                 normalize_input=True,
                                 normalize_output=True,
                                 rng=rng,
                                 lower=lower,
                                 upper=upper)
    else:
        print("ERROR: %s is not a valid model!" % model)
        return

    if acquisition_func == "ei":
        a = EI(gp)
    elif acquisition_func == "log_ei":
        a = LogEI(gp)
    elif acquisition_func == "pi":
        a = PI(gp)
    elif acquisition_func == "lcb":
        a = LCB(gp)
    else:
        print("ERROR: %s is not a valid acquisition function!" %
              acquisition_func)
        return

    if model == "gp":
        acquisition_func = a
    elif model == "gp_mcmc":
        acquisition_func = MarginalizationGPMCMC(a)

    if maximizer == "cmaes":
        max_func = CMAES(acquisition_func,
                         lower,
                         upper,
                         verbose=False,
                         rng=rng)
    elif maximizer == "direct":
        max_func = Direct(acquisition_func, lower, upper, verbose=False)
    else:
        print(
            "ERROR: %s is not a valid function to maximize the acquisition function!"
            % maximizer)
        return

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              acquisition_func,
                              gp,
                              max_func,
                              initial_points=n_init,
                              rng=rng)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    return results
Exemple #24
0
def build_optimizer(model,
                    maximizer="random",
                    acquisition_func="log_ei",
                    maximizer_seed=1):
    """
    General interface for Bayesian optimization for global black box
    optimization problems.

    Parameters
    ----------
    maximizer: {"random", "scipy", "differential_evolution"}
        The optimizer for the acquisition function.
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    maximizer_seed: int
        Seed for random number generator of the acquisition function maximizer

    Returns
    -------
        Optimizer
    """

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)
    else:
        raise ValueError("'{}' is not a valid acquisition function".format(
            acquisition_func))

    if isinstance(model, GaussianProcessMCMC):
        acquisition_func = MarginalizationGPMCMC(a)
    else:
        acquisition_func = a

    maximizer_rng = numpy.random.RandomState(maximizer_seed)
    if maximizer == "random":
        max_func = RandomSampling(acquisition_func,
                                  model.lower,
                                  model.upper,
                                  rng=maximizer_rng)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(acquisition_func,
                                  model.lower,
                                  model.upper,
                                  rng=maximizer_rng)
    elif maximizer == "differential_evolution":
        max_func = DifferentialEvolution(acquisition_func,
                                         model.lower,
                                         model.upper,
                                         rng=maximizer_rng)
    else:
        raise ValueError("'{}' is not a valid function to maximize the "
                         "acquisition function".format(maximizer))

    # NOTE: Internal RNG of BO won't be used.
    # NOTE: Nb of initial points won't be used within BO, but rather outside
    bo = BayesianOptimization(lambda: None,
                              model.lower,
                              model.upper,
                              acquisition_func,
                              model,
                              max_func,
                              initial_points=None,
                              rng=None,
                              initial_design=init_latin_hypercube_sampling,
                              output_path=None)

    return bo
from robo.task.rembo import REMBO
from robo.task.synthetic_functions.branin import Branin
from robo.models.gpy_model import GPyModel
from robo.maximizers.cmaes import CMAES
from robo.solver.bayesian_optimization import BayesianOptimization
from robo.acquisition.ei import EI


class BraninInBillionDims(REMBO):
    def __init__(self):
        self.b = Branin()
        X_lower = np.concatenate((self.b.X_lower, np.zeros([999998])))
        X_upper = np.concatenate((self.b.X_upper, np.ones([999998])))
        super(BraninInBillionDims, self).__init__(X_lower, X_upper, d=2)

    def objective_function(self, x):
        return self.b.objective_function(x[:, :2])

task = BraninInBillionDims()
kernel = GPy.kern.Matern52(input_dim=task.n_dims)
model = GPyModel(kernel, optimize=True, num_restarts=10)
acquisition_func = EI(model, task.X_lower, task.X_upper)
maximizer = CMAES(acquisition_func, task.X_lower, task.X_upper)
bo = BayesianOptimization(acquisition_func=acquisition_func,
                      model=model,
                      maximize_func=maximizer,
                      task=task)

bo.run(500)
Exemple #26
0
'''

import setup_logger

import GPy
from robo.models.gpy_model import GPyModel
from robo.acquisition.ei import EI
from robo.maximizers.cmaes import CMAES
from robo.task.synthetic_functions.branin import Branin
from robo.solver.bayesian_optimization import BayesianOptimization


branin = Branin()

kernel = GPy.kern.Matern52(input_dim=branin.n_dims)
model = GPyModel(kernel)

acquisition_func = EI(model,
                     X_upper=branin.X_upper,
                     X_lower=branin.X_lower,
                     par=0.1)

maximizer = CMAES(acquisition_func, branin.X_lower, branin.X_upper)

bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=branin)

bo.run(10)
Exemple #27
0
    def __init__(self,
                 objective_func,
                 X_lower,
                 X_upper,
                 maximizer="direct",
                 acquisition="LogEI",
                 par=None,
                 n_func_evals=4000,
                 n_iters=500):
        self.objective_func = objective_func
        self.X_lower = X_lower
        self.X_upper = X_upper

        assert self.X_upper.shape[0] == self.X_lower.shape[0]

        self.task = Task(self.X_lower, self.X_upper, self.objective_func)

        cov_amp = 2

        initial_ls = np.ones([self.task.n_dims])
        exp_kernel = george.kernels.Matern32Kernel(initial_ls,
                                                   ndim=self.task.n_dims)
        kernel = cov_amp * exp_kernel
        #kernel = GPy.kern.Matern52(input_dim=task.n_dims)

        prior = DefaultPrior(len(kernel) + 1)

        n_hypers = 3 * len(kernel)
        if n_hypers % 2 == 1:
            n_hypers += 1

        #self.model = GaussianProcessMCMC(kernel, prior=prior, n_hypers=n_hypers, chain_length=500, burnin_steps=100)
        self.model = GaussianProcess(kernel,
                                     prior=prior,
                                     dim=self.X_lower.shape[0],
                                     noise=1e-3)
        #self.model = GPyModel(kernel)

        #MAP ESTMIATE

        if acquisition == "EI":
            if par is not None:
                self.a = EI(self.model,
                            X_upper=self.task.X_upper,
                            X_lower=self.task.X_lower,
                            par=par)
            else:
                self.a = EI(self.model,
                            X_upper=self.task.X_upper,
                            X_lower=self.task.X_lower)
        elif acquisition == "LogEI":
            if par is not None:
                self.a = LogEI(self.model,
                               X_upper=self.task.X_upper,
                               X_lower=self.task.X_lower,
                               par=par)
            else:
                self.a = LogEI(self.model,
                               X_upper=self.task.X_upper,
                               X_lower=self.task.X_lower)
        elif acquisition == "PI":
            self.a = PI(self.model,
                        X_upper=self.task.X_upper,
                        X_lower=self.task.X_lower)
        elif acquisition == "UCB":
            if par is not None:
                self.a = LCB(self.model,
                             X_upper=self.task.X_upper,
                             X_lower=self.task.X_lower,
                             par=par)
            else:
                self.a = LCB(self.model,
                             X_upper=self.task.X_upper,
                             X_lower=self.task.X_lower)
        elif acquisition == "UCB_GP":
            if par is not None:
                self.a = LCB_GP(self.model,
                                X_upper=self.task.X_upper,
                                X_lower=self.task.X_lower,
                                par=par)
            else:
                self.a = LCB_GP(self.model,
                                X_upper=self.task.X_upper,
                                X_lower=self.task.X_lower)
        elif acquisition == "InformationGain":
            self.a = InformationGain(self.model,
                                     X_upper=self.task.X_upper,
                                     X_lower=self.task.X_lower)
        elif acquisition == "InformationGainMC":
            self.a = InformationGainMC(
                self.model,
                X_upper=self.task.X_upper,
                X_lower=self.task.X_lower,
            )
        else:
            logger.error("ERROR: %s is not a"
                         "valid acquisition function!" % (acquisition))
            return None

        #self.acquisition_func = IntegratedAcquisition(self.model, self.a, self.task.X_lower, self.task.X_upper)
        self.acquisition_func = self.a

        if maximizer == "cmaes":
            self.max_fkt = cmaes.CMAES(self.acquisition_func,
                                       self.task.X_lower, self.task.X_upper)
        elif maximizer == "direct":
            self.max_fkt = direct.Direct(
                self.acquisition_func,
                self.task.X_lower,
                self.task.X_upper,
                n_func_evals=n_func_evals,
                n_iters=n_iters)  #default is n_func_evals=400, n_iters=200
        elif maximizer == "stochastic_local_search":
            self.max_fkt = stochastic_local_search.StochasticLocalSearch(
                self.acquisition_func, self.task.X_lower, self.task.X_upper)
        elif maximizer == "grid_search":
            self.max_fkt = grid_search.GridSearch(self.acquisition_func,
                                                  self.task.X_lower,
                                                  self.task.X_upper)
        else:
            logger.error("ERROR: %s is not a valid function"
                         "to maximize the acquisition function!" %
                         (acquisition))
            return None

        self.bo = BayesianOptimization(acquisition_func=self.acquisition_func,
                                       model=self.model,
                                       maximize_func=self.max_fkt,
                                       task=self.task)
Exemple #28
0
def fmin(objective_func,
        X_lower,
        X_upper,
        num_iterations=30,
        maximizer="direct",
        acquisition="LogEI"):

    assert X_upper.shape[0] == X_lower.shape[0]

    class Task(BaseTask):

        def __init__(self, X_lower, X_upper, objective_fkt):
            super(Task, self).__init__(X_lower, X_upper)
            self.objective_function = objective_fkt

    task = Task(X_lower, X_upper, objective_func)

    cov_amp = 2

    initial_ls = np.ones([task.n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls,
                                               ndim=task.n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1
    model = GaussianProcessMCMC(kernel, prior=prior,
                                n_hypers=n_hypers,
                                chain_length=200,
                                burnin_steps=100)

    if acquisition == "EI":
        a = EI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "LogEI":
        a = LogEI(model, X_upper=task.X_upper, X_lower=task.X_lower)        
    elif acquisition == "PI":
        a = PI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "UCB":
        a = LCB(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "InformationGain":
        a = InformationGain(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition == "InformationGainMC":
        a = InformationGainMC(model, X_upper=task.X_upper, X_lower=task.X_lower,)
    else:
        logger.error("ERROR: %s is not a"
                    "valid acquisition function!" % (acquisition))
        return None
        
    acquisition_func = IntegratedAcquisition(model, a,
                                             task.X_lower,
                                             task.X_upper)        

    if maximizer == "cmaes":
        max_fkt = cmaes.CMAES(acquisition_func, task.X_lower, task.X_upper)
    elif maximizer == "direct":
        max_fkt = direct.Direct(acquisition_func, task.X_lower, task.X_upper)
    elif maximizer == "stochastic_local_search":
        max_fkt = stochastic_local_search.StochasticLocalSearch(acquisition_func,
                                                    task.X_lower,
                                                    task.X_upper)
    elif maximizer == "grid_search":
        max_fkt = grid_search.GridSearch(acquisition_func,
                                         task.X_lower,
                                         task.X_upper)
    else:
        logger.error(
            "ERROR: %s is not a valid function"
            "to maximize the acquisition function!" %
            (acquisition))
        return None

    bo = BayesianOptimization(acquisition_func=acquisition_func,
                              model=model,
                              maximize_func=max_fkt,
                              task=task)

    best_x, f_min = bo.run(num_iterations)
    return task.retransform(best_x), f_min
Exemple #29
0
n_hypers = 20

task = Branin()

cov_amp = 1.0
config_kernel = george.kernels.Matern52Kernel(np.ones([task.n_dims]),
                                               ndim=task.n_dims)

kernel = cov_amp * config_kernel

prior = MyPrior(len(kernel) + 1)

model = GaussianProcessMCMC(kernel, prior=prior, burnin=burnin,
                            chain_length=chain_length, n_hypers=n_hypers)

ei = EI(model, X_upper=task.X_upper, X_lower=task.X_lower,)

acquisition_func = IntegratedAcquisition(model, ei,
                                             task.X_lower, task.X_upper)

maximizer = Direct(acquisition_func, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=task
                          )
bo.run(20)


Exemple #30
0
def bayesian_optimization(objective_function,
                          lower,
                          upper,
                          num_iterations=30,
                          X_init=None,
                          Y_init=None,
                          maximizer="random",
                          acquisition_func="log_ei",
                          model_type="gp_mcmc",
                          n_init=3,
                          rng=None,
                          output_path=None,
                          kernel=None,
                          sampling_method="origin",
                          distance="cosine",
                          replacement=True,
                          pool=None,
                          best=None):
    """
    General interface for Bayesian optimization for global black box
    optimization problems.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy
        array (D,) as input and returns the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    X_init: np.ndarray(N,D)
            Initial points to warmstart BO
    Y_init: np.ndarray(N,1)
            Function values of the already initial points
    maximizer: {"random", "scipy", "differential_evolution"}
        The optimizer for the acquisition function.
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    model_type: {"gp", "gp_mcmc", "rf", "bohamiann", "dngo"}
        The model for the objective function.
    n_init: int
        Number of points for the initial design. Make sure that it
        is <= num_iterations.
    output_path: string
        Specifies the path where the intermediate output after each iteration will be saved.
        If None no output will be saved to disk.
    rng: numpy.random.RandomState
        Random number generator
    kernel: george.kernels.ConstantKernel
            {"constant", "polynomial", "linear", "dotproduct",
             "exp", "expsquared", "matern32", "matern52", "rationalquadratic",
             "cosine", "expsine2", "heuristic"}
        Specify the kernel for Gaussian process.
    sampling_method: {"origin", "approx", "exact"}
        Specify the method to choose next sample to update model.
        approx: choose the sample in the candidate pool that is closest (measured by distance
        arg) to the one returned from maximizing acquisition function.
        exact: evaluate all samples in the candidate pool on acquisition function
        and choose the one with maximum output.
    distance: {"cosine", "euclidean"}
        The distance measurement for approximation sampling.
    replacement: boolean
        Whether to sample from pool with replacement.
    pool: np.ndarray(N,D)
        Candidate pool containing possible x
    best: float
        Stop training when the best point is sampled.
    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0], "Dimension miss match"
    assert np.all(lower < upper), "Lower bound >= upper bound"
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    cov_amp = 2
    #n_dims = lower.shape[0]

    #initial_ls = np.ones([n_dims])

    # if kernel == "constant":
    #     exp_kernel = george.kernels.ConstantKernel(1, ndim=n_dims)
    # elif kernel == "polynomial":
    #     exp_kernel = george.kernels.PolynomialKernel(log_sigma2=1, order=3, ndim=n_dims)
    # elif kernel == "linear":
    #     exp_kernel = george.kernels.LinearKernel(log_gamma2=1, order=3, ndim=n_dims)
    # elif kernel == "dotproduct":
    #     exp_kernel = george.kernels.DotProductKernel(ndim=n_dims)
    # elif kernel == "exp":
    #     exp_kernel = george.kernels.ExpKernel(initial_ls, ndim=n_dims)
    # elif kernel == "expsquared":
    #     exp_kernel = george.kernels.ExpSquaredKernel(initial_ls, ndim=n_dims)
    # elif kernel == "matern32":
    #     exp_kernel = george.kernels.Matern32Kernel(initial_ls, ndim=n_dims)
    # elif kernel == "matern52":
    #     exp_kernel = george.kernels.Matern52Kernel(initial_ls, ndim=n_dims)
    # elif kernel == "rationalquadratic":
    #     exp_kernel = george.kernels.RationalQuadraticKernel(log_alpha=1, metric=initial_ls, ndim=n_dims)
    # elif kernel == "cosine":
    #     exp_kernel = george.kernels.CosineKernel(4, ndim=n_dims)
    # elif kernel == "expsine2":
    #     exp_kernel = george.kerngels.ExpSine2Kernel(1, 2, ndim=n_dims)
    # elif kernel == "heuristic":
    #     exp_kernel = george.kernels.PythonKernel(heuristic_kernel_function, ndim=n_dims)
    # else:
    #     raise ValueError("'{}' is not a valid kernel".format(kernel))

    kernel = cov_amp * kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1

    if model_type == "gp":
        model = GaussianProcess(kernel,
                                prior=prior,
                                rng=rng,
                                normalize_output=False,
                                normalize_input=True,
                                lower=lower,
                                upper=upper)
    elif model_type == "gp_mcmc":
        model = GaussianProcessMCMC(kernel,
                                    prior=prior,
                                    n_hypers=n_hypers,
                                    chain_length=200,
                                    burnin_steps=100,
                                    normalize_input=True,
                                    normalize_output=False,
                                    rng=rng,
                                    lower=lower,
                                    upper=upper)

    elif model_type == "rf":
        model = RandomForest(rng=rng)

    elif model_type == "bohamiann":
        model = WrapperBohamiann()

    elif model_type == "dngo":
        model = DNGO()

    else:
        raise ValueError("'{}' is not a valid model".format(model_type))

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)
    else:
        raise ValueError("'{}' is not a valid acquisition function".format(
            acquisition_func))

    if model_type == "gp_mcmc":
        acquisition_func = MarginalizationGPMCMC(a)
    else:
        acquisition_func = a

    if maximizer == "random":
        max_func = RandomSampling(acquisition_func, lower, upper, rng=rng)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(acquisition_func, lower, upper, rng=rng)
    elif maximizer == "differential_evolution":
        max_func = DifferentialEvolution(acquisition_func,
                                         lower,
                                         upper,
                                         rng=rng)
    else:
        raise ValueError("'{}' is not a valid function to maximize the "
                         "acquisition function".format(maximizer))

    if sampling_method == "exact":
        max_func = ExactSampling(acquisition_func,
                                 lower,
                                 upper,
                                 pool,
                                 replacement,
                                 rng=rng)
        init_design = init_exact_random
    elif sampling_method == "approx":
        max_func = ApproxSampling(acquisition_func,
                                  lower,
                                  upper,
                                  pool,
                                  replacement,
                                  distance,
                                  rng=rng)
        init_design = init_exact_random
    else:
        init_design = init_latin_hypercube_sampling

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              acquisition_func,
                              model,
                              max_func,
                              pool,
                              best,
                              sampling_method,
                              distance,
                              replacement,
                              initial_points=n_init,
                              rng=rng,
                              initial_design=init_design,
                              output_path=output_path)

    x_best, f_min = bo.run(num_iterations, X=X_init, y=Y_init)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    results["X"] = [x.tolist() for x in bo.X]
    results["y"] = [y for y in bo.y]
    return results
Exemple #31
0
def entropy_search(objective_function, lower, upper, num_iterations=30,
                   maximizer="direct", model="gp_mcmc",
                   n_init=3, output_path=None, rng=None):
    """
    Entropy search for global black box optimization problems. This is a reimplemenation of the entropy search
    algorithm by Henning and Schuler[1].

    [1] Entropy search for information-efficient global optimization.
        P. Hennig and C. Schuler.
        JMLR, (1), 2012.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    maximizer: {"direct", "cmaes"}
        Defines how the acquisition function is maximized. NOTE: "cmaes" only works in D > 1 dimensions
    model: {"gp", "gp_mcmc"}
        The model for the objective function.
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    output_path: string
        Specifies the path where the intermediate output after each iteration will be saved.
        If None no output will be saved to disk.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0], "Dimension miss match"
    assert np.all(lower < upper), "Lower bound >= upper bound"
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    cov_amp = 2
    n_dims = lower.shape[0]

    initial_ls = np.ones([n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls,
                                               ndim=n_dims)
    kernel = cov_amp * exp_kernel

    prior = DefaultPrior(len(kernel) + 1)

    n_hypers = 3 * len(kernel)
    if n_hypers % 2 == 1:
        n_hypers += 1

    if model == "gp":
        gp = GaussianProcess(kernel, prior=prior, rng=rng,
                             normalize_output=False, normalize_input=True,
                             lower=lower, upper=upper)
    elif model == "gp_mcmc":
        gp = GaussianProcessMCMC(kernel, prior=prior,
                                 n_hypers=n_hypers,
                                 chain_length=200,
                                 burnin_steps=100,
                                 normalize_input=True,
                                 normalize_output=False,
                                 rng=rng, lower=lower, upper=upper)
    else:
        print("ERROR: %s is not a valid model!" % model)
        return

    a = InformationGain(gp, lower=lower, upper=upper, sampling_acquisition=EI)

    if model == "gp":
        acquisition_func = a
    elif model == "gp_mcmc":
        acquisition_func = MarginalizationGPMCMC(a)

    if maximizer == "cmaes":
        max_func = CMAES(acquisition_func, lower, upper, verbose=False, rng=rng)
    elif maximizer == "direct":
        max_func = Direct(acquisition_func, lower, upper)
    else:
        print("ERROR: %s is not a valid function to maximize the acquisition function!" % maximizer)
        return

    bo = BayesianOptimization(objective_function, lower, upper, acquisition_func, gp, max_func,
                              initial_points=n_init, rng=rng, output_path=output_path)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    results["X"] = [x.tolist() for x in bo.X]
    results["y"] = [y for y in bo.y]
    return results
Exemple #32
0
from robo.acquisition.ei import EI
from robo.maximizers.direct import Direct
from robo.task.synthetic_functions.branin import Branin
from robo.solver.bayesian_optimization import BayesianOptimization
from robo.priors.default_priors import DefaultPrior
from robo.acquisition.integrated_acquisition import IntegratedAcquisition


task = Branin()

noise = 1.0
cov_amp = 2
exp_kernel = george.kernels.Matern52Kernel([1.0, 1.0], ndim=2)
kernel = cov_amp * exp_kernel

prior = DefaultPrior(len(kernel) + 1)
model = GaussianProcessMCMC(kernel, prior=prior,
                            chain_length=100, burnin_steps=200, n_hypers=20)

ei = EI(model, task.X_lower, task.X_upper)
acquisition_func = IntegratedAcquisition(model, ei, task.X_lower, task.X_upper)

maximizer = Direct(acquisition_func, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=task)

print bo.run(10)
Exemple #33
0
from robo.models.gaussian_process_mcmc import GaussianProcessMCMC
from robo.acquisition.ei import EI
from robo.maximizers.direct import Direct
from robo.task.controlling_tasks.walker import Walker
from robo.solver.bayesian_optimization import BayesianOptimization
from robo.priors.default_priors import DefaultPrior
from robo.acquisition.integrated_acquisition import IntegratedAcquisition



task = Walker()
test = '/test'

kernel = 1 * george.kernels.Matern52Kernel(np.ones([task.n_dims]),ndim=task.n_dims)
prior = DefaultPrior(len(kernel) + 1)
model = GaussianProcessMCMC(kernel, prior=prior,
                            chain_length=100, burnin_steps=200, n_hypers=8)

ei = EI(model, task.X_lower, task.X_upper)
acquisition_func = IntegratedAcquisition(model, ei, task.X_lower, task.X_upper)

maximizer = Direct(acquisition_func, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=task,
                          save_dir = test)

print bo.run(2)
Exemple #34
0
def fmin(objective_fkt,
        X_lower,
        X_upper,
        num_iterations=30,
        maximizer="direct",
        acquisition_fkt="EI"):

    assert X_upper.shape[0] == X_lower.shape[0]

    class Task(BaseTask):

        def __init__(self, X_lower, X_upper, objective_fkt):
            super(Task, self).__init__(X_lower, X_upper)
            self.objective_function = objective_fkt

    task = Task(X_lower, X_upper, objective_fkt)

    noise = 1.0
    cov_amp = 2

    initial_ls = np.ones([task.n_dims])
    exp_kernel = george.kernels.Matern52Kernel(initial_ls,
                                               ndim=task.n_dims)
    noise_kernel = george.kernels.WhiteKernel(noise, ndim=task.n_dims)
    kernel = cov_amp * (exp_kernel + noise_kernel)

    prior = DefaultPrior(len(kernel))

    model = GaussianProcessMCMC(kernel, prior=prior,
                                n_hypers=20,
                                chain_length=100,
                                burnin_steps=50)

    if acquisition_fkt == "EI":
        a = EI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "PI":
        a = PI(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "UCB":
        a = LCB(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "Entropy":
        a = Entropy(model, X_upper=task.X_upper, X_lower=task.X_lower)
    elif acquisition_fkt == "EntropyMC":
        a = EntropyMC(model, X_upper=task.X_upper, X_lower=task.X_lower,)
    else:
        logger.error("ERROR: %s is not a"
                    "valid acquisition function!" % (acquisition_fkt))
        return None

    if maximizer == "cmaes":
        max_fkt = cmaes.CMAES(a, task.X_lower, task.X_upper)
    elif maximizer == "direct":
        max_fkt = direct.Direct(a, task.X_lower, task.X_upper)
    elif maximizer == "stochastic_local_search":
        max_fkt = stochastic_local_search.StochasticLocalSearch(a,
                                                    task.X_lower,
                                                    task.X_upper)
    elif maximizer == "grid_search":
        max_fkt = grid_search.GridSearch(a, task.X_lower, task.X_upper)
    else:
        logger.error(
            "ERROR: %s is not a valid function"
            "to maximize the acquisition function!" %
            (acquisition_fkt))
        return None

    bo = BayesianOptimization(acquisition_func=a,
                              model=model,
                              maximize_func=max_fkt,
                              task=task)

    best_x, f_min = bo.run(num_iterations)
    return best_x, f_min
Exemple #35
0
@author: Aaron Klein
'''

import setup_logger

import GPy
from robo.models.gpy_model import GPyModel
from robo.acquisition.ei import EI
from robo.maximizers.cmaes import CMAES
from robo.task.synthetic_functions.branin import Branin
from robo.solver.bayesian_optimization import BayesianOptimization

branin = Branin()

kernel = GPy.kern.Matern52(input_dim=branin.n_dims)
model = GPyModel(kernel)

acquisition_func = EI(model,
                      X_upper=branin.X_upper,
                      X_lower=branin.X_lower,
                      par=0.1)

maximizer = CMAES(acquisition_func, branin.X_lower, branin.X_upper)

bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=branin)

bo.run(10)
Exemple #36
0
# the bounds of the input space
branin = Branin()

# Instantiate the random forest. Branin does not have any categorical
# values thus we pass a np.zero vector here.
model = RandomForest(branin.types)

# Define the acquisition function
acquisition_func = EI(model,
                      X_upper=branin.X_upper,
                      X_lower=branin.X_lower,
                      par=0.1)

# Strategy of estimating the incumbent
rec = PosteriorMeanAndStdOptimization(model,
                                      branin.X_lower,
                                      branin.X_upper,
                                      with_gradients=False)

# Define the maximizer
maximizer = CMAES(acquisition_func, branin.X_lower, branin.X_upper)

# Now we defined everything we need to instantiate the solver
bo = BayesianOptimization(acquisition_func=acquisition_func,
                          model=model,
                          maximize_func=maximizer,
                          task=branin,
                          incumbent_estimation=rec)

bo.run(100)
Exemple #37
0
'''
Created on June 5th, 2016

@author: Numair Mansur ([email protected])
'''

import george

from robo.maximizers.direct import Direct
from robo.models.gaussian_process import GaussianProcess
from robo.task.synthetic_functions.levy import Levy
from robo.acquisition.ei import EI
from robo.solver.bayesian_optimization import BayesianOptimization

task = Levy()
kernel = george.kernels.Matern52Kernel([1.0], ndim=1)

model = GaussianProcess(kernel)

ei = EI(model, task.X_lower, task.X_upper)

maximizer = Direct(ei, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=ei,
                          model=model,
                          maximize_func=maximizer,
                          task=task,
                          save_dir='../JsonDumps/')

print bo.run(20)
Exemple #38
0
def bohamiann(objective_function,
              lower,
              upper,
              num_iterations=30,
              maximizer="random",
              acquisition_func="log_ei",
              n_init=3,
              output_path=None,
              rng=None):
    """
    Bohamiann uses Bayesian neural networks to model the objective function [1] inside Bayesian optimization.
    Bayesian neural networks usually scale better with the number of function evaluations and the number of dimensions
    than Gaussian processes.

    [1] Bayesian optimization with robust Bayesian neural networks
        J. T. Springenberg and A. Klein and S. Falkner and F. Hutter
        Advances in Neural Information Processing Systems 29

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    maximizer: {"direct", "cmaes", "random", "scipy"}
        The optimizer for the acquisition function. NOTE: "cmaes" only works in D > 1 dimensions
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    output_path: string
        Specifies the path where the intermediate output after each iteration will be saved.
        If None no output will be saved to disk.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0]
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    model = BayesianNeuralNetwork(sampling_method="sghmc",
                                  l_rate=np.sqrt(1e-4),
                                  mdecay=0.05,
                                  burn_in=3000,
                                  n_iters=50000,
                                  precondition=True,
                                  normalize_input=True,
                                  normalize_output=True)

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)

    else:
        print("ERROR: %s is not a valid acquisition function!" %
              acquisition_func)
        return

    if maximizer == "cmaes":
        max_func = CMAES(a, lower, upper, verbose=True, rng=rng)
    elif maximizer == "direct":
        max_func = Direct(a, lower, upper, verbose=True)
    elif maximizer == "random":
        max_func = RandomSampling(a, lower, upper, rng=rng)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(a, lower, upper, rng=rng)

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              a,
                              model,
                              max_func,
                              initial_points=n_init,
                              output_path=output_path,
                              rng=rng)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    results["X"] = [x.tolist() for x in bo.X]
    results["y"] = [y for y in bo.y]
    return results
Exemple #39
0
@author: Numair Mansur ([email protected])
'''

import george

from robo.maximizers.direct import Direct
from robo.models.gaussian_process import GaussianProcess
from robo.task.synthetic_functions.levy import Levy
from robo.acquisition.ei import EI
from robo.solver.bayesian_optimization import BayesianOptimization


task = Levy()
kernel = george.kernels.Matern52Kernel([1.0], ndim=1)


model = GaussianProcess(kernel)

ei = EI(model, task.X_lower, task.X_upper)

maximizer = Direct(ei, task.X_lower, task.X_upper)

bo = BayesianOptimization(acquisition_func=ei,
                          model=model,
                          maximize_func=maximizer,
                          task=task
                          ,save_dir='../JsonDumps/'
                          )

print bo.run(20)