Exemplo n.º 1
0
def fit_sinc(sampler, stepsize, data_seed, num_training_datapoints=20):
    x_train = init_random_uniform(np.zeros(1),
                                  np.ones(1),
                                  num_points=num_training_datapoints,
                                  rng=np.random.RandomState(seed=data_seed))
    y_train = sinc(x_train)

    x_test = np.linspace(0, 1, 100)[:, None]
    y_test = sinc(x_test)

    if sampler == "SGHMC":
        model = Robo_BNN(sampling_method=SAMPLERS[sampler], l_rate=stepsize)
    else:
        from keras.losses import cosine_proximity, kullback_leibler_divergence, binary_crossentropy
        model = BayesianNeuralNetwork(
            optimizer=SAMPLERS[sampler],
            learning_rate=stepsize,
            hyperloss=lambda y_true, y_pred: kullback_leibler_divergence(
                y_true=y_true, y_pred=y_pred[:, 0]))

    model.train(x_train, y_train)
    prediction_mean, prediction_variance = model.predict(x_test)

    prediction_std = np.sqrt(prediction_variance)

    return {
        "prediction_mean": prediction_mean.tolist(),
        "prediction_std": prediction_std.tolist(),
        "x_train": x_train.tolist(),
        "y_train": y_train.tolist(),
        "x_test": x_test.tolist(),
        "y_test": y_test.tolist()
    }
Exemplo n.º 2
0
    def __init__(self, config_space, burnin=3000, n_iters=10000):

        super(Bohamiann,
              self).__init__(sacred_space_to_configspace(config_space))
        self.rng = np.random.RandomState(np.random.seed())
        self.n_dims = len(self.config_space.get_hyperparameters())

        # All inputs are mapped to be in [0, 1]^D
        self.lower = np.zeros([self.n_dims])
        self.upper = np.ones([self.n_dims])
        self.incumbents = []
        self.X = None
        self.y = None

        self.model = BayesianNeuralNetwork(sampling_method="sghmc",
                                           l_rate=np.sqrt(1e-4),
                                           mdecay=0.05,
                                           burn_in=burnin,
                                           n_iters=n_iters,
                                           precondition=True,
                                           normalize_input=True,
                                           normalize_output=True)

        self.acquisition_func = LogEI(self.model)

        self.maximizer = Direct(self.acquisition_func,
                                self.lower,
                                self.upper,
                                verbose=False)
Exemplo n.º 3
0
class Bohamiann(Optimizer):
    def __init__(self, config_space, burnin=3000, n_iters=10000):

        super(Bohamiann,
              self).__init__(sacred_space_to_configspace(config_space))
        self.rng = np.random.RandomState(np.random.seed())
        self.n_dims = len(self.config_space.get_hyperparameters())

        # All inputs are mapped to be in [0, 1]^D
        self.lower = np.zeros([self.n_dims])
        self.upper = np.ones([self.n_dims])
        self.incumbents = []
        self.X = None
        self.y = None

        self.model = BayesianNeuralNetwork(sampling_method="sghmc",
                                           l_rate=np.sqrt(1e-4),
                                           mdecay=0.05,
                                           burn_in=burnin,
                                           n_iters=n_iters,
                                           precondition=True,
                                           normalize_input=True,
                                           normalize_output=True)

        self.acquisition_func = LogEI(self.model)

        self.maximizer = Direct(self.acquisition_func,
                                self.lower,
                                self.upper,
                                verbose=False)

    def suggest_configuration(self):

        if self.X is None and self.y is None:
            # No data points yet to train a model, just return a random configuration instead
            new_x = init_random_uniform(self.lower,
                                        self.upper,
                                        n_points=1,
                                        rng=self.rng)[0, :]

        else:
            # Train the model on all finished runs
            self.model.train(self.X, self.y)
            self.acquisition_func.update(self.model)

            # Maximize the acquisition function
            new_x = self.maximizer.maximize()

        # Maps from [0, 1]^D space back to original space
        next_config = Configuration(self.config_space, vector=new_x)

        # Transform to sacred configuration
        result = configspace_config_to_sacred(next_config)

        return result
Exemplo n.º 4
0
def fit_uci(sampler, stepsize, data_seed, burn_in_steps=5000,
            num_steps=15000, num_nets=100, batch_size=32, test_split=0.1):
    datasets = (BostonHousing, YachtHydrodynamics, Concrete, WineQualityRed)

    results = {}

    for dataset in datasets:
        train_data, (x_test, y_test) = dataset.load_data(
            test_split=test_split, seed=data_seed
        )
        had_nans = True

        while had_nans:
            if sampler == "sghmc":
                model = Robo_BNN(
                    l_rate=stepsize,
                    sampling_method="sghmc", n_nets=num_nets, burn_in=burn_in_steps,
                    n_iters=num_steps, bsize=batch_size
                )
            elif sampler.startswith("SGHMCHD"):
                # SGHMCHD approaches with different kwargs

                model = KerasBayesianNeuralNetwork(
                    optimizer=SAMPLERS[sampler], learning_rate=stepsize,
                    train_callbacks=(TensorBoard(histogram_freq=1, batch_size=20, ),),
                    hyperloss=lambda y_true, y_pred: kullback_leibler_divergence(y_true=y_true, y_pred=y_pred[:, 0])
                )
            else:
                raise NotImplementedError()

            model.train(*train_data)
            prediction_mean, prediction_variance = model.predict(x_test)

            had_nans = np.isnan(prediction_mean).any() or np.isnan(prediction_variance).any()

        results[dataset.__name__] = {
            "x_test": x_test.tolist(),
            "y_test": y_test.tolist(),
            "prediction_mean": prediction_mean.tolist(),
            "prediction_variance": prediction_variance.tolist()
        }

    return results
Exemplo n.º 5
0
class TestBayesianNeuralNetwork(unittest.TestCase):
    def setUp(self):
        self.X = np.random.rand(10, 2)
        self.y = np.sinc(self.X * 10 - 5).sum(axis=1)
        self.model = BayesianNeuralNetwork(normalize_output=True,
                                           normalize_input=True)
        self.model.train(self.X, self.y)

    def test_predict(self):
        X_test = np.random.rand(10, 2)

        m, v = self.model.predict(X_test)

        assert len(m.shape) == 1
        assert m.shape[0] == X_test.shape[0]
        assert len(v.shape) == 1
        assert v.shape[0] == X_test.shape[0]

    def test_get_incumbent(self):
        inc, inc_val = self.model.get_incumbent()

        b = np.argmin(self.y)
        np.testing.assert_almost_equal(inc, self.X[b], decimal=5)
Exemplo n.º 6
0
class TestBayesianNeuralNetwork(unittest.TestCase):

    def setUp(self):
        self.X = np.random.rand(10, 2)
        self.y = np.sinc(self.X * 10 - 5).sum(axis=1)
        self.model = BayesianNeuralNetwork(normalize_output=True, normalize_input=True)
        self.model.train(self.X, self.y)

    def test_predict(self):
        X_test = np.random.rand(10, 2)

        m, v = self.model.predict(X_test)

        assert len(m.shape) == 1
        assert m.shape[0] == X_test.shape[0]
        assert len(v.shape) == 1
        assert v.shape[0] == X_test.shape[0]

    def test_get_incumbent(self):
        inc, inc_val = self.model.get_incumbent()

        b = np.argmin(self.y)
        np.testing.assert_almost_equal(inc, self.X[b], decimal=5)
Exemplo n.º 7
0
def bohamiann(objective_function,
              lower,
              upper,
              num_iterations=30,
              maximizer="random",
              acquisition_func="log_ei",
              n_init=3,
              output_path=None,
              rng=None):
    """
    Bohamiann uses Bayesian neural networks to model the objective function [1] inside Bayesian optimization.
    Bayesian neural networks usually scale better with the number of function evaluations and the number of dimensions
    than Gaussian processes.

    [1] Bayesian optimization with robust Bayesian neural networks
        J. T. Springenberg and A. Klein and S. Falkner and F. Hutter
        Advances in Neural Information Processing Systems 29

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    maximizer: {"direct", "cmaes", "random", "scipy"}
        The optimizer for the acquisition function. NOTE: "cmaes" only works in D > 1 dimensions
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    output_path: string
        Specifies the path where the intermediate output after each iteration will be saved.
        If None no output will be saved to disk.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0]
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    model = BayesianNeuralNetwork(sampling_method="sghmc",
                                  l_rate=np.sqrt(1e-4),
                                  mdecay=0.05,
                                  burn_in=3000,
                                  n_iters=50000,
                                  precondition=True,
                                  normalize_input=True,
                                  normalize_output=True)

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)

    else:
        print("ERROR: %s is not a valid acquisition function!" %
              acquisition_func)
        return

    if maximizer == "cmaes":
        max_func = CMAES(a, lower, upper, verbose=True, rng=rng)
    elif maximizer == "direct":
        max_func = Direct(a, lower, upper, verbose=True)
    elif maximizer == "random":
        max_func = RandomSampling(a, lower, upper, rng=rng)
    elif maximizer == "scipy":
        max_func = SciPyOptimizer(a, lower, upper, rng=rng)

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              a,
                              model,
                              max_func,
                              initial_points=n_init,
                              output_path=output_path,
                              rng=rng)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    results["X"] = [x.tolist() for x in bo.X]
    results["y"] = [y for y in bo.y]
    return results
Exemplo n.º 8
0
def bohamiann(objective_function,
              lower,
              upper,
              num_iterations=30,
              acquisition_func="log_ei",
              n_init=3,
              rng=None):
    """
    General interface for Bayesian optimization for global black box optimization problems.

    Parameters
    ----------
    objective_function: function
        The objective function that is minimized. This function gets a numpy array (D,) as input and returns
        the function value (scalar)
    lower: np.ndarray (D,)
        The lower bound of the search space
    upper: np.ndarray (D,)
        The upper bound of the search space
    num_iterations: int
        The number of iterations (initial design + BO)
    acquisition_func: {"ei", "log_ei", "lcb", "pi"}
        The acquisition function
    n_init: int
        Number of points for the initial design. Make sure that it is <= num_iterations.
    rng: numpy.random.RandomState
        Random number generator

    Returns
    -------
        dict with all results
    """
    assert upper.shape[0] == lower.shape[0]
    assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"

    if rng is None:
        rng = np.random.RandomState(np.random.randint(0, 10000))

    model = BayesianNeuralNetwork(sampling_method="sghmc",
                                  l_rate=np.sqrt(1e-4),
                                  mdecay=0.05,
                                  burn_in=3000,
                                  n_iters=50000,
                                  precondition=True,
                                  normalize_input=True,
                                  normalize_output=True)

    if acquisition_func == "ei":
        a = EI(model)
    elif acquisition_func == "log_ei":
        a = LogEI(model)
    elif acquisition_func == "pi":
        a = PI(model)
    elif acquisition_func == "lcb":
        a = LCB(model)

    else:
        print("ERROR: %s is not a valid acquisition function!" %
              acquisition_func)
        return

    max_func = Direct(a, lower, upper, verbose=False)

    bo = BayesianOptimization(objective_function,
                              lower,
                              upper,
                              a,
                              model,
                              max_func,
                              initial_points=n_init,
                              rng=rng)

    x_best, f_min = bo.run(num_iterations)

    results = dict()
    results["x_opt"] = x_best
    results["f_opt"] = f_min
    results["incumbents"] = [inc for inc in bo.incumbents]
    results["incumbent_values"] = [val for val in bo.incumbents_values]
    results["runtime"] = bo.runtime
    results["overhead"] = bo.time_overhead
    return results
Exemplo n.º 9
0
 def setUp(self):
     self.X = np.random.rand(10, 2)
     self.y = np.sinc(self.X * 10 - 5).sum(axis=1)
     self.model = BayesianNeuralNetwork(normalize_output=True,
                                        normalize_input=True)
     self.model.train(self.X, self.y)
Exemplo n.º 10
0
 def setUp(self):
     self.X = np.random.rand(10, 2)
     self.y = np.sinc(self.X * 10 - 5).sum(axis=1)
     self.model = BayesianNeuralNetwork(normalize_output=True, normalize_input=True)
     self.model.train(self.X, self.y)
Exemplo n.º 11
0

def f(x):
    return np.sinc(x * 10 - 5).sum(axis=1)

logging.basicConfig(level=logging.INFO)

rng = np.random.RandomState(42)

X = init_random_uniform(np.zeros(1), np.ones(1), 20, rng)
y = f(X)

model = BayesianNeuralNetwork(sampling_method="sgld",
                              l_rate=1e-4,
                              mdecay=0.05,
                              burn_in=3000,
                              n_iters=50000,
                              precondition=True,
                              normalize_input=True,
                              normalize_output=True)
model.train(X, y)

x = np.linspace(0, 1, 100)[:, None]

vals = f(x)

mean_pred, var_pred = model.predict(x)

std_pred = np.sqrt(var_pred)

plt.grid()