Exemplo n.º 1
0
def fit_sinc(sampler, stepsize, data_seed, num_training_datapoints=20):
    x_train = init_random_uniform(np.zeros(1),
                                  np.ones(1),
                                  num_points=num_training_datapoints,
                                  rng=np.random.RandomState(seed=data_seed))
    y_train = sinc(x_train)

    x_test = np.linspace(0, 1, 100)[:, None]
    y_test = sinc(x_test)

    if sampler == "SGHMC":
        model = Robo_BNN(sampling_method=SAMPLERS[sampler], l_rate=stepsize)
    else:
        from keras.losses import cosine_proximity, kullback_leibler_divergence, binary_crossentropy
        model = BayesianNeuralNetwork(
            optimizer=SAMPLERS[sampler],
            learning_rate=stepsize,
            hyperloss=lambda y_true, y_pred: kullback_leibler_divergence(
                y_true=y_true, y_pred=y_pred[:, 0]))

    model.train(x_train, y_train)
    prediction_mean, prediction_variance = model.predict(x_test)

    prediction_std = np.sqrt(prediction_variance)

    return {
        "prediction_mean": prediction_mean.tolist(),
        "prediction_std": prediction_std.tolist(),
        "x_train": x_train.tolist(),
        "y_train": y_train.tolist(),
        "x_test": x_test.tolist(),
        "y_test": y_test.tolist()
    }
Exemplo n.º 2
0
class Bohamiann(Optimizer):
    def __init__(self, config_space, burnin=3000, n_iters=10000):

        super(Bohamiann,
              self).__init__(sacred_space_to_configspace(config_space))
        self.rng = np.random.RandomState(np.random.seed())
        self.n_dims = len(self.config_space.get_hyperparameters())

        # All inputs are mapped to be in [0, 1]^D
        self.lower = np.zeros([self.n_dims])
        self.upper = np.ones([self.n_dims])
        self.incumbents = []
        self.X = None
        self.y = None

        self.model = BayesianNeuralNetwork(sampling_method="sghmc",
                                           l_rate=np.sqrt(1e-4),
                                           mdecay=0.05,
                                           burn_in=burnin,
                                           n_iters=n_iters,
                                           precondition=True,
                                           normalize_input=True,
                                           normalize_output=True)

        self.acquisition_func = LogEI(self.model)

        self.maximizer = Direct(self.acquisition_func,
                                self.lower,
                                self.upper,
                                verbose=False)

    def suggest_configuration(self):

        if self.X is None and self.y is None:
            # No data points yet to train a model, just return a random configuration instead
            new_x = init_random_uniform(self.lower,
                                        self.upper,
                                        n_points=1,
                                        rng=self.rng)[0, :]

        else:
            # Train the model on all finished runs
            self.model.train(self.X, self.y)
            self.acquisition_func.update(self.model)

            # Maximize the acquisition function
            new_x = self.maximizer.maximize()

        # Maps from [0, 1]^D space back to original space
        next_config = Configuration(self.config_space, vector=new_x)

        # Transform to sacred configuration
        result = configspace_config_to_sacred(next_config)

        return result
Exemplo n.º 3
0
def fit_uci(sampler, stepsize, data_seed, burn_in_steps=5000,
            num_steps=15000, num_nets=100, batch_size=32, test_split=0.1):
    datasets = (BostonHousing, YachtHydrodynamics, Concrete, WineQualityRed)

    results = {}

    for dataset in datasets:
        train_data, (x_test, y_test) = dataset.load_data(
            test_split=test_split, seed=data_seed
        )
        had_nans = True

        while had_nans:
            if sampler == "sghmc":
                model = Robo_BNN(
                    l_rate=stepsize,
                    sampling_method="sghmc", n_nets=num_nets, burn_in=burn_in_steps,
                    n_iters=num_steps, bsize=batch_size
                )
            elif sampler.startswith("SGHMCHD"):
                # SGHMCHD approaches with different kwargs

                model = KerasBayesianNeuralNetwork(
                    optimizer=SAMPLERS[sampler], learning_rate=stepsize,
                    train_callbacks=(TensorBoard(histogram_freq=1, batch_size=20, ),),
                    hyperloss=lambda y_true, y_pred: kullback_leibler_divergence(y_true=y_true, y_pred=y_pred[:, 0])
                )
            else:
                raise NotImplementedError()

            model.train(*train_data)
            prediction_mean, prediction_variance = model.predict(x_test)

            had_nans = np.isnan(prediction_mean).any() or np.isnan(prediction_variance).any()

        results[dataset.__name__] = {
            "x_test": x_test.tolist(),
            "y_test": y_test.tolist(),
            "prediction_mean": prediction_mean.tolist(),
            "prediction_variance": prediction_variance.tolist()
        }

    return results
Exemplo n.º 4
0
class TestBayesianNeuralNetwork(unittest.TestCase):
    def setUp(self):
        self.X = np.random.rand(10, 2)
        self.y = np.sinc(self.X * 10 - 5).sum(axis=1)
        self.model = BayesianNeuralNetwork(normalize_output=True,
                                           normalize_input=True)
        self.model.train(self.X, self.y)

    def test_predict(self):
        X_test = np.random.rand(10, 2)

        m, v = self.model.predict(X_test)

        assert len(m.shape) == 1
        assert m.shape[0] == X_test.shape[0]
        assert len(v.shape) == 1
        assert v.shape[0] == X_test.shape[0]

    def test_get_incumbent(self):
        inc, inc_val = self.model.get_incumbent()

        b = np.argmin(self.y)
        np.testing.assert_almost_equal(inc, self.X[b], decimal=5)
Exemplo n.º 5
0
class TestBayesianNeuralNetwork(unittest.TestCase):

    def setUp(self):
        self.X = np.random.rand(10, 2)
        self.y = np.sinc(self.X * 10 - 5).sum(axis=1)
        self.model = BayesianNeuralNetwork(normalize_output=True, normalize_input=True)
        self.model.train(self.X, self.y)

    def test_predict(self):
        X_test = np.random.rand(10, 2)

        m, v = self.model.predict(X_test)

        assert len(m.shape) == 1
        assert m.shape[0] == X_test.shape[0]
        assert len(v.shape) == 1
        assert v.shape[0] == X_test.shape[0]

    def test_get_incumbent(self):
        inc, inc_val = self.model.get_incumbent()

        b = np.argmin(self.y)
        np.testing.assert_almost_equal(inc, self.X[b], decimal=5)
Exemplo n.º 6
0
logging.basicConfig(level=logging.INFO)

rng = np.random.RandomState(42)

X = init_random_uniform(np.zeros(1), np.ones(1), 20, rng)
y = f(X)

model = BayesianNeuralNetwork(sampling_method="sgld",
                              l_rate=1e-4,
                              mdecay=0.05,
                              burn_in=3000,
                              n_iters=50000,
                              precondition=True,
                              normalize_input=True,
                              normalize_output=True)
model.train(X, y)

x = np.linspace(0, 1, 100)[:, None]

vals = f(x)

mean_pred, var_pred = model.predict(x)

std_pred = np.sqrt(var_pred)

plt.grid()

plt.plot(x[:, 0], vals, label="true", color="black")
plt.plot(X[:, 0], y, "ro")

plt.plot(x[:, 0], mean_pred, label="SGLD", color="green")