Ejemplo n.º 1
1
def main():
    """Run the demo."""
    # grab a test function
    f = SubprocessQuery("bc <<< 'scale=8; x={}; -((x-3)^2)'")
    bounds = [0, 8]
    x = np.linspace(bounds[0], bounds[1], 500)

    # solve the model
    xbest, model, info = solve_bayesopt(f, bounds, niter=30, verbose=True)
    mu, s2 = model.predict(x[:, None])

    # plot the final model
    ax = figure().gca()
    ax.plot_banded(x, mu, 2 * np.sqrt(s2))
    ax.axvline(xbest)
    ax.scatter(info.x.ravel(), info.y)
    ax.figure.canvas.draw()
    show()
Ejemplo n.º 2
0
def main(debug=False):

    warnings.filterwarnings('ignore')
    unbuffered = os.fdopen(sys.stdout.fileno(), 'w', 0)
    sys.stdout = unbuffered

    print 'Initiating hyperopt at {}'.format(datetime.now())

    global train_data, test_data, id1_train, id1_test, DEBUG, NUM_CORES, NUM_EPOCHS

    DEBUG = debug
    NUM_CORES = 48
    NUM_EPOCHS = 50
    train_data, test_data, id1_train, id1_test = load_data(debug=DEBUG)
    num_features = train_data.num_features

    bounds = [
        [2, 4],  # num_layers
        [10, num_features],  # hidden_size
        [0.001, 0.01],  # learning_rate
        [0.5, 1.0],  # keep_prob
        [1, 1],  # num_steps
        [0.0, 0.05],  # init_scale
        [15, 15]  # max_grad_norm
    ]

    xbest, model, info = solve_bayesopt(objective,
                                        bounds,
                                        niter=100,
                                        verbose=True)
    run_best_model(xbest)

    save_path = train_data.config[
        'OutputInSample'][:train_data.config['OutputInSample'].rfind('/')]
    pickle.dump(info, open(save_path + '/trials', 'wb'))
Ejemplo n.º 3
0
def main():
    """Run the demo."""
    # grab a test function
    f = SubprocessQuery("bc <<< 'scale=8; x={}; -((x-3)^2)'")
    bounds = [0, 8]
    x = np.linspace(bounds[0], bounds[1], 500)

    # solve the model
    xbest, model, info = solve_bayesopt(f, bounds, niter=30, verbose=True)
    mu, s2 = model.predict(x[:, None])

    # plot the final model
    ax = figure().gca()
    ax.plot_banded(x, mu, 2*np.sqrt(s2))
    ax.axvline(xbest)
    ax.scatter(info.x.ravel(), info.y)
    ax.figure.canvas.draw()
    show()
Ejemplo n.º 4
0
def main():
    """Run the demo."""
    # grab a test function
    bounds = [0, 2*np.pi]
    x = np.linspace(bounds[0], bounds[1], 500)

    # solve the model
    xbest, model, info = solve_bayesopt(f, bounds, niter=30, verbose=True)

    # make some predictions
    mu, s2 = model.predict(x[:, None])

    # plot the final model
    ax = figure().gca()
    ax.plot_banded(x, mu, 2*np.sqrt(s2))
    ax.axvline(xbest)
    ax.scatter(info.x.ravel(), info.y)
    ax.figure.canvas.draw()
    show()
Ejemplo n.º 5
0
def main():
    """Run the demo."""
    # initialize interactive function and 1d bounds
    f = InteractiveQuery()
    bounds = [0, 1]
    x = np.linspace(bounds[0], bounds[1], 100)

    # optimize the model and get final predictions
    xbest, model, info = solve_bayesopt(f, bounds, niter=10)
    mu, s2 = model.predict(x[:, None])

    # plot the final model
    fig = figure()
    axs = fig.gca()
    axs.plot_banded(x, mu, 2*np.sqrt(s2))
    axs.axvline(xbest)
    axs.scatter(info.x.ravel(), info.y)
    fig.canvas.draw()
    show()
Ejemplo n.º 6
0
    xmax = 10  # FIXME - should this not be optional?

    @staticmethod
    def _f(x):
        # iris = load_iris()
        X, y = X, y = make_hastie_10_2(random_state=0)
        x = np.ravel(x)
        f = np.zeros(x.shape)
        for i in range(f.size):
            clf = RandomForestClassifier(n_estimators=1, min_samples_leaf=int(np.round(x[i])), random_state=0)
            # scores = cross_val_score(clf, iris.data, iris.target)
            scores = cross_val_score(clf, X, y, cv=5)
            f[i] = -scores.mean()
        return f.ravel()

if __name__ == '__main__':
    objective = CV_RF()

    info = pybo.solve_bayesopt(
        objective,
        objective.bounds,
        niter=25,
        noisefree=False,
        rng=0,
        init='uniform',
        callback=callback_1d)

    print('Finished')

    raw_input('Press enter to finish')
Ejemplo n.º 7
0
Simplest demo performing Bayesian optimization on a one-dimensional test
function. This script also demonstrates user-defined visualization via a
callback function that is imported from the advanced demo.

The `pybo.solve_bayesopt()` function returns a numpy structured array, called
`info` below, which includes the observed input and output data, `info['x']` and
`info['y']`, respectively; and the recommendations made along the way in
`info['xbest']`.

The `callback` function plots the posterior with uncertainty bands, overlaid
onto the true function; below it we plot the acquisition function, and to the
right, the evolution of the recommendation over time.
"""

import pybo

# import callback from advanced demo
import os
import sys
sys.path.append(os.path.dirname(__file__))
from advanced import callback

if __name__ == '__main__':
    objective = pybo.functions.Sinusoidal()

    info = pybo.solve_bayesopt(objective,
                               objective.bounds,
                               noisefree=True,
                               rng=0,
                               callback=callback)
Ejemplo n.º 8
0
    bounds = objective.bounds                           # bounds of search space
    dim = bounds.shape[0]                               # dimension of space

    # prescribe initial hyperparameters
    sn = noise                                          # likelihood std dev
    sf = 1.0                                            # kernel amplitude
    ell = 0.25 * (bounds[:, 1] - bounds[:, 0])          # kernel length scale
    mu = 0.0                                            # prior mean

    # define model
    kernel = 'matern3'                                  # kernel family
    gp = pygp.BasicGP(sn, sf, ell, mu, kernel=kernel)   # initialize base GP
    prior = {                                           # hyperparameter priors
        'sn': pygp.priors.Horseshoe(0.1, min=1e-5),
        'sf': pygp.priors.LogNormal(np.log(sf), sigma=1., min=1e-6),
        'ell': pygp.priors.Uniform(ell / 100, ell * 2),
        'mu': pygp.priors.Gaussian(mu, sf)}
    model = pygp.meta.MCMC(gp, prior, n=10, rng=rng)    # meta-model for MCMC
                                                        # marginalization

    info = pybo.solve_bayesopt(
        objective,
        bounds,
        niter=30*dim,
        init='sobol',                                   # initialization policy
        policy='ei',                                    # exploration policy
        recommender='incumbent',                        # recommendation policy
        model=model,                                    # surrogate model
        rng=rng,
        callback=callback)
Ejemplo n.º 9
0
if __name__ == '__main__':
    N = 200
    D = 2
    Z = np.random.randn(N, D)
    m = N
    num_folds = 5
    num_repetitions = 10
    lmbda = 0.0001
    sigma = 0.92

    objective = generator_sigma_lmbda_objective()

    info = pybo.solve_bayesopt(objective,
                               bounds=np.array([[-5, 5], [-20, 1]]),
                               noisefree=False,
                               callback=callback_sigma_lmbda,
                               niter=15)

    print("x")
    print(info['x'])

    print("y")
    print(info['y'])

    print("xbest")
    print(info['xbest'])

    pl.figure()
    pl.plot(info['x'], info['y'], 'o')
    pl.show()
Ejemplo n.º 10
0
    @staticmethod
    def _f(x):
        # iris = load_iris()
        X, y = X, y = make_hastie_10_2(random_state=0)
        x = np.ravel(x)
        f = np.zeros(x.shape)
        for i in range(f.size):
            clf = RandomForestClassifier(n_estimators=1,
                                         min_samples_leaf=int(np.round(x[i])),
                                         random_state=0)
            # scores = cross_val_score(clf, iris.data, iris.target)
            scores = cross_val_score(clf, X, y, cv=5)
            f[i] = -scores.mean()
        return f.ravel()


if __name__ == '__main__':
    objective = CV_RF()

    info = pybo.solve_bayesopt(objective,
                               objective.bounds,
                               niter=25,
                               noisefree=False,
                               rng=0,
                               init='uniform',
                               callback=callback_1d)

    print('Finished')

    raw_input('Press enter to finish')
Ejemplo n.º 11
0
import pybo

# import callback from advanced demo
import os
import sys
sys.path.append(os.path.dirname(__file__))
from advanced import callback


if __name__ == '__main__':
    rng = 0                                             # random seed
    bounds = np.array([3, 5])                           # bounds of search space
    dim = bounds.shape[0]                               # dimension of space

    # define a GP which we will sample an objective from.
    likelihood = pygp.likelihoods.Gaussian(sigma=1e-6)
    kernel = pygp.kernels.Periodic(1, 1, 0.5) + pygp.kernels.SE(1, 1)
    gp = pygp.inference.ExactGP(likelihood, kernel, mean=0.0)
    objective = pybo.functions.GPModel(bounds, gp, rng=rng)

    info = pybo.solve_bayesopt(
        objective,
        bounds,
        niter=30*dim,
        init='latin',                                   # initialization policy
        policy='thompson',                              # exploration policy
        recommender='observed',                         # recommendation policy
        noisefree=True,
        rng=rng,
        callback=callback)