Exemple #1
1
def init_model(f, bounds, ninit=None, design='latin', log=None, rng=None):
    """
    Initialize model and its hyperpriors using initial data.

    Arguments:
        f: function handle
        bounds: list of doubles (xmin, xmax) for each dimension.
        ninit: int, number of design points to initialize model with.
        design: string, corresponding to a function in `pybo.inits`, with
            'init_' stripped.
        log: string, path to file where the model is dumped.
        rng: int or random state.

    Returns:
        Initialized model.
    """
    rng = rstate(rng)
    bounds = np.array(bounds, dtype=float, ndmin=2)
    ninit = ninit if (ninit is not None) else 3*len(bounds)
    model, info = safe_load(log)

    if model is not None:
        # if we've already constructed a model return it right away
        return model
    elif len(info.x) == 0:
        # otherwise get the initial design
        design = getattr(inits, 'init_' + design)
        info.x.extend(design(bounds, ninit, rng))
        info.y.extend(np.nan for _ in xrange(ninit))

    # sample the initial data
    for i, x in enumerate(info.x):
        if np.isnan(info.y[i]):
            info.y[i] = f(x)
        # save progress
        safe_dump(None, info, filename=log)

    # define initial setting of hyper parameters
    sn2 = 1e-6
    rho = max(info.y) - min(info.y) if (len(info.y) > 1) else 1.
    rho = 1. if (rho < 1e-1) else rho
    ell = 0.25 * (bounds[:, 1] - bounds[:, 0])
    bias = np.mean(info.y) if (len(info.y) > 0) else 0.

    # initialize the base model
    model = reggie.make_gp(sn2, rho, ell, bias)

    # define priors
    model.params['like.sn2'].set_prior('horseshoe', 0.1)
    model.params['kern.rho'].set_prior('lognormal', np.log(rho), 1.)
    model.params['kern.ell'].set_prior('uniform', ell / 100, ell * 10)
    model.params['mean.bias'].set_prior('normal', bias, rho)

    # initialize the MCMC inference meta-model and add data
    model.add_data(info.x, info.y)
    model = reggie.MCMC(model, n=10, burn=100, rng=rng)

    # save model
    safe_dump(model, info, filename=log)

    return model
Exemple #2
0
def main():
    """Run the demo."""
    # generate random data from a gp prior
    rng = np.random.RandomState(0)
    gp = make_gp(0.1, 1.0, 0.1, kernel='matern1')
    X = rng.uniform(-2, 2, size=(20, 1))
    Y = gp.sample(X, latent=False, rng=rng)
    U = np.linspace(X.min(), X.max(), 10)[:, None]

    # create a new (sparse) GP and optimize its hyperparameters
    gp = make_gp(1, 1, 1, inf='fitc', U=U)
    gp.add_data(X, Y)
    gp.optimize()

    # get the posterior moments
    x = np.linspace(X.min(), X.max(), 500)
    mu, s2 = gp.predict(x[:, None])

    # plot the posterior
    ax = figure().gca()
    ax.plot_banded(x, mu, 2*np.sqrt(s2), label='posterior mean')
    ax.scatter(X, Y, label='observed data')
    ax.scatter(U, np.full_like(U, -1), marker='x', label='inducing points')
    ax.legend(loc=0)
    ax.set_xlabel('inputs, X')
    ax.set_ylabel('outputs, Y')
    ax.set_title('Sparse GP (FITC)')

    # show the figure
    ax.figure.canvas.draw()
    show()
Exemple #3
0
def main():
    """Run the demo."""
    # generate random data from a gp prior
    rng = np.random.RandomState(0)
    gp = make_gp(0.1, 1.0, 0.1, kernel='matern1')
    X = rng.uniform(-2, 2, size=(20, 1))
    Y = gp.sample(X, latent=False, rng=rng)

    # create a new GP and optimize its hyperparameters
    gp = make_gp(1, 1, 1, kernel='se')
    gp.add_data(X, Y)
    gp.optimize()

    # get the posterior moments
    x = np.linspace(X.min(), X.max(), 500)
    mu, s2 = gp.predict(x[:, None])

    # plot the posterior
    ax = figure().gca()
    ax.plot_banded(x, mu, 2*np.sqrt(s2), label='posterior mean')
    ax.scatter(X.ravel(), Y, label='observed data')
    ax.legend(loc=0)
    ax.set_title('Basic GP')
    ax.set_xlabel('inputs, X')
    ax.set_ylabel('outputs, Y')

    # draw/show it
    ax.figure.canvas.draw()
    show()
Exemple #4
0
def main():
    """Run the demo."""
    # generate random data from a gp prior
    rng = np.random.RandomState(0)
    gp = make_gp(0.1, 1.0, 0.1, kernel='matern1')
    X = rng.uniform(-2, 2, size=(20, 1))
    Y = gp.sample(X, latent=False, rng=rng)

    # create a new GP and optimize its hyperparameters
    gp = make_gp(1, 1, 1, kernel='se')
    gp.add_data(X, Y)
    gp.optimize()

    # get the posterior moments
    x = np.linspace(X.min(), X.max(), 500)
    mu, s2 = gp.predict(x[:, None])

    # plot the posterior
    ax = figure().gca()
    ax.plot_banded(x, mu, 2 * np.sqrt(s2), label='posterior mean')
    ax.scatter(X.ravel(), Y, label='observed data')
    ax.legend(loc=0)
    ax.set_title('Basic GP')
    ax.set_xlabel('inputs, X')
    ax.set_ylabel('outputs, Y')

    # draw/show it
    ax.figure.canvas.draw()
    show()
Exemple #5
0
def init_model(domain, X, Y, rng=None):
    """
    Initialize a model using an initial sample.
    """
    if not hasattr(domain, 'bounds'):
        raise ValueError('cannot construct a default model for the given '
                         'domain type')

    # define initial setting of hyper parameters
    sn2 = 1e-6
    rho = max(Y) - min(Y) if (len(Y) > 1) else 1.
    rho = 1. if (rho < 1e-1) else rho
    ell = 0.25 * np.array([b - a for (a, b) in domain.bounds])
    bias = np.mean(Y) if (len(Y) > 0) else 0.

    # initialize the base model
    model = rg.make_gp(sn2, rho, ell, bias)

    # define priors
    model.params['like']['sn2'].prior = rg.priors.Horseshoe(0.1)
    model.params['kern']['rho'].prior = rg.priors.LogNormal(np.log(rho), 1.)
    model.params['mean']['bias'].prior = rg.priors.Normal(bias, rho)

    for i, l in enumerate(ell):
        model.params['kern']['ell'][i].prior = rg.priors.Uniform(
            .01 * l, 10 * l)

    # initialize the MCMC inference meta-model and add data
    model = rg.MCMC(model, n=10, burn=100, skip=True, rng=rng)
    model.add_data(X, Y)

    return model
    def _init_model(self, num_initial_evaluations, previous_model=None):
        logger.info("Initial fitting using %d points" %
                    num_initial_evaluations)

        # get initial data and some test points.
        self.X = list(inits.init_latin(self.bounds, num_initial_evaluations))
        self.Y = [self._eval(x) for x in self.X]

        # initial values for kernel parameters, taken from pybo code
        sn2 = 1e-6
        rho = np.max(self.Y) - np.min(self.Y)
        rho = 1. if (rho < 1e-1) else rho
        ell = 0.25 * (self.bounds[:, 1] - self.bounds[:, 0])

        if previous_model is None:
            # use data mean as GP mean
            bias = np.mean(self.Y)
            self.model = reggie.make_gp(sn2, rho, ell, bias)

            # define priors if gp was created from scratch
            self.model.params['mean.bias'].set_prior('normal', bias, rho)
            self.model.params['like.sn2'].set_prior('horseshoe', 0.1)
            self.model.params['kern.rho'].set_prior('lognormal', np.log(rho),
                                                    1.)
            self.model.params['kern.ell'].set_prior('uniform', ell / 100,
                                                    ell * 10)
        else:
            # if there has been a previous model, use it as mean
            like = previous_model._like
            kern = previous_model._kern
            mean = GPMean(previous_model)
            self.model = reggie.GP(like, kern, mean)

        # initialize the MCMC inference meta-model and add data
        self.model.add_data(self.X, self.Y)
        self.model = reggie.MCMC(self.model, n=10, burn=100)

        # best point so far
        self.xbest = recommenders.best_incumbent(self.model, self.bounds,
                                                 self.X)

        self.initialised = True
 def _init_model(self, num_initial_evaluations, previous_model=None):
     logger.info("Initial fitting using %d points" % num_initial_evaluations)
     
     # get initial data and some test points.
     self.X = list(inits.init_latin(self.bounds, num_initial_evaluations))
     self.Y = [self._eval(x) for x in self.X]
     
     # initial values for kernel parameters, taken from pybo code
     sn2 = 1e-6
     rho = np.max(self.Y) - np.min(self.Y)
     rho = 1. if (rho < 1e-1) else rho
     ell = 0.25 * (self.bounds[:, 1] - self.bounds[:, 0])
     
     if previous_model is None:
         # use data mean as GP mean
         bias = np.mean(self.Y)
         self.model = reggie.make_gp(sn2, rho, ell, bias)
         
         # define priors if gp was created from scratch
         self.model.params['mean.bias'].set_prior('normal', bias, rho)
         self.model.params['like.sn2'].set_prior('horseshoe', 0.1)
         self.model.params['kern.rho'].set_prior('lognormal', np.log(rho), 1.)
         self.model.params['kern.ell'].set_prior('uniform', ell / 100, ell * 10)
     else:
         # if there has been a previous model, use it as mean
         like = previous_model._like
         kern = previous_model._kern
         mean = GPMean(previous_model)
         self.model = reggie.GP(like, kern, mean)
     
     # initialize the MCMC inference meta-model and add data
     self.model.add_data(self.X, self.Y)
     self.model = reggie.MCMC(self.model, n=10, burn=100)
     
     # best point so far
     self.xbest = recommenders.best_incumbent(self.model, self.bounds, self.X)
     
     self.initialised = True
Exemple #8
0
def main():
    """Run the demo."""
    # generate random data from a gp prior
    rng = np.random.RandomState(1)
    N = 5
    X = rng.uniform(-2, 2, size=(N, 1))
    Y = rng.uniform(-2, 2, size=N)
    x = np.linspace(X.min(), X.max(), 500)

    # create a GP and sample its prior
    gp = make_gp(0.01, 1, 0.3, kernel='se')
    pr_fs = gp.sample(x[:, None], 3, rng=rng)
    pr_mu, pr_s2 = gp.predict(x[:, None])

    # add data and sample the posterior
    gp.add_data(X, Y)
    po_fs = gp.sample(x[:, None], 3, rng=rng)
    po_mu, po_s2 = gp.predict(x[:, None])

    # plot the posterior
    fig = figure(w_pad=3)
    ax1 = fig.add_subplotspec((1, 2), (0, 0), hidexy=True)
    ax2 = fig.add_subplotspec((1, 2), (0, 1), hidexy=True, sharey=ax1)

    ax1.plot_banded(x, pr_mu, 3*np.sqrt(pr_s2))
    ax1.plot(x, pr_fs.T, ls='--')
    ax1.set_title('prior')
    ax1.set_ylim(-3.5, 3.5)

    ax2.plot_banded(x, po_mu, 3*np.sqrt(po_s2))
    ax2.plot(x, po_fs.T, ls='--')
    ax2.scatter(X.ravel(), Y, s=80, marker='+', zorder=3, lw=3)
    ax2.set_title('posterior')

    # draw/show it
    fig.canvas.draw()
    show()
Exemple #9
0
def main():
    """Run the demo."""
    # generate random data from a gp prior
    rng = np.random.RandomState(1)
    N = 5
    X = rng.uniform(-2, 2, size=(N, 1))
    Y = rng.uniform(-2, 2, size=N)
    x = np.linspace(X.min(), X.max(), 500)

    # create a GP and sample its prior
    gp = make_gp(0.01, 1, 0.3, kernel='se')
    pr_fs = gp.sample(x[:, None], 3, rng=rng)
    pr_mu, pr_s2 = gp.predict(x[:, None])

    # add data and sample the posterior
    gp.add_data(X, Y)
    po_fs = gp.sample(x[:, None], 3, rng=rng)
    po_mu, po_s2 = gp.predict(x[:, None])

    # plot the posterior
    fig = figure(w_pad=3)
    ax1 = fig.add_subplotspec((1, 2), (0, 0), hidexy=True)
    ax2 = fig.add_subplotspec((1, 2), (0, 1), hidexy=True, sharey=ax1)

    ax1.plot_banded(x, pr_mu, 3 * np.sqrt(pr_s2))
    ax1.plot(x, pr_fs.T, ls='--')
    ax1.set_title('prior')
    ax1.set_ylim(-3.5, 3.5)

    ax2.plot_banded(x, po_mu, 3 * np.sqrt(po_s2))
    ax2.plot(x, po_fs.T, ls='--')
    ax2.scatter(X.ravel(), Y, s=80, marker='+', zorder=3, lw=3)
    ax2.set_title('posterior')

    # draw/show it
    fig.canvas.draw()
    show()
Exemple #10
0
def main():
    """Run the demo."""
    # define the bounds over which we'll optimize, the optimal x for comparison,
    # and a sequence of test points
    bounds = np.array([[-5, 10.], [0, 15]])
    xopt = np.array([np.pi, 2.275])
    x1, x2 = np.meshgrid(np.linspace(*bounds[0], num=100),
                         np.linspace(*bounds[1], num=100))
    xx = np.c_[x1.flatten(), x2.flatten()]

    # get initial data and some test points.
    X = list(inits.init_latin(bounds, 6))
    Y = [f(x_) for x_ in X]
    F = list()

    # initialize the model
    model = make_gp(0.01, 10, [1., 1.], 0)
    model.add_data(X, Y)

    # set a prior on the parameters
    model.params['like.sn2'].set_prior('uniform', 0.005, 0.015)
    model.params['kern.rho'].set_prior('lognormal', 0, 3)
    model.params['kern.ell'].set_prior('lognormal', 0, 3)
    model.params['mean.bias'].set_prior('normal', 0, 20)

    # make a model which samples parameters
    model = MCMC(model, n=10, rng=None)

    # create a new figure
    fig = figure(figsize=(10, 6))

    while True:
        # get index to solve it and plot it
        index = policies.EI(model, bounds, X, xi=0.1)

        # get the recommendation and the next query
        xbest = recommenders.best_incumbent(model, bounds, X)
        xnext, _ = solvers.solve_lbfgs(index, bounds)

        # observe and update model
        ynext = f(xnext)
        model.add_data(xnext, ynext)

        # evaluate the posterior and the acquisition function
        mu, s2 = model.predict(xx)

        # record our data and update the model
        X.append(xnext)
        Y.append(ynext)
        F.append(f(xbest))

        fig.clear()
        ax1 = fig.add_subplotspec((2, 2), (0, 0), hidex=True)
        ax2 = fig.add_subplotspec((2, 2), (1, 0), hidey=True, sharex=ax1)
        ax3 = fig.add_subplotspec((2, 2), (0, 1), rowspan=2)

        # plot the posterior and data
        ax1.contourf(x1, x2, mu.reshape(x1.shape), alpha=0.4)
        X_ = np.array(X)
        ax1.scatter(X_[:-1, 0], X_[:-1, 1], marker='.')
        ax1.scatter(xbest[0], xbest[1], linewidths=3, marker='o', color='r')
        ax1.scatter(xnext[0], xnext[1], linewidths=3, marker='o', color='g')
        ax1.set_xlim(*bounds[0])
        ax1.set_ylim(*bounds[1])
        ax1.set_title('current model (xbest and xnext)')

        # plot the acquisition function
        ax2.contourf(x1, x2, index(xx).reshape(x1.shape), alpha=0.5)
        ax2.scatter(xbest[0], xbest[1], linewidths=3, marker='o', color='r')
        ax2.scatter(xnext[0], xnext[1], linewidths=3, marker='o', color='g')
        ax2.set_xlim(*bounds[0])
        ax2.set_ylim(*bounds[1])
        ax2.set_title('current policy (xnext)')

        # plot the latent function at recomended points
        ax3.axhline(f(xopt))
        ax3.plot(F)
        ax3.set_ylim(-1., 0.)
        ax3.set_title('value of recommendation')

        # draw
        fig.canvas.draw()
        show(block=False)
Exemple #11
0
 def __init__(self):
     X = np.random.rand(10, 2)
     Y = np.random.rand(10)
     gp = make_gp(1, 1, [1., 1.])
     ModelTest.__init__(self, gp, X, Y)
Exemple #12
0
 def __init__(self):
     X = np.random.rand(10, 2)
     Y = np.random.rand(10)
     gp = make_gp(0.7, 1, [1., 1.], inf='laplace')
     ModelTest.__init__(self, gp, X, Y)
Exemple #13
0
 def __init__(self):
     X = np.random.rand(10, 2)
     Y = np.random.rand(10)
     U = np.random.rand(50, 2)
     gp = make_gp(0.7, 1, [1., 1.], inf='fitc', U=U)
     ModelTest.__init__(self, gp, X, Y)
Exemple #14
0
 def __init__(self):
     X = np.random.rand(10, 2)
     Y = np.random.rand(10)
     gp = make_gp(1, 1, [1., 1.])
     ModelTest.__init__(self, gp, X, Y)
Exemple #15
0
"""
N = 20
X = np.random.uniform(bounds[:, 0], bounds[:, 1], size=(N, 2))
Y = np.array([[func.f1(x), func.f2(x)] for x in X])
P = pareto2d(Y)

P_init = P.copy()
"""
GP surrogate
"""
sn2 = 0.001  # Noise variance
rho = 5e-07  # Signal variance
ell = [1.] * len(bounds)  # Lengthscales
mean = 1.  # Mean

model = make_gp(sn2, rho, ell, mean)  # Create GP surrogate
model.add_data(X, Y[:, 1])  # Add data
#model.optimize()           # Optimise hyperparameters
"""
Multi-objective optimisation
"""
exp_iter = 20
for n in range(exp_iter):
    print('Additional experiment {:d}/{:d}'.format(n + 1, exp_iter))
    # Acquisition function
    acqfunc = EHVI(P, r, model, detf)
    # Choose next point to evaluate
    xnext, _ = solvers.solve_lbfgs(acqfunc, bounds)

    # Make 'observation'
    ycnext = func.f1(xnext)  # Deterministic
Exemple #16
0
def main():
    """Run the demo."""
    # define the bounds over which we'll optimize, the optimal x for comparison,
    # and a sequence of test points
    bounds = np.array([[-5, 10.], [0, 15]])
    xopt = np.array([np.pi, 2.275])
    x1, x2 = np.meshgrid(np.linspace(*bounds[0], num=100),
                         np.linspace(*bounds[1], num=100))
    xx = np.c_[x1.flatten(), x2.flatten()]

    # get initial data and some test points.
    X = list(inits.init_latin(bounds, 6))
    Y = [f(x_) for x_ in X]
    F = list()

    # initialize the model
    model = make_gp(0.01, 10, [1., 1.], 0)
    model.add_data(X, Y)

    # set a prior on the parameters
    model.params['like.sn2'].set_prior('uniform', 0.005, 0.015)
    model.params['kern.rho'].set_prior('lognormal', 0, 3)
    model.params['kern.ell'].set_prior('lognormal', 0, 3)
    model.params['mean.bias'].set_prior('normal', 0, 20)

    # make a model which samples parameters
    model = MCMC(model, n=10, rng=None)

    # create a new figure
    fig = figure(figsize=(10, 6))

    while True:
        # get index to solve it and plot it
        index = policies.EI(model, bounds, X, xi=0.1)

        # get the recommendation and the next query
        xbest = recommenders.best_incumbent(model, bounds, X)
        xnext, _ = solvers.solve_lbfgs(index, bounds)

        # observe and update model
        ynext = f(xnext)
        model.add_data(xnext, ynext)

        # evaluate the posterior and the acquisition function
        mu, s2 = model.predict(xx)

        # record our data and update the model
        X.append(xnext)
        Y.append(ynext)
        F.append(f(xbest))

        fig.clear()
        ax1 = fig.add_subplotspec((2, 2), (0, 0), hidex=True)
        ax2 = fig.add_subplotspec((2, 2), (1, 0), hidey=True, sharex=ax1)
        ax3 = fig.add_subplotspec((2, 2), (0, 1), rowspan=2)

        # plot the posterior and data
        ax1.contourf(x1, x2, mu.reshape(x1.shape), alpha=0.4)
        X_ = np.array(X)
        ax1.scatter(X_[:-1, 0], X_[:-1, 1], marker='.')
        ax1.scatter(xbest[0], xbest[1], linewidths=3, marker='o', color='r')
        ax1.scatter(xnext[0], xnext[1], linewidths=3, marker='o', color='g')
        ax1.set_xlim(*bounds[0])
        ax1.set_ylim(*bounds[1])
        ax1.set_title('current model (xbest and xnext)')

        # plot the acquisition function
        ax2.contourf(x1, x2, index(xx).reshape(x1.shape), alpha=0.5)
        ax2.scatter(xbest[0], xbest[1], linewidths=3, marker='o', color='r')
        ax2.scatter(xnext[0], xnext[1], linewidths=3, marker='o', color='g')
        ax2.set_xlim(*bounds[0])
        ax2.set_ylim(*bounds[1])
        ax2.set_title('current policy (xnext)')

        # plot the latent function at recomended points
        ax3.axhline(f(xopt))
        ax3.plot(F)
        ax3.set_ylim(-1., 0.)
        ax3.set_title('value of recommendation')

        # draw
        fig.canvas.draw()
        show(block=False)
Exemple #17
0
 def __init__(self):
     X = np.random.rand(10, 2)
     Y = np.random.rand(10)
     U = np.random.rand(50, 2)
     gp = make_gp(0.7, 1, [1., 1.], inf='fitc', U=U)
     ModelTest.__init__(self, gp, X, Y)
Exemple #18
0
 def __init__(self):
     X = np.random.rand(10, 2)
     Y = np.random.rand(10)
     gp = make_gp(0.7, 1, [1., 1.], inf='laplace')
     ModelTest.__init__(self, gp, X, Y)
Exemple #19
0
def init_model(f, bounds, ninit=None, design='latin', log=None, rng=None):
    """
    Initialize model and its hyperpriors using initial data.

    Arguments:
        f: function handle
        bounds: list of doubles (xmin, xmax) for each dimension.
        ninit: int, number of design points to initialize model with.
        design: string, corresponding to a function in `pybo.inits`, with
            'init_' stripped.
        log: string, path to file where the model is dumped.
        rng: int or random state.

    Returns:
        Initialized model.
    """
    rng = rstate(rng)
    bounds = np.array(bounds, dtype=float, ndmin=2)
    ninit = ninit if (ninit is not None) else 3 * len(bounds)
    model, info = safe_load(log)

    if model is not None:
        # if we've already constructed a model return it right away
        return model
    elif len(info.x) == 0:
        # otherwise get the initial design
        design = getattr(inits, 'init_' + design)
        info.x.extend(design(bounds, ninit, rng))
        info.y.extend(np.nan for _ in xrange(ninit))

    # sample the initial data
    for i, x in enumerate(info.x):
        if np.isnan(info.y[i]):
            info.y[i] = f(x)
        # save progress
        safe_dump(None, info, filename=log)

    # define initial setting of hyper parameters
    sn2 = 1e-6
    rho = max(info.y) - min(info.y) if (len(info.y) > 1) else 1.
    rho = 1. if (rho < 1e-1) else rho
    ell = 0.25 * (bounds[:, 1] - bounds[:, 0])
    bias = np.mean(info.y) if (len(info.y) > 0) else 0.

    # initialize the base model
    model = reggie.make_gp(sn2, rho, ell, bias)

    # define priors
    model.params['like.sn2'].set_prior('horseshoe', 0.1)
    model.params['kern.rho'].set_prior('lognormal', np.log(rho), 1.)
    model.params['kern.ell'].set_prior('uniform', ell / 100, ell * 10)
    model.params['mean.bias'].set_prior('normal', bias, rho)

    # initialize the MCMC inference meta-model and add data
    model.add_data(info.x, info.y)
    model = reggie.MCMC(model, n=10, burn=100, rng=rng)

    # save model
    safe_dump(model, info, filename=log)

    return model
Exemple #20
0
def main():
    """Run the demo."""
    # define the bounds over which we'll optimize, the optimal x for
    # comparison, and a sequence of test points
    bounds = np.array([[0.5, 2.5]])
    xopt = 0.54856343
    fopt = f(xopt)
    x = np.linspace(bounds[0][0], bounds[0][1], 500)

    # get initial data and some test points.
    X = list(inits.init_latin(bounds, 3))
    Y = [f(x_) for x_ in X]
    F = []

    # initialize the model
    model = make_gp(0.01, 1.9, 0.1, 0)
    model.add_data(X, Y)

    # set a prior on the parameters
    model.params['like.sn2'].set_prior('uniform', 0.005, 0.015)
    model.params['kern.rho'].set_prior('lognormal', 0, 100)
    model.params['kern.ell'].set_prior('lognormal', 0, 10)
    model.params['mean.bias'].set_prior('normal', 0, 20)

    # make a model which samples parameters
    model = MCMC(model, n=20, rng=None)

    # create a new figure
    fig = figure(figsize=(10, 6))

    while True:
        # get acquisition function (or index)
        index = policies.EI(model, bounds, X, xi=0.1)

        # get the recommendation and the next query
        xbest = recommenders.best_incumbent(model, bounds, X)
        xnext, _ = solvers.solve_lbfgs(index, bounds)
        ynext = f(xnext)

        # evaluate the posterior before updating the model for plotting
        mu, s2 = model.predict(x[:, None])

        # record our data and update the model
        X.append(xnext)
        Y.append(ynext)
        F.append(f(xbest))
        model.add_data(xnext, ynext)

        # PLOT EVERYTHING
        fig.clear()
        ax1 = fig.add_subplotspec((2, 2), (0, 0), hidex=True)
        ax2 = fig.add_subplotspec((2, 2), (1, 0), hidey=True, sharex=ax1)
        ax3 = fig.add_subplotspec((2, 2), (0, 1), rowspan=2)

        # plot the posterior and data
        ax1.plot_banded(x, mu, 2*np.sqrt(s2))
        ax1.scatter(np.ravel(X), Y)
        ax1.axvline(xbest)
        ax1.axvline(xnext, color='g')
        ax1.set_ylim(-6, 3)
        ax1.set_title('current model (xbest and xnext)')

        # plot the acquisition function
        ax2.plot_banded(x, index(x[:, None]))
        ax2.axvline(xnext, color='g')
        ax2.set_xlim(*bounds)
        ax2.set_title('current policy (xnext)')

        # plot the latent function at recomended points
        ax3.plot(F)
        ax3.axhline(fopt)
        ax3.set_ylim(0.4, 0.9)
        ax3.set_title('value of recommendation')

        # draw
        fig.canvas.draw()
        show(block=False)