Esempio n. 1
0
def init_spectral(x, y, M, Q, kern, n_inits=10, minibatch_size=256, noise_var=10.0, ARD=True, likelihood=None):
    print('Initializing a spectral kernel...')
    best_loglik = -np.inf
    best_m = None
    N, input_dim = x.shape
    for k in range(n_inits):
        try:
            #gpflow.reset_default_graph_and_session()
            with gpflow.defer_build():
                Z = random_Z(x, N, M)
                dists = pdist(Z, 'euclidean').ravel()
                max_freq = min(10.0, 1./np.min(dists[dists > 0.0]))
                max_len = min(5.0, np.max(dists) * (2*np.pi))
                k = kern(input_dim=input_dim, max_freq=max_freq, Q=Q, ARD=ARD, max_len=max_len)
                if likelihood is not None:
                    likhood = likelihood
                else:
                    likhood = gpflow.likelihoods.Gaussian(noise_var)
                    likhood.variance.prior = gpflow.priors.LogNormal(mu=0, var=1)
                model = SVGP(X=x, Y=y, Z=Z, kern=k, likelihood=likhood,
                             minibatch_size=minibatch_size)
                model.feature.Z.prior = gpflow.priors.Gaussian(0, 1)
            model.compile()
            loglik = model.compute_log_likelihood()
            if loglik > best_loglik:
                best_loglik = loglik
                best_m = model
                #best_dir = tempfile.TemporaryDirectory()
                #gpflow.saver.Saver().save(best_dir.name + 'model.gpflow', best_m)
            del model
            gc.collect()
        except tf.errors.InvalidArgumentError:  # cholesky fails sometimes (with really bad init?)
            pass
    print('Best init: %f' % best_loglik)
    print(best_m)
    #gpflow.reset_default_graph_and_session()
    #best_m = gpflow.saver.Saver().load(best_dir.name + 'model.gpflow')
    #best_m.compile()
    #print(best_m)
    return best_m
Esempio n. 2
0
def init_neural(x, y, M, Q, n_inits=1, minibatch_size=256, noise_var=0.1, likelihood=None, hidden_sizes=None):
    print('Initializing neural spectral kernel...')
    best_loglik = -np.inf
    best_m = None
    N, input_dim = x.shape
    for k in range(n_inits):
        try:
            # gpflow.reset_default_graph_and_session()
            with gpflow.defer_build():
                Z = random_Z(x, N, M)
                k = NeuralSpectralKernel(input_dim=input_dim, Q=Q, hidden_sizes=hidden_sizes)
                if likelihood is not None:
                    likhood = likelihood
                else:
                    likhood = gpflow.likelihoods.Gaussian(noise_var)
                    likhood.variance.prior = gpflow.priors.LogNormal(mu=0, var=1)
                model = SVGP(X=x, Y=y, Z=Z, kern=k, likelihood=likhood,
                             minibatch_size=minibatch_size)
                model.feature.Z.prior = gpflow.priors.Gaussian(0, 1)
            model.compile()
            loglik = model.compute_log_likelihood()
            if loglik > best_loglik:
                best_loglik = loglik
                best_m = model
                # best_dir = tempfile.TemporaryDirectory()
                # gpflow.saver.Saver().save(best_dir.name + 'model.gpflow', best_m)
            del model
            gc.collect()
        except tf.errors.InvalidArgumentError:  # cholesky fails sometimes (with really bad init?)
            pass
    print('Best init: %f' % best_loglik)
    print(best_m)
    # gpflow.reset_default_graph_and_session()
    # best_m = gpflow.saver.Saver().load(best_dir.name + 'model.gpflow')
    # best_m.compile()
    # print(best_m)
    return best_m
Esempio n. 3
0
        likelihood = gp.likelihoods.Gaussian()
        # Define the underlying GP mean and kernel
        mean = gp.mean_functions.Zero()
        kernel = gp.kernels.RBF(1)
        # Create the HGP (note the slightly different order from SVGP)
        model = SVGP(X,
                     Y,
                     kernel,
                     likelihood,
                     mean_function=mean,
                     minibatch_size=100,
                     num_latent=1,
                     num_data=None,
                     whiten=False,
                     Z=Z)
        model.compile()

    run_with_adam(model, 1e-3, iterations, PrintAction(model, "Adam"))
    # Predictions uses stochastic sampling and produces
    # [num_samples,N,D] shape output
    ystar, varstar = model.predict_y(X)

# In[5]:

plt.figure(figsize=(4, 4))
plt.plot(X[:, 0], ystar, alpha=1, c='r', label='vanilla-inferred')
plt.fill_between(X[:, 0],
                 np.squeeze(ystar + np.sqrt(varstar)),
                 np.squeeze(ystar - np.sqrt(varstar)),
                 alpha=0.5)
plt.plot(X[:, 0], Y[:, 0], c='b', alpha=0.5, label='data')