Пример #1
0
def bgplvm_simulation(optimize='scg',
                      plot=True,
                      max_f_eval=2e4):
#     from GPy.core.transformations import logexp_clipped
    D1, D2, D3, N, num_inducing, Q = 15, 8, 8, 100, 3, 5
    slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot)

    from GPy.models import mrd
    from GPy import kern
    reload(mrd); reload(kern)


    Y = Ylist[0]

    k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) # + kern.bias(Q)
    m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, _debug=True)
    # m.constrain('variance|noise', logexp_clipped())
    m['noise'] = Y.var() / 100.
    m['linear_variance'] = .01

    if optimize:
        print "Optimizing model:"
        m.optimize(optimize, max_iters=max_f_eval,
                   max_f_eval=max_f_eval,
                   messages=True, gtol=.05)
    if plot:
        m.plot_X_1d("BGPLVM Latent Space 1D")
        m.kern.plot_ARD('BGPLVM Simulation ARD Parameters')
    return m
Пример #2
0
def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False):
    from GPy.util.datasets import swiss_roll_generated
    from GPy.core.transformations import logexp_clipped

    data = swiss_roll_generated(N=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = np.hstack((X, np.random.randn(N, Q - 2)))
    except ImportError:
        X = np.random.randn(N, Q)

    if plot:
        from mpl_toolkits import mplot3d
        import pylab
        fig = pylab.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("Initialization")


    var = .5
    S = (var * np.ones_like(X) + np.clip(np.random.randn(N, Q) * var ** 2,
                                         - (1 - var),
                                         (1 - var))) + .001
    Z = np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2))

    m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
    m.data_colors = c
    m.data_t = t

    m['rbf_lengthscale'] = 1. # X.var(0).max() / X.var(0)
    m['noise_variance'] = Y.var() / 100.
    m['bias_variance'] = 0.05

    if optimize:
        m.optimize('scg', messages=1)
    return m