Пример #1
0
def ssgplvm_simulation(optimize=True,
                       verbose=1,
                       plot=True,
                       plot_sim=False,
                       max_iters=2e4,
                       useGPU=False):
    from GPy import kern
    from GPy.models import SSGPLVM

    D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 45, 3, 9
    _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)
    Y = Ylist[0]
    k = kern.Linear(Q,
                    ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
    # k = kern.RBF(Q, ARD=True, lengthscale=10.)
    m = SSGPLVM(Y,
                Q,
                init="rand",
                num_inducing=num_inducing,
                kernel=k,
                group_spike=True)
    m.X.variance[:] = _np.random.uniform(0, .01, m.X.shape)
    m.likelihood.variance = .01

    if optimize:
        print("Optimizing model:")
        m.optimize('bfgs', messages=verbose, max_iters=max_iters, gtol=.05)
    if plot:
        m.X.plot("SSGPLVM Latent Space 1D")
        m.kern.plot_ARD('SSGPLVM Simulation ARD Parameters')
    return m
Пример #2
0
    def test_ssgplvm(self):
        from GPy import kern
        from GPy.models import SSGPLVM
        from GPy.examples.dimensionality_reduction import _simulate_matern

        np.random.seed(10)
        D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 45, 3, 9
        _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, False)
        Y = Ylist[0]
        k = kern.Linear(Q, ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
        # k = kern.RBF(Q, ARD=True, lengthscale=10.)
        m = SSGPLVM(Y, Q, init="rand", num_inducing=num_inducing, kernel=k, group_spike=True)
        m.randomize()
        self.assertTrue(m.checkgrad())
Пример #3
0
def ssgplvm_simulation(optimize=True, verbose=1,
                      plot=True, plot_sim=False,
                      max_iters=2e4, useGPU=False
                      ):
    from GPy import kern
    from GPy.models import SSGPLVM

    D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 45, 3, 9
    _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)
    Y = Ylist[0]
    k = kern.Linear(Q, ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
    # k = kern.RBF(Q, ARD=True, lengthscale=10.)
    m = SSGPLVM(Y, Q, init="rand", num_inducing=num_inducing, kernel=k, group_spike=True)
    m.X.variance[:] = _np.random.uniform(0, .01, m.X.shape)
    m.likelihood.variance = .01

    if optimize:
        print("Optimizing model:")
        m.optimize('bfgs', messages=verbose, max_iters=max_iters,
                   gtol=.05)
    if plot:
        m.X.plot("SSGPLVM Latent Space 1D")
        m.kern.plot_ARD('SSGPLVM Simulation ARD Parameters')
    return m