Beispiel #1
0
def run_demo():
    LG.basicConfig(level=LG.INFO)

    #1. simulate data from a linear PCA model
    N = 25
    K = 5
    D = 200

    SP.random.seed(1)
    S = SP.random.randn(N,K)
    W = SP.random.randn(D,K)

    Y = SP.dot(W,S.T).T

    Y+= 0.5*SP.random.randn(N,D)

    #use "standard PCA"
    [Spca,Wpca] = gplvm.PCA(Y,K)

    #reconstruction
    Y_ = SP.dot(Spca,Wpca.T)

    if 1:
        #use linear kernel
        covariance = linear.LinearCFISO(n_dimensions=K)
        hyperparams = {'covar': SP.log([1.2])}
    if 0:
        #use ARD kernel
        covariance = se.SqexpCFARD(n_dimensions=K)
        hyperparams = {'covar': SP.log([1]*(K+1))}

    #initialization of X at arandom
    X0 = SP.random.randn(N,K)
    X0 = Spca
    hyperparams['x'] = X0
    
    #standard Gaussian noise
    likelihood = lik.GaussLikISO()
    hyperparams['lik'] = SP.log([0.1])
    g = gplvm.GPLVM(covar_func=covariance,likelihood=likelihood,x=X0,y=Y,gplvm_dimensions=SP.arange(X0.shape[1]))

    #specify optimization bounds:
    bounds = {}
    bounds['lik'] = SP.array([[-5.,5.]]*D)
    hyperparams['x'] = X0

    print "running standard gplvm"
    [opt_hyperparams,opt_lml2] = opt.opt_hyper(g,hyperparams,gradcheck=False)

    print "optimized latent X:"
    print opt_hyperparams['x']
Beispiel #2
0
    K = 3
    D = 10

    S = SP.random.randn(N, K)
    W = SP.random.randn(D, K)

    Y = SP.dot(W, S.T).T
    Y += 0.5 * SP.random.randn(N, D)

    [Spca, Wpca] = PCA(Y, K)

    #reconstruction
    Y_ = SP.dot(Spca, Wpca.T)

    #construct GPLVM model
    linear_cf = linear.LinearCFISO(n_dimensions=K)
    noise_cf = noise.NoiseCFISO()
    mu_cf = fixed.FixedCF(SP.ones([N, N]))
    covariance = combinators.SumCF((mu_cf, linear_cf, noise_cf))
    # covariance = combinators.SumCF((linear_cf, noise_cf))

    #no inputs here (later SNPs)
    X = Spca.copy()
    #X = SP.random.randn(N,K)
    gplvm = GPLVM(covar_func=covariance, x=X, y=Y)

    gpr = GP(covar_func=covariance, x=X, y=Y[:, 0])

    #construct hyperparams
    covar = SP.log([0.1, 1.0, 0.1])
Beispiel #3
0
    ard = True
    if ard:
        covar_r = SP.zeros([Kr]) + 0.2 * SP.random.randn(Kr)
        covar_c = SP.zeros([Kc]) + 0.2 * SP.random.randn(Kc)
        lik = SP.log([0.1])
    else:
        covar_r = SP.log([0.5])
        covar_c = SP.log([0.3])
        lik = SP.log([0.1])

    if ard:
        covariance_c_ = linear.LinearCF(n_dimensions=Kc)
        covariance_r_ = linear.LinearCF(n_dimensions=Kr)
    else:
        covariance_c_ = linear.LinearCFISO(n_dimensions=Kc)
        covariance_r_ = linear.LinearCFISO(n_dimensions=Kr)

    likelihood_ = LIK.GaussLikISO()

    hyperparams = {}
    hyperparams['covar_r'] = covar_r
    hyperparams['covar_c'] = covar_c
    hyperparams['lik'] = lik
    hyperparams['x_r'] = X0r
    hyperparams['x_c'] = X0c

    kgp = kronecker_gplvm.KroneckerGPLVM(covar_func_r=covariance_r_,
                                         covar_func_c=covariance_c_,
                                         likelihood=likelihood_)
    kgp.setData(x_r=X0r, x_c=X0c, y=Y)
Beispiel #4
0
    sim_fa_noise = False
    if sim_fa_noise:
        #inerpolate noise levels
        noise_levels = SP.linspace(0.1, 1.0, Y.shape[1])
        Ynoise = noise_levels * random.randn(N, D)
        Y += Ynoise
    else:
        Y += 0.1 * SP.random.randn(N, D)

    #use "standard PCA"
    [Spca, Wpca] = gplvm.PCA(Y, K)

    #reconstruction
    Y_ = SP.dot(Spca, Wpca.T)

    covariance = linear.LinearCFISO(n_dimensions=K)
    hyperparams = {'covar': SP.log([1.2])}
    hyperparams_fa = {'covar': SP.log([1.2])}

    #factor analysis noise
    likelihood_fa = lik.GaussLikARD(n_dimensions=D)
    hyperparams_fa['lik'] = SP.log(0.1 * SP.ones(Y.shape[1]))

    #standard Gaussian noise
    likelihood = lik.GaussLikISO()
    hyperparams['lik'] = SP.log([0.1])

    #initialization of X at arandom
    X0 = SP.random.randn(N, K)
    X0 = Spca
    hyperparams['x'] = X0
Beispiel #5
0
print limix.__file__
import pygp.covar.linear as lin
import pygp.covar.se as se
import pygp.covar.gradcheck as GC
import pygp.covar.combinators as comb
import scipy as SP
import pdb

n_dimensions = 3
X = SP.randn(3, n_dimensions)

params = SP.zeros([0])

if 0:
    c1 = limix.CCovLinearISO()
    c2 = lin.LinearCFISO(n_dimensions=n_dimensions)

    c1.setX(X)

    K1 = c1.K()
    K2 = c2.K(params, X, X)

    dK1 = c1.Kgrad_param(0)
    dK2 = c2.Kgrad_theta(params, X, 0)

    dKx1 = c1.Kgrad_X(0)
    dKx2 = c2.Kgrad_x(params, X, X, 0)

    dKx1diag = c1.Kdiag_grad_X(0)
    dKx2diag = c2.Kgrad_xdiag(params, X, 0)