Пример #1
0
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(1)

    #1. create toy data
    [x,y] = create_toy_data()
    n_dimensions = 1
    
    #2. location of unispaced predictions
    X = SP.linspace(0,10,100)[:,SP.newaxis]
        

    if 0:
        #old interface where the covaraince funciton and likelihood are one thing:
        #hyperparamters
        covar_parms = SP.log([1,1,1])
        hyperparams = {'covar':covar_parms}       
        #construct covariance function
        SECF = se.SqexpCFARD(n_dimensions=n_dimensions)
        noiseCF = noise.NoiseCFISO()
        covar = combinators.SumCF((SECF,noiseCF))
        covar_priors = []
        #scale
        covar_priors.append([lnpriors.lnGammaExp,[1,2]])
        covar_priors.extend([[lnpriors.lnGammaExp,[1,1]] for i in xrange(n_dimensions)])
        #noise
        covar_priors.append([lnpriors.lnGammaExp,[1,1]])
        priors = {'covar':covar_priors}
        likelihood = None

    if 1:
        #new interface with likelihood parametres being decoupled from the covaraince function
        likelihood = lik.GaussLikISO()
        covar_parms = SP.log([1,1])
        hyperparams = {'covar':covar_parms,'lik':SP.log([1])}       
        #construct covariance function
        SECF = se.SqexpCFARD(n_dimensions=n_dimensions)
        covar = SECF
        covar_priors = []
        #scale
        covar_priors.append([lnpriors.lnGammaExp,[1,2]])
        covar_priors.extend([[lnpriors.lnGammaExp,[1,1]] for i in xrange(n_dimensions)])
        lik_priors = []
        #noise
        lik_priors.append([lnpriors.lnGammaExp,[1,1]])
        priors = {'covar':covar_priors,'lik':lik_priors}

        

    
    gp = GP(covar,likelihood=likelihood,x=x,y=y)
    opt_model_params = opt.opt_hyper(gp,hyperparams,priors=priors,gradcheck=False)[0]
    
    #predict
    [M,S] = gp.predict(opt_model_params,X)

    #create plots
    gpr_plot.plot_sausage(X,M,SP.sqrt(S))
    gpr_plot.plot_training_data(x,y)
    PL.show()
Пример #2
0
 def setData(self, gplvm_dimensions=None, **kw_args):
     GP.setData(self, **kw_args)
     #handle non-informative gplvm_dimensions vector
     if self.gplvm_dimensions is None and gplvm_dimensions is None:
         self.gplvm_dimensions = SP.arange(self.x.shape[1])
     elif gplvm_dimensions is not None:
         self.gplvm_dimensions = gplvm_dimensions
Пример #3
0
 def setData(self, gplvm_dimensions=None, **kw_args):
     GP.setData(self, **kw_args)
     #handle non-informative gplvm_dimensions vector
     if self.gplvm_dimensions is None and gplvm_dimensions is None:
         self.gplvm_dimensions = SP.arange(self.x.shape[1])
     elif gplvm_dimensions is not None:
         self.gplvm_dimensions = gplvm_dimensions
Пример #4
0
def run_demo():
    LG.basicConfig(level=LG.DEBUG)
    SP.random.seed(10)

    #1. create toy data
    x, y, z, sigma, X, actual_inv, L = create_toy_data()
    n_dimensions = 1
    n_terms = 3
    # build GP
    likelihood = lik.GaussLikISO()
    covar_parms = SP.log([1, 1, 1E-5])
    hyperparams = {
        'covar': covar_parms,
        'lik': SP.log([sigma]),
        'warping': (1E-2 * SP.random.randn(n_terms, 3))
    }

    SECF = se.SqexpCFARD(n_dimensions=n_dimensions)
    muCF = mu.MuCF(N=X.shape[0])
    covar = combinators.SumCF([SECF, muCF])
    warping_function = TanhWarpingFunction(n_terms=n_terms)
    mean_function = LinMeanFunction(X=SP.ones([x.shape[0], 1]))
    hyperparams['mean'] = 1E-2 * SP.randn(1)
    bounds = {}
    bounds.update(warping_function.get_bounds())

    gp = WARPEDGP(warping_function=warping_function,
                  mean_function=mean_function,
                  covar_func=covar,
                  likelihood=likelihood,
                  x=x,
                  y=z)
    opt_model_params = opt.opt_hyper(gp,
                                     hyperparams,
                                     bounds=bounds,
                                     gradcheck=True)[0]

    print "WARPED GP (neg) likelihood: ", gp.LML(hyperparams)

    #hyperparams['mean'] = SP.log(1)
    PL.figure()
    PL.plot(z)
    PL.plot(warping_function.f(y, hyperparams['warping']))
    PL.legend(["real function", "larnt function"])

    PL.figure()
    PL.plot(actual_inv)
    PL.plot(warping_function.f_inv(gp.y, hyperparams['warping']))
    PL.legend(['real inverse', 'learnt inverse'])

    hyperparams.pop("warping")
    hyperparams.pop("mean")
    gp = GP(covar, likelihood=likelihood, x=x, y=y)
    opt_model_params = opt.opt_hyper(gp, hyperparams, gradcheck=False)[0]
    print "GP (neg) likelihood: ", gp.LML(hyperparams)
Пример #5
0
def run_demo():
    LG.basicConfig(level=LG.INFO)
    PL.figure()

    random.seed(1)

    #0. generate Toy-Data; just samples from a superposition of a sin + linear trend
    n_replicates = 4
    xmin = 1
    xmax = 2.5 * SP.pi

    x1_time_steps = 10
    x2_time_steps = 20

    x1 = SP.zeros(x1_time_steps * n_replicates)
    x2 = SP.zeros(x2_time_steps * n_replicates)

    for i in xrange(n_replicates):
        x1[i * x1_time_steps:(i + 1) * x1_time_steps] = SP.linspace(
            xmin, xmax, x1_time_steps)
        x2[i * x2_time_steps:(i + 1) * x2_time_steps] = SP.linspace(
            xmin, xmax, x2_time_steps)

    C = 2  #offset
    #b = 0.5
    sigma1 = 0.15
    sigma2 = 0.15
    n_noises = 1

    b = 0

    y1 = b * x1 + C + 1 * SP.sin(x1)
    #    dy1 = b   +     1*SP.cos(x1)
    y1 += sigma1 * random.randn(y1.shape[0])
    y1 -= y1.mean()

    y2 = b * x2 + C + 1 * SP.sin(x2)
    #    dy2 = b   +     1*SP.cos(x2)
    y2 += sigma2 * random.randn(y2.shape[0])
    y2 -= y2.mean()

    for i in xrange(n_replicates):
        x1[i * x1_time_steps:(i + 1) * x1_time_steps] += .7 + (i / 2.)
        x2[i * x2_time_steps:(i + 1) * x2_time_steps] -= .7 + (i / 2.)

    x1 = x1[:, SP.newaxis]
    x2 = x2[:, SP.newaxis]

    x = SP.concatenate((x1, x2), axis=0)
    y = SP.concatenate((y1, y2), axis=0)

    #predictions:
    X = SP.linspace(xmin - n_replicates, xmax + n_replicates,
                    100 * n_replicates)[:, SP.newaxis]

    #hyperparamters
    dim = 1
    replicate_indices = []
    for i, xi in enumerate((x1, x2)):
        for rep in SP.arange(i * n_replicates, (i + 1) * n_replicates):
            replicate_indices.extend(SP.repeat(rep, len(xi) / n_replicates))
    replicate_indices = SP.array(replicate_indices)
    n_replicates = len(SP.unique(replicate_indices))

    logthetaCOVAR = [1, 1]
    logthetaCOVAR.extend(SP.repeat(SP.exp(1), n_replicates))
    logthetaCOVAR.extend([sigma1])
    logthetaCOVAR = SP.log(logthetaCOVAR)  #,sigma2])
    hyperparams = {'covar': logthetaCOVAR}

    SECF = se.SqexpCFARD(dim)
    #noiseCF = noise.NoiseReplicateCF(replicate_indices)
    noiseCF = noise.NoiseCFISO()
    shiftCF = combinators.ShiftCF(SECF, replicate_indices)
    CovFun = combinators.SumCF((shiftCF, noiseCF))

    covar_priors = []
    #scale
    covar_priors.append([lnpriors.lnGammaExp, [1, 2]])
    for i in range(dim):
        covar_priors.append([lnpriors.lnGammaExp, [1, 1]])
    #shift
    for i in range(n_replicates):
        covar_priors.append([lnpriors.lnGauss, [0, .5]])
    #noise
    for i in range(n_noises):
        covar_priors.append([lnpriors.lnGammaExp, [1, 1]])

    covar_priors = SP.array(covar_priors)
    priors = {'covar': covar_priors}
    Ifilter = {'covar': SP.ones(n_replicates + 3)}

    gpr = GP(CovFun, x=x, y=y)
    opt_model_params = opt_hyper(gpr,
                                 hyperparams,
                                 priors=priors,
                                 gradcheck=False,
                                 Ifilter=Ifilter)[0]

    #predict
    [M, S] = gpr.predict(opt_model_params, X)

    T = opt_model_params['covar'][2:2 + n_replicates]

    PL.subplot(212)
    gpr_plot.plot_sausage(X,
                          M,
                          SP.sqrt(S),
                          format_line=dict(alpha=1, color='g', lw=2, ls='-'))
    gpr_plot.plot_training_data(x,
                                y,
                                shift=T,
                                replicate_indices=replicate_indices,
                                draw_arrows=2)

    PL.suptitle("Example for GPTimeShift with simulated data", fontsize=23)

    PL.title("Regression including time shift")
    PL.xlabel("x")
    PL.ylabel("y")
    ylim = PL.ylim()

    gpr = GP(combinators.SumCF((SECF, noiseCF)), x=x, y=y)
    priors = {'covar': covar_priors[[0, 1, -1]]}
    hyperparams = {'covar': logthetaCOVAR[[0, 1, -1]]}
    opt_model_params = opt_hyper(gpr,
                                 hyperparams,
                                 priors=priors,
                                 gradcheck=False)[0]

    PL.subplot(211)
    #predict
    [M, S] = gpr.predict(opt_model_params, X)

    gpr_plot.plot_sausage(X,
                          M,
                          SP.sqrt(S),
                          format_line=dict(alpha=1, color='g', lw=2, ls='-'))
    gpr_plot.plot_training_data(x, y, replicate_indices=replicate_indices)

    PL.title("Regression without time shift")
    PL.xlabel("x")
    PL.ylabel("y")
    PL.ylim(ylim)

    PL.subplots_adjust(left=.1,
                       bottom=.1,
                       right=.96,
                       top=.8,
                       wspace=.4,
                       hspace=.4)
    PL.show()
Пример #6
0
    #reconstruction
    Y_ = SP.dot(Spca, Wpca.T)

    #construct GPLVM model
    linear_cf = linear.LinearCFISO(n_dimensions=K)
    noise_cf = noise.NoiseCFISO()
    mu_cf = fixed.FixedCF(SP.ones([N, N]))
    covariance = combinators.SumCF((mu_cf, linear_cf, noise_cf))
    # covariance = combinators.SumCF((linear_cf, noise_cf))

    #no inputs here (later SNPs)
    X = Spca.copy()
    #X = SP.random.randn(N,K)
    gplvm = GPLVM(covar_func=covariance, x=X, y=Y)

    gpr = GP(covar_func=covariance, x=X, y=Y[:, 0])

    #construct hyperparams
    covar = SP.log([0.1, 1.0, 0.1])

    #X are hyperparameters, i.e. we optimize over them also

    #1. this is jointly with the latent X
    X_ = X.copy()
    hyperparams = {'covar': covar, 'x': X_}

    #for testing just covar params alone:
    #hyperparams = {'covar': covar}

    #evaluate log marginal likelihood
    lml = gplvm.LML(hyperparams=hyperparams)
Пример #7
0
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(1)

    #1. create toy data
    [x, y] = create_toy_data()
    n_dimensions = 1

    #2. location of unispaced predictions
    X = SP.linspace(0, 10, 100)[:, SP.newaxis]

    if 0:
        #old interface where the covaraince funciton and likelihood are one thing:
        #hyperparamters
        covar_parms = SP.log([1, 1, 1])
        hyperparams = {'covar': covar_parms}
        #construct covariance function
        SECF = se.SqexpCFARD(n_dimensions=n_dimensions)
        noiseCF = noise.NoiseCFISO()
        covar = combinators.SumCF((SECF, noiseCF))
        covar_priors = []
        #scale
        covar_priors.append([lnpriors.lnGammaExp, [1, 2]])
        covar_priors.extend([[lnpriors.lnGammaExp, [1, 1]]
                             for i in xrange(n_dimensions)])
        #noise
        covar_priors.append([lnpriors.lnGammaExp, [1, 1]])
        priors = {'covar': covar_priors}
        likelihood = None

    if 1:
        #new interface with likelihood parametres being decoupled from the covaraince function
        likelihood = lik.GaussLikISO()
        covar_parms = SP.log([1, 1])
        hyperparams = {'covar': covar_parms, 'lik': SP.log([1])}
        #construct covariance function
        SECF = se.SqexpCFARD(n_dimensions=n_dimensions)
        covar = SECF
        covar_priors = []
        #scale
        covar_priors.append([lnpriors.lnGammaExp, [1, 2]])
        covar_priors.extend([[lnpriors.lnGammaExp, [1, 1]]
                             for i in xrange(n_dimensions)])
        lik_priors = []
        #noise
        lik_priors.append([lnpriors.lnGammaExp, [1, 1]])
        priors = {'covar': covar_priors, 'lik': lik_priors}

    gp = GP(covar, likelihood=likelihood, x=x, y=y)
    opt_model_params = opt.opt_hyper(gp,
                                     hyperparams,
                                     priors=priors,
                                     gradcheck=False)[0]

    #predict
    [M, S] = gp.predict(opt_model_params, X)

    #create plots
    gpr_plot.plot_sausage(X, M, SP.sqrt(S))
    gpr_plot.plot_training_data(x, y)
    PL.show()
Пример #8
0
def run_demo():
    LG.basicConfig(level=LG.INFO)
    PL.figure()
    
    random.seed(1)
    
    #0. generate Toy-Data; just samples from a superposition of a sin + linear trend
    n_replicates = 4
    xmin = 1
    xmax = 2.5*SP.pi

    x1_time_steps = 10
    x2_time_steps = 20
    
    x1 = SP.zeros(x1_time_steps*n_replicates)
    x2 = SP.zeros(x2_time_steps*n_replicates)

    for i in xrange(n_replicates):
	x1[i*x1_time_steps:(i+1)*x1_time_steps] = SP.linspace(xmin,xmax,x1_time_steps)
	x2[i*x2_time_steps:(i+1)*x2_time_steps] = SP.linspace(xmin,xmax,x2_time_steps)

    C = 2       #offset
    #b = 0.5
    sigma1 = 0.15
    sigma2 = 0.15
    n_noises = 1
    
    b = 0
    
    y1  = b*x1 + C + 1*SP.sin(x1)
#    dy1 = b   +     1*SP.cos(x1)
    y1 += sigma1*random.randn(y1.shape[0])
    y1-= y1.mean()
    
    y2  = b*x2 + C + 1*SP.sin(x2)
#    dy2 = b   +     1*SP.cos(x2)
    y2 += sigma2*random.randn(y2.shape[0])
    y2-= y2.mean()
    
    for i in xrange(n_replicates):
	x1[i*x1_time_steps:(i+1)*x1_time_steps] += .7 + (i/2.)
	x2[i*x2_time_steps:(i+1)*x2_time_steps] -= .7 + (i/2.)  

    x1 = x1[:,SP.newaxis]
    x2 = x2[:,SP.newaxis]
    
    x = SP.concatenate((x1,x2),axis=0)
    y = SP.concatenate((y1,y2),axis=0)
    
    #predictions:
    X = SP.linspace(xmin-n_replicates,xmax+n_replicates,100*n_replicates)[:,SP.newaxis]
    
    #hyperparamters
    dim = 1
    replicate_indices = []
    for i,xi in enumerate((x1,x2)):
        for rep in SP.arange(i*n_replicates, (i+1)*n_replicates):
            replicate_indices.extend(SP.repeat(rep,len(xi)/n_replicates))
    replicate_indices = SP.array(replicate_indices)
    n_replicates = len(SP.unique(replicate_indices))
    
    logthetaCOVAR = [1,1]
    logthetaCOVAR.extend(SP.repeat(SP.exp(1),n_replicates))
    logthetaCOVAR.extend([sigma1])
    logthetaCOVAR = SP.log(logthetaCOVAR)#,sigma2])
    hyperparams = {'covar':logthetaCOVAR}
    
    SECF = se.SqexpCFARD(dim)
    #noiseCF = noise.NoiseReplicateCF(replicate_indices)
    noiseCF = noise.NoiseCFISO()
    shiftCF = combinators.ShiftCF(SECF,replicate_indices)
    CovFun = combinators.SumCF((shiftCF,noiseCF))
    
    covar_priors = []
    #scale
    covar_priors.append([lnpriors.lnGammaExp,[1,2]])
    for i in range(dim):
        covar_priors.append([lnpriors.lnGammaExp,[1,1]])
    #shift
    for i in range(n_replicates):
        covar_priors.append([lnpriors.lnGauss,[0,.5]])    
    #noise
    for i in range(n_noises):
        covar_priors.append([lnpriors.lnGammaExp,[1,1]])
    
    covar_priors = SP.array(covar_priors)
    priors = {'covar':covar_priors}
    Ifilter = {'covar': SP.ones(n_replicates+3)}
    
    gpr = GP(CovFun,x=x,y=y) 
    opt_model_params = opt_hyper(gpr,hyperparams,priors=priors,gradcheck=False,Ifilter=Ifilter)[0]
    
    #predict
    [M,S] = gpr.predict(opt_model_params,X)
    
    T = opt_model_params['covar'][2:2+n_replicates]
    
    PL.subplot(212)
    gpr_plot.plot_sausage(X,M,SP.sqrt(S),format_line=dict(alpha=1,color='g',lw=2, ls='-'))
    gpr_plot.plot_training_data(x,y,shift=T,replicate_indices=replicate_indices,draw_arrows=2)
    
    PL.suptitle("Example for GPTimeShift with simulated data", fontsize=23)
    
    PL.title("Regression including time shift")
    PL.xlabel("x")
    PL.ylabel("y")
    ylim = PL.ylim()
    
    gpr = GP(combinators.SumCF((SECF,noiseCF)),x=x,y=y)
    priors = {'covar':covar_priors[[0,1,-1]]}
    hyperparams = {'covar':logthetaCOVAR[[0,1,-1]]}
    opt_model_params = opt_hyper(gpr,hyperparams,priors=priors,gradcheck=False)[0]
    
    PL.subplot(211)
    #predict
    [M,S] = gpr.predict(opt_model_params,X)
    
    gpr_plot.plot_sausage(X,M,SP.sqrt(S),format_line=dict(alpha=1,color='g',lw=2, ls='-'))
    gpr_plot.plot_training_data(x,y,replicate_indices=replicate_indices)
    
    PL.title("Regression without time shift")
    PL.xlabel("x")
    PL.ylabel("y")
    PL.ylim(ylim)
    
    PL.subplots_adjust(left=.1, bottom=.1, 
    right=.96, top=.8,
    wspace=.4, hspace=.4)
    PL.show()
Пример #9
0
    #breakpoint, no knowledge
    for i in range(1):
        covar_priors.append([lnpriors.lnuniformpdf, [0, 0]])    
        covar_priors.append([lnpriors.lnGammaExp, [1, .5]])

    logthetaCOVAR = SP.log([.4, 3.2, 0.3])#,sigma2])
    hyperparams = {'covar':logthetaCOVAR}

    covar_priors = SP.array(covar_priors)
    priors = {'covar' : covar_priors[[0, 1, 2]]}
    priors_BP = {'covar' : covar_priors}
    Ifilter = {'covar' : SP.array([1, 1, 1], dtype='int')}
    Ifilter_BP = {'covar' : SP.array([1, 1, 1, 0], dtype='int')}

    #gpr_BP = GPR.GP(CovFun,x=x,y=y)
    gpr_BP = GP(CovFun, x=x.reshape(-1,1), y=y.reshape(-1,1))
#    gpr_opt_hyper = GP(combinators.SumCF((SECF,noiseCF)),x=x,y=y)
    gpr_opt_hyper = GroupGP((GP(combinators.SumCF((SECF, noiseCF)), x=x1.reshape(-1,1), y=C[1].reshape(-1,1)),
                             GP(combinators.SumCF((SECF, noiseCF)), x=x2.reshape(-1,1), y=T[1].reshape(-1,1))))

    [opt_model_params, opt_lml] = opt_hyper(gpr_opt_hyper, hyperparams, priors=priors, gradcheck=False, Ifilter=Ifilter)
#    opt_model_params = hyperparams
    print SP.exp(opt_model_params['covar'])
    
#    import copy
#    _hyperparams = copy.deepcopy(opt_model_params)
    # _logtheta = SP.array([0,0,0,0],dtype='double')
    # _logtheta[:2] = _hyperparams['covar'][:2]
    # _logtheta[3] = _hyperparams['covar'][2]
    # _hyperparams['covar'] = _logtheta