예제 #1
0
        for jj in range(N):
            ind = ii*N*4 + jj*4 + 4
            hyp.cov[ind-1] =  ii

    hyp.mean = np.array([])

    sn = 0.1
    hyp.lik = np.array([np.log(sn)])

    ##----------------------------------------------------------##
    ## STANDARD GP (prediction)                                 ##
    ##----------------------------------------------------------## 
    xs = np.arange(2004+1./24.,2024-1./24.,1./12.)     # TEST POINTS
    xs = xs.reshape(len(xs),1)

    vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xs)
    ym = vargout[0]; ys2 = vargout[1]
    m  = vargout[2]; s2  = vargout[3]
    plotter(xs,ym,ys2,x,y)#,[1955, 2030, 310, 420])
    ##----------------------------------------------------------##
    ## STANDARD GP (training)                                   ##
    ## OPTIMIZE HYPERPARAMETERS                                 ##
    ##----------------------------------------------------------##
    ## -> parameter training using (off the shelf) conjugent gradient (CG) optimization (NOTE: SCG is faster)
    from time import clock
    t0 = clock()
    vargout = min_wrapper(hyp,gp,'SCG',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)
    t1 = clock()

    hyp = vargout[0]
    vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xs)
예제 #2
0
    ## SPECIFY inference method
    inffunc = ['inferences.infLaplace']

    ## SET (hyper)parameters
    hyp = hyperParameters()
    hyp.mean = np.array([0.])
    hyp.cov  = np.array([0.,0.,0.])
    [hyp_opt, fopt, gopt, funcCalls] = min_wrapper(hyp,gp,'Minimize',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)    

    hyp = hyp_opt
    ##----------------------------------------------------------##
    ## STANDARD GP (example 1)                                  ##
    ##----------------------------------------------------------##
    print '...example 1: prediction...'
    ## GET negative log marginal likelihood
    [nlml,dnlZ,post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, None, None, True)
    print "nlml = ", nlml    
    
    ## PREDICTION
    [ymu,ys2,fmu,fs2,lp,post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, xstar, np.ones((n,1)) )
    
    ## PLOT log predictive probabilities
    if PLOT:
        fig = plt.figure()
        plt.plot(x1[:,0], x1[:,1], 'b+', markersize = 12)
        plt.plot(x2[:,0], x2[:,1], 'r+', markersize = 12)
        pc = plt.contour(t1, t2, np.reshape(np.exp(lp), (t1.shape[0],t1.shape[1]) ))
        fig.colorbar(pc)
        plt.grid()
        plt.axis([-4, 4, -4, 4])
        plt.show()
예제 #3
0
    ## SPECIFY inference method
    inffunc  = ['inferences.infExact']
    
    ## SET (hyper)parameters
    hyp = hyperParameters()
    hyp.cov = np.array([np.log(0.25),np.log(1.0),3.0])
    hyp.mean = np.array([0.5,1.0])
    hyp.lik = np.array([np.log(0.1)])
    
    ##----------------------------------------------------------##
    ## STANDARD GP (example 1)                                  ##
    ##----------------------------------------------------------##
    print '...example 1: prediction...'
    ## PREDICTION
    t0 = clock()
    vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xstar)
    t1 = clock()
    ym = vargout[0]; ys2 = vargout[1]; m  = vargout[2]; s2 = vargout[3]
    
    print 'Time for prediction =',t1-t0
    
    ## PLOT results
    if PLOT:
        plotter(xstar,ym,s2,x,y,[-2, 2, -0.9, 3.9])

    ## GET negative log marginal likelihood
    [nlml, post] = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,None,None,False)
    print "nlml =", nlml


    ##----------------------------------------------------------##
예제 #4
0
    likfunc = ['likelihoods.likGauss']

    ## SET (hyper)parameters
    hyp = hyperParameters()

    ## SET (hyper)parameters for covariance and mean
    hyp.cov = np.random.normal(0., 1., (3, ))
    hyp.mean = np.array([])

    hyp.lik = np.array([np.log(0.1)])

    print 'Initial mean = ', hyp.mean
    print 'Initial covariance = ', hyp.cov
    print 'Initial liklihood = ', hyp.lik

    [nlml, post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, None,
                      None, False)
    print 'Initial negative log marginal likelihood = ', nlml

    ##----------------------------------------------------------##
    ## STANDARD GP (prediction)                                 ##
    ##----------------------------------------------------------##
    vargout = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, xs)
    ym = vargout[0]
    ys2 = vargout[1]
    m = vargout[2]
    s2 = vargout[3]

    HousingPlotter(range(len(y)), y, range(len(y),
                                           len(y) + len(ys)), ym, ys2,
                   range(len(y),
                         len(y) + len(ys)), ys)
예제 #5
0
    likfunc = ['likelihoods.likGauss']

    ## SET (hyper)parameters
    hyp = hyperParameters()

    ## SET (hyper)parameters for covariance and mean
    hyp.cov = np.random.normal(0.,1.,(3,))
    hyp.mean = np.array([])

    hyp.lik = np.array([np.log(0.1)])

    print 'Initial mean = ',hyp.mean
    print 'Initial covariance = ',hyp.cov
    print 'Initial liklihood = ',hyp.lik

    [nlml, post] = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,None,None,False)
    print 'Initial negative log marginal likelihood = ',nlml
    
    ##----------------------------------------------------------##
    ## STANDARD GP (prediction)                                 ##
    ##----------------------------------------------------------## 
    vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xs)
    ym = vargout[0]; ys2 = vargout[1]
    m  = vargout[2]; s2  = vargout[3]

    HousingPlotter(range(len(y)),y,range(len(y),len(y)+len(ys)),ym,ys2,range(len(y),len(y)+len(ys)),ys)
    ##----------------------------------------------------------##
    ## STANDARD GP (training)                                   ##
    ## OPTIMIZE HYPERPARAMETERS                                 ##
    ##----------------------------------------------------------##
    ## -> parameter training using (off the shelf) conjugent gradient (CG) optimization (NOTE: SCG is faster)
예제 #6
0
        for jj in range(N):
            ind = ii * N * 4 + jj * 4 + 4
            hyp.cov[ind - 1] = ii

    hyp.mean = np.array([])

    sn = 0.1
    hyp.lik = np.array([np.log(sn)])

    ##----------------------------------------------------------##
    ## STANDARD GP (prediction)                                 ##
    ##----------------------------------------------------------##
    xs = np.arange(2004 + 1. / 24., 2024 - 1. / 24., 1. / 12.)  # TEST POINTS
    xs = xs.reshape(len(xs), 1)

    vargout = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, xs)
    ym = vargout[0]
    ys2 = vargout[1]
    m = vargout[2]
    s2 = vargout[3]
    plotter(xs, ym, ys2, x, y)  #,[1955, 2030, 310, 420])
    ##----------------------------------------------------------##
    ## STANDARD GP (training)                                   ##
    ## OPTIMIZE HYPERPARAMETERS                                 ##
    ##----------------------------------------------------------##
    ## -> parameter training using (off the shelf) conjugent gradient (CG) optimization (NOTE: SCG is faster)
    from time import clock
    t0 = clock()
    vargout = min_wrapper(hyp, gp, 'SCG', inffunc, meanfunc, covfunc, likfunc,
                          x, y, None, None, True)
    t1 = clock()
예제 #7
0
    ## SET (hyper)parameters
    hyp = hyperParameters()
    hyp.mean = np.array([0.])
    hyp.cov = np.array([0., 0., 0.])
    [hyp_opt, fopt, gopt,
     funcCalls] = min_wrapper(hyp, gp, 'Minimize', inffunc, meanfunc, covfunc,
                              likfunc, x, y, None, None, True)

    hyp = hyp_opt
    ##----------------------------------------------------------##
    ## STANDARD GP (example 1)                                  ##
    ##----------------------------------------------------------##
    print '...example 1: prediction...'
    ## GET negative log marginal likelihood
    [nlml, dnlZ, post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y,
                            None, None, True)
    print "nlml = ", nlml

    ## PREDICTION
    [ymu, ys2, fmu, fs2, lp, post] = gp(hyp, inffunc, meanfunc, covfunc,
                                        likfunc, x, y, xstar, np.ones((n, 1)))

    ## PLOT log predictive probabilities
    if PLOT:
        fig = plt.figure()
        plt.plot(x1[:, 0], x1[:, 1], 'b+', markersize=12)
        plt.plot(x2[:, 0], x2[:, 1], 'r+', markersize=12)
        pc = plt.contour(t1, t2,
                         np.reshape(np.exp(lp), (t1.shape[0], t1.shape[1])))
        fig.colorbar(pc)
        plt.grid()
예제 #8
0
    ## SPECIFY inference method
    inffunc = ['inferences.infExact']

    ## SET (hyper)parameters
    hyp = hyperParameters()
    hyp.cov = np.array([np.log(0.25), np.log(1.0), 3.0])
    hyp.mean = np.array([0.5, 1.0])
    hyp.lik = np.array([np.log(0.1)])

    ##----------------------------------------------------------##
    ## STANDARD GP (example 1)                                  ##
    ##----------------------------------------------------------##
    print '...example 1: prediction...'
    ## PREDICTION
    t0 = clock()
    vargout = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, xstar)
    t1 = clock()
    ym = vargout[0]
    ys2 = vargout[1]
    m = vargout[2]
    s2 = vargout[3]

    print 'Time for prediction =', t1 - t0

    ## PLOT results
    if PLOT:
        plotter(xstar, ym, s2, x, y, [-2, 2, -0.9, 3.9])

    ## GET negative log marginal likelihood
    [nlml, post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, None,
                      None, False)