Example #1
0
    ##----------------------------------------------------------## 
    xs = np.arange(2004+1./24.,2024-1./24.,1./12.)     # TEST POINTS
    xs = xs.reshape(len(xs),1)

    vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xs)
    ym = vargout[0]; ys2 = vargout[1]
    m  = vargout[2]; s2  = vargout[3]
    plotter(xs,ym,ys2,x,y)#,[1955, 2030, 310, 420])
    ##----------------------------------------------------------##
    ## STANDARD GP (training)                                   ##
    ## OPTIMIZE HYPERPARAMETERS                                 ##
    ##----------------------------------------------------------##
    ## -> parameter training using (off the shelf) conjugent gradient (CG) optimization (NOTE: SCG is faster)
    from time import clock
    t0 = clock()
    vargout = min_wrapper(hyp,gp,'SCG',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)
    t1 = clock()

    hyp = vargout[0]
    vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xs)
    ym = vargout[0]; ys2 = vargout[1]
    m  = vargout[2]; s2  = vargout[3]

    print 'Time to optimize = ',t1-t0
    print 'Optimized mean = ',hyp.mean
    print 'Optimized covariance = ',hyp.cov
    print 'Optimized liklihood = ',hyp.lik
    
    plotter(xs,ym,ys2,x,y,[1955, 2030, 310, 420])

Example #2
0
    if PLOT:
        plotter(xstar,ym,ys2,x,y,[-2, 2, -0.9, 3.9])
    
    ## GET negative log marginal likelihood
    [nlml, post] = gp(hyp2,inffunc,meanfunc,covfunc,likfunc,x,y,None,None,False)
    print "nlml =", nlml


    ##----------------------------------------------------------##
    ## STANDARD GP (example 3)                                  ##
    ##----------------------------------------------------------##
    print '...example 3: training and prediction...'
    ## TRAINING: OPTIMIZE HYPERPARAMETERS      
    ## -> parameter training via off-the-shelf optimization   
    t0 = clock()
    [hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'Minimize',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True) # minimize
    #[hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'CG',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)	    # conjugent gradient
    #[hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'SCG',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)	    # scaled conjugent gradient (faster than CG) 
    #[hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'BFGS',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)    # quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)
    t1 = clock()
    print 'Time for optimization =',t1-t0
    print "Optimal nlml =", fopt

    ## PREDICTION
    vargout = gp(hyp2_opt,inffunc,meanfunc,covfunc,likfunc,x,y,xstar)
    ym = vargout[0]; ys2 = vargout[1]; m  = vargout[2]; s2  = vargout[3]
    
    ## Plot results
    if PLOT:
        plotter(xstar,ym,ys2,x,y,[-1.9, 1.9, -0.9, 3.9])
    
Example #3
0
        plt.grid()
        plt.axis([-4, 4, -4, 4])
        plt.show()
    ## DEFINE parameterized mean and covariance functions
    meanfunc = ['means.meanConst']  
    covfunc  = ['kernels.covSEard'] 
    ## DEFINE likelihood function used 
    likfunc = ['likelihoods.likErf']
    ## SPECIFY inference method
    inffunc = ['inferences.infLaplace']

    ## SET (hyper)parameters
    hyp = hyperParameters()
    hyp.mean = np.array([0.])
    hyp.cov  = np.array([0.,0.,0.])
    [hyp_opt, fopt, gopt, funcCalls] = min_wrapper(hyp,gp,'Minimize',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)    

    hyp = hyp_opt
    ##----------------------------------------------------------##
    ## STANDARD GP (example 1)                                  ##
    ##----------------------------------------------------------##
    print '...example 1: prediction...'
    ## GET negative log marginal likelihood
    [nlml,dnlZ,post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, None, None, True)
    print "nlml = ", nlml    
    
    ## PREDICTION
    [ymu,ys2,fmu,fs2,lp,post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y, xstar, np.ones((n,1)) )
    
    ## PLOT log predictive probabilities
    if PLOT:
Example #4
0
    ys2 = vargout[1]
    m = vargout[2]
    s2 = vargout[3]

    HousingPlotter(range(len(y)), y, range(len(y),
                                           len(y) + len(ys)), ym, ys2,
                   range(len(y),
                         len(y) + len(ys)), ys)
    ##----------------------------------------------------------##
    ## STANDARD GP (training)                                   ##
    ## OPTIMIZE HYPERPARAMETERS                                 ##
    ##----------------------------------------------------------##
    ## -> parameter training using (off the shelf) conjugent gradient (CG) optimization (NOTE: SCG is faster)
    from time import clock
    t0 = clock()
    vargout = min_wrapper(hyp, gp, 'SCG', inffunc, meanfunc, covfunc, likfunc,
                          x, y, None, None, True)
    t1 = clock()
    hyp = vargout[0]

    #vargout = gp(hyp,inffunc,meanfunc,covfunc,likfunc,x,y,xs)
    vargout = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y,
                 np.concatenate((x, xs), axis=0))
    ym = vargout[0]
    ys2 = vargout[1]
    m = vargout[2]
    s2 = vargout[3]

    print 'Time to optimize = ', t1 - t0
    print 'Optimized mean = ', hyp.mean
    print 'Optimized covariance = ', hyp.cov
    print 'Optimized liklihood = ', hyp.lik
Example #5
0
        plt.axis([-4, 4, -4, 4])
        plt.show()
    ## DEFINE parameterized mean and covariance functions
    meanfunc = ['means.meanConst']
    covfunc = ['kernels.covSEard']
    ## DEFINE likelihood function used
    likfunc = ['likelihoods.likErf']
    ## SPECIFY inference method
    inffunc = ['inferences.infLaplace']

    ## SET (hyper)parameters
    hyp = hyperParameters()
    hyp.mean = np.array([0.])
    hyp.cov = np.array([0., 0., 0.])
    [hyp_opt, fopt, gopt,
     funcCalls] = min_wrapper(hyp, gp, 'Minimize', inffunc, meanfunc, covfunc,
                              likfunc, x, y, None, None, True)

    hyp = hyp_opt
    ##----------------------------------------------------------##
    ## STANDARD GP (example 1)                                  ##
    ##----------------------------------------------------------##
    print '...example 1: prediction...'
    ## GET negative log marginal likelihood
    [nlml, dnlZ, post] = gp(hyp, inffunc, meanfunc, covfunc, likfunc, x, y,
                            None, None, True)
    print "nlml = ", nlml

    ## PREDICTION
    [ymu, ys2, fmu, fs2, lp, post] = gp(hyp, inffunc, meanfunc, covfunc,
                                        likfunc, x, y, xstar, np.ones((n, 1)))
Example #6
0
        plotter(xstar, ym, ys2, x, y, [-2, 2, -0.9, 3.9])

    ## GET negative log marginal likelihood
    [nlml, post] = gp(hyp2, inffunc, meanfunc, covfunc, likfunc, x, y, None,
                      None, False)
    print "nlml =", nlml

    ##----------------------------------------------------------##
    ## STANDARD GP (example 3)                                  ##
    ##----------------------------------------------------------##
    print '...example 3: training and prediction...'
    ## TRAINING: OPTIMIZE HYPERPARAMETERS
    ## -> parameter training via off-the-shelf optimization
    t0 = clock()
    [hyp2_opt, fopt, gopt,
     funcCalls] = min_wrapper(hyp2, gp, 'Minimize', inffunc, meanfunc, covfunc,
                              likfunc, x, y, None, None, True)  # minimize
    #[hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'CG',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)	    # conjugent gradient
    #[hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'SCG',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)	    # scaled conjugent gradient (faster than CG)
    #[hyp2_opt, fopt, gopt, funcCalls] = min_wrapper(hyp2,gp,'BFGS',inffunc,meanfunc,covfunc,likfunc,x,y,None,None,True)    # quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)
    t1 = clock()
    print 'Time for optimization =', t1 - t0
    print "Optimal nlml =", fopt

    ## PREDICTION
    vargout = gp(hyp2_opt, inffunc, meanfunc, covfunc, likfunc, x, y, xstar)
    ym = vargout[0]
    ys2 = vargout[1]
    m = vargout[2]
    s2 = vargout[3]

    ## Plot results