Exemplo n.º 1
0
def MySqExponential(X, Y, theta, white_noise=False):
    """
  Just copied from GeePea.SqExponential, using log height scale par
  """

    #Calculate distance matrix with scaling - multiply each coord by sqrt(eta)
    D2 = GeePea.EuclideanDist2(X, Y, v=1. / (np.array(theta[1:-1])))  #

    #Calculate covariance matrix
    K = np.exp(2 * theta[0]) * np.exp(-0.5 * D2)

    #Add white noise
    if white_noise is True: K += np.identity(X[:, 0].size) * (theta[-1]**2)

    return np.matrix(K)
Exemplo n.º 2
0
import numpy as np
import pylab
import os
import matplotlib as mpl

import GeePea
import MyFuncs as MF
import Infer

#create example with some noise/systematics
tpar = [0,3.0,10,0.1,0.2,0.2,0.2,1.0,0.0]
time = np.linspace(-0.1,0.1,300)
flux = MF.Transit_aRs(tpar,time) + 0.001*np.sin(2*np.pi*40*time) + np.random.normal(0.,0.0005,time.size)

#construct the GP
gp = GeePea.GP(time,flux,p=tpar+[0.1,0.01,0.001],mf=MF.Transit_aRs)
gp.opt() #optimise

#run quick MCMC to test predictions:
ch_len = 10000
lims = (0,5000,10)
epar = [0,0,0,0.001,0,0,0,0,0,] + [0.001,0.01,0.001]
Infer.MCMC_N(gp.logPosterior,gp.p,(),ch_len,epar,adapt_limits=lims,glob_limits=lims,chain_filenames=['test_chain'])
p,perr = Infer.AnalyseChains(lims[1],chain_filenames=['test_chain'])
X = Infer.GetSamples(5000,100,chain_filenames=['test_chain']) #get samples from the chains
os.remove('test_chain.npy')

#standard plot
pylab.figure()
gp.plot()
#pylab.savefig('test.pdf')
Exemplo n.º 3
0
#create X array
x0 = time
x0 = (x0 - x0.mean()) / x0.std()
x1 = SS.sawtooth(2 * np.pi * time * 15.)
x1 = (x1 - x1.mean()) / x1.std()
X = np.array([x0, x1]).T

flux += 0.0003 * np.sin(2 * np.pi * 10. * time)
flux += x1 * 0.0004

#define the GP
gp = GeePea.GP(X,
               flux,
               xmf=time,
               p=mfp + hp,
               mf=MF.Transit_aRs,
               ep=ep,
               kf=GeePea.SqExponentialSum)


def logPrior(p, n):
    if p[-n] < 0: return -np.inf
    if p[-n + 1] < 0: return -np.inf
    if p[-n + 2] < 0: return -np.inf
    if p[-n + 3] < 0: return -np.inf
    if p[-n + 4] < 0: return -np.inf
    return 0


gp.logPrior = logPrior
    #hyperparameters
    if np.array(p[-nhp:] < 0).any(): return -np.inf
    #limb darkening parameters
    if (p[5] + p[6]) > 1.: return -np.inf  #ensure positive surface brightness

    #else calculate the log prior
    log_prior = 0.
    #eg of gamma prior
    #log_prior += np.log(gamma.pdf(p[-nhp+1],1.,0.,1.e2)).sum()
    #eg or normal prior
    #log_prior += np.log(norm_dist.pdf(p[4],b,b_err)).sum()
    return log_prior


#now define the GP
gp = GeePea.GP(time, flux, p=mfp + hp, kf=kf, mf=MF.Transit_aRs, ep=ep + hp)
gp.logPrior = logPrior

#optimise the free parameters - this uses a nelder-mead simplex by default
gp.optimise()

#can also use a global optimiser which is more reliable, but needs a little more effort to set up
#needs a set of tuples defining the lower and upper limit for each parameter
bounds_hp = [(1.e-5, 1.e-2), (0.1, 250), (0, 0.01)]
bounds_mf = [(_p - 3 * _e, _p + 3 * _e) for _p, _e in zip(mfp, ep)]
for i, _e in enumerate(ep):  #better to set fixed pars to 'None'
    if _e == 0.: bounds_mf[i] = None
gp.opt_global(bounds=bounds_mf + bounds_hp)  #and optimise

#finally make a plot
plt.figure(1)
Exemplo n.º 5
0
#!/usr/bin/env python
"""
Example on how to save GPs using dill (same as pickle but works on instance methods)

"""

import numpy as np
import dill
import GeePea
import os

#individual GP:
x = np.linspace(0,1,200)
y = np.sin(2*np.pi*2*x) + np.random.normal(0,0.2,200)
gp = GeePea.GP(x,y,p=[0.1,0.5,0.1])

#save using dill stream
stream = dill.dumps(gp)
#and recover
gp_copy = dill.loads(stream)

#save using dill file
file = open('test_save.dat','w')
dill.dump(gp,file)
file.close()
#and recover
gp_copy = dill.load(open('test_save.dat'))

#define a list of GPs:
gps = [GeePea.GP(x,y,p=[0.1,0.5,0.1]) for i in range(10)]
#do something to all gps
Exemplo n.º 6
0
#!/usr/bin/env python

import GeePea
import numpy as np
import pylab

x = np.linspace(0, 1, 50)
y = np.sin(2 * np.pi * x) + np.random.normal(0, 0.1, x.size)

p = [1, 1, 0.1]

gp = GeePea.GP(x, y, p)

gp.optimise()

gp.plot()

pylab.xlabel('x')
pylab.ylabel('y')

raw_input()
Exemplo n.º 7
0
#!/usr/bin/env python

import GeePea
import numpy as np
import pylab

#create test data
x = [-1.50,-1.0,-0.75,-0.4,-0.25,0.00]
y = [-1.6,-1.1,-0.5,0.25,0.5,0.8]

#define mean function parameters and hyperparameters
hp = [0.,1.,0.3] # kernel hyperparameters (sq exponential takes 3 parameters for 1D input)
fp = [0,0,1]

#also define a predictive distribution
x_p = np.linspace(-2,1,200)

#define the GP
gp = GeePea.GP(x,y,p=hp,fp=fp,x_pred=x_p)

#optimise and plot
gp.optimise()
pylab.figure(1)
gp.plot()

pylab.figure(2)
gp.set_pars(x_pred=x_p)
gp.plot()

raw_input()
Exemplo n.º 8
0
    return np.matrix(K)


#add some attributes to the kernel
MySqExponential.n_par = lambda D: D + 2
MySqExponential.kernel_type = "Full"

#create test data
x = np.linspace(0, 1, 50)
y = my_mean_func([1., 3.], x) + np.sin(2 * np.pi * x) + np.random.normal(
    0, 0.1, x.size)

#define mean function parameters and hyperparameters
mfp = [0.8, 2.]
hp = [
    0., 1., 0.1
]  # kernel hyperparameters (sq exponential takes 3 parameters for 1D input)

#define the GP
gp = GeePea.GP(x, y, p=mfp + hp, kf=MySqExponential, mf=my_mean_func)

#print out the GP attributes
gp.describe()

#optimise and plot
gp.optimise()
gp.plot()

raw_input()
Exemplo n.º 9
0
#!/usr/bin/env python

import GeePea
import numpy as np

#first define mean function in correct format
my_mean_func = lambda p, x: p[0] + p[1] * x

#create test data
x = np.linspace(0, 1, 50)
y = my_mean_func([1., 3.], x) + np.sin(2 * np.pi * x) + np.random.normal(
    0, 0.1, x.size)

#define mean function parameters and hyperparameters
mfp = [0.8, 2.]
hp = [
    1., 1., 0.1
]  # kernel hyperparameters (sq exponential takes 3 parameters for 1D input)

#define the GP
gp = GeePea.GP(x, y, p=mfp + hp, mf=my_mean_func)

#print out the GP attributes
gp.describe()

#optimise and plot
gp.optimise()
gp.plot()

raw_input()