コード例 #1
0
ファイル: example4_section4.py プロジェクト: rdenham/pymcmc
print
intro = '''This example shows four different ways of
programming the likelihood function. Three of these
ways are efficient, using Numpy, weave or f2py. The
fourth uses looping in python, and is thus much slower.'''

ptitle(intro.split('\n'))

print
print

ptitle("Numpy")
logl = loglnumpy
random.seed(12345)
ms = MCMC(20000, 4000, data, [samplebeta],
          loglike = (logl, xmat.shape[1], 'yvec'))
ms.sampler()
ms.output()

print

ptitle("loop (the slow one)")
logl = loglloop
random.seed(12345)
samplebeta = RWMH(posterior, scale, init_beta, 'beta')
ms = MCMC(20000, 4000, data, [samplebeta],
          loglike = (logl, xmat.shape[1], 'yvec'))
ms.sampler()
ms.output()

print
コード例 #2
0
ファイル: example2_section3.2.py プロジェクト: rdenham/pymcmc

# main program
random.seed(12345)       # seed or the random number generator

# loads data from file
data = loadtxt(os.path.join(datadir,'count.txt'), skiprows = 1)    
yvec = data[:, 0]
xmat = data[:, 1:data.shape[1]]
xmat = hstack([ones((data.shape[0], 1)), xmat])

data ={'yvec':yvec, 'xmat':xmat}

# use bayesian regression to initialise
bayesreg = BayesRegression(yvec, xmat)     
sig, beta0 = bayesreg.posterior_mean()

init_beta, info = leastsq(minfunc, beta0, args = (yvec, xmat))
data['betaprec'] =-llhessian(data, init_beta)
scale = linalg.inv(data['betaprec'])

# Initialise the random walk MH algorithm
samplebeta = RWMH(posterior, scale, init_beta, 'beta')

ms = MCMC(20000, 4000, data, [samplebeta],
          loglike = (logl, xmat.shape[1], 'yvec'))
ms.sampler()

ms.output()
ms.plot('beta')
コード例 #3
0
ファイル: example3_section3.3.py プロジェクト: rdenham/pymcmc
priorreg = ("g_prior", zeros(kreg), 1000.0)
regs = BayesRegression(yvec, xmat, prior=priorreg)

"""A dictionary is set up. The contents of the dictionary will be
available for use for by the functions that make up the MCMC sampler.
Note that we pass in storage space as well as the class intance used
to sample the regression from."""
data = {"yvec": yvec, "xmat": xmat, "regsampler": regs}
U = spmatrix.ll_mat(nobs, nobs, 2 * nobs - 1)
U.put(1.0, range(0, nobs), range(0, nobs))
data["yvectil"] = zeros(nobs)
data["xmattil"] = zeros((nobs, kreg))
data["Upper"] = U

# Use Bayesian regression to initialise MCMC sampler
bayesreg = BayesRegression(yvec, xmat)
sig, beta = bayesreg.posterior_mean()

simsigbeta = CFsampler(WLS, [sig, beta], ["sigma", "beta"])

rho = 0.9
simrho = SliceSampler([post_rho], 0.1, 5, rho, "rho")
blocks = [simrho, simsigbeta]

loglikeinfo = (loglike, kreg + 2, "yvec")
ms = MCMC(10000, 2000, data, blocks, loglike=loglikeinfo)
ms.sampler()

ms.output()
ms.plot("rho")
コード例 #4
0
ファイル: ex_loglinear.py プロジェクト: rdenham/pymcmc
    sum = zeros((kreg, kreg))
    for i in xrange(nobs):
        sum = sum + lamb[i] * outer(store['xmat'][i], store['xmat'][i])
    return -sum

# main program
random.seed(12345)       # seed or the random number generator

data = loadtxt(os.path.join(datadir,'count.txt'), skiprows = 1)    # loads data from file
yvec = data[:, 0]
xmat = data[:, 1:data.shape[1]]
xmat = hstack([ones((data.shape[0], 1)), xmat])

data ={'yvec':yvec, 'xmat':xmat} 
bayesreg = BayesRegression(yvec, xmat)     # use bayesian regression to initialise
                                        # nonlinear least squares algorithm
sig, beta0 = bayesreg.posterior_mean()
init_beta, info = leastsq(minfunc, beta0, args = (yvec, xmat))
data['betaprec'] =-llhessian(data, init_beta)
scale = linalg.inv(data['betaprec'])

samplebeta = RWMH(posterior, scale, init_beta, 'beta')
ms = MCMC(20000, 4000, data, [samplebeta], loglike = (logl, xmat.shape[1], 'yvec'))
ms.sampler()
ms.output(filename='example1c.out') 
ms.plot('beta', filename='ex_loglinear.pdf')
# ms.CODAoutput('beta')
# ms.plot('beta', elements = [0], plottypes ="trace", filename ="xx.pdf")
# ms.plot('beta', elements = [0], plottypes ="density", filename ="xx.png")
## ms.plot('beta', elements = [0], plottypes ="acf", filename ="yy.ps")
コード例 #5
0
ファイル: ex_AR1.py プロジェクト: rdenham/pymcmc
to sample the regression from."""
data ={'yvec':yvec, 'xmat':xmat, 'regsampler':regs}
U = spmatrix.ll_mat(nobs, nobs, 2 * nobs - 1)
U.put(1.0, range(0, nobs), range(0, nobs))
data['yvectil'] = zeros(nobs)
data['xmattil'] = zeros((nobs, kreg))
data['Upper'] = U

# Use Bayesian regression to initialise MCMC sampler
bayesreg = BayesRegression(yvec, xmat)
sig, beta = bayesreg.posterior_mean()

simsigbeta = CFsampler(WLS, [sig, beta], ['sigma', 'beta'])
scale = 0.002                       # tuning parameter for RWMH
rho = 0.9
##rho = [1] ## to test exception handling
# simrho = RWMH(post_rho, scale, rho, 'rho')
simrho = SliceSampler([post_rho], 0.1, 5, rho, 'rho')
#simrho = OBMC(post_rho, 3, scale, rho, 'rho')
# simrho = MH(gencand, post_rho, probcandgprev, probprevgcand, rho, 'rho')
blocks = [simrho, simsigbeta]
loglikeinfo = (loglike, kreg + 2, 'yvec')
ms = MCMC(10000, 2000, data, blocks, loglike = loglikeinfo)
ms.sampler()
ms.output()
#ms.plot('sigbeta')
ms.plot('rho', filename ='rho')
ms.CODAoutput(parameters = ['rho'])


コード例 #6
0
ファイル: example1_section3.1.py プロジェクト: rdenham/pymcmc
yvec = data[:, 0]
xmat = data[:, 1:20]
xmat = hstack([ones((xmat.shape[0], 1)), xmat])

"""data is a dictionary whose elements are accessible from the functions
in the MCMC sampler"""
data ={'yvec':yvec, 'xmat':xmat}
prior = ['g_prior',zeros(xmat.shape[1]), 100.]
SSVS = StochasticSearch(yvec, xmat, prior);
data['SS'] = SSVS

"""initialise gamma"""
initgamma = zeros(xmat.shape[1], dtype ='i')
initgamma[0] = 1
simgam = CFsampler(samplegamma, initgamma, 'gamma', store ='all')


# initialise class for MCMC samper
random.seed(12346)
ms = MCMC(20000, 5000, data, [simgam])
ms.sampler()
ms.output()
ms.output(custom = SSVS.output)

txmat = SSVS.extract_regressors(0)
g_prior = ['g_prior', 0.0, 100.]
breg = BayesRegression(yvec,txmat,prior = g_prior)
breg.output()

breg.plot()
コード例 #7
0
data = loadtxt(os.path.join(datadir,'yld2.txt'))
yvec = data[:, 0]
xmat = data[:, 1:20]
xmat = hstack([ones((xmat.shape[0], 1)), xmat])

"""data is a dictionary whose elements are accessible from the functions
in the MCMC sampler"""
data ={'yvec':yvec, 'xmat':xmat}
prior = ['g_prior',zeros(xmat.shape[1]), 100.]
SSVS = StochasticSearch(yvec, xmat, prior);
data['SS'] = SSVS

"""initialise gamma"""
initgamma = zeros(xmat.shape[1], dtype ='i')
initgamma[0] = 1
simgam = CFsampler(samplegamma, initgamma, 'gamma', store ='none')

# initialise class for MCMC samper
ms = MCMC(20000, 5000, data, [simgam])
ms.sampler()
ms.output(filename ='vs.txt')
ms.output(custom = SSVS.output, filename = 'SSVS.out')
ms.output(custom = SSVS.output)

txmat = SSVS.extract_regressors(0)
g_prior = ['g_prior', 0.0, 100.]
breg = BayesRegression(yvec,txmat,prior = g_prior)
breg.output(filename = 'SSVS1.out')
breg.plot()