# main program random.seed(12345) # seed or the random number generator # loads data from file data = loadtxt(os.path.join(datadir,'count.txt'), skiprows = 1) yvec = data[:, 0] xmat = data[:, 1:data.shape[1]] xmat = hstack([ones((data.shape[0], 1)), xmat]) data ={'yvec':yvec, 'xmat':xmat} # use bayesian regression to initialise bayesreg = BayesRegression(yvec, xmat) sig, beta0 = bayesreg.posterior_mean() init_beta, info = leastsq(minfunc, beta0, args = (yvec, xmat)) data['betaprec'] =-llhessian(data, init_beta) scale = linalg.inv(data['betaprec']) # Initialise the random walk MH algorithm samplebeta = RWMH(posterior, scale, init_beta, 'beta') ms = MCMC(20000, 4000, data, [samplebeta], loglike = (logl, xmat.shape[1], 'yvec')) ms.sampler() ms.output() ms.plot('beta')
yvec = data[:, 0] xmat = data[:, 1:20] xmat = hstack([ones((xmat.shape[0], 1)), xmat]) """data is a dictionary whose elements are accessible from the functions in the MCMC sampler""" data ={'yvec':yvec, 'xmat':xmat} prior = ['g_prior',zeros(xmat.shape[1]), 100.] SSVS = StochasticSearch(yvec, xmat, prior); data['SS'] = SSVS """initialise gamma""" initgamma = zeros(xmat.shape[1], dtype ='i') initgamma[0] = 1 simgam = CFsampler(samplegamma, initgamma, 'gamma', store ='all') # initialise class for MCMC samper random.seed(12346) ms = MCMC(20000, 5000, data, [simgam]) ms.sampler() ms.output() ms.output(custom = SSVS.output) txmat = SSVS.extract_regressors(0) g_prior = ['g_prior', 0.0, 100.] breg = BayesRegression(yvec,txmat,prior = g_prior) breg.output() breg.plot()
sum = zeros((kreg, kreg)) for i in xrange(nobs): sum = sum + lamb[i] * outer(store['xmat'][i], store['xmat'][i]) return -sum # main program random.seed(12345) # seed or the random number generator data = loadtxt(os.path.join(datadir,'count.txt'), skiprows = 1) # loads data from file yvec = data[:, 0] xmat = data[:, 1:data.shape[1]] xmat = hstack([ones((data.shape[0], 1)), xmat]) data ={'yvec':yvec, 'xmat':xmat} bayesreg = BayesRegression(yvec, xmat) # use bayesian regression to initialise # nonlinear least squares algorithm sig, beta0 = bayesreg.posterior_mean() init_beta, info = leastsq(minfunc, beta0, args = (yvec, xmat)) data['betaprec'] =-llhessian(data, init_beta) scale = linalg.inv(data['betaprec']) samplebeta = RWMH(posterior, scale, init_beta, 'beta') ms = MCMC(20000, 4000, data, [samplebeta], loglike = (logl, xmat.shape[1], 'yvec')) ms.sampler() ms.output(filename='example1c.out') ms.plot('beta', filename='ex_loglinear.pdf') # ms.CODAoutput('beta') # ms.plot('beta', elements = [0], plottypes ="trace", filename ="xx.pdf") # ms.plot('beta', elements = [0], plottypes ="density", filename ="xx.png") ## ms.plot('beta', elements = [0], plottypes ="acf", filename ="yy.ps")
data = loadtxt(os.path.join(datadir,'yld2.txt')) yvec = data[:, 0] xmat = data[:, 1:20] xmat = hstack([ones((xmat.shape[0], 1)), xmat]) """data is a dictionary whose elements are accessible from the functions in the MCMC sampler""" data ={'yvec':yvec, 'xmat':xmat} prior = ['g_prior',zeros(xmat.shape[1]), 100.] SSVS = StochasticSearch(yvec, xmat, prior); data['SS'] = SSVS """initialise gamma""" initgamma = zeros(xmat.shape[1], dtype ='i') initgamma[0] = 1 simgam = CFsampler(samplegamma, initgamma, 'gamma', store ='none') # initialise class for MCMC samper ms = MCMC(20000, 5000, data, [simgam]) ms.sampler() ms.output(filename ='vs.txt') ms.output(custom = SSVS.output, filename = 'SSVS.out') ms.output(custom = SSVS.output) txmat = SSVS.extract_regressors(0) g_prior = ['g_prior', 0.0, 100.] breg = BayesRegression(yvec,txmat,prior = g_prior) breg.output(filename = 'SSVS1.out') breg.plot()