Example #1
0
def runToy2D():
	np.random.seed(0)
	random.seed(0)

	for i in range(20):
		print ' '*random.randint(0,70) + '*'

	# priors:
	aP = prior.uniform(-5,5)			# will have 2D gaussian toy likelihood
	bP = prior.uniform(-5,5)			
	g.priors = [aP,bP]

    # parameters ('name', x_init, isFrozen):
	a = particle.param('a',aP.sample(),False)
	b = particle.param('b',bP.sample(),False)
	g.initParams = [a,b]

	for i,p in enumerate(g.initParams):		# important!
		p.setPrior(i)

	# which indexes are thawed:
	for i,p in enumerate(g.initParams):
		if(not p.isFrozen):
			g.thawedIdxs.append(i)

	# choose likelihood:
	g.likelihood = likelihood.toyGauss()

	# mass vector for parameters:
	g.masses = np.ones(len(g.initParams))

	# create the initial particle set:
	particles = []
	for i in range(g.nParticles):
		particles.append(particle.particle(g.initParams))
		particles[i].assignPriorSample()

	# set the inference running:
	fname = '/Users/jmrv/Documents/school/mit/research/software/nest/samples/toy2d.txt'
	nested.sample(particles,fname)

	pos = posterior.posterior(fname)
	plotname = '/Users/jmrv/Documents/school/mit/research/software/nest/plots/toy2d.pdf'
	pos.plotMarginals(plotname)
Example #2
0
File: chmcRun.py Project: jmrv/chmc
def runOneChain(iChain,convConn):
	# priors:					(min, max, logarithmic?)
	lambda0p = prior.uniform(1e-3,1e2,True)	
	dldtp = prior.uniform(-2,2,False)
	g.priors = [lambda0p,dldtp]
	
	# initial parameters:		 (prior, starting value, frozen?)
	lambda0 = cob.param('lambda0',5,False)		# poisson rate at start of dataset
	lambda0.setPrior(0)
	dldt = cob.param('dldt',0.2,False)			# rate of change of poisson rate
	dldt.setPrior(1)
	g.initParams = [lambda0,dldt]
	
	# which indexes are thawed:
	for i,p in enumerate(g.initParams):
		if(not p.isFrozen):
			g.thawedIdxs.append(i)
	
	# mass vector for parameters:
	g.masses = ones(len(g.initParams))		# (ensures double precision)
	
	# data and likelihood:
	counts = random.poisson(10,size=100)
	time = range(len(counts))
	data = transpose([time,counts])
	g.likelihood = likelihood.poissDrift(data)
	#g.likelihood = likelihood.multiGauss()
	
	# start the chain:
	fname = 'chains/chain-'+str(iChain)+'.txt'
	ch = cob.chain(fname,convConn)
	
	# evolve the chain:
	# (the convergence monitor will only send anything to this end of 
	#  the pipe once all chains have converged. evolve until then.)
	while not convConn.poll():
		ch.evolve()
Example #3
0
def runToy1D():
	np.random.seed(1)
	random.seed(1)

	for i in range(20):
		print ' '*random.randint(0,70) + '*'

	# priors:
	aP = prior.uniform(-5,5)			# single parameter. will have gaussian toy likelihood
	g.priors = [aP]

    # parameters ('name', x_init, isFrozen):
	a = particle.param('a',aP.sample(),False)
	g.initParams = [a]

	for i,p in enumerate(g.initParams):		# important!
		p.setPrior(i)

	# which indexes are thawed:
	for i,p in enumerate(g.initParams):
		if(not p.isFrozen):
			g.thawedIdxs.append(i)

	# choose likelihood:
	g.likelihood = likelihood.toy1D()

	# mass vector for parameters:
	g.masses = np.ones(len(g.initParams))*0.05

	# create the initial particle set:
	particles = []
	for i in range(g.nParticles):
		particles.append(particle.particle(g.initParams,g.initStep))
		particles[i].assignPriorSample()

	# set the inference running:
	nested.sample(particles)
Example #4
0
def runOneChain(iChain,convConn):
	# priors:
	normp = prior.uniform(10**-4,10**-2.5,isLog=True)			# powerlaw norm			
	alphap = prior.uniform(2.0,3.5)							# powerlaw power			
	nHp = prior.uniform(0.5,3.0)						# nH (absorption)	
	scareap = prior.uniform(1e-10,1e-6,isLog=True)		# Sc line area
	area1p = prior.uniform(10**-8,10**-5,isLog=True)		# nuisance line 1
	center1p = prior.uniform(3.5,3.75)					# 					
	sigma1p = prior.uniform(0.001,0.1)					# natural width 
	area2p = prior.uniform(10**-7,10**-5,isLog=True)		# nuisance line 2
	center2p = prior.uniform(3.75,4.0)					# 				
	sigma2p = prior.uniform(0.001,0.1)					# natural width 
	
	g.priors = [normp,alphap,nHp,scareap,area1p,center1p,sigma1p,area2p,center2p,sigma2p]

	# parameters ('name', x_init, isFrozen):
	names = ['norm', 'alpha', 'nH', 'Sc area', 'area1', 'center1', 'sigma1', 
				'area2', 'center2', 'sigma2']
	g.initParams = []
	for i in range(len(g.priors)):
		if i == 2:	# nH set by hwang '12
			g.initParams.append(cob.param(names[i], 2.0, True))
		else:
			g.initParams.append(cob.param(names[i], g.priors[i].sample(), False))

	for i,p in enumerate(g.initParams):
		p.setPrior(i)

	# which indices are thawed:
	for i,p in enumerate(g.initParams):
		if(not p.isFrozen):
			g.thawedIdxs.append(i)
	
	# mass vector for parameters:
	g.masses = ones(len(g.initParams))		
	g.masses[0] = 1.0		# powerlaw norm
	g.masses[1] = 1.0		# powerlaw alpha
	#g.masses[2] = 1.0		# nH
	g.masses[3] = 1.0		# Sc area
	g.masses[4] = 1.0		# area 1
	g.masses[5] = 1.0		# center 1
	g.masses[6] = 0.5		# sigma 1
	g.masses[7] = 1.0		# area 2
	g.masses[8] = 1.0		# center 2
	g.masses[9] = 0.5		# sigma 2
	
	# data and likelihood:
	epochs = ['4634','4635','4636','4637','4638']		
	#epochs = ['4637']
	datasets = []
	fnames = []
	
	for e in epochs:
		#froot = g.datadir+'cpoor'
		froot = g.datadir+'eastegg'+'-'+e
		fnames.append(froot)
		datasets.append(cob.dataset(froot, [3.4,5],
						g.datadir+'../bkgd', g.Aeastegg/g.Abkgd))

	g.likelihood = likelihood.ScLike(datasets, g.codedir+'models/phabs1e22.txt')
	
	g.dumpInfo(g.initParams, g.priors, g.masses, g.likelihood,fnames=fnames)

	# start the chain:
	fname = g.chaindir+'chain-'+str(iChain)+'.txt'
	ch = cob.chain(fname,convConn)
	
	# evolve the chain:
	# (the convergence monitor will only send anything to this end of 
	#  the pipe once all chains have converged. evolve until then.)
	while not convConn.poll():
		ch.evolve()
Example #5
0
def runSc():
	np.random.seed(0)
	random.seed(0)

	for i in range(20):
		print ' '*random.randint(0,70) + '*'
	
	# priors:
	normP = prior.uniform(1e-2,1,isLog=True)			# powerlaw norm			
	alphaP = prior.uniform(2,4)							# powerlaw power			
	
	nHP = prior.uniform(1.0,3.0)						# nH (absorption)	
	
	scAreaP = prior.uniform(1e-6, 1e-3, isLog=True)		# Sc line area
	
	area1P = prior.uniform(1e-6, 1e-3, isLog=True)		# nuisance line 1
	center1P = prior.uniform(3.5, 3.7)					# 						
	sigma1P = prior.uniform(0.001, 0.010)				# natural width

	area2P = prior.uniform(1e-5, 1e-4, isLog=True)		# nuisance line 2
	center2P = prior.uniform(3.75, 4.0)					#					
	sigma2P = prior.uniform(0.001, 0.010)				#	

	g.priors = [	normP, alphaP, \
					nHP, \
					scAreaP, \
					#area1P,center1P,sigma1P, \
					area2P,center2P,sigma2P			]

    # parameters ('name', x_init, isFrozen):
	norm = particle.param('norm',5e-2,False)
	alpha = particle.param('alpha',2.9,False)

	nH = particle.param('nH',2.0,True)

	scarea = particle.param('Sc area',1e-5,False)

	area1 = particle.param('area1',1e-5,False)
	center1 = particle.param('center1',3.6,False)
	sigma1 = particle.param('sigma1',0.005,False)

	area2 = particle.param('area2',1.8e-5,False)
	center2 = particle.param('center2',3.87,False)
	sigma2 = particle.param('sigma2',0.005,False)
	g.initParams = [    norm,alpha, \
						nH, \
						scarea, \
						#area1,center1,sigma1, \
						area2,center2,sigma2]

	for i,p in enumerate(g.initParams):
		p.setPrior(i)

	# which indexes are thawed:
	for i,p in enumerate(g.initParams):
		if(not p.isFrozen):
			g.thawedIdxs.append(i)
	
	# mass vector for parameters:
	g.masses = np.ones(len(g.initParams))*0.1

    # data and likelihood:
	g.likelihood = likelihood.ScLike(	g.datadir+'column.warf',
										g.datadir+'column.wrmf',
										g.datadir+'column.pi',
										g.modeldir+'phabs1e22.txt',
										[3.4,5])

	# create the initial particle set:
	particles = []
	for i in range(g.nParticles):
		particles.append(particle.particle(g.initParams))
		particles[i].assignPriorSample()

	# set the inference running:
	fname = g.sampleDir+'samples.txt'
	nested.sample(particles,fname)

	pos = posterior.posterior(fname)
	plotname = g.plotDir+'sctest.pdf'
	pos.plotMarginals(plotname)
Example #6
0
# ---------------

# ~~~ todo and wishlist ~~~
# hey, likelihoods - separate class
# hey, plotting evolution to verify chmc
# hey, parallel processing and convergence
# hey, debugging gui
# ~~~~~~~~~~~~~~~~~~~~~~~~~

# chmc-specific modules:
import chainObject as cob
import prior
from posterior import *

# initial parameters:
kTp = prior.uniform(0.5,8.0,False)			# (min, max, logarithmic?)
kT = cob.param('kT',kTp,1,False)			# (prior, starting value, frozen?)
eltp = prior.uniform(1e-6,1000,True)
elt = cob.param('O',eltp,2,True)			# 'elemental abundance'
taup = prior.uniform(1e9,1e12,True)
tau = cob.param('tau',taup,1.2e10,False)

initParams = [kT,elt,tau]
masses = ones(len(initParams))

# start the chain:
fp = open('chains/chain.txt','w')
ch = cob.chain(initParams,fp,masses)		# (seedparams,file pointer, masses)

# evolve the chain:
for i in range(1000):
Example #7
0
from numpy import *
from matplotlib.pyplot import *

# chmc-specific modules:
from posterior import *
import chainObject as cob
import prior
import chmcGlobals as g
import likelihood

# priors:
normp = prior.uniform(1e-4,1,isLog=True)			# powerlaw norm			
alphap = prior.uniform(1,5)							# powerlaw power			
nhp = prior.uniform(1e-1,1e1,isLog=True)			# nH (absorption)			~ tighten? 
dp = prior.normal(3.4,0.2)							# distance to Cas A, kpc	
													# reed 1995, apj 440, 706
agep = prior.uniform(2011-1620,2011-1700)			# Cas A's age				~ weak beta
timassp = prior.uniform(1e-8,1e0,isLog=True)		# mass of 44Ti, in M_sol
sccenterp = prior.normal(4.1,0.2)					# center of sc line
scsigmap = prior.uniform(0,1)						# width of sc line		~ tighter, normal?
area1p = prior.uniform(1e-8,1,isLog=True)			# nuisance line 1
center1p = prior.uniform(3.55,0.2)					# 						
sigma1p = prior.uniform(0,1)						#						~ tighter, normal?	
area2p = prior.uniform(1e-8,1,isLog=True)			# nuisance line 2
center2p = prior.normal(3.75,0.2)					#					
sigma2p = prior.uniform(0,1)						#						~ tighter, normal?	
g.priors = [	normp,alphap,nhp, \
				dp,agep,timassp, \
				sccenterp,scsigmap, \
				area1p,center1p,sigma1p, \
				area2p,center2p,sigma2p]
Example #8
0
File: chmcRun.py Project: jmrv/chmc
def runOneChain(iChain,convConn):
	# priors:
	normp = prior.uniform(1e-6,1e1,isLog=True)			# powerlaw norm			
	alphap = prior.uniform(1,5)							# powerlaw power			

	nHp = prior.uniform(0.5,3.0)						# nH (absorption)	
														# based on hwang+ 2012

	scareap = prior.uniform(1e-9,1e-3,isLog=True)		# Sc line area

	area1p = prior.uniform(1e-9,1e-3,isLog=True)		# nuisance line 1
	center1p = prior.uniform(3.5,3.75)					# 					
	sigma1p = prior.uniform(0.001,0.1)					# natural width 
	
	area2p = prior.uniform(1e-9,1e-2,isLog=True)		# nuisance line 2
	center2p = prior.uniform(3.75,4.0)					# 				
	sigma2p = prior.uniform(0.001,0.1)					# natural width 
	
	g.priors = [	normp,alphap, \
					nHp, \
					scareap, \
					area1p,center1p,sigma1p, \
					area2p,center2p,sigma2p]

	# parameters ('name', x_init, isFrozen):
	norm = cob.param('norm',10**-1.4,False)
	alpha = cob.param('alpha',3.3,False)

	nH = cob.param('nH',2.0,True)

	scarea = cob.param('Sc area',1e-7,False)

	area1 = cob.param('area1',10**-5.2,False)
	center1 = cob.param('center1',3.69,False)
	sigma1 = cob.param('sigma1',0.005,False)

	area2 = cob.param('area2',10**-4.5,False)
	center2 = cob.param('center2',3.86,False)
	sigma2 = cob.param('sigma2',0.01,False)
	g.initParams = [	norm,alpha, \
						nH, \
						scarea, \
						area1,center1,sigma1, \
						area2,center2,sigma2]

	for i,p in enumerate(g.initParams):
		p.setPrior(i)

	# which indexes are thawed:
	for i,p in enumerate(g.initParams):
		if(not p.isFrozen):
			g.thawedIdxs.append(i)
	
	# mass vector for parameters:
	g.masses = ones(len(g.initParams))		
	g.masses[0] = 0.2		# powerlaw norm
	g.masses[1] = 0.2		# powerlaw alpha
	#g.masses[2] = 0.1		# nH
	g.masses[3] = 0.01		# Sc area
	g.masses[4] = 0.01		# area 1
	g.masses[5] = 0.01		# center 1
	g.masses[6] = 0.01		# sigma 1
	g.masses[7] = 0.01		# area 2
	g.masses[8] = 0.01		# center 2
	g.masses[9] = 0.01		# sigma 2
	
	# data and likelihood:
	#epochs = ['4634','4635','4636','4637','4638']		
	epochs = ['4637']		
	datasets = []
	fnames = []
	
	for e in epochs:
		#froot = g.datadir+'fe'
		froot = g.datadir+'eastegg'+'-'+e
		fnames.append(froot)
		datasets.append(cob.dataset(froot, [3.4,5],
						g.datadir+'../bkgd', g.Aeastegg/g.Abkgd))
	g.likelihood = likelihood.ScLike(datasets, g.codedir+'models/phabs1e22.txt')
	
	g.dumpInfo(g.initParams, g.priors, g.masses, g.likelihood,fnames=fnames)

	# start the chain:
	fname = g.chaindir+'chain-'+str(iChain)+'.txt'
	ch = cob.chain(fname,convConn)
	
	# evolve the chain:
	# (the convergence monitor will only send anything to this end of 
	#  the pipe once all chains have converged. evolve until then.)
	while not convConn.poll():
		ch.evolve()