def runToy2D(): np.random.seed(0) random.seed(0) for i in range(20): print ' '*random.randint(0,70) + '*' # priors: aP = prior.uniform(-5,5) # will have 2D gaussian toy likelihood bP = prior.uniform(-5,5) g.priors = [aP,bP] # parameters ('name', x_init, isFrozen): a = particle.param('a',aP.sample(),False) b = particle.param('b',bP.sample(),False) g.initParams = [a,b] for i,p in enumerate(g.initParams): # important! p.setPrior(i) # which indexes are thawed: for i,p in enumerate(g.initParams): if(not p.isFrozen): g.thawedIdxs.append(i) # choose likelihood: g.likelihood = likelihood.toyGauss() # mass vector for parameters: g.masses = np.ones(len(g.initParams)) # create the initial particle set: particles = [] for i in range(g.nParticles): particles.append(particle.particle(g.initParams)) particles[i].assignPriorSample() # set the inference running: fname = '/Users/jmrv/Documents/school/mit/research/software/nest/samples/toy2d.txt' nested.sample(particles,fname) pos = posterior.posterior(fname) plotname = '/Users/jmrv/Documents/school/mit/research/software/nest/plots/toy2d.pdf' pos.plotMarginals(plotname)
def runToy1D(): np.random.seed(1) random.seed(1) for i in range(20): print ' '*random.randint(0,70) + '*' # priors: aP = prior.uniform(-5,5) # single parameter. will have gaussian toy likelihood g.priors = [aP] # parameters ('name', x_init, isFrozen): a = particle.param('a',aP.sample(),False) g.initParams = [a] for i,p in enumerate(g.initParams): # important! p.setPrior(i) # which indexes are thawed: for i,p in enumerate(g.initParams): if(not p.isFrozen): g.thawedIdxs.append(i) # choose likelihood: g.likelihood = likelihood.toy1D() # mass vector for parameters: g.masses = np.ones(len(g.initParams))*0.05 # create the initial particle set: particles = [] for i in range(g.nParticles): particles.append(particle.particle(g.initParams,g.initStep)) particles[i].assignPriorSample() # set the inference running: nested.sample(particles)
def runSc(): np.random.seed(0) random.seed(0) for i in range(20): print ' '*random.randint(0,70) + '*' # priors: normP = prior.uniform(1e-2,1,isLog=True) # powerlaw norm alphaP = prior.uniform(2,4) # powerlaw power nHP = prior.uniform(1.0,3.0) # nH (absorption) scAreaP = prior.uniform(1e-6, 1e-3, isLog=True) # Sc line area area1P = prior.uniform(1e-6, 1e-3, isLog=True) # nuisance line 1 center1P = prior.uniform(3.5, 3.7) # sigma1P = prior.uniform(0.001, 0.010) # natural width area2P = prior.uniform(1e-5, 1e-4, isLog=True) # nuisance line 2 center2P = prior.uniform(3.75, 4.0) # sigma2P = prior.uniform(0.001, 0.010) # g.priors = [ normP, alphaP, \ nHP, \ scAreaP, \ #area1P,center1P,sigma1P, \ area2P,center2P,sigma2P ] # parameters ('name', x_init, isFrozen): norm = particle.param('norm',5e-2,False) alpha = particle.param('alpha',2.9,False) nH = particle.param('nH',2.0,True) scarea = particle.param('Sc area',1e-5,False) area1 = particle.param('area1',1e-5,False) center1 = particle.param('center1',3.6,False) sigma1 = particle.param('sigma1',0.005,False) area2 = particle.param('area2',1.8e-5,False) center2 = particle.param('center2',3.87,False) sigma2 = particle.param('sigma2',0.005,False) g.initParams = [ norm,alpha, \ nH, \ scarea, \ #area1,center1,sigma1, \ area2,center2,sigma2] for i,p in enumerate(g.initParams): p.setPrior(i) # which indexes are thawed: for i,p in enumerate(g.initParams): if(not p.isFrozen): g.thawedIdxs.append(i) # mass vector for parameters: g.masses = np.ones(len(g.initParams))*0.1 # data and likelihood: g.likelihood = likelihood.ScLike( g.datadir+'column.warf', g.datadir+'column.wrmf', g.datadir+'column.pi', g.modeldir+'phabs1e22.txt', [3.4,5]) # create the initial particle set: particles = [] for i in range(g.nParticles): particles.append(particle.particle(g.initParams)) particles[i].assignPriorSample() # set the inference running: fname = g.sampleDir+'samples.txt' nested.sample(particles,fname) pos = posterior.posterior(fname) plotname = g.plotDir+'sctest.pdf' pos.plotMarginals(plotname)