def get_overall_best_fit(self, pars, **specargs): """Returns the overall best-fit, the chi-square and the N.d.o.f.""" if type(pars) == str: pars = [pars] for s, v in enumerate(self.p.get("data_vectors")): d = DataManager(self.p, v, self.cosmo, all_data=False) self.d = d lik = Likelihood(self.p.get('params'), d.data_vector, d.covar, self._th, template=d.templates) sam = Sampler(lik.lnprob, lik.p0, lik.p_free_names, self.p.get_sampler_prefix(v['name']), self.p.get('mcmc')) sam.get_chain() sam.update_p0(sam.chain[np.argmax(sam.probs)]) kwargs = lik.build_kwargs(sam.p0) w = kwargs["width"] zz, NN = self._get_dndz(d.tracers[0][0].dndz, w) zmean = np.average(zz, weights=NN) kwargs["z"] = zmean kwargs["chi2"] = lik.chi2(sam.p0) all_pars = self.p.p.get("params") dof = np.sum( [param["vary"] for param in all_pars if "vary" in param]) kwargs["dof"] = len(lik.dv) - dof kwargs["PTE"] = 1 - chi2.cdf(kwargs["chi2"], kwargs["dof"]) if s == 0: keys = ["z", "chi2", "dof", "PTE"] + pars OV_BF = {k: kwargs[k] for k in keys} else: for k in keys: OV_BF[k] = np.vstack((OV_BF[k], kwargs[k])) return OV_BF
fname_params = "params_wnarrow.yml" p = ParamRun(fname_params) kwargs = p.get_cosmo_pars() cosmo = p.get_cosmo() hmc = get_hmcalc(cosmo, **kwargs) cosmo_vary = COSMO_VARY(p) hm_correction = HalomodCorrection(cosmo) v=None for s,vv in enumerate(p.get("data_vectors")): if vv['name']==sample: v=vv dat = DataManager(p, v, cosmo, all_data=False) gat = DataManager(p, v, cosmo, all_data=True) def th(pars,d): if not cosmo_vary: cosmo_fid = cosmo hmc_fid = hmc else: cosmo_fid = COSMO_ARGS(kwargs) hmc_fid = get_hmcalc(cosmo_fid, **kwargs) return get_theory(p, d, cosmo_fid, hmc_fid, hm_correction=hm_correction, selection=None,**pars) likd = Likelihood(p.get('params'), dat.data_vector, dat.covar, th, template=dat.templates) likg = Likelihood(p.get('params'), gat.data_vector, gat.covar, th, template=gat.templates)
def get_chains(self, pars, **specargs): """Returns a dictionary containing the chains of `pars`. """ def bias_one(p0, num): """Calculates the halo model bias for a set of parameters.""" bb = hm_bias(self.cosmo, 1 / (1 + zarr), d.tracers[num][1].profile, **lik.build_kwargs(p0)) return bb def bias_avg(num, skip): """Calculates the halo model bias of a profile, from a chain.""" #from pathos.multiprocessing import ProcessingPool as Pool #with Pool() as pool: # bb = pool.map(lambda p0: bias_one(p0, num), # sam.chain[::skip]) bb = list(map(lambda p0: bias_one(p0, num), sam.chain[::skip])) bb = np.mean(np.array(bb), axis=1) return bb # path to chain fname = lambda s: self.p.get("global")["output_dir"] + "/sampler_" + \ self.p.get("mcmc")["run_name"] + "_" + s + "_chain" if type(pars) == str: pars = [pars] import os print(os.getcwd()) preCHAINS = {} fid_pars = pars.copy() for par in pars: try: print(fname(par)) preCHAINS[par] = np.load(fname(par) + ".npy") fid_pars.remove(par) print("Found saved chains for %s." % par) except FileNotFoundError: continue if "bg" or "by" in fid_pars: # skip every (for computationally expensive hm_bias) b_skip = specargs.get("reduce_by_factor") if b_skip is None: print("'reduce_by_factor' not given. Defaulting to 100.") b_skip = 100 for s, v in enumerate(self.p.get("data_vectors")): d = DataManager(self.p, v, self.cosmo, all_data=False) self.d = d lik = Likelihood(self.p.get('params'), d.data_vector, d.covar, self._th, template=d.templates) sam = Sampler(lik.lnprob, lik.p0, lik.p_free_names, self.p.get_sampler_prefix(v['name']), self.p.get('mcmc')) sam.get_chain() chains = lik.build_kwargs(sam.chain.T) sam.update_p0(sam.chain[np.argmax(sam.probs)]) kwargs = lik.build_kwargs(sam.p0) w = kwargs["width"] zz, NN = self._get_dndz(d.tracers[0][0].dndz, w) zmean = np.average(zz, weights=NN) chains["z"] = zmean if "probs" in pars: chains["probs"] = sam.probs if ("by" or "bg") in fid_pars: sigz = np.sqrt(np.sum(NN * (zz - zmean)**2) / np.sum(NN)) zarr = np.linspace(zmean - sigz, zmean + sigz, 10) if "bg" in pars: chains["bg"] = bias_avg(num=0, skip=b_skip) if "by" in pars: chains["by"] = bias_avg(num=1, skip=b_skip) # Construct tomographic dictionary if s == 0: keys = ["z"] + fid_pars CHAINS = {k: chains[k] for k in keys} else: for k in keys: CHAINS[k] = np.vstack((CHAINS[k], chains[k])) # save bias chains to save time if not already saved if "bg" in fid_pars: np.save(fname("bg"), CHAINS["bg"]) if "by" in fid_pars: np.save(fname("by"), CHAINS["by"]) return {**preCHAINS, **CHAINS}
print("Loading chains...") chains = q.get_chains() chains.pop("z") print("Computing chain autocorrelation times...") with warnings.catch_warnings(record=False) as w: warnings.filterwarnings("ignore") taus = q.get_tau(chains) zmeans = [] bmeans = [] sbmeans = [[],[]] # min and max error bar for s, v in enumerate(p.get('data_vectors')): print(v['name']) # Construct data vector and covariance d = DataManager(p, v) z, nz = np.loadtxt(d.tracers[0][0].dndz, unpack=True) zmean = np.average(z, weights=nz) sigz = np.sqrt(np.sum(nz * (z - zmean)**2) / np.sum(nz)) zmeans.append(zmean) # Theory predictor wrapper def th(pars): if not cosmo_vary: pars_fid = pars cosmo_fid = cosmo hmc_fid = hmc else: pars_fid = {**p.get_cosmo_pars(), **pars} cosmo_fid = COSMO_ARGS(pars_fid) hmc_fid = get_hmcalc(cosmo_fid, **pars_fid)
help="specify for no MPI", action="store_false") parser.add_argument("--jk-id", type=int, help="JK region") args = parser.parse_args() fname_params = args.fname_params p = ParamRun(fname_params) jk_region = args.jk_id # JK id hmc = p.get_hmc() # halo model calculator hm_correction = p.get_hm_correction() # halo model correction v = p.get_data_vector(args.data_vector) # data vector print(v['name']) # Construct data vector and covariance d = DataManager(p, v, jk_region=jk_region) # Theory predictor wrapper def th(kwargs): """Theory for free cosmology.""" cosmo_use = p.get_cosmo(pars=kwargs) # optimized internally return get_theory(p, d, cosmo_use, hmc, hm_correction=hm_correction, **kwargs) # Set up likelihood
if sel == 'erf': sel = selection_planck_erf elif sel == 'tophat': sel = selection_planck_tophat elif sel == 'none': sel = None par = [] for v in p.get('data_vectors'): # if (v['name'] != o.data_name) and (o.data_name != 'all'): # continue print(v['name']) # Construct data vector and covariance d = DataManager(p, v, cosmo, jk_region=jk_region) # Theory predictor wrapper def th(pars): return get_theory(p, d, cosmo, hm_correction=hm_correction, selection=sel, **pars) # Set up likelihood lik = Likelihood(p.get('params'), d.data_vector, d.covar, th,
def get_chains(self, pars=None, **specargs): """Returns a dictionary containing the chains of `pars`. """ # if `pars` is not set, collect chains for all free parameters if pars is None: pars = [ par["name"] for par in self.p.get("params") if par.get("vary") ] def bias_one(p0, num): """Calculates the halo model bias for a set of parameters.""" if self.cosmo_vary: cosmo = COSMO_ARGS(pars) hmc = get_hmcalc( cosmo, **{ "mass_function": self.p.get_massfunc(), "halo_bias": self.p.get_halobias() }) else: cosmo = self.cosmo hmc = self.hmc bb = hm_bias(cosmo, hmc, 1 / (1 + zarr), d.tracers[num][1], **lik.build_kwargs(p0)) return bb def bias_avg(num, skip): """Calculates the halo model bias of a profile, from a chain.""" from pathos.multiprocessing import ProcessingPool as Pool with Pool() as pool: bb = pool.map(lambda p0: bias_one(p0, num), sam.chain[::skip]) # bb = list(map(lambda p0: bias_one(p0, num), sam.chain[::skip])) bb = np.mean(np.array(bb), axis=1) return bb # path to chain fname = lambda s: self.p.get("global")["output_dir"] + "/sampler_" + \ self.p.get("mcmc")["run_name"] + "_" + s + "_chain" if type(pars) == str: pars = [pars] preCHAINS = {} fid_pars = pars.copy() for par in pars: try: preCHAINS[par] = np.load(fname(par) + ".npy") fid_pars.remove(par) print("Found saved chains for %s." % par) except FileNotFoundError: continue if ("bg" in fid_pars) or ("by" in fid_pars) or ("bk" in fid_pars): # thin sample (for computationally expensive hm_bias) b_skip = specargs.get("thin") if b_skip is None: print("Chain 'thin' factor not given. Defaulting to 100.") b_skip = 100 for s, v in enumerate(self.p.get("data_vectors")): print(v["name"]) d = DataManager(self.p, v, all_data=False) self.d = d lik = Likelihood(self.p.get('params'), d.data_vector, d.covar, self._th, template=d.templates) sam = Sampler(lik.lnprob, lik.p0, lik.p_free_names, self.p.get_sampler_prefix(v['name']), self.p.get('mcmc')) sam.get_chain() chains = lik.build_kwargs(sam.chain.T) sam.update_p0(sam.chain[np.argmax(sam.probs)]) # print(sam.p0) kwargs = lik.build_kwargs(sam.p0) w = kwargs["width"] zz, NN = self._get_dndz(d.tracers[0][0].dndz, w) zmean = np.average(zz, weights=NN) chains["z"] = zmean if "probs" in pars: chains["probs"] = sam.probs if ("bg" in fid_pars) or ("by" in fid_pars) or ("bk" in fid_pars): sigz = np.sqrt(np.sum(NN * (zz - zmean)**2) / np.sum(NN)) zarr = np.linspace(zmean - sigz, zmean + sigz, 10) if "bg" in pars: chains["bg"] = bias_avg(num=0, skip=b_skip) if "by" in pars: chains["by"] = bias_avg(num=1, skip=b_skip) if "bk" in pars: chains["bk"] = bias_avg(num=2, skip=b_skip) # Construct tomographic dictionary if s == 0: keys = ["z"] + fid_pars CHAINS = {k: [chains[k]] for k in keys} else: for k in keys: CHAINS[k].append(chains[k]) # save bias chains to save time if not already saved if "bg" in fid_pars: np.save(fname("bg"), CHAINS["bg"]) if "by" in fid_pars: np.save(fname("by"), CHAINS["by"]) if "bk" in fid_pars: np.save(fname("bk"), CHAINS["bk"]) return {**preCHAINS, **CHAINS}
if sel is not None: if sel == 'erf': sel = selection_planck_erf elif sel == 'tophat': sel = selection_planck_tophat elif sel == 'none': sel = None zmeans = [] bmeans = [] sbmeans = [[], []] # min and max error bar for v in p.get('data_vectors'): print(v['name']) # Construct data vector and covariance d = DataManager(p, v, cosmo) z, nz = np.loadtxt(d.tracers[0][0].dndz, unpack=True) zmean = np.average(z, weights=nz) sigz = np.sqrt(np.sum(nz * (z - zmean)**2) / np.sum(nz)) zmeans.append(zmean) # Theory predictor wrapper def th(pars): return get_theory(p, d, cosmo, return_separated=False, hm_correction=hm_correction, selection=sel, **pars)