def test_massfn(): from szar import counts import hmf from cluster_toolkit import massfunction zs = np.linspace(0.,3.,20) ms = np.geomspace(1e14,1e17,200) ks = np.geomspace(1e-3,10,101) from enlib import bench with bench.show("init"): hcos = hm.HaloModel(zs,ks,ms=ms,mass_function="tinker") dndM_ct2 = np.zeros((zs.size,ms.size)) for i,z in enumerate(zs): h = hmf.MassFunction(z=z,Mmin=np.log10(ms.min()*hcos.h),Mmax=np.log10(ms.max()*hcos.h)) if i==0: dndM_ct = np.zeros((zs.size,h.dndm.size)) dndM_ct[i,:] = h.dndm.copy() dndM_ct2[i,:] = massfunction.dndM_at_M(ms*hcos.h, hcos.ks_sigma2/hcos.h, hcos.sPzk[i]*hcos.h**3, hcos.om0) fsky = 0.4 hmf = counts.Halo_MF(counts.ClusterCosmology(hcos.params,skipCls=True),np.log10(ms),zs) nz_szar = hmf.N_of_z()*fsky print(nz_szar,nz_szar.shape) # sys.exit() print(hcos.nzm.shape,hcos.bh.shape) bh = hcos.bh nzm = hcos.nzm # ims,ins = np.loadtxt("data/tinker2008Fig5.txt",unpack=True,delimiter=',') # pl = io.Plotter(xyscale='linlin') # pl.add(ims,ins,ls="--") # pl.add(np.log10(ms*hcos.h),np.log10(nzm[0,:]*ms**2./hcos.rho_matter_z(0.))) # pl.done() chis = hcos.results.angular_diameter_distance(hcos.zs) * (1+hcos.zs) nz = np.trapz(nzm,ms,axis=-1)*4.*np.pi*chis**2./hcos.results.h_of_z(hcos.zs)*fsky nz_ct = np.trapz(dndM_ct,h.m,axis=-1)*4.*np.pi*chis**2./hcos.results.h_of_z(hcos.zs)*fsky * hcos.h**3. nz_ct2 = np.trapz(dndM_ct2,ms,axis=-1)*4.*np.pi*chis**2./hcos.results.h_of_z(hcos.zs)*fsky * hcos.h**3. pl = io.Plotter() pl.add(zs,nz,label='hmvec') pl.add(hmf.zarr,nz_szar,ls='--',label='szar') pl.add(zs,nz_ct,ls='-.',label='hmf') pl.add(zs,nz_ct2,ls='-.',label='ct') pl.done() n = np.trapz(nz,zs) print(n) n = np.trapz(nz_szar,hmf.zarr) print(n) n = np.trapz(nz_ct,zs) print(n) n = np.trapz(nz_ct2,zs) print(n)
def __init__(self, Mexp_edges, z_edges, cosmo_params=None, const_params=None, low_acc=True): cc = counts.ClusterCosmology( cosmo.defaultCosmology if cosmo_params is None else cosmo_params, constDict=cosmo.defaultConstants if const_params is None else const_params, lmax=None, skipCls=True, skipPower=False, low_acc=low_acc) hmf = counts.Halo_MF(cc, Mexp_edges, z_edges, kh=None, powerZK=None, kmin=1e-4, kmax=5., knum=200) delta = 200 nmzdensity = hmf.N_of_Mz(hmf.M200, delta) Ndz = np.multiply(nmzdensity, np.diff(z_edges).reshape((1, z_edges.size - 1))) self.Nmz = np.multiply( Ndz, np.diff(10**Mexp_edges).reshape( (Mexp_edges.size - 1, 1))) * 4. * np.pi self.Mexp_edges = Mexp_edges self.z_edges = z_edges self.Medges = 10.**self.Mexp_edges self.Mcents = (self.Medges[1:] + self.Medges[:-1]) / 2. self.Mexpcents = np.log10(self.Mcents) self.zcents = (self.z_edges[1:] + self.z_edges[:-1]) / 2. self.ntot = self.Nmz.sum() self.cc = cc
# MPI comm = mpi.MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size() # Paths PathConfig = io.load_path_config() pout_dir = PathConfig.get("paths","plots")+"qest_hdv_"+str(args.noise)+"_" io.mkdir(pout_dir,comm) # Theory theory_file_root = "../alhazen/data/Aug6_highAcc_CDM" cc = counts.ClusterCosmology(skipCls=True) theory = cosmology.loadTheorySpectraFromCAMB(theory_file_root,unlensedEqualsLensed=False, useTotal=False,TCMB = 2.7255e6,lpad=9000,get_dimensionless=False) # Geometry shape, wcs = maps.rect_geometry(width_arcmin=args.arc,px_res_arcmin=args.pix,pol=False) modlmap = enmap.modlmap(shape,wcs) modrmap = enmap.modrmap(shape,wcs) # Binning bin_edges = np.arange(0.,20.0,args.pix*2) binner = stats.bin2D(modrmap*60.*180./np.pi,bin_edges) # Noise model noise_uK_rad = args.noise*np.pi/180./60. normfact = np.sqrt(np.prod(enmap.pixsize(shape,wcs)))
ns = 0.958 omb = ombh2 / h**2 omc = om - omb omch2 = omc * h**2. As = cosmology.As_from_s8(sigma8 = 0.76,bounds=[1.9e-9,2.5e-9],rtol=1e-4,omegab = omb, omegac = omc, ns = ns, h = h) print(As) params = {} params['As'] = As params['H0'] = h * 100. params['omch2'] = omch2 params['ombh2'] = ombh2 params['ns'] = ns params['mnu'] = 0. conc = 3.2 cc = counts.ClusterCosmology(params,skipCls=True,skipPower=True,skip_growth=True) z = 0.7 mass = 2e14 thetas = np.geomspace(0.1,10,1000) kappa = lensing.nfw_kappa(mass,thetas*utils.arcmin,cc,zL=z,concentration=conc,overdensity=180,critical=False,atClusterZ=False) hthetas,hkappa = np.loadtxt("data/hdv_unfiltered.csv",unpack=True,delimiter=',') pl = io.Plotter(xyscale='loglog', xlabel='$\\theta$ [arcmin]', ylabel='$\\kappa$') pl.add(thetas,kappa) pl.add(hthetas,hkappa,ls='--') pl.done('test_uhdv.png') pl = io.Plotter(xyscale='linlin', xlabel='$\\theta$ [arcmin]', ylabel='$\\kappa$') pl.add(hthetas,hkappa/maps.interp(thetas,kappa)(hthetas),ls='--') pl.hline(y=1)