Exemple #1
0
    def fit(self,keyData,keyTheory,amplitudeRange=np.arange(0.1,2.0,0.01),debug=False,numbins=-1):
        # evaluate likelihood on a 1d grid and fit to a gaussian
        # store fit as new theory curve

        width = amplitudeRange[1]-amplitudeRange[0]
        Likelihood = lambda x: np.exp(-0.5*self.chisq(keyData,keyTheory,amp=x,numbins=numbins))
        Likes = np.array([Likelihood(x) for x in amplitudeRange])
        Likes = Likes / (Likes.sum()*width) #normalize

        ampBest,ampErr = cfit(norm.pdf,amplitudeRange,Likes,p0=[1.0,0.5])[0]

        
        
        if debug:
            fitVals = np.array([norm.pdf(x,ampBest,ampErr) for x in amplitudeRange])
            pl = Plotter()
            pl.add(amplitudeRange,Likes,label="likes")
            pl.add(amplitudeRange,fitVals,label="fit")
            pl.legendOn()
            pl.done("output/debug_coreFit.png")

        fitKey = keyData+"_fitTo_"+keyTheory
        self.datas[fitKey] = {}
        self.datas[fitKey]['covmat'] = None
        self.datas[fitKey]['binned'] = self.datas[keyTheory]['binned']*ampBest
        #self.datas[fitKey]['unbinned'] = self.datas[keyTheory]['unbinned']*ampBest
        self.datas[fitKey]['label'] = keyData+" fit to "+keyTheory+" with amp "+'{0:.2f}'.format(ampBest)+"+-"+'{0:.2f}'.format(ampErr)
        self.datas[fitKey]['amp']=(ampBest,ampErr)
        self.datas[fitKey]['isFit'] = True

        return fitKey
Exemple #2
0
def makeTemplate(l, Fl, mod, Nx, Ny, debug=False):
    """
    Given 1d function Fl of l, creates the 2d version
    of Fl on 2d k-space defined by mod
    """

    FlSpline = splrep(l, Fl, k=3)
    ll = np.ravel(mod)
    kk = (splev(ll, FlSpline))

    template = np.reshape(kk, [Ny, Nx])

    if debug:
        print(kk)
        myFig = Plotter("$l$", "$F_l$", scaleX="log", scaleY="log")
        #myFig.add(l,Fl)
        myFig.add(ll, kk)
        myFig.done(fileName="output/interp.png")
        plotme([mod], saveFile="output/mod.png", axoff=True, clbar=False)
        plotme([template], saveFile="output/temp.png", axoff=True, clbar=False)
        plotme([np.log(template)],
               saveFile="output/logtemp.png",
               axoff=True,
               clbar=False)
        sys.exit()

    return template
Exemple #3
0
    def plotCls(self,saveFile,keys=None,xlimits=None,ylimits=None,transform=True,showBinnedTheory=False,scaleX='linear',scaleY='linear'):

        nsigma = 2.
        
        binCenters = self.binner.getBinCenters()

        if transform:
            ylab = "$\ell C_{\ell}$"
            mult = binCenters
            multTh = 1.#binCenters*0.+1.
        else:
            ylab = "$C_{\ell}$"
            mult = binCenters*0.+1.
            multTh = 0.#binCenters*0.
            
        pl = Plotter(labelX="$\ell$",labelY=ylab,scaleX=scaleX,scaleY=scaleY)


        
        if keys is None: keys = list(self.datas.keys())
        for key in keys:

            dat = self.datas[key]

            if dat['covmat'] is None:
                #This is a theory curve
                ells = np.array(list(range(len(dat['unbinned']))))
                if dat['isFit']:
                    ls="--"
                    lw=1
                else:
                    ls="-"
                    lw=2
                    
                base_line, = pl.add(ells,(multTh*(ells-1)+1.)*dat['unbinned'],label=dat['label'],lw=lw,ls=ls)
                if dat['isFit']:
                    pl._ax.fill_between(ells,(multTh*(ells-1)+1.)*dat['unbinned']*(1.-nsigma*dat['amp'][1]/dat['amp'][0]),(multTh*(ells-1)+1.)*dat['unbinned']*(1.+nsigma*dat['amp'][1]/dat['amp'][0]),alpha=0.3, facecolor=base_line.get_color())
                    
                if showBinnedTheory:
                    pl.add(binCenters[:len(dat['binned'])],mult[:len(dat['binned'])]*dat['binned'],
                           ls='none',marker='x',mew=2,markersize=10,label=dat['label']+' binned')
                  
            else:
                errs = np.sqrt(np.diagonal(dat['covmat']))
                print((dat['label']))
                pl.addErr(binCenters[:len(dat['binned'])],mult[:len(dat['binned'])]*dat['binned'],mult[:len(dat['binned'])]*errs,label=dat['label'],marker='o',elinewidth=2,markersize=10,mew=2,)


        [i.set_linewidth(2.0) for i in list(pl._ax.spines.values())]
        pl._ax.tick_params(which='major',width=2)
        pl._ax.tick_params(which='minor',width=2)
        pl._ax.axhline(y=0.,ls='--')
    
        if not(xlimits is None):
            pl._ax.set_xlim(*xlimits)
        else:
            pl._ax.set_xlim(self.binner.bin_edges[0],self.binner.bin_edges[-1])    
        if not(ylimits is None): pl._ax.set_ylim(*ylimits)
        pl.legendOn(loc='lower left',labsize=10)
        pl.done(saveFile)
Exemple #4
0
def stat_analysis(cutouts, binsize, arcmax, cents, modRMaps):
    profiles = []
    for i in range(0, len(cutouts)):
        thetaRange = np.arange(0., arcmax, binsize)
        breali = bin2D(modRMaps[i] * 180. * 60. / np.pi, thetaRange)
        a = breali.bin(cutouts[i])[1]
        profiles.append(a)
    statistics = stats.getStats(profiles)
    mean = statistics['mean']
    error = statistics['errmean']
    covmat = statistics['cov']
    corrcoef = stats.cov2corr(covmat)
    io.quickPlot2d(corrcoef, 'corrcoef.png')
    pl = Plotter(labelX='Distance from Center (arcminutes)',
                 labelY='Temperature Fluctuation ($\mu K$)',
                 ftsize=10)
    pl.add(cents, mean)
    pl.addErr(cents, mean, yerr=error)
    pl._ax.axhline(y=0., ls="--", alpha=0.5)
    pl.done(out_dir + "error.png")
Exemple #5
0
    def kappaToAlpha(self, kappaMap, test=False):

        fKappa = fft_gen(kappaMap, axes=[-2, -1])
        fAlpha = self.ftkernels * fKappa
        pixScaleY, pixScaleX = kappaMap.pixshape()
        Ny, Nx = kappaMap.shape

        #retAlpha = (np.fft_gen.ifft_genshift(enmap.ifft_gen(fAlpha,normalize=False).real)+kappaMap*0.)*pixScaleY*pixScaleX/Nx/Ny
        retAlpha = -(np.fft_gen.ifft_genshift(
            ifft_gen(fAlpha, axes=[-2, -1], normalize=False).real[::-1]) +
                     kappaMap * 0.) * pixScaleY * pixScaleX / Nx / Ny

        if test:
            newKap = -np.nan_to_num(0.5 * enmap.div(retAlpha))
            thetaMap = kappaMap.posmap()
            thetaModMap = 60. * 180. * (np.sum(thetaMap**2, 0)**0.5) / np.pi
            print(("newkappaint ", np.nanmean(newKap[thetaModMap < 10.])))

            pl = Plotter()
            pl.plot2d(kappaMap)
            pl.done("output/oldKap.png")
            pl = Plotter()
            pl.plot2d(newKap)
            pl.done("output/newKap.png")
            ratio = np.nan_to_num(newKap / kappaMap)
            print((thetaMap.shape))

            print((ratio[thetaModMap < 5].mean()))
            pl = Plotter()
            pl.plot2d(ratio[200:-200, 200:-200])
            pl.done("output/testratio.png")

        return retAlpha
Exemple #6
0
    print(("Reconstructing" , i , " ..."))
    qest.updateTEB_X(fot,foe,fob,alreadyFTed=True)
    qest.updateTEB_Y(alreadyFTed=True)

    for j, polComb in enumerate(polCombList):

        kappa = qest.getKappa(polComb)


        reconLm = lensedTLm.copy()
        reconLm.data[:,:] = kappa[:,:].real

        pl = Plotter()
        pl.plot2d(reconLm.data)
        pl.done("/gpfs01/astro/www/msyriac/plots/recon"+str(i)+".png")
        
        print("crossing with input")


        p2d = ft.powerFromLiteMap(kappaLm,reconLm,applySlepianTaper=False)
        # p2d.powerMap = p2d.powerMap/w2
        centers, means = stats.binInAnnuli(p2d.powerMap, p2d.modLMap, bin_edges)
        listCrossPower[polComb].append( means )



        p2d = ft.powerFromLiteMap(reconLm,applySlepianTaper=False)
        # p2d.powerMap = p2d.powerMap/w4
        centers, means = stats.binInAnnuli(p2d.powerMap, p2d.modLMap, bin_edges)
        listReconPower[polComb].append( means )
Exemple #7
0
if not (saveMf):
    try:
        mf = np.loadtxt("data/meanfield.dat")
        kappaStack -= mf
        print("subtracted meanfield")
    except:
        pass

stepmin = kellmin

kappaStack = fmaps.stepFunctionFilterLiteMap(kappaStack,
                                             modLMap,
                                             kellmax,
                                             ellMin=stepmin)

pl = Plotter()
pl.plot2d(kappaStack)
pl.done(outDir + "recon.png")

dt = pixScale
arcmax = 20.
thetaRange = np.arange(0., arcmax, dt)
breal = bin2D(modRMap * 180. * 60. / np.pi, thetaRange)
cents, recons = breal.bin(kappaStack)

pl = Plotter()
pl.add(cents, recons)
pl._ax.axhline(y=0., ls="--", alpha=0.5)
pl.done(outDir + "profiles.png")
Exemple #8
0
import matplotlib
matplotlib.use('Agg')
import numpy as np
from orphics.tools.io import Plotter

zrange = np.arange(0., 3.05, 0.05)

pl = Plotter(scaleY='log', scaleX='log')

for stepSize, ls in zip([0.2, 0.1, 0.05], ['-', '--', '-.']):
    cambOutUpFile = lambda z: "/home/msyriac/software/CAMB_wa/forDerivsStep" + str(
        stepSize) + "Up_matterpower_" + str(z) + ".dat"
    cambOutDnFile = lambda z: "/home/msyriac/software/CAMB_wa/forDerivsStep" + str(
        stepSize) + "Dn_matterpower_" + str(z) + ".dat"

    for i, z in enumerate(zrange[::-1]):

        kh_camb_up, P_camb_up = np.loadtxt(cambOutUpFile(z), unpack=True)
        kh_camb_dn, P_camb_dn = np.loadtxt(cambOutDnFile(z), unpack=True)

        assert np.all(kh_camb_dn == kh_camb_up)

        Pderiv = old_div(np.abs(P_camb_up - P_camb_dn), stepSize)

        if i % 5 == 0:
            pl.add(kh_camb_up, Pderiv, ls=ls)
            #pl.add(kh_camb,P_camb)
            #pl.add(kh,pk[Nzs-i-1,:],ls="--")

pl.done("output/testwa.png")
Exemple #9
0
def stack_on_map(lite_map,
                 width_stamp_arcminute,
                 pix_scale,
                 ra_range,
                 dec_range,
                 catalog=None,
                 n_random_points=None):
    width_stamp_degrees = width_stamp_arcminute / 60.
    Np = np.int(width_stamp_arcminute / pix_scale + 0.5)
    pad = np.int(Np / 2 + 0.5)
    print("Expected width in pixels = ", Np)
    lmap = lite_map
    stack = 0
    N = 0

    if catalog is not None:
        looprange = range(0, len(catalog))
        assert n_random_points is None
        random = False
    else:
        assert n_random_points is not None
        assert len(ra_range) == 2
        assert len(dec_range) == 2
        looprange = range(0, n_random_points)
        random = True
    print(looprange)
    for i in looprange:
        banana = True
        mass = catalog[i][10]
        if random:
            ra = np.random.uniform(*ra_range)
            dec = np.random.uniform(*dec_range)
        if random == False:
            ra = catalog[i][1]  #1 for ACT catalog 2 for SDSS
            dec = catalog[i][2]  #2 for ACT catalog 3 for SDSS
        for j in range(0, 2130):
            distance = np.sqrt((ra - RAps[j])**2 + (dec - DECps[j])**2)
            crit = 0.25
            if distance < crit:
                banana = False
                print('too close')
        ix, iy = lmap.skyToPix(ra, dec)
        if ix >= pad and ix < lmap.Nx - pad and iy >= pad and iy < lmap.Ny - pad and banana == True and mass > 8:
            print(i)
            #print(ra,dec)
            smap = lmap.selectSubMap(ra - width_stamp_degrees / 2.,
                                     ra + width_stamp_degrees / 2.,
                                     dec - width_stamp_degrees / 2.,
                                     dec + width_stamp_degrees / 2.)
            #print (smap.data.shape)
            #cutout = zoom(smap.data.copy(),zoom=(float(Np)/smap.data.shape[0],float(Np)/smap.data.shape[1])
            cutout = resize(smap.data.copy(),
                            output_shape=(Np, Np)) - randomstack
            xMap, yMap, modRMap, xx, yy = fmaps.getRealAttributes(smap)
            dt = pix_scale
            arcmax = 20.
            thetaRange = np.arange(0., arcmax, dt)
            breali = bin2D(modRMap * 180. * 60. / np.pi, thetaRange)
            a = breali.bin(cutout)[1]
            profiles.append(a)
            io.quickPlot2d(cutout, str(i) + "cutout.png")
            #print (cutout.shape)
            stack = stack + cutout
            N = N + 1
        else:
            print("skip")
    stack = stack / N  #-randomstack
    #print(stack.shape())
    #print(smap.data.shape)
    # print(stack)
    print(N)
    statistics = stats.getStats(profiles)
    mean = statistics['mean']
    error = statistics['errmean']
    corrcoef = statistics['corr']
    covmat = statistics['covmat']
    print(mean / error)
    np.save('statistics', statistics)
    #np.save('newrandomstamp',stack)
    # io.quickPlot2d(stack,out_dir+"newACTstack.png")
    dt = pix_scale
    arcmax = 20.
    thetaRange = np.arange(0., arcmax, dt)
    breal = bin2D(modRMap * 180. * 60. / np.pi, thetaRange)
    cents, recons = breal.bin(stack)
    pl = Plotter(labelX='Distance from Center (arcminutes)',
                 labelY='Temperature Fluctuation ($\mu K$)',
                 ftsize=10)
    pl.add(cents, mean)
    pl.addErr(cents, mean, yerr=error)
    pl._ax.axhline(y=0., ls="--", alpha=0.5)
    pl.done(out_dir + "error.png")
    print(covmat)
    io.quickPlot2d(covmat, 'covmat.png')
    return (stack, cents, recons)
Exemple #10
0
                  pellmin,
                  pellmax,
                  beamY=beamY,
                  noiseTY=noiseTY,
                  noisePY=noisePY)
ls, Nls = myNls.getNl(polComb=polComb, halo=halo)

# ls,Nls = np.loadtxt("../SZ_filter/data/LA_pol_Nl.txt",unpack=True,delimiter=',')

ellkk = np.arange(2, 9000, 1)
Clkk = theory.gCl("kk", ellkk)
pl = Plotter(scaleY='log', scaleX='log')
pl.add(ellkk, 4. * Clkk / 2. / np.pi)
pl.add(ls, 4. * Nls / 2. / np.pi)
pl.legendOn(loc='lower left', labsize=10)
pl.done("output/nl.png")

overdensity = 200.
critical = False
atClusterZ = False

# overdensity=180.
# critical=False
# atClusterZ=False
kellmax = 8000

sn, k, std = NFWMatchedFilterSN(cc,
                                Mexp,
                                c,
                                z,
                                ells=ls,
Exemple #11
0
                                                overdensity=overdensity,
                                                critical=critical,
                                                atClusterZ=atClusterZ,
                                                rayleighSigmaArcmin=ray)

                print((sn * np.sqrt(1000.)))
                sns.append(old_div(1., (sn * np.sqrt(1000.))))

            fgpart = ""
            mispart = ""
            if miscenter:
                mispart = ", miscentered"
                col = "C0"
            else:
                col = "C1"
            if lensName == "CMB_all":
                lenspart = "T+P"
            else:
                lenspart = "P only"
            if not (doFg):
                fgpart = ", no foregrounds"
                col = "black"
                al = 0.5
            else:
                al = 1.0

            lab = lenspart + fgpart + mispart
            pl.add(beamList, sns, label=lab, ls=linestyle, alpha=al, color=col)
pl.legendOn(loc="upper left", labsize=12)
pl.done(out_dir + "FigBeam.pdf")
Exemple #12
0
# Read config
iniFile = "input/params.ini"
Config = SafeConfigParser()
Config.optionxform = str
Config.read(iniFile)

px = 180 * 60 / 60000.
gradCut = 60000.

expName = "DM-18arcsec"
lensName = "lensing"
ls, Nls, ellbb, dlbb, efficiency = lensNoise(Config,
                                             expName,
                                             lensName,
                                             px=px,
                                             gradCut=gradCut,
                                             bigell=gradCut,
                                             plot=True)

print efficiency
print ls
print Nls

from orphics.tools.io import Plotter

pl = Plotter(scaleY='log')
pl.add(ls, Nls)
pl._ax.set_ylim(1e-12, 1e-6)
#pl._ax.set_xlim(2,4000)
pl.done("nls.png")
Exemple #13
0
HMF = Halo_MF(cc,Mexp_edges,z_edges)
HMF.sigN = siggrid.copy()
#MM = 10**np.linspace(13.,14.,5)
#print SZProfExample.quickVar(MM,zz,tmaxN=tmaxN,numts=numts)

#sys.exit()

#print z_edges
#print HMF.N_of_z()

Nzs =  HMF.N_of_z_SZ(fsky,SZProfExample)*np.diff(z_edges)
zcents = old_div((z_edges[1:]+z_edges[:-1]),2.)
pl = Plotter()
pl.add(zcents,Nzs)
pl.done("nz.png")


print((HMF.Mass_err(fsky,lndM*24.0,SZProfExample)))

#print "quickvar " , np.sqrt(SZProfExample.quickVar(MM,zz,tmaxN=tmaxN,numts=numts))
#print "filtvar " , np.sqrt(SZProfExample.filter_variance(MM,zz))






#print "y_m",SZProfExample.Y_M(MM,zz)

Exemple #14
0
        Nz = dNdp.copy().sum(axis=-1).sum(axis=0)
        Nm = dNdp.copy().sum(axis=-1).sum(axis=-1)
        Nq = dNdp.copy().sum(axis=0).sum(axis=0)
        
        yNzs[key].append((val,Nz,Nm,Nq))
        

        
    pl = Plotter(labelX="$z$",labelY="$dN$")
    xstep = 0.01
    for i,val in enumerate(vals):
        assert yNzs[key][i][0]==val
        pl.add(getCents(z_edges)+((i-old_div(len(vals),2))*xstep),yNzs[key][i][1],label=key+" "+str(val))
    pl.legendOn(labsize=10,loc='upper right')
    pl.done(outDir+key+"_Nz_step.png")
    pl = Plotter(labelX="$M$",labelY="$dN$")
    xstep = 0.01
    for i,val in enumerate(vals):
        assert yNzs[key][i][0]==val
        pl.add(getCents(mexp_edges)+((i-old_div(len(vals),2))*xstep),yNzs[key][i][2],label=key+" "+str(val))
    pl.legendOn(labsize=10,loc='upper right')
    pl.done(outDir+key+"_Nm_step.png")
    pl = Plotter(labelX="$q$",labelY="$dN$",scaleX='log')
    xstep = 0.1
    for i,val in enumerate(vals):
        assert yNzs[key][i][0]==val
        pl.add(getCents(qbin_edges)+((i-old_div(len(vals),2))*xstep),yNzs[key][i][3],label=key+" "+str(val))
    pl.legendOn(labsize=10,loc='upper right')
    pl.done(outDir+key+"_Nq_step.png")
Exemple #15
0
Nlmvinv = 0.
pl = Plotter(scaleY='log')
for polComb in ['TT','TE','EE','EB']:
    kmax = getMax(polComb,tellmaxY,pellmaxY)
    bin_edges = np.arange(kmin,kmax,dell)+dell
    lmap = lm.makeEmptyCEATemplate(raSizeDeg=deg, decSizeDeg=deg,pixScaleXarcmin=px,pixScaleYarcmin=px)
    myNls = NlGenerator(lmap,theory,bin_edges,gradCut=gradCut)
    myNls.updateBins(bin_edges)

    nTX,nPX,nTY,nPY = myNls.updateNoise(beamX,noiseTX,noisePX,tellminX,tellmaxX, \
                      pellminX,pellmaxX,beamY=beamY,noiseTY=noiseTY, \
                      noisePY=noisePY,tellminY=tellminY,tellmaxY=tellmaxY, \
                      pellminY=pellminY,pellmaxY=pellmaxY,lkneesX=(lkneeTX,lkneePX),alphasX=(alphaTX,alphaPX), \
                                        lkneesY=(lkneeTY,lkneePY),alphasY=(alphaTY,alphaPY),lxcutTX=lxcutTX, \
                                        lxcutTY=lxcutTY,lycutTX=lycutTX,lycutTY=lycutTY, \
                                        lxcutPX=lxcutPX,lxcutPY=lxcutPY,lycutPX=lycutPX,lycutPY=lycutPY, \
                                        fgFileX=fgFileX,beamFileX=beamFileX,fgFileY=fgFileY,beamFileY=beamFileY )


    cbinner = bin2D(myNls.N.modLMap,cmb_bin_edges)
    ells, Nells = cbinner.bin(nTX)

    pl = Plotter(scaleY='log')
    pl.add(ells,Nells*ells**2.*TCMB**2.)
    pl.add(ells,Nells*ells**2.*TCMB**2.)
    tells,tnlstt = np.loadtxt('data/louisCls.dat',delimiter=',',unpack=True)
    pl.add(tells,tnlstt)
    pl.done("output/compnl.png")
    sys.exit()
           
Exemple #16
0
fsky = 0.4

N1 = hmf.N_of_z() * fsky

#hmf.sigN = np.loadtxt("temp.txt")

try:
    hmf.sigN = np.loadtxt("tempSigN.txt")
    N2 = hmf.N_of_z_SZ(SZProf) * fsky
except:
    N2 = hmf.N_of_z_SZ(SZProf) * fsky
    np.savetxt("tempSigN.txt", hmf.sigN)

pl = Plotter()
pl.plot2d(hmf.sigN)
pl.done(outDir + "signRefactor.png")

pl = Plotter(scaleY='log')
pl.add(zs, N1)
pl.add(zs, N2)

Ntot1 = np.trapz(N2, zs)
print(Ntot1)

sn, ntot = hmf.Mass_err(fsky, outmerr, SZProf)

print(ntot)

#q_arr = np.logspace(np.log10(6.),np.log10(500.),64)
qs = [6., 500., 64]
qbin_edges = np.logspace(np.log10(qs[0]), np.log10(qs[1]), int(qs[2]) + 1)
Exemple #17
0
# R500_alt = cc.rdel_c_alt(M500,c500,delta)
# print R500, R500_alt
# sys.exit()
# R500 = R500_alt

Rrange = npspace(0.1, 10. * R500, 100, "log")

rhofunc = halos.rho_nfw(M500, c500, R500)
rhos = rhofunc(Rrange)

pl = Plotter(scaleY='log',
             scaleX='log',
             labelX="$R$ (Mpc/h)",
             labelY="$\\rho (h^2 M_{\\odot}/{\\mathrm{Mpc}^3})$")
pl.add(Rrange, rhos)
pl.done("output/rhos.png")

comL = cc.results.comoving_radial_distance(z) * cc.h
thetaS = R500 / c500 / comL
arcmin = 0.1
arcmax = 50.
thetas = npspace(arcmin * np.pi / 180. / 60., arcmax * np.pi / 180. / 60., 100,
                 'log')
gs = halos.projected_rho(thetas, comL, rhofunc)

gsalt = halos.proj_rho_nfw(thetas, comL, M500, c500, R500)

pl = Plotter(scaleY='log',
             scaleX='log',
             labelX="$\\theta$ (arcmin)",
             labelY="$g(\\theta/\\theta_S)$")
Exemple #18
0
experimentName = "SO-v2-6m"
beams = listFromConfig(Config,experimentName,'beams')
noises = listFromConfig(Config,experimentName,'noises')
freqs = listFromConfig(Config,experimentName,'freqs')
lmax = int(Config.getfloat(experimentName,'lmax'))
lknee = listFromConfig(Config,experimentName,'lknee')[0]
alpha = listFromConfig(Config,experimentName,'alpha')[0]
fsky = Config.getfloat(experimentName,'fsky')

SZProfExample = SZ_Cluster_Model(clusterCosmology=cc,clusterDict=clusterDict,rms_noises = noises,fwhms=beams,freqs=freqs,lmax=lmax,lknee=lknee,alpha=alpha,tsz_cib=True)

#print SZProfExample.nl / SZProfExample.nl_new

pl = Plotter()
pl.add(SZProfExample.evalells,old_div(SZProfExample.nl_old, SZProfExample.nl))
pl.done("tests/new_nl_test.png")

#ILC = ILC_simple(clusterCosmology=cc, rms_noises = noises,fwhms=beams,freqs=freqs,lmax=lmax,lknee=lknee,alpha=alpha)
#ILC2 = ILC_simple(clusterCosmology=cc, rms_noises = noises[3:],fwhms=beams[3:],freqs=freqs[3:],lmax=lmax,lknee=lknee,alpha=alpha)
#ILC3 = ILC_simple(clusterCosmology=cc, rms_noises = noises[3:6],fwhms=beams[3:6],freqs=freqs[3:6],lmax=lmax,lknee=lknee,alpha=alpha)

ILC = ILC_simple(clusterCosmology=cc, rms_noises = noises,fwhms=beams,freqs=freqs,lmax=lmax,lknee=lknee,alpha=alpha)

lsedges = np.arange(300,8001,100)
el_ilc, cls_ilc, err_ilc, s2n = ILC.Forecast_Cellcmb(lsedges,fsky)
el_ilc_c, cls_ilc_c, err_ilc_c, s2n_c = ILC.Forecast_Cellcmb(lsedges,fsky,constraint='tsz')
print((s2n,s2n_c))

ILC = ILC_simple(clusterCosmology=cc, rms_noises = noises,fwhms=beams,freqs=freqs,lmax=7000,lknee=lknee,alpha=alpha)

lsedges = np.arange(300,7001,100)
Exemple #19
0
    eln, nl = ILC.Noise_ellyy(constraint='cib')
    eln2, nl2 = ILC2.Noise_ellyy(constraint='cib')
    #    eln3,nl3 = ILC3.Noise_ellyy(constraint='cib')
    #    eln4,nl4 = ILC4.Noise_ellyy(constraint='cib')

    elnc, nlc = ILC.Noise_ellcmb(constraint='tsz')
    elnc2, nlc2 = ILC2.Noise_ellcmb(constraint='tsz')
#    elnc3,nlc3 = ILC3.Noise_ellcmb(constraint='tsz')
#    elnc4,nlc4 = ILC4.Noise_ellcmb(constraint='tsz')

pl = Plotter(labelX="$\ell$", labelY="Noise Ratio", ftsize=12, figsize=(8, 6))
pl.add(eln2, old_div(nl2, nl), label="SO/CCATP")
#pl.add(eln3,nl3/nl,label="90 - 270 / Full")
#pl.add(eln4,nl4/nl,label="90 - 220 / Full")
#pl.legend(loc='upper right',labsize=10)
pl.done(outDir + experimentName + "_y_noise_ratio" + constraint_tag[cf] +
        ".png")

pl = Plotter(labelX="$\ell$", labelY="Noise Ratio", ftsize=12, figsize=(8, 6))
pl.add(elnc2, old_div(nlc2, nlc), label="SO/CCATP")
#pl.add(elnc3,nlc3/nlc,label="90 - 270 / Full")
#pl.add(elnc4,nlc4/nlc,label="90 - 220 / Full")
#pl.legend(loc='upper right',labsize=10)
pl.done(outDir + experimentName + "_cmb_noise_ratio" + constraint_tag[cf] +
        ".png")

pl = Plotter(labelX="$\ell$", labelY="Error Ratio", ftsize=12, figsize=(8, 6))
pl.add(el_il2, old_div(err_il2, err_il), label="SO/CCATP")
#pl.add(el_il3,err_il3/err_il,label="90 - 270 / Full")
#pl.add(el_il4,err_il4/err_il,label="90 - 220 / Full")
#pl.legend(loc='upper right',labsize=10)
pl.done(outDir + experimentName + "_y_error_ratio" + constraint_tag[cf] +
Exemple #20
0
            errs = np.sqrt(np.diagonal(Finv))
            errDict = {}
            for i, param in enumerate(paramList):
                errDict[param] = errs[i]

            if 'mnu' in fishName:
                constraint = errDict['mnu'] * 1000
            elif 'w0' in fishName:
                constraint = errDict['w0'] * 100
            sigs.append(constraint)
            if (np.abs(preVal - constraint) * 100. / constraint) < pertol:
                print(((constraint - preVal) * 100. / constraint))
                if k > mink: break
            preVal = constraint
            print((prior, val, constraint))
            k += 1

        priorLabel = paramLatexList[paramList.index(prior)]
        if "DESI" in fishSection:
            lab = None
            lss = "--"
        else:
            lab = "$" + priorLabel + "$"
            lss = "-"

        pl.add(xs, sigs, label=lab, ls=lss)
    plt.gca().set_color_cycle(None)

pl.legendOn(loc='upper right', labsize=11)
pl.done(os.environ['WWW'] + "paper/FigPrior_" + fishName + "_tau.pdf")
Exemple #21
0
        s8now = np.mean(s81zs[np.logical_and(zrange >= zleft,
                                             zrange < zright)])
        print((lab, zleft, zright, yerr, s8now, yerr * 100. / s8now, "%"))
        #s8now = np.mean(s81zs[np.logical_and(zrange>=zleft,zrange<zright)])/s81
        #yerrsq = (1./sum([1/x**2. for x in errselect]))
        #yerr = (s8now/s80mean)*np.sqrt(yerrsq/s8now**2. + yerrsq0/s80mean**2.)
        errcents.append(yerr)
        ms8.append(s8now)
        currentAxis.add_patch(
            Rectangle((zcent - xerr + pad, 1. - old_div(yerr, s8now)),
                      2 * xerr - old_div(pad, 2.),
                      2. * yerr / s8now,
                      facecolor=col,
                      alpha=1.0))
    print("=====================")
    pl._ax.fill_between(zrange, 1., 1., label=lab, alpha=0.75, color=col)

#pl.add(zrange,s82zs/s81zs,label="$w=-0.97$",color='red',alpha=0.5)
pl.add(zrange, old_div(s81zs, s81zs), color='white', alpha=0.5,
       ls="--")  #,label="$w=-1$")

# pl.add(zrange,s82zs/s81zs/s82*s81,label="$w=-0.97$",color='red',alpha=0.5)
# pl.add(zrange,s81zs*0.+1.,label="$w=-1$",color='black',alpha=0.5,ls="--")

pl.legendOn(labsize=12, loc="lower left")
pl._ax.set_ylim(0.88, 1.12)  # res
#pl._ax.set_ylim(0.95,1.05) # fsky
#pl._ax.text(0.8,.82,"Madhavacheril et. al. in prep")
pl.done(outDir + "FigS8.pdf")
#pl.done(outDir+"s8SO.png")
Exemple #22
0
            #     lcents,Nlbinned = binner.bin(nPX)
            #     pl.add(lcents,Nlbinned,label=labname,ls="--")

            pl.add(ls,Nls,label=labname,ls=lines,alpha=alpha)
                

            

            LF = LensForecast()
            LF.loadKK(frange,Clkk,ls,Nls)
            sn,errs = LF.sn(snrange,fsky,"kk")
            #print errs
            sns.append(sn)
            print((noiseFile, " S/N " , sn))



            
# pl.add(frangeC,theory.lCl('EE',frangeC))
# pl.legendOn(loc='lower right',labsize = 8)
# #pl._ax.set_xlim(0,3000)
# #pl._ax.set_ylim(1.e-9,1.e-6)
# pl.done("beamVary_"+polComb+".pdf")

            
pl.add(frange,Clkk,color="black")
pl.legendOn(loc='lower right',labsize = 8)
pl._ax.set_xlim(0,3000)
pl._ax.set_ylim(1.e-9,1.e-6)
pl.done("beamVary_"+polComb+".pdf")
Exemple #23
0
    data2d = qest.N.Nlkk[polComb]
    centers, Nlbinned = binInAnnuli(data2d, modLMap, bin_edges)

    try:
        huFile = '/astro/u/msyriac/repos/cmb-lensing-projections/data/NoiseCurvesKK/hu_'+polComb.lower()+'.csv'
        huell,hunl = np.loadtxt(huFile,unpack=True,delimiter=',')
    except:
        huFile = '/astro/u/msyriac/repos/cmb-lensing-projections/data/NoiseCurvesKK/hu_'+polComb[::-1].lower()+'.csv'
        huell,hunl = np.loadtxt(huFile,unpack=True,delimiter=',')


    pl.add(centers,4.*Nlbinned/2./np.pi,color=col)
    pl.add(huell,hunl,ls='--',color=col)


pl.done("tests/output/testbin.png")






#cambRoot = "data/ell28k_highacc"
gradCut = None
halo = True
beam = 7.0
noiseT = 27.0
noiseP = 56.6
tellmin = 2
tellmax = 3000
gradCut = 10000
Exemple #24
0
    inputKappaStack = 0.
    szStack = 0.

    N = numClusters

    for i in range(N):
        print i

        kappa = enmap.read_map(saveName + "_kappa_" + str(i) + "_" +
                               str(snap) + ".hdf")
        inputKappaMap = enmap.read_map(saveName + "_inpkappa_" + str(i) + "_" +
                                       str(snap) + ".hdf")
        szMap = enmap.read_map(saveName + "_sz_" + str(i) + "_" + str(snap) +
                               ".hdf")

        kappaStack += kappa
        inputKappaStack += inputKappaMap
        szStack += szMap

    pl = Plotter()
    pl.plot2d(kappaStack / N)
    pl.done(outDir + "recon_" + str(snap) + ".png")

    pl = Plotter()
    pl.plot2d(inputKappaStack / N)
    pl.done(outDir + "truestack_" + str(snap) + ".png")

    pl = Plotter()
    pl.plot2d(szStack / N)
    pl.done(outDir + "szstack_" + str(snap) + ".png")
Exemple #25
0
def stack_on_map(lite_map,
                 width_stamp_arcminute,
                 pix_scale,
                 ra_range,
                 dec_range,
                 catalog=None,
                 n_random_points=None):
    width_stamp_degrees = width_stamp_arcminute / 60.
    Np = np.int(width_stamp_arcminute / pix_scale + 0.5)
    pad = np.int(Np / 2 + 0.5)
    print("Expected width in pixels = ", Np)

    lmap = lite_map
    stack = 0
    N = 0

    if catalog is not None:
        looprange = range(0, len(catalog))
        assert n_random_points is None
        random = False
    else:
        assert n_random_points is not None
        assert len(ra_range) == 2
        assert len(dec_range) == 2
        looprange = range(0, n_random_points)
        random = True

    for i in looprange:
        if random:
            ra = np.random.uniform(*ra_range)
            dec = np.random.uniform(*dec_range)
        else:
            ra = catalog[i][1]
            dec = catalog[i][2]
        ix, iy = lmap.skyToPix(ra, dec)
        if ix >= pad and ix < lmap.Nx - pad and iy >= pad and iy < lmap.Ny - pad:
            print(i)
            #print(ra,dec)
            smap = lmap.selectSubMap(ra - width_stamp_degrees / 2.,
                                     ra + width_stamp_degrees / 2.,
                                     dec - width_stamp_degrees / 2.,
                                     dec + width_stamp_degrees / 2.)
            #print (smap.data.shape)
            #cutout = zoom(smap.data.copy(),zoom=(float(Np)/smap.data.shape[0],float(Np)/smap.data.shape[1]))
            cutout = resize(smap.data.copy(), output_shape=(Np, Np))
            #print (cutout.shape)
            stack = stack + cutout
            xMap, yMap, modRMap, xx, yy = fmaps.getRealAttributes(smap)
            N = N + 1.
        else:
            print("skip")
    stack = stack / N
    #print(stack.shape())
    #print(smap.data.shape)
    print(stack)
    print(N)
    io.quickPlot2d(stack, out_dir + "stackrandom.png")

    dt = pix_scale
    arcmax = 20.
    thetaRange = np.arange(0., arcmax, dt)
    breal = bin2D(modRMap * 180. * 60. / np.pi, thetaRange)
    cents, recons = breal.bin(stack)
    pl = Plotter(labelX='Distance from Center (arcminutes)',
                 labelY='Temperature Fluctuation ($\mu K$)',
                 ftsize=10)
    pl.add(cents, recons)
    pl._ax.axhline(y=0., ls="--", alpha=0.5)
    pl.done(out_dir + "randomprofiles.png")
    return stack, cents, recons
Exemple #26
0
kh, z, pk, s8 = HMF.pk(zrange)

#pl = Plotter(scaleY='log',scaleX='log')
pl = Plotter()

Nzs = pk.shape[0]

for i, z in enumerate(zrange[::-1]):

    kh_camb, P_camb = np.loadtxt(cambOutFile(i), unpack=True)

    if i == 0:
        kmin = max(kh_camb[0], kh[0])
        kmax = min(kh_camb[-1], kh[-1])
        keval = np.logspace(np.log10(kmin), np.log10(kmax), 20)

    pcambfunc = interp1d(kh_camb, P_camb)
    pfunc = interp1d(kh, pk[Nzs - i - 1, :])

    pcambeval = pcambfunc(keval)
    peval = pfunc(keval)
    pdiff = (pcambeval - peval) * 100. / peval
    print((z, pdiff))

    if i % 1 == 0:
        pl.add(keval, pdiff)
        #pl.add(kh_camb,P_camb)
        #pl.add(kh,pk[Nzs-i-1,:],ls="--")

pl.done("output/testcamb.png")
Exemple #27
0
import numpy as np

saveRoot = "data/ell28"

gradCut = 2000
polComb = 'TT'
beamY = 1.5
noiseY = 3.0
tellminY = 200
pellminY = 50
kmin = 40
deg = 10.
px = 0.2
delensTolerance = 1.0

pl = Plotter()

for lab in ['planckGrad', 'sameGrad']:
    fileName = saveRoot + getFileNameString([
        'gradCut', 'polComb', 'beamY', 'noiseY', 'grad', 'tellminY',
        'pellminY', 'kmin', 'deg', 'px', 'delens'
    ], [
        gradCut, polComb, beamY, noiseY, lab, tellminY, pellminY, kmin, deg,
        px, delensTolerance
    ]) + ".txt"

    ells, Nls = np.loadtxt(fileName, unpack=True)

    pl.add(ells, Nls, label=lab)
pl.done("output/grad.png")
Exemple #28
0
    if noiseT > 1.e-3:
        lensedMapX = lensedMapX + gGenT.getMap(stepFilterEll=None)
    if noiseT > 1.e-3:
        lensedMapY = lensedMapY + gGenT.getMap(stepFilterEll=None)

    # lensedMapYRot = np.rot90(lensedMapY.copy(),2)

    lensedMapX = lensedMapX * win
    lensedMapY = lensedMapY * win
    # lensedMapYRot = lensedMapYRot*win

    if i == 0:
        pl = Plotter()
        pl.plot2d(lensedMapX)
        pl.done(outDir + "lensed.png")

    fotX = np.nan_to_num(fft(lensedMapX, axes=[-2, -1]) / beamTemplate[:, :])
    fotY = np.nan_to_num(fft(lensedMapY, axes=[-2, -1]) / beamTemplate[:, :])
    # fotYRot = np.nan_to_num(fft(lensedMapYRot,axes=[-2,-1])/ beamTemplate[:,:])

    if i % 1 == 0: print "Reconstructing", i, " ..."
    qest.updateTEB_X(fotX, alreadyFTed=True)
    qest.updateTEB_Y(fotY, alreadyFTed=True)
    kappa = qest.getKappa(polCombList[0]).real / w2

    # qest.updateTEB_Y(-fotYRot,alreadyFTed=True)
    # kappaRot = qest.getKappa(polCombList[0]).real/w2

    kappaStack += kappa
    # kappaFix += kappaRot #(kappa+kappaRot)/2.
Exemple #29
0
        fp = interp1d(centers,
                      statsRecon[polComb]['mean'],
                      fill_value='extrapolate')
        pl.add(ellkk, (fp(ellkk)) - Clkk, color=col, lw=2)

        Nlkk2d = qest.N.Nlkk[polComb]
        ncents, npow = stats.binInAnnuli(Nlkk2d, p2d.modLMap, bin_edges)
        pl.add(ncents, npow, color=col, lw=2, ls="--")

    avgInputPower = totAllInputPower / N
    pl.add(centers, avgInputPower, color='cyan',
           lw=3)  # ,label = "input x input"

    pl.legendOn(labsize=10, loc='lower left')
    pl._ax.set_xlim(kellmin, kellmax)
    pl.done("tests/output/power.png")

    # cross compare to power of input (percent)
    pl = Plotter()

    for polComb, col in zip(polCombList, colorList):
        cross = statsCross[polComb]['mean']

        pl.add(centers, (cross - avgInputPower) * 100. / avgInputPower,
               label=polComb,
               color=col,
               lw=2)

    pl.legendOn(labsize=10, loc='upper right')
    pl._ax.set_xlim(kellmin, kellmax)
    pl._ax.axhline(y=0., ls="--", color='black', alpha=0.5)
Exemple #30
0
                print "percentage efficiency ", efficiency, " %"
            else:
                ls, Nls = myNls.getNl(polComb=polComb, halo=halo)

            pl.add(ls, Nls, label=str(beamX))
            #pl.add(myNls.N.cents,myNls.N.wxy,label=str(beamX))

            LF = LensForecast()
            LF.loadKK(frange, Clkk, ls, Nls)
            sn, errs = LF.sn(snrange, fsky, "kk")
            sns.append(sn)

        pl.add(frange, Clkk, color="black")

        #pl.legendOn(loc='lower left',labsize = 8)
        pl._ax.set_xlim(0, 3000)
        pl._ax.set_ylim(1.e-9, 1.e-6)
        pl.done("beamVary_" + polComb + "_delens_" + str(delens) +
                "_noiseVary.pdf")

        # pl = Plotter(labelX = "beamX (arcmin)",labelY="S/N auto",ftsize=14)
        # pl.add(beamRange,sns)
        # pl.done(polComb+str(delens)+"_sn_swap_"+str(swap)+"_noiseVary.pdf")

        pl = Plotter(labelX="noiseX (muK-arcmin)",
                     labelY="S/N auto",
                     ftsize=14)
        pl.add(noiseRange, sns)
        pl.done(polComb + str(delens) + "_sn_swap_" + str(swap) +
                "_noiseVary.pdf")