def plot(cents, nls, ncoadd, pols, tag="default"):
    npols = len(pols)
    pl = io.Plotter(xyscale='linlog', ylabel='$C_L$', xlabel='$L$')
    for i in range(npols):
        for j in range(i, npols):
            nl1d = nls[i, j]
            if i != j:
                pl.add(cents, np.abs(nl1d), ls="--",
                       alpha=0.2)  #,label=pols[i]+'x'+pols[j]
            else:
                pl.add(cents, nl1d, alpha=0.6)  #,label=pols[i])

    ells = np.arange(0, 2500, 1)
    theory = cosmology.default_theory()
    clkk = theory.gCl('kk', ells)
    pl.add(ells, clkk, color='k', lw=3)
    pl.legend(loc='upper right')
    pl._ax.set_ylim(1e-10, 1e-4)
    pl.add(cents, ncoadd, color='red', lw=3)  #,label='MV')
    pl.done("nlkk_%s.png" % tag)
    # pl = io.Plotter(xyscale='linlin')
    # pl.add(cents,nls[0,3],label="TT x EB")
    # pl.hline(y=0)
    # pl.done("nltteb_%s.png"%tag)
    io.save_cols("lensing_noise_%s.txt" % tag, (cents, ncoadd))
Exemple #2
0
def save_cached_inpaint_geometries(cache_name,ras,decs,gtags,pcoords,gdicts):
    rootdir = _get_temp_root_dir() + cache_name
    assert not(os.path.exists(rootdir))
    os.mkdir(rootdir)
    
    io.save_cols(_get_radec_filename(rootdir),(ras,decs))
    np.save(_get_pcoords_filename(rootdir),pcoords)
    np.save(_get_gtags_filename(rootdir),gtags)

    for key in gdicts.keys():
        gd = gdicts[key]
        for item in gd.keys():
            np.save(_get_gdicts_filename(rootdir,key,item),gdicts[key][item])
Exemple #3
0
def build_and_save_ilc(arrays,region,version,cov_version,beam_version,
                       solutions,beams,chunk_size,
                       effective_freq,overwrite,maxval,unsanitized_beam=False,do_weights=False,
                       pa1_shift = None,
                       pa2_shift = None,
                       pa3_150_shift = None,
                       pa3_090_shift = None,
                       no_act_color_correction=False, ccor_exp = -1, 
                       isotropize=False, isotropize_width=20):

    print("Chunk size is ", chunk_size*64./8./1024./1024./1024., " GB.")
    def warn(): print("WARNING: no bandpass file found. Assuming array ",dm.c['id']," has no response to CMB, tSZ and CIB.")
    aspecs = tutils.ASpecs().get_specs
    bandpasses = not(effective_freq)
    savedir = tutils.get_save_path(version,region)
    covdir = tutils.get_save_path(cov_version,region)
    assert os.path.exists(covdir)
    if not(overwrite):
        assert not(os.path.exists(savedir)), \
       "This version already exists on disk. Please use a different version identifier."
    try: os.makedirs(savedir)
    except:
        if overwrite: pass
        else: raise


    mask = enmap.read_map(covdir+"tilec_mask.fits")
    shape,wcs = mask.shape,mask.wcs
    Ny,Nx = shape
    modlmap = enmap.modlmap(shape,wcs)



    arrays = arrays.split(',')
    narrays = len(arrays)
    kcoadds = []
    kbeams = []
    bps = []
    names = []
    lmins = []
    lmaxs = []
    shifts = []
    cfreqs = []
    lbeams = []
    ells = np.arange(0,modlmap.max())
    for i,qid in enumerate(arrays):
        dm = sints.models[sints.arrays(qid,'data_model')](region=mask,calibrated=True)
        lmin,lmax,hybrid,radial,friend,cfreq,fgroup,wrfit = aspecs(qid)
        cfreqs.append(cfreq)
        lmins.append(lmin)
        lmaxs.append(lmax)
        names.append(qid)
        if dm.name=='act_mr3':
            season,array1,array2 = sints.arrays(qid,'season'),sints.arrays(qid,'array'),sints.arrays(qid,'freq')
            array = '_'.join([array1,array2])
        elif dm.name=='planck_hybrid':
            season,patch,array = None,None,sints.arrays(qid,'freq')
        else:
            raise ValueError
        kcoadd_name = covdir + "kcoadd_%s.npy" % qid
        kmask = maps.mask_kspace(shape,wcs,lmin=lmin,lmax=lmax)
        kcoadd = enmap.enmap(np.load(kcoadd_name),wcs)
        dtype = kcoadd.dtype
        kcoadds.append(kcoadd.copy()*kmask)
        kbeam = tutils.get_kbeam(qid,modlmap,sanitize=not(unsanitized_beam),version=beam_version,planck_pixwin=True)
        if dm.name=='act_mr3':
            lbeam = tutils.get_kbeam(qid,ells,sanitize=not(unsanitized_beam),version=beam_version,planck_pixwin=False) # note no pixwin but doesnt matter since no ccorr for planck
        elif dm.name=='planck_hybrid':
            lbeam = None
        else:
            raise ValueError
        lbeams.append(lbeam)
        kbeams.append(kbeam.copy())
        if bandpasses:
            try: 
                fname = dm.get_bandpass_file_name(array) 
                bps.append("data/"+fname)
                if (pa1_shift is not None) and 'PA1' in fname:
                    shifts.append(pa1_shift)
                elif (pa2_shift is not None) and 'PA2' in fname:
                    shifts.append(pa2_shift)
                elif (pa3_150_shift is not None) and ('PA3' in fname) and ('150' in fname):
                    shifts.append(pa3_150_shift)
                elif (pa3_090_shift is not None) and ('PA3' in fname) and ('090' in fname):
                    shifts.append(pa3_90_shift)
                else:
                    shifts.append(0)

            except:
                warn()
                bps.append(None)
        else:
            try: bps.append(cfreq)
            except:
                warn()
                bps.append(None)

    kcoadds = enmap.enmap(np.stack(kcoadds),wcs)



    # Read Covmat
    cov = maps.SymMat(narrays,shape[-2:])
    for aindex1 in range(narrays):
        for aindex2 in range(aindex1,narrays):
            icov = enmap.enmap(np.load(covdir+"tilec_hybrid_covariance_%s_%s.npy" % (names[aindex1],names[aindex2])),wcs)
            if isotropize:
                bin_edges = np.append([0.],np.arange(min(lmins),modlmap.max(),isotropize_width))
                binner = stats.bin2D(modlmap,bin_edges)
                ls,c1d = binner.bin(icov)
                icov = maps.interp(ls,c1d)(modlmap)
                
            if aindex1==aindex2: 
                icov[modlmap<lmins[aindex1]] = maxval
                icov[modlmap>lmaxs[aindex1]] = maxval
            cov[aindex1,aindex2] = icov
    cov.data = enmap.enmap(cov.data,wcs,copy=False)
    covfunc = lambda sel: cov.to_array(sel,flatten=True)

    assert cov.data.shape[0]==((narrays*(narrays+1))/2) # FIXME: generalize
    assert np.all(np.isfinite(cov.data))

    # Make responses
    responses = {}
    for comp in ['tSZ','CMB','CIB']:
        if bandpasses:
            if no_act_color_correction:
                responses[comp] = tfg.get_mix_bandpassed(bps, comp, bandpass_shifts=shifts)
            else:
                responses[comp] = tfg.get_mix_bandpassed(bps, comp, bandpass_shifts=shifts,
                                                         ccor_cen_nus=cfreqs, ccor_beams=lbeams, 
                                                         ccor_exps = [ccor_exp] * narrays)
        else:
            responses[comp] = tfg.get_mix(bps, comp)

    ilcgen = ilc.chunked_ilc(modlmap,np.stack(kbeams),covfunc,chunk_size,responses=responses,invert=True)

    # Initialize containers
    solutions = solutions.split(',')
    data = {}
    kcoadds = kcoadds.reshape((narrays,Ny*Nx))
    for solution in solutions:
        data[solution] = {}
        comps = solution.split('-')
        data[solution]['comps'] = comps
        if len(comps)<=2: 
            data[solution]['noise'] = enmap.zeros((Ny*Nx),wcs)
        if len(comps)==2: 
            data[solution]['cnoise'] = enmap.zeros((Ny*Nx),wcs)
        data[solution]['kmap'] = enmap.zeros((Ny*Nx),wcs,dtype=dtype) # FIXME: reduce dtype?
        if do_weights and len(comps)<=2:
            for qid in arrays:
                data[solution]['weight_%s' % qid] = enmap.zeros((Ny*Nx),wcs)
            

    for chunknum,(hilc,selchunk) in enumerate(ilcgen):
        print("ILC on chunk ", chunknum+1, " / ",int(modlmap.size/chunk_size)+1," ...")
        for solution in solutions:
            comps = data[solution]['comps']
            if len(comps)==1: # GENERALIZE
                data[solution]['noise'][selchunk] = hilc.standard_noise(comps[0])
                if do_weights: weight = hilc.standard_weight(comps[0])
                data[solution]['kmap'][selchunk] = hilc.standard_map(kcoadds[...,selchunk],comps[0])
            elif len(comps)==2:
                data[solution]['noise'][selchunk] = hilc.constrained_noise(comps[0],comps[1])
                data[solution]['cnoise'][selchunk] = hilc.cross_noise(comps[0],comps[1])
                ret = hilc.constrained_map(kcoadds[...,selchunk],comps[0],comps[1],return_weight=do_weights)
                if do_weights:
                    data[solution]['kmap'][selchunk],weight = ret
                else:
                    data[solution]['kmap'][selchunk] = ret

            elif len(comps)>2:
                data[solution]['kmap'][selchunk] = np.nan_to_num(hilc.multi_constrained_map(kcoadds[...,selchunk],comps[0],*comps[1:]))

            if len(comps)<=2 and do_weights:
                for qind,qid in enumerate(arrays):
                    data[solution]['weight_%s' % qid][selchunk] = weight[qind]


    del ilcgen,cov

    # Reshape into maps
    name_map = {'CMB':'cmb','tSZ':'comptony','CIB':'cib'}
    beams = beams.split(',')
    for solution,beam in zip(solutions,beams):
        comps = "tilec_single_tile_"+region+"_"
        comps = comps + name_map[data[solution]['comps'][0]]+"_"
        if len(data[solution]['comps'])>1: comps = comps + "deprojects_"+ '_'.join([name_map[x] for x in data[solution]['comps'][1:]]) + "_"
        comps = comps + version

        if do_weights and len(data[solution]['comps'])<=2:
            for qind,qid in enumerate(arrays):
                enmap.write_map("%s/%s_%s_weight.fits" % (savedir,comps,qid), enmap.enmap(data[solution]['weight_%s' % qid].reshape((Ny,Nx)),wcs))
            


        try:
            noise = enmap.enmap(data[solution]['noise'].reshape((Ny,Nx)),wcs)
            enmap.write_map("%s/%s_noise.fits" % (savedir,comps),noise)
        except: pass
        try:
            cnoise = enmap.enmap(data[solution]['cnoise'].reshape((Ny,Nx)),wcs)
            enmap.write_map("%s/%s_cross_noise.fits" % (savedir,comps),cnoise)
        except: pass

        ells = np.arange(0,modlmap.max(),1)
        try:
            fbeam = float(beam)
            kbeam = maps.gauss_beam(modlmap,fbeam)
            lbeam = maps.gauss_beam(ells,fbeam)
        except:
            qid = beam
            bfunc = lambda x: tutils.get_kbeam(qid,x,version=beam_version,sanitize=not(unsanitized_beam),planck_pixwin=False)
            kbeam = bfunc(modlmap)
            lbeam = bfunc(ells)

        kmap = enmap.enmap(data[solution]['kmap'].reshape((Ny,Nx)),wcs)
        smap = enmap.ifft(kbeam*kmap,normalize='phys').real
        enmap.write_map("%s/%s.fits" % (savedir,comps),smap)
        io.save_cols("%s/%s_beam.txt" % (savedir,comps),(ells,lbeam),header="ell beam")


    enmap.write_map(savedir+"/tilec_mask.fits",mask)
Exemple #4
0
    pl1.done(pout_dir+"lensed_lnlikes_all.png")
    
    # Bayesian
    c,b,a = bp
    mean = -b/2./c
    sigma = np.sqrt(-1./2./c)
    print(mean,sigma)
    sn = (kamp_true/sigma)
    pbias = (mean-kamp_true)*100./kamp_true
    print ("BE Bias : ",pbias, " %")
    print ("BE Bias : ",(mean-kamp_true)/sigma, " sigma")
    print ("S/N for 1000 : ",sn*np.sqrt(1000./args.Nclusters))

    like = np.exp(blnlikes)
    like /= like.max()
    nkamps = np.linspace(bkamps.min(),bkamps.max(),1000)
    
    pl2 = io.Plotter(xlabel="$A$",ylabel="$\\mathcal{L}$")
    
    pl2.add(nkamps,np.exp(-(nkamps-mean)**2./2./sigma**2.),label="BE likelihood from chisquare fit")
    # pl2.add(bkamps,like,label="BE likelihood")

    
    io.save_cols("nofg_unmarginalized.txt",(nkamps,np.exp(-(nkamps-mean)**2./2./sigma**2.)))

    pl2.vline(x=kamp_true,ls="--")
    pl2.legend(loc='upper left')
    pl2.done(pout_dir+"lensed_likes.png")

    
Exemple #5
0
        w1d = w1ds[i]
        w1d[cents < lmin] = np.nan
        w1d[cents > lmax] = np.nan

        if tutils.is_lfi(qid):
            ls = "-."
            lab = "LFI %d GHz" % cfreq
        elif tutils.is_hfi(qid):
            ls = "--"
            lab = "HFI %d GHz" % cfreq
        else:
            ls = "-"
            aind = qid.split("_")[1]
            lab = actmap[qid]  #"ACT_%s %d GHz" % (aind,cfreq )
        mul = 1e7 if comp == 'comptony' else 1
        io.save_cols("weights_%s_%s_%s.txt" % (comp, version, lab),
                     (cents, w1d))
        pl.add(cents, w1d * mul, label=lab, ls=ls)
    pl._ax.set_xlim(20 + bw / 2., 10000)

    pl._ax.yaxis.set_minor_locator(AutoMinorLocator())
    #pl._ax.xaxis.set_minor_locator(AutoMinorLocator())
    pl._ax.tick_params(axis='x', which='both', width=1)
    pl._ax.tick_params(axis='y', which='both', width=1)
    pl._ax.xaxis.grid(True, which='both', alpha=0.3)
    pl._ax.yaxis.grid(True, which='both', alpha=0.3)

    pl.legend(loc='upper right', bbox_to_anchor=(1.4, 1), labsize=12)
    pl.done(("%s/fig_weight1d_%s_%s" %
             (os.environ['WORK'], comp, version)).replace('.', '_') + ".pdf")
            bfile = os.environ["WORK"] + "/data/depot/tilec/v1.2.0_20200324/map_v1.2.0_%s_%s/tilec_single_tile_%s_cmb%s_map_v1.2.0_%s_beam.txt" % (cversion,region,region,method,cversion)
            yfile = os.environ["WORK"] + "/data/depot/tilec/v1.2.0_20200324/map_v1.2.0_%s_%s/tilec_single_tile_%s_cmb%s_map_v1.2.0_%s.fits" % (cversion,region,region,method,cversion)
            w2 = np.mean(mask**2.)

            als,bells = np.loadtxt(bfile,unpack=True)
            imap = enmap.read_map(yfile)
            modlmap = mask.modlmap()
            bin_edges = np.arange(20,6000,80)

            binner = stats.bin2D(modlmap,bin_edges)
            kmap = enmap.fft(imap,normalize='phys')/maps.interp(als,bells)(modlmap)*maps.gauss_beam(modlmap,dfwhm)
            kmap[~np.isfinite(kmap)] = 0
            p2d = (kmap*kmap.conj()).real / w2

            cents,p1d = binner.bin(p2d)
            io.save_cols(os.environ['WORK']+"/cpower_tilec_%s_%s.txt" % (region,method), (cents,p1d))
        else:
            cents,p1d = np.loadtxt("cpower_tilec_%s_%s.txt" % (region,method), unpack=True)

        pl.add(cents,p1d,color=col,label='This work (%s)' % region_map[region])

        if methodp=='sz':
            szlab = "SMICA (PR2)"
        elif methodp=='nosz':
            szlab = "SMICA-nosz (PR3)"


        if redo:

            bin_edges = np.arange(20,3000,20)
            binner = stats.bin2D(modlmap,bin_edges)
f0 = 100

acls = []

monos = {}
ms = []
freqs = []
for i,f in enumerate(fs):
    freq = int(f.split('_')[1].split('.')[0])
    # r = tfg.get_mix(f0, 'CIB_Jysr')/tfg.get_mix(freq, 'CIB_Jysr')

    imap = hp.read_map(f)
    _,mono = hp.remove_monopole(imap,fitval=True)
    monos[freq] = mono
    ms.append(mono)
    freqs.append(freq)

io.save_cols("cib_monopoles.txt",(freqs,ms))

pl = io.Plotter(xyscale='loglin',xlabel='f',ylabel='m/m0')
for freq in monos.keys():
    r = tfg.get_mix(f0, 'CIB_Jysr')/tfg.get_mix(freq, 'CIB_Jysr')/monos[f0]
    pl._ax.scatter(freq,monos[freq]*r,color="C0")
pl.hline(y=1)
pl.done("mono_webskycib.png")
    


    

Exemple #8
0
rlim = 1.0
jras = np.append(eras , bras)
jdecs = np.append(edecs , bdecs)
ocat = merge_duplicates(jras*utils.degree,jdecs*utils.degree, rlim=rlim*utils.arcmin)  / utils.degree

extras = [(-174.9567, 2.0733),
          (-178.078, -1.108),
          (166.408, 2.043)]

print(ocat.shape)
for extra in extras:
    ocat = np.vstack((ocat,extra))

print(ocat.shape)

io.save_cols("union_catalog_%s.csv" % version,(ocat[:,0],ocat[:,1]),delimiter=',',header='ra(deg),dec(deg) | Made using actsims/bin/union_srcs.py.',fmt='%.5f')
#ras,decs = sints.get_act_mr3f_union_sources()
#assert np.all(np.isclose(ras,ocat[:,0]))
#assert np.all(np.isclose(decs,ocat[:,1]))


if debug:
    xmin = -190 ;  xmax = 190
    ymin = -70 ; ymax = 35
    s = 0.1
    dpi = 100

    plt.scatter(jras,jdecs,s=s,marker=',')
    plt.xlim(xmin,xmax)
    plt.ylim(ymin,ymax)
    plt.savefig("scatter.png",dpi=dpi)
Exemple #9
0
num_each,each_tasks = mpi.mpi_distribute(Njobs,numcores,allow_empty=True)
if rank==0: print ("At most ", max(num_each) , " tasks...")
my_tasks = each_tasks[rank]

for task in my_tasks:
    actseason0,array0,actseason1,array1 = combs[task]
    actbeam0 = lambda x: dma.get_beam(x, actseason0, patch, array0)
    actbeam1 = lambda x: dma.get_beam(x, actseason1, patch, array1)

    actmap0 = dma.get_coadd(actseason0, patch, array0, 
                            ncomp=1, srcfree=True)[0,:,:] # just want T
    actmap1 = dma.get_coadd(actseason1, patch, array1, 
                            ncomp=1, srcfree=True)[0,:,:] # just want T
    lb, Cb = compute_ps(actmap0, actmap1, actbeam0, actbeam1)

    io.save_cols("/scratch/r/rbond/msyriac/data/depot/actsims/spectra/spec%s_%s_%s_%s_%s_%s.txt" % (fftstr,patch, actseason0, array0, actseason1, array1),(lb,Cb))
    if rank==0: print ("Rank 0 done with task ", task+1, " / " , len(my_tasks))


comm.Barrier()

# combs = []
# nsplits = 4

# # we loop over all pairs of ACT x ACT
# for actseason0 in ['s14','s15']: # s13 doesn't have these patches
#     for array0 in ['pa1_f150', 'pa2_f150', 'pa3_f090', 'pa3_f150']:
#         try:
#             actbeam = lambda x: dma.get_beam(x, actseason0, patch, array0)
#             actbeam(100)
#             combs.append((actseason0,array0,actseason0,array0))
l = [70,143,217,353,545,857]
freqs = [100] + l
fs = ["/scratch/r/rbond/msyriac/data/sims/websky/new_cib/cib_ns4096_nu%s.fits" % str(x).zfill(4) for x in  freqs]
f0 = 100.

acls = []

anis = []
monos = []



for i,f in enumerate(fs):

    freq = freqs[i]
    print(freq)
    imap = hp.read_map(f)
    _,mono = hp.remove_monopole(imap,fitval=True)

    ialm = hp.map2alm(imap)
    cls = hp.alm2cl(ialm)
    ells = np.arange(len(cls))
    acls.append(cls)

    anis.append(cls[np.logical_and(ells>4000,ells<6000)].mean())
    monos.append(mono)

    

io.save_cols(os.environ['WORK'] + "/new_cib.txt",(freqs,monos,anis))
Exemple #11
0
from __future__ import print_function
from orphics import maps, io, cosmology, stats
from pixell import enmap
import numpy as np
import os, sys
from falafel.qe import symlens_norm as get_norm

thloc = "/scratch/r/rbond/msyriac/data/sims/alex/v0.4/cosmo2017_10K_acc3"
theory = cosmology.loadTheorySpectraFromCAMB(thloc, get_dimensionless=False)

ells = np.arange(3100)
uctt = tctt = theory.lCl('TT', ells)
ucee = tcee = theory.lCl('EE', ells)
ucbb = tcbb = theory.lCl('BB', ells)
ucte = tcte = theory.lCl('TE', ells)
clkk = theory.gCl('kk', ells)

ls, Als, al_mv_pol, al_mv, Al_te_hdv = get_norm(uctt,
                                                tctt,
                                                ucee,
                                                tcee,
                                                ucte,
                                                tcte,
                                                ucbb,
                                                tcbb,
                                                lmin=100,
                                                lmax=3000,
                                                plot=False)
io.save_cols("norm.txt", (ls, Als['TT'], Als['EE'], Als['EB'], Als['TE'],
                          Als['TB'], al_mv_pol, al_mv, Al_te_hdv))
Exemple #12
0
                cversion, region, region, cversion)
        w2 = np.mean(mask**2.)

        ls, bells = np.loadtxt(bfile, unpack=True)
        imap = enmap.read_map(yfile)
        modlmap = mask.modlmap()
        bin_edges = np.arange(20, 6000, 80)

        binner = stats.bin2D(modlmap, bin_edges)
        kmap = enmap.fft(imap, normalize='phys') / maps.interp(
            ls, bells)(modlmap) * maps.gauss_beam(modlmap, dfwhm)
        kmap[~np.isfinite(kmap)] = 0
        p2d = (kmap * kmap.conj()).real / w2

        cents, p1d = binner.bin(p2d)
        io.save_cols("ypower_tilec_%s.txt" % region, (cents, p1d))
    else:
        cents, p1d = np.loadtxt("ypower_tilec_%s.txt" % region, unpack=True)

    pl.add(cents, p1d, color=col, label='This work (%s)' % region_map[region])

    for ls, method in zip(['--', ':'], ['nilc', 'milca']):
        if method == 'milca': continue

        if redo:

            bin_edges = np.arange(20, 3000, 80)
            binner = stats.bin2D(modlmap, bin_edges)

            alm = hp.read_alm(
                ppath +
Exemple #13
0
dbinner = stats.bin2D(dmask.modlmap(), bin_edges)
cents, de_noise1d = dbinner.bin(de_p2d)
cents, db_noise1d = dbinner.bin(db_p2d)
cents, oe_noise1d = binner.bin(oe_p2d)
cents, ob_noise1d = binner.bin(ob_p2d)

#cents,smoothed_oe_noise1d = binner.bin(smoothed_oe_p2d)

pl = io.Plotter(xlabel='l', ylabel='C', xyscale='linlog')
pl.add(cents, de_noise1d, label='d6 E')
#pl.add(cents,db_noise1d,label='d6 B')
pl.add(cents, oe_noise1d, label='kspace E')
#pl.add(cents,smoothed_oe_noise1d,label='smoothed kspace E')
#pl.add(cents,ob_noise1d,label='kspace B')
pl.done("powcomp_pol.png")
"""
We make lensing noise curves from 1d noise curves of :
(a) ILC A+P tSZ deproj symmetrized combination
(b) k-space Q/U
"""

txnoise = ti_noise1d
tynoise = ti_nosz_noise1d
tcrossnoise = ti_cross_noise1d

enoise = oe_noise1d
bnoise = ob_noise1d

io.save_cols("lnoises.txt",
             (cents, txnoise, tynoise, tcrossnoise, enoise, bnoise))
Exemple #14
0
if rank==0: print("Rank 0 starting ...")
for k,my_task in enumerate(my_tasks):
    kamp = kamps[my_task]


    kappa_template = lensing.nfw_kappa(kamp*1e15,bmodrmap,cc,overdensity=200.,critical=True,atClusterZ=True)
    phi,_ = lensing.kappa_to_phi(kappa_template,bmodlmap,return_fphi=True)
    grad_phi = enmap.grad(phi)
    pos = posmap + grad_phi
    alpha_pix = enmap.sky2pix(bshape,bwcs,pos, safe=False)


    def do_the_thing():
        return lensing.lens_cov(Ucov,alpha_pix,lens_order=lens_order,kbeam=kbeam,bshape=shape)

    if rank==0:
        with bench.show("rank 0 lensing cov"):
            Scov = do_the_thing()
    else:
        Scov = do_the_thing()
        
    np.save(cov_name(my_task),Scov)


if rank==0:
    io.save_cols(GridName+"/amps.txt",(kamps,))
    import json
    save_dict = {"arc":args.arc,"pix":args.pix,"beam":args.beam}
    with open(GridName+"/attribs.json",'w') as f:
        f.write(json.dumps(save_dict))
Exemple #15
0
"""
Given the mask saved from mapcat.py, generate and save
a random catalog.
"""


paths = cutils.paths
#cat_type = "wise_panstarrs"
#cat_type = "madcows_photz"
cat_type = args.sys[1]
Nx = int(args.sys[2])

ras,decs,_,_,_ = cutils.catalog_interface(cat_type,is_meanfield=False)
N = Nx * len(ras)

mask = enmap.read_map(f'{paths.scratch}{cat_type}_mask.fits')
shape,wcs = mask.shape,mask.wcs
Npix = mask[mask>0].size
inds = np.random.choice(Npix,size=N,replace=False)

pixs = enmap.pixmap(shape,wcs)

print(pixs.shape)

coords = mask.pix2sky(pixs[:,mask>0][:,inds]) / utils.degree
io.save_cols(paths.data+f"{cat_type}_randoms.txt",(coords[1],coords[0]))
print(coords.shape)
cmapper = catalogs.CatMapper(coords[1],coords[0],shape=shape,wcs=wcs)
io.hplot(enmap.downgrade(cmapper.counts,16),'randcounts',mask=0)

Exemple #16
0
        pl.done(io.dout_dir+"clkk.png")


        pl = io.Plotter(xlabel='$L$',ylabel='$\Delta C_L/C_L$')
        save_tuples = []
        save_tuples.append(cents)
        header = ""
        header += "L \t"
        for pcomb in polcombs+['TE_ET','mv','tteb']:
            pmean,perr = st.stats["r_"+pcomb]['mean'],st.stats["r_"+pcomb]['errmean']
            pl.add_err(cents,pmean,yerr=perr,marker="o",mew=2,elinewidth=2,lw=2,label=pcomb,ls="--" if pcomb=='mv' else "-")
            save_tuples.append(pmean)
            save_tuples.append(perr)
            header += pcomb+" \t"
            header += pcomb+"_err \t"
        io.save_cols(filename("rclkk","txt"),save_tuples,header=header,delimiter='\t')
        pl.legend(loc='upper right')
        pl.hline()
        pl._ax.set_ylim(-0.1,0.05)
        pl.done(io.dout_dir+"rclkk.png")



        pl = io.Plotter(yscale='log',xlabel='$L$',ylabel='$C_{\\ell}$')
        ells = np.arange(2,args.pellmax,1)
        for i,cmb in enumerate(['TT','EE','BB']):
            pl.add(ells,theory.lCl(cmb,ells)*ells**2.,lw=3,color="k")
            pmean,perr = st.stats["c"+cmb]['mean'],st.stats["c"+cmb]['errmean']
            pl.add_err(cents,pmean*cents**2.,yerr=perr*cents**2.,marker="o",mew=2,elinewidth=2,ls="-",lw=2,label=cmb,color="C"+str(i))
            if args.debug_noise:
                pmean,perr = st.stats["n"+cmb]['mean'],st.stats["n"+cmb]['errmean']
            ri = totcrosses[solution]
            pl.add(cents,
                   ri,
                   label=solution,
                   ls="none",
                   marker="o",
                   color=color,
                   markersize=2,
                   alpha=0.8)

            rr = totautos[solution]
            pl.add(cents, rr, alpha=0.4, color=color)
        pl.done(os.environ['WORK'] + "/val_%s_%s_%s.png" %
                (input_name, args.region, args.version))
        io.save_cols(
            "%s/verification_%s_%s_%s.txt" %
            (os.environ['WORK'], input_name, args.region, args.version),
            (cents, ri, rr, ii))

    for input_name in input_names:
        pl = io.Plotter(xyscale='linlin',
                        xlabel='$\\ell$',
                        ylabel='$\Delta C_{\\ell} / C_{\\ell}$',
                        ftsize=16,
                        labsize=14)
        plt.gca().set_prop_cycle(None)
        # ii = totcmb if input_name=='CMB' else tottsz
        #ii = totinputs[input_name]
        i = 0
        for solution in args.solutions.split(','):
            rat = s.stats['rat_%s' % solution]['mean']
            erat = s.stats['rat_%s' % solution]['err']
Exemple #18
0
    else:
        flmin = 1000
        
    if not(tutils.is_planck(qid1)) and not(tutils.is_planck(qid2)): 
        flmax = 5000
    elif tutils.is_hfi(qid1) and tutils.is_hfi(qid2):
        flmax = 2000
    elif (not(tutils.is_planck(qid1)) and tutils.is_hfi(qid2)) or (not(tutils.is_planck(qid2)) and tutils.is_hfi(qid1)):
        flmax = 3000
    elif (qid1=='p03' or qid2=='p03') and (qid1 not in ['p01','p02']) and (qid2 not in ['p01','p02']):
        flmax = 1000
    else:
        flmax = 300
    
    if flmax<=flmin:  #!!!!!!
        io.save_cols("%sfgcov_%s_%s.txt" % (opath,qid1,qid2),(fells,fells*0))
    else:


        print("Rank %d doing task %d for array %s x %s with lmin %d and lmax %d ..." % (rank,task,qids[0],qids[1],flmin,flmax))

        # ERROR CALC
        c11 = stheory.get_theory_cls(f1,f1,a_cmb=1,a_gal=0.8) 
        n11 = maps.interp(ncents,n1d1)(ccents)/fbeam1(ccents)/fbeam1(ccents)
        c22 = stheory.get_theory_cls(f2,f2,a_cmb=1,a_gal=0.8) 
        n22 = maps.interp(ncents,n1d2)(ccents)/fbeam2(ccents)/fbeam2(ccents)
        c12 = stheory.get_theory_cls(f1,f2,a_cmb=1,a_gal=0.8) 
        n12 = maps.interp(ncents,n1d)(ccents)/fbeam1(ccents)/fbeam2(ccents)
        c11[~np.isfinite(c11)] = 0
        c12[~np.isfinite(c12)] = 0
        c22[~np.isfinite(c22)] = 0
Exemple #19
0
nmax = len(ils)

rdn0 = bias.rdn0(icov=0,
                 alpha=polcomb,
                 beta=polcomb,
                 qfunc=qfunc,
                 get_kmap=get_kmap,
                 comm=comm,
                 power=power,
                 nsims=nsims)
#rdn0 = bias.mcn1(0,'TT','TT',qfunc,get_kmap,comm,power,nsims,verbose=True)

rdn0[:nmax] = rdn0[:nmax] * Als[polcomb]**2.
if not (args.no_mask):
    rdn0[:nmax] = rdn0[:nmax] / w4
rdn0[nmax:] = 0
io.save_cols(f'{solenspipe.opath}/rdn0_{polcomb}_{isostr}_{car}_new.txt',
             (ils, rdn0[:nmax]))

if rank == 0:
    theory = cosmology.default_theory()

    ls = np.arange(rdn0.size)
    pl = io.Plotter('CL')
    pl.add(ils, rdn0[:nmax])
    pl.add(ils, Nl)
    pl.add(ils, theory.gCl('kk', ils))
    #pl._ax.set_ylim(1e-9,1e-6)
    pl.done(f'{solenspipe.opath}/recon_rdn0.png')
f0 = 100.

acls = []

anis = []
freqs = []

for i, f in enumerate(fs):
    freq = int(f.split('_')[1])
    #r = tfg.get_mix(f0, 'CIB')/tfg.get_mix(freq, 'CIB')
    r = 1  #tfg.get_mix(f0, 'CIB_Jysr')/tfg.get_mix(freq, 'CIB_Jysr')
    print(freq)
    #ialm = hp.read_alm(f) * 1e6 * tfg.ItoDeltaT(freq) / 1e26 * r
    ialm = hp.read_alm(f) * r

    cls = hp.alm2cl(ialm)
    ells = np.arange(len(cls))
    acls.append(cls)

    anis.append(cls[np.logical_and(ells > 4000, ells < 6000)].mean() / r / r)
    freqs.append(freq)

    # pl = io.Plotter(xyscale='linlin',xlabel='l',ylabel='C/C0')
    # for j,cl in enumerate(acls):
    #     pl.add(ells,cl/acls[0],label=fs[j].split('_')[1])
    # pl.hline(y=1)
    # pl._ax.set_ylim(0,3)
    # pl.done("webskycib.png")

io.save_cols(os.environ['WORK'] + "/cib_anis.txt", (freqs, anis))
                                              np.deg2rad(pfwhm / 60.)),
                                          div=divstamp,
                                          ps=ps,
                                          beam=pfwhm,
                                          n2d=None)
        # model = pointsrcs.sim_srcs(stamp.shape, stamp.wcs,
        #                            np.array((dec,ra,famp.reshape(-1)[0]))[None],
        #                            maps.sigma_from_fwhm(np.deg2rad(pfwhm/60.)))
        # io.plot_img(np.log10(stamp),"stamp_%d.png" % k)
        # io.plot_img(divstamp,"divstamp_%d.png"  % k)
        # io.plot_img(model,"model_%d.png"  % k)
        # io.plot_img(stamp-model,"residual_%d.png"  % k)

        # if k==1: sys.exit()
        sdecs.append(dec)
        sras.append(ra)
        amps.append(famp.reshape(-1)[0])
        print(famp, sns[k])
        print("Done with source ", k + 1, " / ", len(ras))

    srcs = np.stack((sdecs, sras, amps)).T
    shape, wcs = imap.shape, imap.wcs
    model = pointsrcs.sim_srcs(shape[-2:], wcs, srcs,
                               maps.sigma_from_fwhm(np.deg2rad(pfwhm / 60.)))
    omap = imap - model
    mname = fname.replace('.fits', '_pccs_sub_model.fits')
    cname = fname.replace('.fits', '_pccs_sub_catalog.txt')
    io.save_cols(cname, (sras, sdecs, amps))
    enmap.write_fits(mname, model[None])
    enmap.write_fits(oname, omap[None])
Exemple #22
0
    print(eigs)
    # assert np.all(eigs>0)

    print("==== hybrid")
    eigs = np.linalg.eigh(hmat[:, :, sell])[0]
    print(sell)
    print(hcorr[:, :, sell])
    print(eigs)
    # assert np.all(eigs>0)

pl = io.Plotter(xyscale='linlog', xlabel='l', ylabel='C')
for i in range(narrays):
    for j in range(i, narrays):
        qid1 = qids[i]
        qid2 = qids[j]

        # if "p02" not in [qid1,qid2]: continue # !!!!

        f1d = cfgres[i, j]
        t1d = tfgres[i, j]
        h1d = hmat[i, j]
        pl.add(ells[:lmax], f1d, color=f"C{c}", lw=1, label=f'{qid1} x {qid2}')
        pl.add(ells[:lmax], t1d, color=f"C{c}", lw=1, ls='--')
        pl.add(ells[:lmax], h1d, color=f"C{c}", lw=2, ls=':')
        io.save_cols(f"{fpath}/tfgcov_{qid1}_{qid2}.txt", (ells, t1d))
        io.save_cols(f"{fpath}/hfgcov_{qid1}_{qid2}.txt", (ells, h1d))

        c += 1
pl._ax.set_xlim(2, 5800)
pl.done("debugfits2.png")
               label='standard - noise')
    pl.add_err(cents + 10,
               cmb_cilc_auto,
               yerr=ecmb_cilc_auto,
               marker="x",
               ls="none",
               label='constrained  - noise')
    # pl._ax.set_ylim(2e3,9e4)
    pl._ax.set_ylim(2e-1, 9e4)
    pl._ax.set_xlim(0, lmax)
    # pl._ax.set_xlim(0,2500)
    pl.legend(loc='lower left')
    pl.done(io.dout_dir + "cmb_cross.png")

    io.save_cols("cmb_results.txt",
                 (cents, cmb_silc_cross, ecmb_silc_cross, cmb_cilc_cross,
                  ecmb_cilc_cross, cmb_silc_auto, ecmb_silc_auto,
                  cmb_cilc_auto, ecmb_cilc_auto))

    iclyy = binner.bin(maps.interp(ells, clyy)(modlmap))[1]
    pl = io.Plotter(scalefn=lambda x: x**2,
                    xlabel='l',
                    ylabel='D',
                    yscale='log')
    pl.add(ells, clyy)
    pl.add(cents, iclyy, marker="x", ls="none", color='k')
    pl.add_err(cents - 5,
               y_silc_cross,
               yerr=ey_silc_cross,
               marker="o",
               ls="none",
               label='standard cross')