Beispiel #1
0
    def write_vector_to_sacc(self,
                             fname_out,
                             sacc_t,
                             sacc_b,
                             cls,
                             covar=None,
                             verbose=False):
        """
        Write a vector of power spectrum measurements into a SACC file.
        :param fname_out: path to output file
        :param sacc_t: list of SACC tracers
        :param sacc_b: SACC Binning object.
        :param cls: list of power spectrum measurements.
        :param covar: covariance matrix:
        :param verbose: do you want to print out information about the SACC file?
        """
        sacc_mean = sacc.MeanVec(cls.flatten())
        if covar is None:
            sacc_precision = None
        else:
            sacc_precision = sacc.Precision(covar,
                                            "dense",
                                            is_covariance=True,
                                            binning=sacc_b)

        sacc_meta = {'Area_rad': self.area_patch}
        s = sacc.SACC(sacc_t,
                      sacc_b,
                      sacc_mean,
                      precision=sacc_precision,
                      meta=sacc_meta)
        if verbose:
            s.printInfo()
        s.saveToHDF(fname_out)
Beispiel #2
0
    def save_cell_to_file(self, cell, tracers, binning, fname):
        # Create data vector
        vector = []
        for b1, b2, s1, s2, l1, l2 in self.get_cell_iterator():
            add_BE = not ((b1 == b2) and (s1 == s2))
            vector.append(cell[l1][l2][0])  #EE
            vector.append(cell[l1][l2][1])  #EB
            if add_BE:  #Only add B1E2 if 1!=2
                vector.append(cell[l1][l2][2])  #BE
            vector.append(cell[l1][l2][3])  #BB

        sacc_mean = sacc.MeanVec(np.array(vector).flatten())
        s = sacc.SACC(tracers, binning, sacc_mean)
        print("Saving to " + fname)
        s.saveToHDF(fname)
Beispiel #3
0
               axis=0) / area)
    for i_w, ww in enumerate(sc['GAMA09H'].binning.windows)
]

# Bins
s_bn = sc['GAMA09H'].binning
s_bn.windows = wins

# Tracers
s_tr = []
for i_t in range(4):
    T = sacc.Tracer('bin_%d' % i_t,
                    'point',
                    zs[i_t],
                    Nz[i_t],
                    exp_sample="HSC_DESC")
    T.addColumns({pn: ec[pn][i_t] for pn in pz_codes})
    s_tr.append(T)

# Signal spectra
s_mean = sacc.MeanVec(mean)
s_prec = sacc.Precision(cov, "dense", is_covariance=True, binning=s_bn)
s_meta = {'Area_rad': area}
s = sacc.SACC(s_tr, s_bn, s_mean, precision=s_prec, meta=s_meta)
s.saveToHDF("COADD/power_spectra_wdpj.sacc")
# Noise spectra
s_mean = sacc.MeanVec(mean_n)
s_bn.windows = None
s = sacc.SACC(s_tr, s_bn, s_mean, meta=s_meta)
s.saveToHDF("COADD/noi_bias.sacc")
Beispiel #4
0
                          ['BK15_95_B', 'P353_B'], ['BK15_95_E', 'P353_B']])


def get_tracer_from_name(name, exp_sample=None):
    d = np.loadtxt("BK15_cosmomc/data/BK15/bandpass_" + name + ".txt",
                   unpack=True)
    return sacc.Tracer(name, "spin2", d[0], d[1], exp_sample)


#Tracers
tracers = [get_tracer_from_name(t, e) for t, e in zip(tracer_names, exp_names)]

#Mean vector
dv = np.loadtxt("BK15_cosmomc/data/BK15/BK15_cl_hat.dat", unpack=True)[1:]
ncls, nells = dv.shape
meanvec = sacc.MeanVec(dv.flatten())

#Precision matrix
precis = sacc.Precision(
    np.transpose(np.loadtxt("BK15_cosmomc/data/BK15/BK15_covmat_dust.dat",
                            unpack=True).reshape([nells, ncls, nells, ncls]),
                 axes=[1, 0, 3, 2]))

#Binning
ls = np.loadtxt("BK15_cosmomc/data/BK15/windows/BK15_bpwf_bin1.txt",
                unpack=True)[0]
windows = np.array([
    np.loadtxt("BK15_cosmomc/data/BK15/windows/BK15_bpwf_bin%d.txt" % (i + 1),
               unpack=True)[1:] for i in range(nells)
])
typ_arr = []
Beispiel #5
0
                sacc.Window(
                    np.arange(l - 50, l + 50),
                    np.exp(-(1.0 * np.arange(-50, 50))**2 / (2 * 20.**2))))
            ## We refer to tracers by their index
            t1.append(t1i)
            t2.append(t2i)
            ## Here we have density cross-correlations so "P" as point
            ## except for CMB where
            q1.append('S')
            q2.append('S')
            ## values and errors
            val.append(np.random.uniform(0, 10))
            err.append(np.random.uniform(1, 2))

binning = sacc.Binning(type, ell, t1, q1, t2, q2, windows=wins)
mean = sacc.MeanVec(val)

## We need to add covariance matrix. We will use ell_block_diagonal
## where everything is coupled across tracers/redshifts at the same ell but not
## across ell with fixed 10% off-diagonal elements
Np = binning.size()
cov = np.zeros((Np, Np))
for i in range(Np):
    for j in range(i, Np):
        if ell[i] == ell[j]:
            cov[i, j] = err[i] * err[j]
            if (i != j):
                cov[i, j] /= 10
            cov[j, i] = cov[i, j]
precision = sacc.Precision(cov,
                           "ell_block_diagonal",
Beispiel #6
0
}

# Theory prediction
cl_theory = get_theory(hod_params,
                       cosmo_params,
                       s_mean,
                       halo_mod_corrector=HMCorr)

# Perturb true spectra
cl_new = np.random.multivariate_normal(cl_theory,
                                       s_d.precision.getCovarianceMatrix())

# SACC object with random realization of the N(z)s and the power spectra
s_new = sacc.SACC(tr_rand,
                  s_d.binning,
                  sacc.MeanVec(cl_new),
                  precision=s_d.precision,
                  meta=s_d.meta)

dirname = "data/" + sim_name
os.system("mkdir -p " + dirname)
os.system("cp data/COADD/noi_bias.sacc " + dirname)
s_new.saveToHDF(dirname + "/power_spectra_wdpj.sacc")

if save_mean:
    # SACC object with mean N(z) and mean power spectra
    s_truth = sacc.SACC(tr_mean,
                        s_d.binning,
                        sacc.MeanVec(cl_theory),
                        precision=s_d.precision,
                        meta=s_d.meta)
Beispiel #7
0
def process_catalog(o):

    #Read z-binning
    print "Bins"
    z0_bins, zf_bins, lmax_bins = np.loadtxt(o.fname_bins_z, unpack=True)
    nbins = len(z0_bins)

    cat = fc.Catalog(read_from=o.fname_in)

    #Get weights, compute binary mask based on weights, and apodize it if needed
    print "Window"
    mask = Mask(cat, o.nside, o.theta_apo)
    nside = mask.nside

    #Get contaminant templates
    #TODO: check resolution
    if o.templates_fname != "none":
        templates = [[t] for t in hp.read_map(o.templates_fname, field=None)]
        ntemp = len(templates)
    else:
        templates = None
        ntemp = 0

    #Generate bandpowers binning scheme (we're assuming all maps will use the same bandpowers!)
    print "Bandpowers"
    bpw = nmt.NmtBin(nside, nlb=o.delta_ell)
    ell_eff = bpw.get_effective_ells()
    tracers = []
    #Generate tracers
    #TODO: pass extra sampling parameters
    zs, nzs, mps = bin_catalog(cat, z0_bins, zf_bins, mask)
    if mrank != 0:
        return

    for zar, nzar, mp, lmax in zip(zs, nzs, mps, lmax_bins):
        zav = np.average(zar, weights=nzar)
        print "-- z-bin: %3.2f " % zav
        tracers.append(Tracer(mp, zar, nzar, lmax, mask, templates=templates))
        if o.save_map:
            hp.write_map("map_%3.2f.fits" % zav, mp)
        cat.rewind()

    print "Compute power spectra"
    #Compute coupling matrix
    #TODO: (only done once, assuming all maps have the same mask!)
    print "  Computing coupling matrix"
    w = nmt.NmtWorkspace()
    if not (os.path.isfile(o.nmt_workspace)):
        w.compute_coupling_matrix(tracers[0].field, tracers[0].field, bpw)
        if o.nmt_workspace != "none":
            w.write_to(o.nmt_workspace)
    else:
        w.read_from(o.nmt_workspace)

    #Compute all cross-correlations
    def compute_master(fa, fb, wsp, clb):
        cl_coupled = nmt.compute_coupled_cell(fa, fb)
        cl_decoupled = wsp.decouple_cell(cl_coupled, cl_bias=clb)
        return cl_decoupled

    #If attempting to deproject contaminant templates, we need an estimate of the true power spectra.
    #This can be done interatively from a first guess using cl_bias=0, but I haven't coded that up yet.
    #For the moment we will use cl_guess=0.
    cl_guess = np.zeros(3 * nside)
    t1 = time()
    print "  Computing power spectrum"
    cls_all = {}
    for b1 in np.arange(nbins):
        f1 = tracers[b1].field
        for b2 in np.arange(b1, nbins):
            f2 = tracers[b2].field
            if ntemp > 0:
                cl_bias = nmt.deprojection_bias(f1, f2, w, cl_theory)
            else:
                cl_bias = None
            cls_all[(b1, b2)] = compute_master(f1, f2, w, clb=cl_bias)[0]
        print 'Computed bin: ', b1, b2, ' in ', time() - t1, ' s'
        if debug:
            plt.figure()
            plt.plot(ell_eff, cls_all[(b1, b1)])
            plt.xscale('log')
            plt.yscale('log')
            plt.xlabel(r'$l$')
            plt.ylabel(r'$C_{l}$')
            plt.show()
    print "Translating into SACC"
    #Transform everything into SACC format
    #1- Generate SACC tracers
    stracers = [
        sacc.Tracer("tr_b%d" % i, "point", t.zarr, t.nzarr, exp_sample="gals")
        for i, t in enumerate(tracers)
    ]

    #2- Define SACC binning
    typ, ell, t1, q1, t2, q2 = [], [], [], [], [], []
    for i1 in np.arange(nbins):
        for i2 in np.arange(i1, nbins):
            lmax = min(tracers[i1].lmax, tracers[i2].lmax)
            for l in ell_eff[ell_eff < lmax]:
                typ.append('F')
                ell.append(l)
                t1.append(i1)
                t2.append(i2)
                q1.append('P')
                q2.append('P')
    sbin = sacc.Binning(typ, ell, t1, q1, t2, q2)
    ssbin = sacc.SACC(stracers, sbin)

    #3- Arrange power spectra into SACC mean vector
    vec = np.zeros((ssbin.size(), ))
    for t1i, t2i, ells, ndx in ssbin.sortTracers():
        lmax = min(tracers[t1i].lmax, tracers[t2i].lmax)
        vec[ndx] = cls_all[(t1i, t2i)][np.where(ell_eff < lmax)[0]]
    svec = sacc.MeanVec(vec)

    #4- Create SACC file and write to file
    csacc = sacc.SACC(stracers, sbin, svec)
    csacc.saveToHDF(o.fname_out)
bpw_noi = bpw_noi.flatten()
cov_bpw = cov_bpw.reshape([ncross * nbands, ncross * nbands])

#Write in SACC format
import sacc


#Tracers
def get_tracer_from_Bpass(b):
    return sacc.Tracer(b.name, "spin2", b.nu, b.bnu, 'SO_SAT')


tracers = [get_tracer_from_Bpass(b) for b in bpss]

#Vectors
v_signoi = sacc.MeanVec(bpw_tot)
v_signal = sacc.MeanVec(bpw_sig)
v_noise = sacc.MeanVec(bpw_noi)

#Covariance
precis = sacc.Precision(cov_bpw, is_covariance=True)

#Ordering
typ_arr = []
ls_arr = []
t1_arr = []
t2_arr = []
q1_arr = []
q2_arr = []
w_arr = []
for ic, c in enumerate(corr_ordering):
Beispiel #9
0
                          ['BK15_95_B', 'P353_B'], ['BK15_95_E', 'P353_B']])


def get_tracer_from_name(name, exp_sample=None):
    d = np.loadtxt("BK15_cosmomc/data/BK15/bandpass_" + name + ".txt",
                   unpack=True)
    return sacc.Tracer(name, "spin2", d[0], d[1], exp_sample)


#Tracers
tracers = [get_tracer_from_name(t, e) for t, e in zip(tracer_names, exp_names)]

#Mean vector
dv = np.loadtxt("BK15_cosmomc/data/BK15/BK15_cl_hat.dat", unpack=True)[1:]
ncls, nells = dv.shape
meanvec = sacc.MeanVec(dv.flatten())

#Mean fiducial
dv_f = np.loadtxt("BK15_cosmomc/data/BK15/BK15_fiducial_dust.dat",
                  unpack=True)[1:]
meanvec_f = sacc.MeanVec(dv_f.flatten())

#Mean noise
dv_n = np.loadtxt("BK15_cosmomc/data/BK15/BK15_noise.dat", unpack=True)[1:]
meanvec_n = sacc.MeanVec(dv_n.flatten())

#Precision matrix
precis = sacc.Precision(
    np.transpose(np.loadtxt("BK15_cosmomc/data/BK15/BK15_covmat_dust.dat",
                            unpack=True).reshape([nells, ncls, nells, ncls]),
                 axes=[1, 0, 3, 2]).reshape([nells * ncls, nells * ncls]))
Beispiel #10
0
    T.addColumns({'ndens': t.ndens_perad * np.ones_like(nz)})
    sacc_tracers.append(T)
#Binning and mean
type, ell, dell, t1, q1, t2, q2 = [], [], [], [], [], [], []
for t1i in range(nbins):
    for t2i in range(t1i, nbins):
        for i_l, l in enumerate(ell_eff):
            type.append('F')  #Fourier-space
            ell.append(l)
            dell.append(lend[i_l] - lini[i_l])
            t1.append(t1i)
            q1.append('P')
            t2.append(t2i)
            q2.append('P')
sacc_binning = sacc.Binning(type, ell, t1, q1, t2, q2, deltaLS=dell)
sacc_mean = sacc.MeanVec(cls_all.flatten())
if covar is None:
    sacc_precision = None
else:
    sacc_precision = sacc.Precision(covar,
                                    "dense",
                                    is_covariance=True,
                                    binning=sacc_binning)
sacc_meta = {'Field': o.hsc_field, 'Area_rad': area_patch}
s = sacc.SACC(sacc_tracers,
              sacc_binning,
              sacc_mean,
              precision=sacc_precision,
              meta=sacc_meta)
s.printInfo()
s.saveToHDF(o.fname_out)
Beispiel #11
0
  T.addColumns({'ndens':t.ndens_perad*np.ones_like(nz)})
  sacc_tracers.append(T)
#Binning and mean
type,ell,dell,t1,q1,t2,q2=[],[],[],[],[],[],[]
for t1i in range(nbins) :
  for t2i in range(t1i,nbins) :
    for i_l,l in enumerate(ell_eff) :
      type.append('F') #Fourier-space
      ell.append(l)
      dell.append(lend[i_l]-lini[i_l])
      t1.append(t1i)
      q1.append('P')
      t2.append(t2i)
      q2.append('P')
sacc_binning=sacc.Binning(type,ell,t1,q1,t2,q2,deltaLS=dell,windows=windows_sacc)
sacc_mean=sacc.MeanVec(cls_all.flatten())
if covar is None :
  sacc_precision=None
else :
  sacc_precision=sacc.Precision(covar,"dense",is_covariance=True, binning=sacc_binning)
sacc_meta={'Field':o.hsc_field,'Area_rad':area_patch}
s=sacc.SACC(sacc_tracers,sacc_binning,sacc_mean,precision=sacc_precision,meta=sacc_meta)
s.printInfo()
s.saveToHDF(o.prefix_out+'.sacc')
#Save noise
sacc_binning_noise=sacc.Binning(type,ell,t1,q1,t2,q2,deltaLS=dell)
sacc_mean_noise=sacc.MeanVec(nls_all.flatten())
s=sacc.SACC(sacc_tracers,sacc_binning_noise,sacc_mean_noise,precision=None,meta=sacc_meta)
s.printInfo()
s.saveToHDF(o.prefix_out+'_noise.sacc')
Beispiel #12
0
def getTheoryVec(s, cls_theory):
    vec=np.zeros((s.size(),))
    for t1i,t2i,ells,ndx in s.sortTracers():
        vec[ndx]=cls_theory[(t1i,t2i)]
    return sacc.MeanVec(vec)
corr_ordering = np.array([['A90_T', 'A90_T'], ['A90_T', 'A150_T'], ['A150_T', 'A150_T'], ['A90_T', 'A90_E'], ['A90_T', 'A150_E'], ['A90_E', 'A150_T'], ['A150_T', 'A150_E'], ['A90_E', 'A90_E'], ['A90_E', 'A150_E'], ['A150_E', 'A150_E']])

def get_tracer_from_name(name, exp_sample=None):
    if name=='A90':
        nu = [90.]
    if name=='A150':
        nu = [150.]
    bandpass = [1.] 
    return sacc.Tracer(name, "spin2", np.asarray(nu), np.asarray(bandpass), exp_sample)

#Tracers
tracers=[get_tracer_from_name(t,e) for t,e in zip(tracer_names,exp_names)]

#Mean vector
dv = np.loadtxt(datadir + 'mr3c_20181012_190130_TT_TE_EE_C_ell_iter0.dat')[:, 1]
meanvec = sacc.MeanVec(dv)

#Precision matrix
cov = np.loadtxt(datadir + 'mr3c_20181012_190130_TT_TE_EE_cov_diag_C_ell.dat')
precis = sacc.Precision(cov)

#Binning
windows = {}
windows['TT'] = np.loadtxt(datadir + 'TT_C_ell_bpwf_v2_lmin2.dat')
windows['TE'] = np.loadtxt(datadir + 'TE_C_ell_bpwf_v2_lmin2.dat')
windows['ET'] = np.loadtxt(datadir + 'TE_C_ell_bpwf_v2_lmin2.dat')
windows['EE'] = np.loadtxt(datadir + 'EE_C_ell_bpwf_v2_lmin2.dat')

nells = windows['TT'].shape[0]
ellmax = windows['TT'].shape[1]
ls = np.arange(ellmax) + 2
Beispiel #14
0
    p1 = i1 % 2
    for i2 in range(i1, 2 * nfreqs):
        b2 = i2 // 2
        p2 = i2 % 2
        ty = pol_names[p1] + pol_names[p2]
        for il, ll in enumerate(ells_bpw):
            ell.append(ll)
            typ.append(ty)
            t1.append(b1)
            t2.append(b2)
            q1.append('C')
            q2.append('C')
bins = sacc.Binning(typ, ell, t1, q1, t2, q2)
bpw_model = bpw_model.reshape([2 * nfreqs, 2 * nfreqs,
                               n_bpw])[np.triu_indices(2 * nfreqs)].flatten()
mean_model = sacc.MeanVec(bpw_model)
sacc_model = sacc.SACC(tracers, bins, mean=mean_model)
os.system('mkdir -p ' + prefix_out)
sacc_model.saveToHDF(prefix_out + "/cells_model.sacc")

nhits_binary = np.zeros_like(nhits)
inv_sqrtnhits = np.zeros_like(nhits)
inv_sqrtnhits[nhits > 1E-3] = 1. / np.sqrt(nhits[nhits > 1E-3])
nhits_binary[nhits > 1E-3] = 1
#Add noise
ylf = 1
nell = np.zeros([nfreqs, lmax + 1])
_, nell[:, 2:], _ = Simons_Observatory_V3_SA_noise(sens,
                                                   knee,
                                                   ylf,
                                                   fsky,
Beispiel #15
0
    def parse_splits_sacc_file(self, s):
        """
        Transform a SACC file containing splits into 4 SACC vectors:
        1 that contains the coadded power spectra.
        1 that contains coadded power spectra for cross-split only.
        1 that contains an estimate of the noise power spectrum.
        1 that contains all null tests
        """

        # Check we have the right number of bands, splits, cross-correlations and power spectra
        self.check_sacc_consistency(s)

        # Now read power spectra into an array of form [nsplits,nsplits,nbands,nbands,2,2,n_ell]
        # This duplicates the number of elements, but simplifies bookkeeping significantly.
        spectra = np.zeros([
            self.nsplits, self.nsplits, self.nbands, 2, self.nbands, 2,
            self.n_bpws
        ])
        for t1, t2, typ, ells, ndx in self.sorting:
            # Band, split and polarization channel indices
            b1, s1 = self.tracer_number_to_band_split(t1)
            b2, s2 = self.tracer_number_to_band_split(t2)
            typ = typ.decode()
            p1 = self.index_pol[typ[0]]
            p2 = self.index_pol[typ[1]]
            is_x = not ((b1 == b2) and (s1 == s2) and (p1 == p2))
            spectra[s1, s2, b1, p1, b2, p2, :] = s.mean.vector[ndx]
            if is_x:
                spectra[s2, s1, b2, p2, b1, p1, :] = s.mean.vector[ndx]

        # Coadding (assuming flat coadding)
        # Total coadding (including diagonal)
        weights_total = np.ones(self.nsplits, dtype=float) / self.nsplits
        spectra_coadd_total = np.einsum('i,ijklmno,j', weights_total, spectra,
                                        weights_total)
        # Off-diagonal coadding
        spectra_coadd_xcorr = np.mean(spectra[np.triu_indices(self.nsplits,
                                                              1)],
                                      axis=0)

        # Noise power spectra
        spectra_coadd_noise = spectra_coadd_total - spectra_coadd_xcorr

        # Nulls
        spectra_nulls = np.zeros(
            [self.n_nulls, self.nbands, 2, self.nbands, 2, self.n_bpws])
        for i_null, (i, j, k, l) in enumerate(self.pairings):
            spectra_nulls[i_null] = spectra[i, k] - spectra[i, l] - spectra[
                j, k] + spectra[j, l]

        # Turn into SACC means
        spectra_coadd_total = spectra_coadd_total.reshape(
            [2 * self.nbands, 2 * self.nbands,
             self.n_bpws])[np.triu_indices(2 * self.nbands)]
        spectra_coadd_xcorr = spectra_coadd_xcorr.reshape(
            [2 * self.nbands, 2 * self.nbands,
             self.n_bpws])[np.triu_indices(2 * self.nbands)]
        spectra_coadd_noise = spectra_coadd_noise.reshape(
            [2 * self.nbands, 2 * self.nbands,
             self.n_bpws])[np.triu_indices(2 * self.nbands)]
        spectra_nulls = spectra_nulls.reshape([-1, self.n_bpws])
        sv_coadd_total = sacc.MeanVec(spectra_coadd_total.flatten())
        sv_coadd_xcorr = sacc.MeanVec(spectra_coadd_xcorr.flatten())
        sv_coadd_noise = sacc.MeanVec(spectra_coadd_noise.flatten())
        sv_nulls = sacc.MeanVec(spectra_nulls.flatten())

        return sv_coadd_total, sv_coadd_xcorr, sv_coadd_noise, sv_nulls