def healpix2car( input_file, fields=None, mask_file=None, output_file=None, resolution=0.5, bounding_box=(-180, 180, -75, 30), lmax=6000, ): """ Convert HEALPIX map to CAR map Parameters ---------- input_file: fits file name of the input HEALPIX fits file fields: tuple HEALPIX fields to convert i.e. (0,) will keep only temperature field mask_file: fits file name of the HEALPIX mask file output_file: fits file name of the output CAR fits file resolution: string CAR final resolution in arcminutes bounding_box: tuple (ra0, ra1, dec0, dec1) in degree """ healpix_map = so_map.read_map(input_file, fields_healpix=fields) # CAR Template ra0, ra1, dec0, dec1 = bounding_box res = resolution car_template = so_map.car_template(healpix_map.ncomp, ra0, ra1, dec0, dec1, res) projected_map = so_map.healpix2car(healpix_map, car_template, lmax=lmax) if mask_file is not None: mask = so_map.read_map(mask_file) projected_mask = so_map.healpix2car(mask, car_template, lmax=lmax) if mask.ncomp == healpix_map.ncomp == 1: projected_map.data *= np.where(projected_mask.data < 0.5, 0, 1) elif mask.ncomp == 1: for i in range(healpix_map.ncomp): projected_map.data[i] *= np.where(projected_mask.data < 0.5, 0, 1) else: if healpix_map.ncomp != mask.ncomp: raise ValueError("Map and mask have different number of components") for i in range(mask.ncomp): projected_map.data[i] *= np.where(projected_mask.data[i] < 0.5, 0, 1) print("Writing '{}' file".format(output_file)) projected_map.write_map(output_file) return projected_map
def create_crosslink_mask(xlink_map, cross_link_threshold): # remove pixels with very little amount of cross linking xlink = so_map.read_map(xlink_map) xlink_lowres = xlink.downgrade(32) with np.errstate(invalid="ignore"): x_mask = (np.sqrt(xlink_lowres.data[1]**2 + xlink_lowres.data[2]**2) / xlink_lowres.data[0]) x_mask[np.isnan(x_mask)] = 1 x_mask[x_mask >= cross_link_threshold] = 1 x_mask[x_mask < cross_link_threshold] = 0 x_mask = 1 - x_mask xlink_lowres.data[0] = x_mask xlink = so_map.car2car(xlink_lowres, xlink) x_mask = xlink.data[0].copy() id = np.where(x_mask > 0.9) x_mask[:] = 0 x_mask[id] = 1 return x_mask
print("number of covariance matrices to compute : %s" % ncovs) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=ncovs - 1) print(subtasks) for task in subtasks: task = int(task) na, nb, nc, nd = na_list[task], nb_list[task], nc_list[task], nd_list[task] na_r, nb_r, nc_r, nd_r = na.replace("&", "_"), nb.replace("&", "_"), nc.replace( "&", "_"), nd.replace("&", "_") print("cov element (%s x %s, %s x %s)" % (na_r, nb_r, nc_r, nd_r)) win = {} win["Ta"] = so_map.read_map(d["window_T_%s" % na_r]) win["Tb"] = so_map.read_map(d["window_T_%s" % nb_r]) win["Tc"] = so_map.read_map(d["window_T_%s" % nc_r]) win["Td"] = so_map.read_map(d["window_T_%s" % nd_r]) win["Pa"] = so_map.read_map(d["window_pol_%s" % na_r]) win["Pb"] = so_map.read_map(d["window_pol_%s" % nb_r]) win["Pc"] = so_map.read_map(d["window_pol_%s" % nc_r]) win["Pd"] = so_map.read_map(d["window_pol_%s" % nd_r]) coupling = so_cov.cov_coupling_spin0and2_simple(win, lmax, niter=niter, l_exact=l_exact, l_band=l_band, l_toep=l_toep)
ar2_list += [ar2] n_mcms += 1 print("number of mcm matrices to compute : %s" % n_mcms) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=n_mcms - 1) print(subtasks) for task in subtasks: task = int(task) sv1, ar1, sv2, ar2 = sv1_list[task], ar1_list[task], sv2_list[ task], ar2_list[task] print("%s_%s x %s_%s" % (sv1, ar1, sv2, ar2)) l, bl1 = pspy_utils.read_beam_file(d["beam_%s_%s" % (sv1, ar1)]) win1_T = so_map.read_map(d["window_T_%s_%s" % (sv1, ar1)]) win1_pol = so_map.read_map(d["window_pol_%s_%s" % (sv1, ar1)]) l, bl2 = pspy_utils.read_beam_file(d["beam_%s_%s" % (sv2, ar2)]) win2_T = so_map.read_map(d["window_T_%s_%s" % (sv2, ar2)]) win2_pol = so_map.read_map(d["window_pol_%s_%s" % (sv2, ar2)]) mbb_inv, Bbl = so_mcm.mcm_and_bbl_spin0and2(win1=(win1_T, win1_pol), win2=(win2_T, win2_pol), bl1=(bl1, bl1), bl2=(bl2, bl2), binning_file=d["binning_file"], niter=d["niter"], lmax=d["lmax"], type=d["type"], l_exact=l_exact,
window = so_map.healpix_template(ncomp=1, nside=nside) with fits.open("%s/HFI_Mask_PointSrc_2048_R2.00.fits" % EB_mask_dir) as hdul: data = hdul["SRC-POL"].data ps_mask = { f: hp.reorder(data[f], n2r=True) for f in ["F100", "F143", "F217", "F353"] } for freq in freqs: CO_mask = np.ones(12 * nside**2) if freq is not "143": log10_CO_noise_ratio = so_map.read_map( "%s/HFI_BiasMap_%s-CO-noiseRatio_2048_R3.00_full.fits" % (EB_mask_dir, freq), fields_healpix=0) id = np.where(log10_CO_noise_ratio.data > -2) CO_mask[id] = 0 missing_pixel = np.ones(12 * nside**2) half_mission = [1, 2] for hm in half_mission: for c, field in enumerate(["I", "Q", "U"]): map = so_map.read_map( "%s/HFI_SkyMap_%s_2048_R3.01_halfmission-%s.fits" % (maps_dir, freq, hm), fields_healpix=c) id = np.where(map.data < -10**30) missing_pixel[id] = 0 cov = so_map.read_map(
plot_dir = "plots/maps/" pspy_utils.create_directory(plot_dir) pspy_utils.create_directory(specDir) spectra = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"] spin_pairs = ["spin0xspin0", "spin0xspin2", "spin2xspin0", "spin2xspin2"] ncomp = 3 master_alms = {} nsplit = {} for sv in surveys: arrays = d["arrays_%s" % sv] for ar in arrays: win_T = so_map.read_map(d["window_T_%s_%s" % (sv, ar)]) win_pol = so_map.read_map(d["window_pol_%s_%s" % (sv, ar)]) window_tuple = (win_T, win_pol) maps = d["maps_%s_%s" % (sv, ar)] nsplit[sv] = len(maps) cal = d["cal_%s_%s" % (sv, ar)] print("%s split of survey: %s, array %s" % (nsplit[sv], sv, ar)) t = time.time() for k, map in enumerate(maps): if win_T.pixel == "CAR": split = so_map.read_map(map, geometry=win_T.data.geometry) if d["use_kspace_filter"]:
plot_dir = "plots/maps/" pspy_utils.create_directory(plot_dir) pspy_utils.create_directory(specDir) spectra = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"] spin_pairs = ["spin0xspin0", "spin0xspin2", "spin2xspin0", "spin2xspin2"] ncomp = 3 master_alms = {} nsplit = {} for sv in surveys: arrays = d["arrays_%s" % sv] for ar in arrays: win_T = so_map.read_map(d["window_T_%s_%s" % (sv, ar)]) win_pol = so_map.read_map(d["window_pol_%s_%s" % (sv, ar)]) window_tuple = (win_T, win_pol) maps = d["maps_%s_%s" % (sv, ar)] nsplit[sv] = len(maps) cal = d["cal_%s_%s" % (sv, ar)] print("%s split of survey: %s, array %s"%(nsplit[sv], sv, ar)) t = time.time() for k, map in enumerate(maps): if win_T.pixel == "CAR": split = so_map.read_map(map, geometry=win_T.data.geometry)
nd_list += [nd] ncovs += 1 nspecs = len(spec_name) print("number of covariance matrices to compute : %s" % ncovs) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=ncovs - 1) for task in subtasks: task = int(task) na, nb, nc, nd = na_list[task], nb_list[task], nc_list[task], nd_list[task] win = {} win["Ta"] = so_map.read_map("%s/window_%s.fits" % (window_dir, na)) win["Tb"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nb)) win["Tc"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nc)) win["Td"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nd)) win["Pa"] = so_map.read_map("%s/window_%s.fits" % (window_dir, na)) win["Pb"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nb)) win["Pc"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nc)) win["Pd"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nd)) coupling = so_cov.cov_coupling_spin0and2_simple(win, lmax, niter=niter) analytic_cov = np.zeros((4 * nbins, 4 * nbins)) # TaTbTcTd M_00 = coupling["TaTcTbTd"] * so_cov.chi(na, nc, nb, nd, ns, ps_all, nl_all, "TTTT")
d.read_from_file(sys.argv[1]) # the apodisation lenght of the point source mask in degree apod_pts_source_degree = d["apod_pts_source_degree"] # the apodisation lenght of the survey x gal x cross linking mask apod_survey_degree = d["apod_survey_degree"] # we will skip the edges of the survey where the noise is very difficult to model skip_from_edges_degree = d["skip_from_edges_degree"] # the threshold on the amount of cross linking to keep the data in cross_link_threshold = d["cross_link_threshold"] window_dir = "windows" surveys = d["surveys"] pspy_utils.create_directory(window_dir) ps_mask = so_map.read_map(d["ps_mask"]) gal_mask = so_map.read_map(d["gal_mask"]) patch = None if "patch" in d: patch = so_map.read_map(d["patch"]) # here we list the different windows that need to be computed, we will then do a MPI loops over this list sv_list, ar_list = [], [] n_wins = 0 for sv in surveys: arrays = d["arrays_%s" % sv] for ar in arrays: sv_list += [sv] ar_list += [ar]
import numpy as np from pspy import so_dict, so_map, so_mpi d = so_dict.so_dict() d.read_from_file(sys.argv[1]) npipe_map_directory = "/global/cfs/cdirs/cmb/data/planck2020/pla/frequency_maps/Multi-detector" freqs, map_files = [], [] for ar in d.get("arrays_planck", raise_error=True): files = glob.glob( os.path.join(npipe_map_directory, "HFI*{}-*full.fits".format(ar[1:]))) map_files += files freqs += len(files) * [ar[1:]] # Survey mask survey = so_map.read_map(d.get("survey_planck", raise_error=True)) survey.ncomp = 3 survey.data = np.tile(survey.data, (survey.ncomp, 1, 1)) # Mask dir for removing mon/dipole masks_dir = os.path.join(d["data_dir"], "planck/likelihood_mask/") mask_tmpl = os.path.join(masks_dir, "COM_Mask_Likelihood-{}-{}-{}_2048_R3.00.fits") nmaps = len(map_files) print(f"number of map to project : {nmaps}") so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=nmaps - 1) print(subtasks) for task in subtasks:
maps = d["maps_%s_%s" % (sv, ar)] nsplit[sv] = len(maps) print("%s split of survey: %s, array %s" % (nsplit[sv], sv, ar)) for k, map in enumerate(maps): master_alms[sv, ar, k] = np.load("%s/alms_%s_%s_%d.npy" % (alms_dir, sv, ar, k)) # compute the transfer functions _, _, lb, _ = pspy_utils.read_binning_file(binning_file, lmax) tf_array = {} for sv in surveys: tf_survey = np.ones(len(lb)) ks_f = d["k_filter_%s" % sv] template = so_map.read_map(d["window_T_%s_%s" % (sv, d["arrays_%s" % sv][0])]) if ks_f["apply"]: if ks_f["tf"] == "analytic": print("compute analytic kspace tf %s" % sv) shape, wcs = template.data.shape, template.data.wcs if ks_f["type"] == "binary_cross": filter = so_map_preprocessing.build_std_filter( shape, wcs, vk_mask=ks_f["vk_mask"], hk_mask=ks_f["hk_mask"], dtype=np.float32) elif ks_f["type"] == "gauss": filter = so_map_preprocessing.build_sigurd_filter( shape, wcs, ks_f["lbounds"], dtype=np.float32)
type = d["type"] binning_file = d["binning_file"] pixwin = d["pixwin"] splits = d["splits"] experiment = "Planck" print("Compute Planck 2018 mode coupling matrices") for f1, freq1 in enumerate(freqs): window_t_1 = d["window_T_%s" % freq1] window_pol_1 = d["window_pol_%s" % freq1] for count1, hm1 in enumerate(splits): win_t1 = so_map.read_map(window_t_1[count1]) win_pol1 = so_map.read_map(window_pol_1[count1]) win_t1.write_map("%s/window_T_%s_%s-%s.fits" % (windows_dir, experiment, freq1, hm1)) win_pol1.write_map("%s/window_P_%s_%s-%s.fits" % (windows_dir, experiment, freq1, hm1)) window_tuple1 = (win_t1, win_pol1) del win_t1, win_pol1 l, bl1_t = np.loadtxt(d["beam_%s_%s_T" % (freq1, hm1)], unpack=True) l, bl1_pol = np.loadtxt(d["beam_%s_%s_pol" % (freq1, hm1)], unpack=True)
name_list = [] id_list = [] for field in ["T", "E"]: for s, id in zip(survey_name, survey_id): name_list += ["%s%s" % (field, s)] id_list += ["%s%s" % (field, id)] Clth_dict = {} for name1, id1 in zip(name_list, id_list): for name2, id2 in zip(name_list, id_list): spec = id1[0] + id2[0] Clth_dict[id1 + id2] = ps_theory[spec] + nl_th[spec] * so_cov.delta2( name1, name2) window = so_map.read_map("%s/window_%s_%s.fits" % (window_dir, scan, run)) mbb_inv, Bbl = so_mcm.read_coupling(prefix="%s/%s_%s" % (mcm_dir, scan, run), spin_pairs=spin_pairs) coupling_dict = so_cov.cov_coupling_spin0and2_simple(window, lmax, niter=niter, planck=False) analytic_cov = so_cov.cov_spin0and2(Clth_dict, coupling_dict, binning_file, lmax, mbb_inv, mbb_inv) fsky[scan, run], quick_cov = SO_noise_utils.quick_analytic_cov( lth, Clth_dict, window, binning_file, lmax)
def car2tiles( input_file, mask_file=None, enplot_args=None, output_dir=None, delete_fits=True, use_webplot=True, pre_operation=None, ): """Convert CAR map to PNG tiles Parameters ---------- input_file: fits file name of the input CAR fits file mask_file: fits file name of the CAR mask file enplot_args: list of enplot/webplot options (see corresponding programs) output_dir: string name of the output directory holding PNG files delete_fits: boolean delete the FITS files corresponding to the tiles use_webplot: boolean use webplot in place of enplot program """ enplot_args = enplot_args or [] comm = mpi.COMM_WORLD if comm.rank == 0: if output_dir is None: output_dir = os.path.join("tiles", os.path.basename(input_file)) # Check if path to fits file are already stored fits_files = os.path.join(output_dir, "*/*.fits") if fits_files not in enplot_args: enplot_args.append(fits_files) if os.path.exists(output_dir): os.system("rm -rf %s" % output_dir) if mask_file is not None: car = so_map.read_map(input_file) mask = so_map.read_map(mask_file) # from pixell import wcsutils # if not wcsutils.is_compatible(mask.geometry[0], car.geometry[0]): # raise ValueError("Map and mask must have compatible geometries") if mask.ncomp == car.ncomp == 1: car.data *= np.where(mask.data < 0.5, 0, 1) elif mask.ncomp == 1: for i in range(car.ncomp): car.data[i] *= np.where(mask.data < 0.5, 0, 1) else: if mask.ncomp != car.ncomp: raise ValueError("Map and mask have different number of components") for i in range(mask.ncomp): car.data[i] *= np.where(mask.data[i] < 0.5, 0, 1) input_file += ".tmp" car.write_map(input_file) if pre_operation is not None: car = so_map.read_map(input_file) car.data = eval(pre_operation, {"m": car.data}, np.__dict__) input_file += ".tmp" car.write_map(input_file) if not mpi.disabled: comm.barrier() tile_utils_sigurd.leaftile( input_file, output_dir, verbose="-v" in enplot_args, comm=comm if not mpi.disabled else None, monolithic=True, ) if use_webplot: args = webplot.parse_args(enplot_args) webplot.plot(args) else: args = enplot.parse_args(enplot_args) for plot in enplot.plot_iterator(*args.ifiles, comm=comm, **args): enplot.write(plot.name, plot) if comm.rank == 0: if mask_file is not None or pre_operation is not None: os.remove(input_file) if delete_fits: for fits in args.ifiles: os.remove(fits)
binning_file = d["binning_file"] remove_mono_dipo_t = d["remove_mono_dipo_T"] remove_mono_dipo_pol = d["remove_mono_dipo_pol"] splits = d["splits"] experiment = "Planck" alms = {} print("Compute Planck 2018 spectra") for freq in freqs: maps = d["map_%s" % freq] for hm, map in zip(splits, maps): window_t = so_map.read_map("%s/window_T_%s_%s-%s.fits" % (windows_dir, experiment, freq, hm)) window_pol = so_map.read_map("%s/window_P_%s_%s-%s.fits" % (windows_dir, experiment, freq, hm)) window_tuple = (window_t, window_pol) del window_t, window_pol pl_map = so_map.read_map("%s" % map, fields_healpix=(0, 1, 2)) pl_map.data *= 10**6 cov_map = so_map.read_map("%s" % map, fields_healpix=4) badpix = (cov_map.data == hp.pixelfunc.UNSEEN) for i in range(3): pl_map.data[i][badpix] = 0.0 if remove_mono_dipo_t: pl_map.data[0] = planck_utils.subtract_mono_di( pl_map.data[0], window_tuple[0].data, pl_map.nside)
import healpy as hp from pspy import so_dict, so_map, so_mcm, pspy_utils import sys d = so_dict.so_dict() d.read_from_file(sys.argv[1]) windows_dir = "windows" pspy_utils.create_directory(windows_dir) freqs = d["freqs"] splits = d["splits"] experiment = "Planck" print("Compute Planck 2018 mode coupling matrices") for f1, freq1 in enumerate(freqs): window_t_1 = d["window_T_%s" % freq1] window_pol_1 = d["window_pol_%s" % freq1] for count1, hm1 in enumerate(splits): win_t1 = so_map.read_map(window_t_1[count1]) win_pol1 = so_map.read_map(window_pol_1[count1]) win_t1.write_map("%s/window_T_%s_%s-%s.fits" % (windows_dir, experiment, freq1, hm1)) win_pol1.write_map("%s/window_P_%s_%s-%s.fits" % (windows_dir, experiment, freq1, hm1))
# This script projects the Planck galactic masks onto the ACT survey import os import re import sys import healpy as hp import numpy as np from astropy.io import fits from pspy import so_dict, so_map d = so_dict.so_dict() d.read_from_file(sys.argv[1]) # Survey mask survey = so_map.read_map(d["template"]) if survey.ncomp > 2: # Only use temperature survey.data = survey.data[0] survey.ncomp = 1 # Planck galatic masks fn = d["planck_galactic_masks"] hdul = fits.open(fn) # Try to get nside and apodization from filename m = re.search("apo(.)", fn) apod = int(m.group(1)) if m else d.get("galactic_mask_apodization", 0) m = re.search("apo.*_(.*)_", fn) nside = int(m.group(1)) if m else d.get("galactic_mask_nside", 2048)
def get_spectra(window, maps_info_list, car_box, type, lmax, binning_file, ps_method="master", mbb_inv=None, compute_T_only=False): """compute the power spectra in the patch Parameters ---------- window: so_map the window function of the patch maps_info_list: list of dicts describing the data maps dictionnary should contain the name, the data type ("IQU" or "I") and optionally a calibration factor to apply to the map note that all map in the list should have the same data type car_box: 2x2 array an array of the form [[dec0,rac0],[dec1,ra1]] it encompasses the patch and we will only load in memory the map inside the box type: string the type of binning, either bin Cl or bin Dl lmax : integer the maximum multipole to consider for the spectra computation ps_method: string the method for the computation of the power spectrum can be "master", "pseudo", or "2dflat" for now binning_file: text file a binning file with three columns bin low, bin high, bin mean note that either binning_file or bin_size should be provided mbb_inv: 2d array the inverse mode coupling matrix, not in use for 2dflat compute_T_only: boolean True to compute only T spectra """ ht_list = [] name_list = [] if not compute_T_only: window = (window, window) for map_info in maps_info_list: split = so_map.read_map(map_info["name"], car_box=car_box) if compute_T_only and map_info["data_type"] == "IQU": split.data = split.data[0] split.ncomp = 1 if map_info["cal"] is not None: split.data *= map_info["cal"] if ps_method in ["master", "pseudo"]: print("SPHT of %s in the patch" % map_info["name"]) alms = sph_tools.get_alms(split, window, niter=0, lmax=lmax + 50) ht_list += [alms] elif ps_method == "2dflat": print("FFT of %s in the patch" % map_info["name"]) ffts = flat_tools.get_ffts(split, window, lmax) ht_list += [ffts] name_list += [map_info["id"]] split_num = np.arange(len(maps_info_list)) if compute_T_only: if ps_method in ["master", "pseudo"]: spectra = None elif ps_method == "2dflat": spectra = ["II"] else: if ps_method in ["master", "pseudo"]: spectra = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"] elif ps_method == "2dflat": spectra = ["II", "IQ", "IU", "QI", "QQ", "QU", "UI", "UQ", "UU"] ps_dict = {} spec_name_list = [] for name1, ht1, c1 in zip(name_list, ht_list, split_num): for name2, ht2, c2 in zip(name_list, ht_list, split_num): if c1 > c2: continue spec_name = "%sx%s" % (name1, name2) if ps_method in ["master", "pseudo"]: l, ps = so_spectra.get_spectra(ht1, ht2, spectra=spectra) ells, ps_dict[spec_name] = so_spectra.bin_spectra( l, ps, binning_file, lmax, type=type, mbb_inv=mbb_inv, spectra=spectra) elif ps_method == "2dflat": ells, ps_dict[spec_name] = flat_tools.power_from_fft(ht1, ht2, type=type) spec_name_list += [spec_name] if compute_T_only: # to make TT only behave the same as the other cases, make it a dictionnary if ps_method in ["master", "pseudo"]: spectra = ["TT"] for spec_name in spec_name_list: ps_dict[spec_name] = {"TT": ps_dict[spec_name]} return spectra, spec_name_list, ells, ps_dict
remove_mono_dipo_T = d['remove_mono_dipo_T'] remove_mono_dipo_pol = d['remove_mono_dipo_pol'] splits = d['splits'] experiment = 'Planck' alms = {} nsplit = {} print('Compute Planck 2018 spectra') for freq in freqs: maps = d['map_%s' % freq] for hm, map in zip(splits, maps): window_T = so_map.read_map('%s/window_T_%s_%s-%s.fits' % (auxMapDir, experiment, freq, hm)) window_pol = so_map.read_map('%s/window_P_%s_%s-%s.fits' % (auxMapDir, experiment, freq, hm)) window_tuple = (window_T, window_pol) del window_T, window_pol pl_map = so_map.read_map('%s' % map, fields_healpix=(0, 1, 2)) pl_map.data *= 10**6 cov_map = so_map.read_map('%s' % map, fields_healpix=4) badpix = (cov_map.data == hp.pixelfunc.UNSEEN) for i in range(3): pl_map.data[i][badpix] = 0.0 if remove_mono_dipo_T: pl_map.data[0] = planck_utils.subtract_mono_di( pl_map.data[0], window_tuple[0].data, pl_map.nside)
def create_window(patch, maps_info_list, apo_radius_survey=1, res_arcmin=0.5, galactic_mask=None, source_mask=None, compute_T_only=False): """Create a window function for a patch Parameters ---------- patch: dict a dict containing the patch type and coordinates if patch_type is "Rectangle" the coordinates are expected to be the 4 corners if patch_type is "Disk" we expect the coordinates of the center and the radius in degree maps_info_list: list of dicts describing the data maps dictionnary should contain the name, the data type ("IQU" or "I") and optionally a calibration factor to apply to the map note that all map in the list should have the same data type apo_radius_survey: float the apodisation radius in degree (default: 1 degree) res_arcmin: float the resolution in arcminutes (default: 0.5 arcmin) source_mask: dict a dict containing an optional source mask and its properties the dictionnary should contain the name, the type of apodisation and the radius of apodisation galactic_mask: fits file an optional galactic mask to apply """ if patch["patch_type"] == "Rectangle": car_box = patch["patch_coordinate"] window = so_map.read_map(maps_info_list[0]["name"], car_box=car_box) if maps_info_list[0]["data_type"] == "IQU": window.data = window.data[0] window.ncomp = 1 window.data[:] = 0 window.data[1:-1, 1:-1] = 1 apo_type_survey = "C1" elif patch["patch_type"] == "Disk": dec_c, ra_c = patch["center"] radius = patch["radius"] eps = 0.1 car_box = [[dec_c - radius - eps, ra_c - radius - eps], [dec_c + radius + eps, ra_c + radius + eps]] window = so_map.read_map(maps_info_list[0]["name"], car_box=car_box) if maps_info_list[0]["data_type"] == "IQU": window.data = window.data[0] window.ncomp = 1 window.data[:] = 1 y_c, x_c = enmap.sky2pix(window.data.shape, window.data.wcs, [dec_c * np.pi / 180, ra_c * np.pi / 180]) window.data[int(y_c), int(x_c)] = 0 dist = distance_transform_edt(window.data) * res_arcmin * 1 / 60 window.data[dist < radius] = 0 window.data = 1 - window.data apo_type_survey = "C1" else: raise ValueError("Patch type '{}' is not supported".format( patch["patch_type"])) if galactic_mask is not None: gal_mask = so_map.read_map(galactic_mask, car_box=car_box) window.data *= gal_mask.data del gal_mask for map_info in maps_info_list: split = so_map.read_map(map_info["name"], car_box=car_box) if compute_T_only and map_info["data_type"] == "IQU": split.data = split.data[0] split.ncomp = 1 if split.ncomp == 1: window.data[split.data == 0] = 0. else: for i in range(split.ncomp): window.data[split.data[i] == 0] = 0. window = so_window.create_apodization(window, apo_type=apo_type_survey, apo_radius_degree=apo_radius_survey) if source_mask is not None: ps_mask = so_map.read_map(source_mask["name"], car_box=car_box) ps_mask = so_window.create_apodization( ps_mask, apo_type=source_mask["apo_type"], apo_radius_degree=source_mask["apo_radius"]) window.data *= ps_mask.data del ps_mask return car_box, window
# we put the best fit power spectrum in a matrix [nfreqs, nfreqs, lmax] # taking into account the correlation of the fg between different frequencies ncomp = 3 ps_cmb = powspec.read_spectrum("%s/lcdm.dat" % bestfit_dir)[:ncomp, :ncomp] l, ps_fg = data_analysis_utils.get_foreground_matrix(bestfit_dir, freq_list, lmax) # prepare the filters template = {} filter = {} for sv in surveys: template_name = d["maps_%s_%s" % (sv, arrays[0])][0] template[sv] = so_map.read_map(template_name) ks_f = d["k_filter_%s" % sv] assert (template[sv].pixel == "CAR" ), "we only compute kspace tf in CAR pixellisation" assert (ks_f["apply"] == True ), "the filter keyword apply has to be set to True" shape, wcs = template[sv].data.shape, template[sv].data.wcs if ks_f["type"] == "binary_cross": filter[sv] = so_map_preprocessing.build_std_filter( shape, wcs, vk_mask=ks_f["vk_mask"], hk_mask=ks_f["hk_mask"], dtype=np.float64)
"data_type": "IQU", "id": name, "cal": None }] beam = d["beam"] lmax = d["lmax"] type = d["type"] bin_size = d["bin_size"] compute_T_only = d["compute_T_only"] binning_file_name = "%s/binning.dat" % spectra_dir pspy_utils.create_binning_file(bin_size=bin_size, n_bins=1000, file_name=binning_file_name) window = so_map.read_map("%s/window.fits" % (window_dir)) car_box = np.loadtxt("%s/car_box.dat" % (window_dir)) l_exact_array = d["l_exact_array"] l_band_array = d["l_band_array"] l_toep_array = d["l_toep_array"] for l_exact, l_band, l_toep in zip(l_exact_array, l_band_array, l_toep_array): if (l_exact == None) & (l_toep == None) & (l_band == None): test = "exact" else: test = "%d_%d_%d" % (l_exact, l_band, l_toep) t = time.time()
for id_sv2, sv2 in enumerate(surveys): arrays_2 = d["arrays_%s" % sv2] for id_ar2, ar2 in enumerate(arrays_2): # This ensures that we do not repeat redundant computations if (id_sv1 == id_sv2) & (id_ar1 > id_ar2): continue if (id_sv1 > id_sv2): continue sv1_list += [sv1] ar1_list += [ar1] sv2_list += [sv2] ar2_list += [ar2] n_alms += 1 print("number of sq win alms to compute : %s" % n_alms) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=n_alms - 1) print(subtasks) for task in subtasks: task = int(task) sv1, ar1, sv2, ar2 = sv1_list[task], ar1_list[task], sv2_list[ task], ar2_list[task] win_T1 = so_map.read_map(d["window_T_%s_%s" % (sv1, ar1)]) win_T2 = so_map.read_map(d["window_T_%s_%s" % (sv2, ar2)]) sq_win = win_T1.copy() sq_win.data[:] *= win_T2.data[:] sqwin_alm = sph_tools.map2alm(sq_win, niter=niter, lmax=lmax) np.save("%s/alms_%s_%sx%s_%s.npy" % (sq_win_alms_dir, sv1, ar1, sv2, ar2), sqwin_alm)
freq1_list += [freq1] freq2_list += [freq2] hm1_list += [hm1] hm2_list += [hm2] n_mcms += 1 print("number of mcm matrices to compute : %s" % n_mcms) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=n_mcms - 1) print(subtasks) for task in subtasks: task = int(task) freq1, hm1, freq2, hm2 = freq1_list[task], hm1_list[task], freq2_list[ task], hm2_list[task] win_t1 = so_map.read_map("%s/window_T_%s_%s-%s.fits" % (windows_dir, experiment, freq1, hm1)) win_pol1 = so_map.read_map("%s/window_P_%s_%s-%s.fits" % (windows_dir, experiment, freq1, hm1)) window_tuple1 = (win_t1, win_pol1) win_t2 = so_map.read_map("%s/window_T_%s_%s-%s.fits" % (windows_dir, experiment, freq2, hm2)) win_pol2 = so_map.read_map("%s/window_P_%s_%s-%s.fits" % (windows_dir, experiment, freq2, hm2)) window_tuple2 = (win_t2, win_pol2) del win_t1, win_pol1 l, bl1_t = np.loadtxt(d["beam_%s_%s_T" % (freq1, hm1)], unpack=True)
pspy_utils.create_directory(survey_mask_dir) # We loop on all the different experiments that we want to consider for exp in experiment: # Each experiment could have its associated nside and frequency list nside=d['nside_%s'%exp] freqs=d['freq_%s'%exp] for count,freq in enumerate(freqs): #We create a template for each frequency and add all composents present in 'content' map_all=so_map.healpix_template(ncomp=3,nside=nside) for cont in content: maps_list= d['%s_maps'%cont] map=maps_list[count] map=so_map.read_map(map) #some of the component are I only while other are I,Q,U if (len(map.data.shape)==1): map_all.data[0]+=map.data else: for i in range(3): map_all.data[i]+=map.data[i] # we read two noise maps, since the noise maps represent the noise properties of the full SO survey # we multiply them by sqrt(2) noise0_list= d['noise_maps0'] noise1_list= d['noise_maps1'] noise_map0=so_map.read_map(noise0_list[count])
import sys import numpy as np import pandas as pd from pspy import so_dict, so_map d = so_dict.so_dict() d.read_from_file(sys.argv[1]) binary = so_map.read_map(d["template"]) if binary.data.ndim > 2: # Only use temperature binary.data = binary.data[0] binary.data = binary.data.astype(np.int16) binary.data[:] = 1 # Sigurd point sources if "point_source_file" in d: print("Adding point sources...") df = pd.read_table(d["point_source_file"], escapechar="#", sep="\s+") high_flux_good_SNR = (df.Tflux > d.get( "point_source_Tflux", 15)) & (df.SNR > d.get("point_source_SNR", 5)) df = df[high_flux_good_SNR] coordinates = np.deg2rad([df.dec, df.ra]) mask = so_map.generate_source_mask(binary, coordinates, d.get("point_source_radius", 5.0)) # Monster sources if "monster_source_file" in d: print("Adding monster point sources...") df = pd.read_csv(d["monster_source_file"], comment="#")
d["ra1_%s" % exp], d["dec0_%s" % exp], d["dec1_%s" % exp], d["res_%s" % exp]) else: template = so_map.healpix_template(ncomp, nside=d["nside_%s" % exp]) l, nl_array_t, nl_array_pol = maps_to_params_utils.get_noise_matrix_spin0and2( noise_data_dir, exp, freqs, lmax_simu + 1, nsplits, lcut=lcut) nlms = maps_to_params_utils.generate_noise_alms( nl_array_t, lmax_simu, nsplits, ncomp, nl_array_pol=nl_array_pol) for fid, freq in enumerate(freqs): window = so_map.read_map("%s/window_%s_%s.fits" % (window_dir, exp, freq)) window_tuple = (window, window) l, bl = np.loadtxt("sim_data/beams/beam_%s_%s.dat" % (exp, freq), unpack=True) alms_beamed = alms.copy() if include_fg == True: # include fg for the temperature alms # pol not implemented yet alms_beamed[0] += fglms[2 * fcount] alms_beamed[1] += fglms[2 * fcount + 1] alms_beamed = maps_to_params_utils.multiply_alms( alms_beamed, bl, ncomp)
nc_list += [nc] nd_list += [nd] ncovs += 1 nspecs = len(spec_name) print("number of covariance matrices to compute : %s" % ncovs) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=ncovs - 1) for task in subtasks: task = int(task) na, nb, nc, nd = na_list[task], nb_list[task], nc_list[task], nd_list[task] win = {} win["Ta"] = so_map.read_map("%s/window_T_%s-hm1.fits" % (windows_dir, na)) win["Tb"] = so_map.read_map("%s/window_T_%s-hm2.fits" % (windows_dir, nb)) win["Tc"] = so_map.read_map("%s/window_T_%s-hm1.fits" % (windows_dir, nc)) win["Td"] = so_map.read_map("%s/window_T_%s-hm2.fits" % (windows_dir, nd)) win["Pa"] = so_map.read_map("%s/window_P_%s-hm1.fits" % (windows_dir, na)) win["Pb"] = so_map.read_map("%s/window_P_%s-hm2.fits" % (windows_dir, nb)) win["Pc"] = so_map.read_map("%s/window_P_%s-hm1.fits" % (windows_dir, nc)) win["Pd"] = so_map.read_map("%s/window_P_%s-hm2.fits" % (windows_dir, nd)) coupling = so_cov.cov_coupling_spin0and2_simple(win, lmax, niter=niter, planck=True) analytic_cov = np.zeros((4 * nbins, 4 * nbins)) # TaTbTcTd
except: pass spectra = ['TT', 'TE', 'TB', 'ET', 'BT', 'EE', 'EB', 'BE', 'BB'] arrays = d['arrays'] niter = d['niter'] lmax = d['lmax'] type = d['type'] binning_file = d['binning_file'] theoryfile = d['theoryfile'] fsky = {} fsky['pa1'] = 'fsky0.01081284' fsky['pa2'] = 'fsky0.01071187' apo = so_map.read_map(d['apo_path']) box = so_map.bounding_box_from_map(apo) for ar in arrays: t = time.time() window = so_map.read_map(d['window_T_%s' % ar]) window = so_map.get_submap_car(window, box, mode='round') window_tuple = (window, window) print("compute mcm and Bbl ...") beam = np.loadtxt(d['beam_%s' % ar]) l, bl = beam[:, 0], beam[:, 1] bl_tuple = (bl, bl)
type=d['type'] binning_file=d['binning_file'] pixWin=d['pixWin'] splits=d['splits'] experiment='Planck' print ('Compute Planck 2018 mode coupling matrices') for c1,freq1 in enumerate(freqs): window_T_1=d['window_T_%s'%freq1] window_pol_1=d['window_pol_%s'%freq1] for count1,hm1 in enumerate(splits): win_T1=so_map.read_map(window_T_1[count1]) win_pol1=so_map.read_map(window_pol_1[count1]) win_T1.write_map('%s/window_T_%s_%s-%s.fits'%(auxMapDir,experiment,freq1,hm1)) win_pol1.write_map('%s/window_P_%s_%s-%s.fits'%(auxMapDir,experiment,freq1,hm1)) window_tuple1=(win_T1,win_pol1) del win_T1,win_pol1 l,bl1_T= np.loadtxt(d['beam_%s_%s_T'%(freq1,hm1)],unpack=True) l,bl1_pol=np.loadtxt(d['beam_%s_%s_pol'%(freq1,hm1)],unpack=True) if pixWin==True: bl1_T*=hp.pixwin(window_tuple1[0].nside)[:len(bl1_T)] bl1_pol*=hp.pixwin(window_tuple1[0].nside)[:len(bl1_pol)]