for id_ar1, ar1 in enumerate(arrays_1): for id_sv2, sv2 in enumerate(surveys): arrays_2 = d["arrays_%s" % sv2] for id_ar2, ar2 in enumerate(arrays_2): # This ensures that we do not repeat redundant computations if (id_sv1 == id_sv2) & (id_ar1 > id_ar2): continue if (id_sv1 > id_sv2): continue sv1_list += [sv1] ar1_list += [ar1] sv2_list += [sv2] ar2_list += [ar2] n_mcms += 1 print("number of mcm matrices to compute : %s" % n_mcms) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=n_mcms - 1) print(subtasks) for task in subtasks: task = int(task) sv1, ar1, sv2, ar2 = sv1_list[task], ar1_list[task], sv2_list[ task], ar2_list[task] print("%s_%s x %s_%s" % (sv1, ar1, sv2, ar2)) l, bl1 = pspy_utils.read_beam_file(d["beam_%s_%s" % (sv1, ar1)]) win1_T = so_map.read_map(d["window_T_%s_%s" % (sv1, ar1)]) win1_pol = so_map.read_map(d["window_pol_%s_%s" % (sv1, ar1)]) l, bl2 = pspy_utils.read_beam_file(d["beam_%s_%s" % (sv2, ar2)]) win2_T = so_map.read_map(d["window_T_%s_%s" % (sv2, ar2)]) win2_pol = so_map.read_map(d["window_pol_%s_%s" % (sv2, ar2)])
pspy_utils.create_directory(specDir) spectra = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"] spin_pairs = ["spin0xspin0", "spin0xspin2", "spin2xspin0", "spin2xspin2"] all_freqs = [freq for exp in experiments for freq in d["freqs_%s" % exp]] ncomp = 3 ps_cmb = powspec.read_spectrum(d["clfile"])[:ncomp, :ncomp] if include_fg == True: l, ps_fg = maps_to_params_utils.get_foreground_matrix( fg_dir, fg_components, all_freqs, lmax_simu + 1) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=d["iStart"], imax=d["iStop"]) for iii in subtasks: #First we will generate our simulations and take their harmonics transforms t0 = time.time() alms = curvedsky.rand_alm(ps_cmb, lmax=lmax_simu) if include_fg == True: fglms = curvedsky.rand_alm(ps_fg, lmax=lmax_simu) master_alms = {} fcount = 0 for exp in experiments: freqs = d["freqs_%s" % exp]
# here we list the different windows that need to be computed, we will then do a MPI loops over this list sv_list, ar_list = [], [] n_wins = 0 for sv in surveys: arrays = d["arrays_%s" % sv] for ar in arrays: sv_list += [sv] ar_list += [ar] n_wins += 1 print("number of windows to compute : %s" % n_wins) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=n_wins - 1) print(subtasks) for task in subtasks: task = int(task) sv, ar = sv_list[task], ar_list[task] survey_mask = gal_mask.copy() survey_mask.data[:] = 1 maps = d["maps_%s_%s" % (sv, ar)] for k, map in enumerate(maps): print(map) map = so_map.read_map(map) survey_mask.data[map.data[0] == 0.0] = 0.0
for sid2, spec2 in enumerate(spec_name): if sid1 > sid2: continue print(spec1, spec2) na, nb = spec1.split("x") nc, nd = spec2.split("x") na_list += [na] nb_list += [nb] nc_list += [nc] nd_list += [nd] ncovs += 1 nspecs = len(spec_name) print("number of covariance matrices to compute : %s" % ncovs) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=ncovs - 1) for task in subtasks: task = int(task) na, nb, nc, nd = na_list[task], nb_list[task], nc_list[task], nd_list[task] win = {} win["Ta"] = so_map.read_map("%s/window_%s.fits" % (window_dir, na)) win["Tb"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nb)) win["Tc"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nc)) win["Td"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nd)) win["Pa"] = so_map.read_map("%s/window_%s.fits" % (window_dir, na)) win["Pb"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nb)) win["Pc"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nc)) win["Pd"] = so_map.read_map("%s/window_%s.fits" % (window_dir, nd))
freqs += len(files) * [ar[1:]] # Survey mask survey = so_map.read_map(d.get("survey_planck", raise_error=True)) survey.ncomp = 3 survey.data = np.tile(survey.data, (survey.ncomp, 1, 1)) # Mask dir for removing mon/dipole masks_dir = os.path.join(d["data_dir"], "planck/likelihood_mask/") mask_tmpl = os.path.join(masks_dir, "COM_Mask_Likelihood-{}-{}-{}_2048_R3.00.fits") nmaps = len(map_files) print(f"number of map to project : {nmaps}") so_mpi.init(True) subtasks = so_mpi.taskrange(imin=0, imax=nmaps - 1) print(subtasks) for task in subtasks: task = int(task) map_file = map_files[task] print(f"Reading {map_file}...") npipe_map = so_map.read_map(map_file, fields_healpix=(0, 1, 2), coordinate="gal") npipe_map.data *= 10**6 if d.get("remove_mono_dipo_t", True): mask_hm1 = so_map.read_map( mask_tmpl.format("temperature", freqs[task], "hm1")) mask_hm2 = so_map.read_map(
nSplits = len(splits) l, Nl_T, Nl_P = planck_utils.get_noise_matrix_spin0and2( ps_model_dir, experiment, freqs, lmax, nSplits, lcut=0, use_noise_th=use_noise_th) pixwin = hp.pixwin(nside) so_mpi.init(True) subtasks = so_mpi.taskrange(imin=d['iStart'], imax=d['iStop']) for iii in subtasks: t0 = time.time() alms = {} sim_alm = curvedsky.rand_alm( ps_th, lmax=lmax - 1 ) #the nlms and sim_alm lmax need to be investigated, there is a mismatch of 1 that is not understood at the moment nlms = planck_utils.generate_noise_alms(Nl_T, Nl_P, lmax, nSplits, ncomp) for freq_id, freq in enumerate(freqs): maps = d['map_%s' % freq]