def sim_build(region, version, overwrite): save_scratch = True savedir = tutils.get_save_path(version, region) if save_scratch: scratch = tutils.get_scratch_path(version, region) covscratch = scratch else: covscratch = None if not (overwrite): assert not(os.path.exists(savedir)), \ "This version already exists on disk. Please use a different version identifier." try: os.makedirs(savedir) except: if overwrite: pass else: raise if save_scratch: try: os.makedirs(scratch) except: pass #enmap.write_map(scratch+"/temp1.hdf",enmap.enmap(np.zeros((100,100)))) #enmap.write_map(savedir+"/temp2.hdf",enmap.enmap(np.zeros((100,100)))) np.save(scratch + "/temp1.npy", enmap.enmap(np.zeros((100, 100)))) np.save(savedir + "/temp2.npy", enmap.enmap(np.zeros((100, 100)))) shutil.rmtree(scratch)
def sim_ilc(region, version, cov_version, overwrite): savedir = tutils.get_save_path(version, region) covdir = tutils.get_save_path(cov_version, region) print(covdir) assert os.path.exists(covdir) if not (overwrite): assert not(os.path.exists(savedir)), \ "This version already exists on disk. Please use a different version identifier." try: os.makedirs(savedir) except: if overwrite: pass else: raise #enmap.write_map(savedir+"/temp2.hdf",enmap.enmap(np.zeros((100,100)))) np.save(savedir + "/temp2.npy", enmap.enmap(np.zeros((100, 100))))
if do_act_only: print("Starting ACT-only ILC") ilc_version = "map_act_only_%s_%s" % (args.version,ind_str) with bench.show("sim ilc"): pipeline.build_and_save_ilc(act_arrays,args.region,ilc_version,sim_version,args.beam_version, args.solutions,args.beams,args.chunk_size, args.effective_freq,args.overwrite,args.maxval, unsanitized_beam=args.unsanitized_beam,do_weights=args.do_weights, no_act_color_correction=args.no_act_color_correction,ccor_exp=args.ccor_exp, isotropize=args.isotropize) if do_planck_only: print("Starting Planck-only ILC") ilc_version = "map_planck_only_%s_%s" % (args.version,ind_str) with bench.show("sim ilc"): pipeline.build_and_save_ilc(planck_arrays,args.region,ilc_version,sim_version,args.beam_version, args.solutions,args.beams_planck,args.chunk_size, args.effective_freq,args.overwrite,args.maxval, unsanitized_beam=args.unsanitized_beam,do_weights=args.do_weights, no_act_color_correction=args.no_act_color_correction,ccor_exp=args.ccor_exp, isotropize=args.isotropize) savepath = tutils.get_save_path(sim_version,args.region) if not(args.save_all): shutil.rmtree(savepath) print("Rank %d done with task %d at %s." % (rank,task,str(datetime.now())))
totinputs = {} totautos = {} totcrosses = {} for i, task in enumerate(my_tasks): sim_index = task print("Rank %d starting task %d at %s..." % (rank, task, str(datetime.now()))) # Get directory ind_str = str(set_id).zfill(2) + "_" + str(sim_index).zfill(4) #ind_str = str(set_id).zfill(2)+"_"+str(sim_index*2).zfill(4) # !!! version = "map_joint_%s_%s" % (args.version, ind_str) savedir = tutils.get_save_path(version, args.region, rversion) print(savedir) assert os.path.exists(savedir), savedir # Load mask if i == 0: mask = enmap.read_map(savedir + "/tilec_mask.fits") shape, wcs = mask.shape, mask.wcs modlmap = mask.modlmap() binner = stats.bin2D(modlmap, bin_edges) w2 = np.mean(mask**2.) inputs = {} iis = {} for input_name in input_names: inputs[input_name] = enmap.fft(
def calculate_yy(bin_edges,arrays,region,version,cov_versions,beam_version, effective_freq,overwrite,maxval,unsanitized_beam=False,do_weights=False, pa1_shift = None, pa2_shift = None, pa3_150_shift = None, pa3_090_shift = None, no_act_color_correction=False, ccor_exp = -1, sim_splits=None,unblind=False,all_analytic=False,beta_samples=None): """ We calculate the yy power spectrum as follows. We restrict the Fourier modes in our analysis to those within bin_edges. This way we don't carry irrelevant pixels and thus speed up the ability to MC. We accept two covariance versions in cov_versions, which correspond to [act_covariance_from_split_0,act_covariance_from_split_1,other_covs]. Thus the ACT auto covariances are pre-calculated """ arrays = arrays.split(',') narrays = len(arrays) if sim_splits is not None: assert not(unblind) def warn(): print("WARNING: no bandpass file found. Assuming array ",dm.c['id']," has no response to CMB, tSZ and CIB.") aspecs = tutils.ASpecs().get_specs bandpasses = not(effective_freq) savedir = tutils.get_save_path(version,region) assert len(cov_versions)==3 covdirs = [tutils.get_save_path(cov_versions[i],region) for i in range(3)] for covdir in covdirs: assert os.path.exists(covdir) if not(overwrite): assert not(os.path.exists(savedir)), \ "This version already exists on disk. Please use a different version identifier." try: os.makedirs(savedir) except: if overwrite: pass else: raise mask = enmap.read_map(covdir+"tilec_mask.fits") from scipy.ndimage.filters import gaussian_filter as smooth pm = enmap.read_map("/scratch/r/rbond/msyriac/data/planck/data/pr2/COM_Mask_Lensing_2048_R2.00_car_deep56_interp_order0.fits") wcs = pm.wcs mask = enmap.enmap(smooth(pm,sigma=10),wcs) * mask shape,wcs = mask.shape,mask.wcs Ny,Nx = shape modlmap = enmap.modlmap(shape,wcs) omodlmap = modlmap.copy() ells = np.arange(0,modlmap.max()) minell = maps.minimum_ell(shape,wcs) sel = np.where(np.logical_and(modlmap>=bin_edges[0]-minell,modlmap<=bin_edges[-1]+minell)) modlmap = modlmap[sel] bps = [] lbeams = [] kbeams = [] shifts = [] cfreqs = [] lmins = [] lmaxs = [] names = [] for i,qid in enumerate(arrays): dm = sints.models[sints.arrays(qid,'data_model')](region=mask,calibrated=True) if dm.name=='act_mr3': season,array1,array2 = sints.arrays(qid,'season'),sints.arrays(qid,'array'),sints.arrays(qid,'freq') array = '_'.join([array1,array2]) elif dm.name=='planck_hybrid': season,patch,array = None,None,sints.arrays(qid,'freq') else: raise ValueError lmin,lmax,hybrid,radial,friend,cfreq,fgroup,wrfit = aspecs(qid) lmins.append(lmin) lmaxs.append(lmax) names.append(qid) cfreqs.append(cfreq) if bandpasses: try: fname = dm.get_bandpass_file_name(array) bps.append("data/"+fname) if (pa1_shift is not None) and 'PA1' in fname: shifts.append(pa1_shift) elif (pa2_shift is not None) and 'PA2' in fname: shifts.append(pa2_shift) elif (pa3_150_shift is not None) and ('PA3' in fname) and ('150' in fname): shifts.append(pa3_150_shift) elif (pa3_090_shift is not None) and ('PA3' in fname) and ('090' in fname): shifts.append(pa3_90_shift) else: shifts.append(0) except: warn() bps.append(None) else: try: bps.append(cfreq) except: warn() bps.append(None) kbeam = tutils.get_kbeam(qid,modlmap,sanitize=not(unsanitized_beam),version=beam_version,planck_pixwin=True) if dm.name=='act_mr3': lbeam = tutils.get_kbeam(qid,ells,sanitize=not(unsanitized_beam),version=beam_version,planck_pixwin=False) # note no pixwin but doesnt matter since no ccorr for planck elif dm.name=='planck_hybrid': lbeam = None else: raise ValueError lbeams.append(lbeam) kbeams.append(kbeam.copy()) # Make responses responses = {} def _get_response(comp,param_override=None): if bandpasses: if no_act_color_correction: r = tfg.get_mix_bandpassed(bps, comp, bandpass_shifts=shifts, param_dict_override=param_override) else: r = tfg.get_mix_bandpassed(bps, comp, bandpass_shifts=shifts, ccor_cen_nus=cfreqs, ccor_beams=lbeams, ccor_exps = [ccor_exp] * narrays, param_dict_override=param_override) else: r = tfg.get_mix(bps, comp,param_dict_override=param_override) return r for comp in ['tSZ','CMB','CIB']: responses[comp] = _get_response(comp,None) from tilec.utils import is_planck ilcgens = [] okcoadds = [] for splitnum in range(2): covdir = covdirs[splitnum] kcoadds = [] for i,qid in enumerate(arrays): lmin = lmins[i] lmax = lmaxs[i] if is_planck(qid): dm = sints.models[sints.arrays(qid,'data_model')](region=mask,calibrated=True) _,kcoadd,_ = kspace.process(dm,region,qid,mask, skip_splits=True, splits_fname=sim_splits[i] if sim_splits is not None else None, inpaint=False,fn_beam = None, plot_inpaint_path = None, split_set=splitnum) else: kcoadd_name = covdir + "kcoadd_%s.npy" % qid kcoadd = enmap.enmap(np.load(kcoadd_name),wcs) kmask = maps.mask_kspace(shape,wcs,lmin=lmin,lmax=lmax) dtype = kcoadd.dtype kcoadds.append((kcoadd.copy()*kmask)[sel]) kcoadds = enmap.enmap(np.stack(kcoadds),wcs) okcoadds.append(kcoadds.copy()) # Read Covmat ctheory = ilc.CTheory(modlmap) nells = kcoadds[0].size cov = np.zeros((narrays,narrays,nells)) for aindex1 in range(narrays): for aindex2 in range(aindex1,narrays): qid1 = names[aindex1] qid2 = names[aindex2] if is_planck(names[aindex1]) or is_planck(names[aindex2]) or all_analytic: lmin,lmax,hybrid,radial,friend,f1,fgroup,wrfit = aspecs(qid1) lmin,lmax,hybrid,radial,friend,f2,fgroup,wrfit = aspecs(qid2) # If both are Planck and same array, get white noise from last bin icov = ctheory.get_theory_cls(f1,f2,a_cmb=1,a_gal=0.8)*kbeams[aindex1]*kbeams[aindex2] if aindex1==aindex2: pcov = enmap.enmap(np.load(covdirs[2]+"tilec_hybrid_covariance_%s_%s.npy" % (names[aindex1],names[aindex2])),wcs) pbin_edges = np.append(np.arange(500,3000,200) ,[3000,4000,5000,5800]) pbinner = stats.bin2D(omodlmap,pbin_edges) w = pbinner.bin(pcov)[1][-1] icov = icov + w else: icov = np.load(covdir+"tilec_hybrid_covariance_%s_%s.npy" % (names[aindex1],names[aindex2]))[sel] if aindex1==aindex2: icov[modlmap<lmins[aindex1]] = maxval icov[modlmap>lmaxs[aindex1]] = maxval cov[aindex1,aindex2] = icov cov[aindex2,aindex1] = icov assert np.all(np.isfinite(cov)) ilcgen = ilc.HILC(modlmap,np.stack(kbeams),cov=cov,responses=responses,invert=True) ilcgens.append(ilcgen) solutions = ['tSZ','tSZ-CMB','tSZ-CIB'] ypowers = {} w2 = np.mean(mask**2.) binner = stats.bin2D(modlmap,bin_edges) np.random.seed(100) blinding = np.random.uniform(0.8,1.2) if not(unblind) else 1 def _get_ypow(sname,dname,dresponse=None,dcmb=False): if dresponse is not None: assert dname is not None for splitnum in range(2): ilcgens[splitnum].add_response(dname,dresponse) ykmaps = [] for splitnum in range(2): if dcmb: assert dname is not None ykmap = ilcgens[splitnum].multi_constrained_map(okcoadds[splitnum],sname,[dname,"CMB"]) else: if dname is None: ykmap = ilcgens[splitnum].standard_map(okcoadds[splitnum],sname) else: ykmap = ilcgens[splitnum].constrained_map(okcoadds[splitnum],sname,dname) ykmaps.append(ykmap.copy()) ypower = (ykmaps[0]*ykmaps[1].conj()).real / w2 return binner.bin(ypower)[1] * blinding # The usual solutions for solution in solutions: sols = solution.split('-') if len(sols)==2: sname = sols[0] dname = sols[1] elif len(sols)==1: sname = sols[0] dname = None else: raise ValueError ypowers[solution] = _get_ypow(sname,dname,dresponse=None) # The CIB SED samples if beta_samples is not None: y_bsamples = [] y_bsamples_cmb = [] for beta in beta_samples: pdict = tfg.default_dict.copy() pdict['beta_CIB'] = beta response = _get_response("CIB",param_override=pdict) y_bsamples.append( _get_ypow("tSZ","iCIB",dresponse=response,dcmb=False) ) y_bsamples_cmb.append( _get_ypow("tSZ","iCIB",dresponse=response,dcmb=True) ) else: y_bsamples = None y_bsamples_cmb = None return binner.centers,ypowers,y_bsamples,y_bsamples_cmb
def build_and_save_ilc(arrays,region,version,cov_version,beam_version, solutions,beams,chunk_size, effective_freq,overwrite,maxval,unsanitized_beam=False,do_weights=False, pa1_shift = None, pa2_shift = None, pa3_150_shift = None, pa3_090_shift = None, no_act_color_correction=False, ccor_exp = -1, isotropize=False, isotropize_width=20): print("Chunk size is ", chunk_size*64./8./1024./1024./1024., " GB.") def warn(): print("WARNING: no bandpass file found. Assuming array ",dm.c['id']," has no response to CMB, tSZ and CIB.") aspecs = tutils.ASpecs().get_specs bandpasses = not(effective_freq) savedir = tutils.get_save_path(version,region) covdir = tutils.get_save_path(cov_version,region) assert os.path.exists(covdir) if not(overwrite): assert not(os.path.exists(savedir)), \ "This version already exists on disk. Please use a different version identifier." try: os.makedirs(savedir) except: if overwrite: pass else: raise mask = enmap.read_map(covdir+"tilec_mask.fits") shape,wcs = mask.shape,mask.wcs Ny,Nx = shape modlmap = enmap.modlmap(shape,wcs) arrays = arrays.split(',') narrays = len(arrays) kcoadds = [] kbeams = [] bps = [] names = [] lmins = [] lmaxs = [] shifts = [] cfreqs = [] lbeams = [] ells = np.arange(0,modlmap.max()) for i,qid in enumerate(arrays): dm = sints.models[sints.arrays(qid,'data_model')](region=mask,calibrated=True) lmin,lmax,hybrid,radial,friend,cfreq,fgroup,wrfit = aspecs(qid) cfreqs.append(cfreq) lmins.append(lmin) lmaxs.append(lmax) names.append(qid) if dm.name=='act_mr3': season,array1,array2 = sints.arrays(qid,'season'),sints.arrays(qid,'array'),sints.arrays(qid,'freq') array = '_'.join([array1,array2]) elif dm.name=='planck_hybrid': season,patch,array = None,None,sints.arrays(qid,'freq') else: raise ValueError kcoadd_name = covdir + "kcoadd_%s.npy" % qid kmask = maps.mask_kspace(shape,wcs,lmin=lmin,lmax=lmax) kcoadd = enmap.enmap(np.load(kcoadd_name),wcs) dtype = kcoadd.dtype kcoadds.append(kcoadd.copy()*kmask) kbeam = tutils.get_kbeam(qid,modlmap,sanitize=not(unsanitized_beam),version=beam_version,planck_pixwin=True) if dm.name=='act_mr3': lbeam = tutils.get_kbeam(qid,ells,sanitize=not(unsanitized_beam),version=beam_version,planck_pixwin=False) # note no pixwin but doesnt matter since no ccorr for planck elif dm.name=='planck_hybrid': lbeam = None else: raise ValueError lbeams.append(lbeam) kbeams.append(kbeam.copy()) if bandpasses: try: fname = dm.get_bandpass_file_name(array) bps.append("data/"+fname) if (pa1_shift is not None) and 'PA1' in fname: shifts.append(pa1_shift) elif (pa2_shift is not None) and 'PA2' in fname: shifts.append(pa2_shift) elif (pa3_150_shift is not None) and ('PA3' in fname) and ('150' in fname): shifts.append(pa3_150_shift) elif (pa3_090_shift is not None) and ('PA3' in fname) and ('090' in fname): shifts.append(pa3_90_shift) else: shifts.append(0) except: warn() bps.append(None) else: try: bps.append(cfreq) except: warn() bps.append(None) kcoadds = enmap.enmap(np.stack(kcoadds),wcs) # Read Covmat cov = maps.SymMat(narrays,shape[-2:]) for aindex1 in range(narrays): for aindex2 in range(aindex1,narrays): icov = enmap.enmap(np.load(covdir+"tilec_hybrid_covariance_%s_%s.npy" % (names[aindex1],names[aindex2])),wcs) if isotropize: bin_edges = np.append([0.],np.arange(min(lmins),modlmap.max(),isotropize_width)) binner = stats.bin2D(modlmap,bin_edges) ls,c1d = binner.bin(icov) icov = maps.interp(ls,c1d)(modlmap) if aindex1==aindex2: icov[modlmap<lmins[aindex1]] = maxval icov[modlmap>lmaxs[aindex1]] = maxval cov[aindex1,aindex2] = icov cov.data = enmap.enmap(cov.data,wcs,copy=False) covfunc = lambda sel: cov.to_array(sel,flatten=True) assert cov.data.shape[0]==((narrays*(narrays+1))/2) # FIXME: generalize assert np.all(np.isfinite(cov.data)) # Make responses responses = {} for comp in ['tSZ','CMB','CIB']: if bandpasses: if no_act_color_correction: responses[comp] = tfg.get_mix_bandpassed(bps, comp, bandpass_shifts=shifts) else: responses[comp] = tfg.get_mix_bandpassed(bps, comp, bandpass_shifts=shifts, ccor_cen_nus=cfreqs, ccor_beams=lbeams, ccor_exps = [ccor_exp] * narrays) else: responses[comp] = tfg.get_mix(bps, comp) ilcgen = ilc.chunked_ilc(modlmap,np.stack(kbeams),covfunc,chunk_size,responses=responses,invert=True) # Initialize containers solutions = solutions.split(',') data = {} kcoadds = kcoadds.reshape((narrays,Ny*Nx)) for solution in solutions: data[solution] = {} comps = solution.split('-') data[solution]['comps'] = comps if len(comps)<=2: data[solution]['noise'] = enmap.zeros((Ny*Nx),wcs) if len(comps)==2: data[solution]['cnoise'] = enmap.zeros((Ny*Nx),wcs) data[solution]['kmap'] = enmap.zeros((Ny*Nx),wcs,dtype=dtype) # FIXME: reduce dtype? if do_weights and len(comps)<=2: for qid in arrays: data[solution]['weight_%s' % qid] = enmap.zeros((Ny*Nx),wcs) for chunknum,(hilc,selchunk) in enumerate(ilcgen): print("ILC on chunk ", chunknum+1, " / ",int(modlmap.size/chunk_size)+1," ...") for solution in solutions: comps = data[solution]['comps'] if len(comps)==1: # GENERALIZE data[solution]['noise'][selchunk] = hilc.standard_noise(comps[0]) if do_weights: weight = hilc.standard_weight(comps[0]) data[solution]['kmap'][selchunk] = hilc.standard_map(kcoadds[...,selchunk],comps[0]) elif len(comps)==2: data[solution]['noise'][selchunk] = hilc.constrained_noise(comps[0],comps[1]) data[solution]['cnoise'][selchunk] = hilc.cross_noise(comps[0],comps[1]) ret = hilc.constrained_map(kcoadds[...,selchunk],comps[0],comps[1],return_weight=do_weights) if do_weights: data[solution]['kmap'][selchunk],weight = ret else: data[solution]['kmap'][selchunk] = ret elif len(comps)>2: data[solution]['kmap'][selchunk] = np.nan_to_num(hilc.multi_constrained_map(kcoadds[...,selchunk],comps[0],*comps[1:])) if len(comps)<=2 and do_weights: for qind,qid in enumerate(arrays): data[solution]['weight_%s' % qid][selchunk] = weight[qind] del ilcgen,cov # Reshape into maps name_map = {'CMB':'cmb','tSZ':'comptony','CIB':'cib'} beams = beams.split(',') for solution,beam in zip(solutions,beams): comps = "tilec_single_tile_"+region+"_" comps = comps + name_map[data[solution]['comps'][0]]+"_" if len(data[solution]['comps'])>1: comps = comps + "deprojects_"+ '_'.join([name_map[x] for x in data[solution]['comps'][1:]]) + "_" comps = comps + version if do_weights and len(data[solution]['comps'])<=2: for qind,qid in enumerate(arrays): enmap.write_map("%s/%s_%s_weight.fits" % (savedir,comps,qid), enmap.enmap(data[solution]['weight_%s' % qid].reshape((Ny,Nx)),wcs)) try: noise = enmap.enmap(data[solution]['noise'].reshape((Ny,Nx)),wcs) enmap.write_map("%s/%s_noise.fits" % (savedir,comps),noise) except: pass try: cnoise = enmap.enmap(data[solution]['cnoise'].reshape((Ny,Nx)),wcs) enmap.write_map("%s/%s_cross_noise.fits" % (savedir,comps),cnoise) except: pass ells = np.arange(0,modlmap.max(),1) try: fbeam = float(beam) kbeam = maps.gauss_beam(modlmap,fbeam) lbeam = maps.gauss_beam(ells,fbeam) except: qid = beam bfunc = lambda x: tutils.get_kbeam(qid,x,version=beam_version,sanitize=not(unsanitized_beam),planck_pixwin=False) kbeam = bfunc(modlmap) lbeam = bfunc(ells) kmap = enmap.enmap(data[solution]['kmap'].reshape((Ny,Nx)),wcs) smap = enmap.ifft(kbeam*kmap,normalize='phys').real enmap.write_map("%s/%s.fits" % (savedir,comps),smap) io.save_cols("%s/%s_beam.txt" % (savedir,comps),(ells,lbeam),header="ell beam") enmap.write_map(savedir+"/tilec_mask.fits",mask)
def build_and_save_cov(arrays,region,version,mask_version, signal_bin_width,signal_interp_order,delta_ell, rfit_lmin, overwrite,memory_intensive,uncalibrated, sim_splits=None,skip_inpainting=False,theory_signal="none", unsanitized_beam=False,save_all=False,plot_inpaint=False, isotropic_override=False,split_set=None,save_extra=False): save_scratch = not(memory_intensive) savedir = tutils.get_save_path(version,region) if save_scratch: scratch = tutils.get_scratch_path(version,region) covscratch = scratch else: covscratch = None if not(overwrite): assert not(os.path.exists(savedir)), \ "This version already exists on disk. Please use a different version identifier." try: os.makedirs(savedir) except: if overwrite: pass else: raise if save_scratch: try: os.makedirs(scratch) except: pass mask = sints.get_act_mr3_crosslinked_mask(region, version=mask_version, kind='binary_apod') shape,wcs = mask.shape,mask.wcs aspecs = tutils.ASpecs().get_specs with bench.show("ffts"): kcoadds = [] kdiffs = [] wins = [] lmins = [] lmaxs = [] hybrids = [] do_radial_fit = [] freqs = [] rfit_wnoise_widths = [] save_names = [] # to make sure nothing is overwritten friends = {} # what arrays are each correlated with? names = arrays.split(',') print("Calculating FFTs for " , arrays) fbeam = lambda qname,x: tutils.get_kbeam(qname,x,sanitize=not(unsanitized_beam),planck_pixwin=True) for i,qid in enumerate(arrays.split(',')): dm = sints.models[sints.arrays(qid,'data_model')](region=mask,calibrated=not(uncalibrated)) lmin,lmax,hybrid,radial,friend,cfreq,fgroup,wrfit = aspecs(qid) print(f"{qid} lmin {lmin} lmax {lmax}") assert (type(radial)==bool) or (type(radial)==np.bool_) do_radial_fit.append(radial) friends[qid] = friend hybrids.append(hybrid) freqs.append(fgroup) rfit_wnoise_widths.append(wrfit) kdiff,kcoadd,win = kspace.process(dm,region,qid,mask, skip_splits=False, splits_fname=sim_splits[i] if sim_splits is not None else None, inpaint=not(skip_inpainting),fn_beam = lambda x: fbeam(qid,x), plot_inpaint_path = savedir if plot_inpaint else None, split_set=split_set) print("Processed ",qid) if save_scratch: kcoadd_name = savedir + "kcoadd_%s.npy" % qid kdiff_name = scratch + "kdiff_%s.npy" % qid win_name = scratch + "win_%s.npy" % qid assert win_name not in save_names assert kcoadd_name not in save_names assert kdiff_name not in save_names np.save(win_name,win) np.save(kcoadd_name,kcoadd) np.save(kdiff_name,kdiff) wins.append(win_name) kcoadds.append(kcoadd_name) kdiffs.append(kdiff_name) save_names.append(win_name) save_names.append(kcoadd_name) save_names.append(kdiff_name) print("Saved ",qid) else: wins.append(win.copy()) kcoadds.append(kcoadd.copy()) kdiffs.append(kdiff.copy()) lmins.append(lmin) lmaxs.append(lmax) if sim_splits is not None: del sim_splits print("Done with ffts.") # print("Exiting because I just want to see inpainted stuff.") # sys.exit() # Decide what pairs to do hybrid smoothing for anisotropic_pairs = get_aniso_pairs(arrays.split(','),hybrids,friends) print("Anisotropic pairs: ",anisotropic_pairs) enmap.write_map(savedir+"tilec_mask.fits",mask) save_fn = lambda x,a1,a2: np.save(savedir+"tilec_hybrid_covariance_%s_%s.npy" % (names[a1],names[a2]),enmap.enmap(x,wcs)) print("Building covariance...") with bench.show("build cov"): ilc.build_cov(names,kdiffs,kcoadds,fbeam,mask,lmins,lmaxs,freqs,anisotropic_pairs, delta_ell, do_radial_fit,save_fn, signal_bin_width=signal_bin_width, signal_interp_order=signal_interp_order, rfit_lmaxes=lmaxs, rfit_wnoise_widths=rfit_wnoise_widths, rfit_lmin=rfit_lmin, rfit_bin_width=None, verbose=True, debug_plots_loc=False, separate_masks=False,theory_signal=theory_signal,scratch_dir=covscratch, isotropic_override=isotropic_override,save_extra=save_extra,wins=wins) if not(save_all): shutil.rmtree(scratch)
input_names = [] for solution in solutions: components[solution] = solution.split('-') input_names.append( components[solution][0] ) input_names = sorted(list(set(input_names))) print(components) print(input_names) for region in regions: for comb in combs: version = "map_$VERSION_%s" % (comb) savedir = tutils.get_save_path(version,region) for solution in solutions: pl = io.Plotter(xlabel='l',ylabel='r',xyscale='loglin') comps = "tilec_single_tile_"+region+"_" comps = comps + name_map[components[solution][0]]+"_" if len(components[solution])>1: comps = comps + "deprojects_"+ '_'.join([name_map[x] for x in components[solution][1:]]) + "_" comps = comps + version fname = "%s%s.fits" % (savedir,comps) cs = [] for isotype in ['iso_v1.0.0_rc','v1.0.0_rc']: