def get_iso_integral_flux_map(iso_ascii_file, e_min, e_max, nside=512): """Returns a isotropic map of the integral IGRB between emin and emax from 'iso_P8R2_SOURCE_V6_v06.txt' file. iso_ascii_file: str Ascii file found here https://fermi.gsfc.nasa.gov/ssc/data/access/lat/BackgroundModels.html emin: float minimum energy of the bin [MeV] emax: float maximum energy of the bin [MeV] nside: int healpix nside parameter """ isofile = os.path.join(GRATOOLS_CONFIG, 'models', 'iso_P8R2_ULTRACLEANVETO_V6_v06.txt') from GRATools.utils.gFTools import iso_parse e, difflux, diffluxerr = iso_parse(iso_ascii_file) index = np.where((e>e_min)*(e<e_max)) erange = e[index] frange = difflux[index] f_e = xInterpolatedUnivariateSplineLinear(erange, frange) intf = f_e.integral(e_min, e_max) logger.info('Isotropic Bkg %e [cm-2s-1]'%intf) npix = hp.nside2npix(nside) intiso_map = np.full(npix, intf) return intiso_map
def get_crbkg(txt_file): """Get the CR residual bkg (spline) as a function of the energy from the txt files. txt_file : str It must contain 2 columns: the first one with the energy, the second one with the cosmic ray residual background flux. """ logger.info('Getting CR residual bkg from file %s'%txt_file) f = open(txt_file, 'r') _bkg, _en = [], [] for line in f: try: e, bkg = [float(item) for item in line.split()] _en.append(e) _bkg.append(bkg) except: pass fmt = dict(xname='Energy', xunits='MeV', yname='E$^{2}$ x CR Residual flux', yunits='MeV cm$^{-2}$ s$^{-1}$ sr$^{-1}$') crbkg = xInterpolatedUnivariateSplineLinear(np.array(_en), np.array(_bkg),\ **fmt) f.close() return crbkg
def mask_src(cat_file, MASK_S_RAD, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. cat_file: str .fits file of the sorce catalog MASK_S_RAD: float radius around each source definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for sources activated') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT = src_cat['LAT_Point_Source_Catalog'] BAD_PIX_SRC = [] SOURCES = CAT.data RADrad = MASK_S_RAD*np.pi/180. for i in range (0,len(SOURCES)-1): GLON = SOURCES.field('GLON')[i] GLAT = SOURCES.field('GLAT')[i] x, y, z = hp.rotator.dir2vec(GLON,GLAT,lonlat=True) b_pix= hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) BAD_PIX_inrad = [] for bn in BAD_PIX_SRC: pixVec = hp.pix2vec(NSIDE,bn) radintpix = hp.query_disc(NSIDE, pixVec, RADrad) BAD_PIX_inrad.extend(radintpix) BAD_PIX_SRC.extend(BAD_PIX_inrad) src_cat.close() return BAD_PIX_SRC
def mask_extsrc(cat_file, MASK_S_RAD, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. cat_file: str .fits file of the sorce catalog MASK_S_RAD: float radius around each source definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for extended sources activated') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT_EXTENDED = src_cat['ExtendedSources'] BAD_PIX_SRC = [] EXT_SOURCES = CAT_EXTENDED.data src_cat.close() for i, src in enumerate(EXT_SOURCES): NAME = EXT_SOURCES.field('Source_Name')[i] GLON = EXT_SOURCES.field('GLON')[i] GLAT = EXT_SOURCES.field('GLAT')[i] if 'LMC' in NAME or 'CenA Lobes' in NAME: x, y, z = hp.rotator.dir2vec(GLON,GLAT,lonlat=True) b_pix= hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), np.radians(10)) BAD_PIX_SRC.extend(radintpix) else: x, y, z = hp.rotator.dir2vec(GLON,GLAT,lonlat=True) b_pix = hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), np.radians(5)) BAD_PIX_SRC.extend(radintpix) return BAD_PIX_SRC
def csi_parse(csi_file): """Parsing of the *_csi.txt files. csi_file : str This file is created by bin/mkcsi.py app. """ logger.info('loading Csi values from %s'%csi_file) csi = [] theta = [] Rs = [] emin, emax, emean = [], [], [] f = open(csi_file, 'r') for line in f: if 'ENERGY\t' in line: e1, e2, em = [float(item) for item in line.split()[1:]] emin.append(e1) emax.append(e2) emean.append(em) if 'CSI\t' in line: c = np.array([float(item) for item in line.split()[1:]]) csi.append(c) if 'THETA\t' in line: th = np.array([float(item) for item in line.split()[1:]]) theta.append(th) if 'R\t' in line: r = np.array([float(item) for item in line.split()[1:]]) Rs.append(r) f.close() return np.array(emin), np.array(emax), np.array(emean), np.array(csi), np.array(theta), np.array(Rs)
def get_cl_param(cl_param_file): """Parsing of *_parameters.txt files. cl_param_file : str This file is created by bin/mkdatarestyle.py app. """ logger.info('loading parameters from %s'%cl_param_file) ff = open(cl_param_file, 'r') _emin, _emax, _emean, _f, _ferr, _cn, _fsky = [], [], [], [], [], \ [], [] for line in ff: try: emin, emax, emean, f, ferr, cn, fsky = [float(item) for item in \ line.split()] _emin.append(emin) _emax.append(emax) _emean.append(emean) _f.append(f) _ferr.append(ferr) _cn.append(cn) _fsky.append(fsky) except: pass ff.close() return np.array(_emin), np.array(_emax), np.array(_emean), np.array(_f), \ np.array(_ferr), np.array(_cn), np.array(_fsky)
def clEcross_pol_parse(Ecross_file): """Parsing of the *_polspiceEcross.txt files. Ecross_file : str This file is created by bin/mkpolspiceEcross.py app. """ logger.info('loading Cl values from %s'%Ecross_file) f = open(Ecross_file, 'r') _emin1, _emax1, _emin2, _emax2 = [], [], [], [] _ls, _cls, _clserr = [], [], [] for line in f: if 'ENERGY' in line: emin1, emax1, emin2, emax2 = [float(item) for item in \ line.split()[1:]] _emin1.append(emin1) _emax1.append(emax1) _emin2.append(emin2) _emax2.append(emax2) if 'multipole\t' in line: l = np.array([float(item) for item in line.split()[1:]]) _ls.append(l) if 'Cl\t' in line: cl = np.array([float(item) for item in line.split()[1:]]) _cls.append(cl) if 'Cl_ERR\t' in line: clerr = np.array([float(item) for item in line.split()[1:]]) _clserr.append(clerr) f.close() return np.array(_emin1), np.array(_emax1), np.array(_emin2), \ np.array(_emax2), np.array(_ls), np.array(_cls), np.array(_clserr)
def clEcross_parse(cl_file): """Parsing of the *_cps.txt files """ logger.info('loading Cp values from %s' % cl_file) ff = open(cl_file, 'r') _emin1, _emax1, _emean1, _cls, _clserr = [], [], [], [], [] _emin2, _emax2, _emean2 = [], [], [] for line in ff: if 'ENERGY1\t' in line: emin1, emax1, emean1 = [float(item) for item in line.split()[1:]] _emin1.append(emin1) _emax1.append(emax1) _emean1.append(emean1) if 'ENERGY2\t' in line: emin2, emax2, emean2 = [float(item) for item in line.split()[1:]] _emin2.append(emin2) _emax2.append(emax2) _emean2.append(emean2) if 'Cl\t' in line: cl = np.array([float(item) for item in line.split()[1:]]) _cls.append(cl) if 'Cl_ERR\t' in line: clerr = np.array([float(item) for item in line.split()[1:]]) _clserr.append(clerr) ff.close() return np.array(_emin1), np.array(_emax1),np.array(_emean1), \ np.array(_emin2), np.array(_emax2),np.array(_emean2), \ np.array(_cls), np.array(_clserr)
def mask_src(cat_file, MASK_S_RAD, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. SOURCE_CAT: str opened fits file with the sorce catalog MASK_S_RAD: float radius around each source definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for sources activated') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT = src_cat['LAT_Point_Source_Catalog'] BAD_PIX_SRC = [] SOURCES = CAT.data RADrad = MASK_S_RAD * np.pi / 180. for i in range(0, len(SOURCES) - 1): GLON = SOURCES[i][3] GLAT = SOURCES[i][4] x, y, z = hp.rotator.dir2vec(GLON, GLAT, lonlat=True) b_pix = hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) BAD_PIX_inrad = [] for bn in BAD_PIX_SRC: pixVec = hp.pix2vec(NSIDE, bn) radintpix = hp.query_disc(NSIDE, pixVec, RADrad) BAD_PIX_inrad.extend(radintpix) BAD_PIX_SRC.extend(BAD_PIX_inrad) src_cat.close() return BAD_PIX_SRC
def mergeft_P305month(path_to_files, out_file_name, N1month, Nnmonth): """creates a .txt file with the list of the FT files to merge. path_to_files: str path where datat files are stored out_file_name: str name of the txt output file (created in the same folder of data) N1week: int number of the starting week Nnweek: int number of the ending week """ if N1month < 1: abort('Invalid number of weeks: the minimun must be > or = to 1') if Nnmonth > 108: abort('Invalid number of weeks: the maximum must be < or = to 108') outtxtfile = os.path.join(path_to_files, out_file_name) if not os.path.exists(outtxtfile): out_file = open(outtxtfile, 'w') for i in range(N1month, Nnmonth+1): if i > 1 and i < 10: out_file.write("%s/P305_Source_00%i_zmax105.fits \n" \ %(path_to_files,i)) if i >= 10 and i <= 99: out_file.write("%s/P305_Source_0%i_zmax105.fits \n" \ %(path_to_files,i)) if i > 99: out_file.write("%s/P305_Source_%i_zmax105.fits \n" \ %(path_to_files,i)) out_file.close() logger.info('Created %s...' %outtxtfile) return '@'+outtxtfile
def csi_parse(csi_file): """Parsing of the *_csi.txt files """ logger.info('loading Csi values from %s' % csi_file) csi = [] theta = [] Rs = [] emin, emax, emean = [], [], [] f = open(csi_file, 'r') for line in f: if 'ENERGY\t' in line: e1, e2, em = [float(item) for item in line.split()[1:]] emin.append(e1) emax.append(e2) emean.append(em) if 'CSI\t' in line: c = np.array([float(item) for item in line.split()[1:]]) csi.append(c) if 'THETA\t' in line: th = np.array([float(item) for item in line.split()[1:]]) theta.append(th) if 'R\t' in line: r = np.array([float(item) for item in line.split()[1:]]) Rs.append(r) f.close() return np.array(emin), np.array(emax), np.array(emean), np.array( csi), np.array(theta), np.array(Rs)
def mergeft(path_to_files, out_file_name, N1week, Nnweek): """creates a .txt file with the list of the FT files to merge. path_to_files: str path where datat files are stored out_file_name: str name of the txt output file (created in the same folder of data) N1week: int number of the starting week Nnweek: int number of the ending week """ if N1week < 9: abort('Invalid number of weeks: the minimun must be > or = to 9') if Nnweek > 486: abort('Invalid number of weeks: the maximum must be < or = to 486') outtxtfile = os.path.join(path_to_files, out_file_name) if not os.path.exists(outtxtfile): out_file = open(outtxtfile, 'w') for i in range(N1week, Nnweek+1): if i == 9: out_file.write("%s/lat_photon_weekly_w00%i_p302_v001.fits \n" \ %(path_to_files,i)) if i >= 10 and i <= 99: out_file.write("%s/lat_photon_weekly_w0%i_p302_v001.fits \n" \ %(path_to_files,i)) if i > 99: out_file.write("%s/lat_photon_weekly_w%i_p302_v001.fits \n" \ %(path_to_files,i)) out_file.close() logger.info('Created %s...' %outtxtfile) return '@'+outtxtfile
def mergeft(path_to_files, out_file_name, N1week, Nnweek): """creates a .txt file with the list of the FT files to merge. path_to_files: str path where datat files are stored out_file_name: str name of the txt output file (created in the same folder of data) N1week: int number of the starting week Nnweek: int number of the ending week """ if N1week < 9: abort('Invalid number of weeks: the minimun must be > or = to 9') if Nnweek > 434: abort('Invalid number of weeks: the maximum must be < or = to 397') outtxtfile = os.path.join(path_to_files, out_file_name) if not os.path.exists(outtxtfile): out_file = open(outtxtfile, 'w') for i in range(N1week, Nnweek + 1): if i == 9: out_file.write("%s/lat_photon_weekly_w00%i_p302_v001.fits \n" \ %(path_to_files,i)) if i >= 10 and i <= 99: out_file.write("%s/lat_photon_weekly_w0%i_p302_v001.fits \n" \ %(path_to_files,i)) if i > 99: out_file.write("%s/lat_photon_weekly_w%i_p302_v001.fits \n" \ %(path_to_files,i)) out_file.close() logger.info('Created %s...' % outtxtfile) return '@' + outtxtfile
def pol_create_config(pol_dict, config_file_name): """Creates PolSpice config file """ pol_config = os.path.join(GRATOOLS_CONFIG, 'ascii/%s.txt' % config_file_name) pol_config_file = open(pol_config, 'w') for key in pol_dict: pol_config_file.write('%s = %s \n' % (key, str(pol_dict[key]))) logger.info('Created config/ascii/%s.txt' % config_file_name) return pol_config
def __init__(self, x, y, xname=None, xunits=None, yname=None, yunits=None, optimize=False, tolerance=1e-4): """ Constructor. """ if optimize: oldx, oldy = x, y x, y = optimize_grid_linear(x, y, tolerance) xInterpolatedUnivariateSpline.__init__(self, x, y, None, [None, None], 1, xname, xunits, yname, yunits) if optimize: dist = self.dist(oldx, oldy) logger.info('Relative (max/ave) dist. to original array: %e/%e' %\ (dist.max(), dist.sum()/len(dist)))
def gtbin(label, evtbin_dict): """gtbin from Science Tools. label: str To automatically set the name of the output file filter_dict: python dict To define all the parameters """ logger.info('Running gtbin...') LABEL = label if not os.path.exists(os.path.join(FT_DATA_OUT, 'output_gtbin')): os.makedirs(os.path.join(FT_DATA_OUT, 'output_gtbin')) OUTPATH = os.path.join(FT_DATA_OUT, 'output_gtbin') OUTFILE = os.path.join(OUTPATH, LABEL + '_filtered_gti_bin.fits') if os.path.exists(OUTFILE): logger.info('ATT: Already created %s' % OUTFILE) return OUTFILE for key in evtbin_dict: if key == 'outfile': if evtbin_dict[key] == 'DEFAULT': my_apps.evtbin['outfile'] = OUTFILE else: my_apps.evtbin[key] = evtbin_dict[key] continue my_apps.evtbin[key] = evtbin_dict[key] my_apps.evtbin.run() logger.info('Created %s' % OUTFILE) logger.info('gtbin --> CPU time spent: %.2f' % time.clock()) return OUTFILE
def gtbin(label, evtbin_dict): """gtbin from Science Tools. label: str To automatically set the name of the output file filter_dict: python dict To define all the parameters """ logger.info('Running gtbin...') LABEL = label if not os.path.exists(os.path.join(FT_DATA_OUT, 'output_gtbin')): os.makedirs(os.path.join(FT_DATA_OUT, 'output_gtbin')) OUTPATH = os.path.join(FT_DATA_OUT, 'output_gtbin') OUTFILE = os.path.join(OUTPATH, LABEL + '_filtered_gti_bin.fits') if os.path.exists(OUTFILE): logger.info('ATT: Already created %s'%OUTFILE) return OUTFILE for key in evtbin_dict: if key == 'outfile': if evtbin_dict[key] == 'DEFAULT': my_apps.evtbin['outfile'] = OUTFILE else: my_apps.evtbin[key] = evtbin_dict[key] continue my_apps.evtbin[key] = evtbin_dict[key] my_apps.evtbin.run() logger.info('Created %s'%OUTFILE) logger.info('gtbin --> CPU time spent: %.2f'%time.clock()) return OUTFILE
def pol_create_config(pol_dict, config_file_name): """Creates and returns PolSpice config ascii file. pol_dict : python dict a dictionary where all the parameters of a tipical PolSpice config ascii file should have. config_file_name : str name of the confg ascii file that will be created """ pol_config = os.path.join(GRATOOLS_CONFIG, 'ascii/%s.txt'%config_file_name) pol_config_file = open(pol_config, 'w') for key in pol_dict: pol_config_file.write('%s = %s \n'%(key, str(pol_dict[key]))) logger.info('Created config/ascii/%s.txt'%config_file_name) return pol_config
def ebinning_fits_file(ebinning_array): """Produces a fits file defining the enrgy binning to fed gtbin. ebinning_array: numpy array array in which the energy binnin is defined. """ txt_file_name = os.path.join(GRATOOLS_OUT, 'ebinning.txt') txt_file = open(txt_file_name, 'w') fits_file = os.path.join(GRATOOLS_OUT, 'ebinning.fits') for emin, emax in zip(ebinning_array[:-1], ebinning_array[1:]): txt_file.write('%.4f %.4f\n' % (emin, emax)) txt_file.close() os.system('gtbindef bintype=E binfile=%s outfile=%s energyunits=MeV' \ %(txt_file_name, fits_file)) logger.info('Created %s...' % fits_file) return fits_file
def mask_hemi_north(NSIDE): """Returns the 'bad pixels' in the northen hemisphere. NSIDE: int healpix nside parameter """ logger.info('Masking northen hemisphere...') NPIX = hp.pixelfunc.nside2npix(NSIDE) BAD_PIX_HEMI_N = [] iii = range(NPIX) x, y, z = hp.pix2vec(NSIDE, iii) lon, lat = hp.rotator.vec2dir(x, y, z, lonlat=True) for i, b in enumerate(lat): if b >= 0: BAD_PIX_HEMI_N.append(iii[i]) return BAD_PIX_HEMI_N
def udgrade_as_psf(in_map, cont_ang): npix = len(in_map) in_nside = hp.pixelfunc.npix2nside(npix) order = int(np.log2(in_nside)) min_pix_area = cont_ang**2 pix_area = hp.pixelfunc.nside2pixarea(in_nside) while(pix_area<min_pix_area): order = order - 1 nside = 2**order pix_area = hp.pixelfunc.nside2pixarea(nside) if order < 7: break out_nside = 2**order out_map = hp.pixelfunc.ud_grade(in_map, out_nside) logger.info('Udgraded map from NSIDE=%i to NSIDE=%i'%(in_nside, out_nside)) return out_map
def fit_foreground(fore_map, data_map): """ATT: maps are intended to be healpix maps (namely numpy arrays) """ nside_out = 64 _notnull = np.where(data_map > 1e-30)[0] mask_f = os.path.join(GRATOOLS_CONFIG, 'fits/Mask64_src2_gp30.fits') mask = hp.read_map(mask_f) _unmask = np.where(mask > 1e-30)[0] logger.info('down grade...') fore_repix = np.array(hp.ud_grade(fore_map, nside_out=nside_out)) data_repix = np.array(hp.ud_grade(data_map, nside_out=nside_out)) A = np.vstack([fore_repix[_unmask], np.ones(len(fore_repix[_unmask]))]).T norm, const = np.linalg.lstsq(A, data_repix[_unmask])[0] #a, norm = best_fit(fore_map[_notnull], data_map[_notnull]) logger.info('fit param (norm, const): %.3f, %e' % (norm, const)) return norm, const
def ebinning_fits_file(ebinning_array): """Produces a fits file defining the enrgy binning to fed gtbin. ebinning_array: numpy array array in which the energy binnin is defined. """ txt_file_name = os.path.join(GRATOOLS_OUT,'ebinning.txt') txt_file = open(txt_file_name,'w') fits_file = os.path.join(GRATOOLS_OUT,'ebinning.fits') for emin, emax in zip(ebinning_array[:-1], ebinning_array[1:]): txt_file.write('%.4f %.4f\n'%(emin, emax)) txt_file.close() os.system('gtbindef bintype=E binfile=%s outfile=%s energyunits=MeV' \ %(txt_file_name, fits_file)) logger.info('Created %s...'%fits_file) return fits_file
def mask_hemi_west(NSIDE): """Returns the 'bad pixels' in the westhern hemisphere. NSIDE: int healpix nside parameter """ logger.info('Masking westhern hemisphere...') NPIX = hp.pixelfunc.nside2npix(NSIDE) BAD_PIX_HEMI_W = [] iii = range(NPIX) x,y,z = hp.pix2vec(NSIDE,iii) lon,lat = hp.rotator.vec2dir(x,y,z,lonlat=True) for i,b in enumerate(lon): if 0 <= b <= 180: BAD_PIX_HEMI_W.append(iii[i]) return BAD_PIX_HEMI_W
def mask_src_weighted_custom(cat_file, ENERGY, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. The radii increase with the brightness and rescaled by a factor between 1 and 0.3 shaped as the PSF. cat_file: str .fits file with the sorce catalog ENERGY: float Mean energy of the map to be masked NSIDE: int healpix nside parameter """ psf_ref_file = os.path.join(GRATOOLS_CONFIG, 'ascii/PSF_UCV_PSF1.txt') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT = src_cat[1] BAD_PIX_SRC = [] SOURCES = CAT.data src_cat.close() psf_ref = get_psf_ref(psf_ref_file) psf_en = psf_ref(ENERGY) psf_min, psf_max = psf_ref.y[5], psf_ref.y[-1] norm_min, norm_max = 1, 0.3 norm = norm_min + psf_en*((norm_max - norm_min)/(psf_max - psf_min)) -\ psf_min*((norm_max - norm_min)/(psf_max - psf_min)) logger.info('Normalization of radii due to energy: %.3f'%norm) logger.info('Psf(%.2f)= %.2f'%(ENERGY, psf_en)) FLUX = np.log10(SOURCES.field('eflux1000')) flux_min, flux_max = min(FLUX), max(FLUX) rad_min, rad_max = 1, 5. RADdeg = rad_min + FLUX*((rad_max - rad_min)/(flux_max - flux_min)) -\ flux_min*((rad_max - rad_min)/(flux_max - flux_min)) RADrad = np.radians(RADdeg) logger.info('Flux-weighted mask for sources activated') TS = SOURCES.field('ts') indTS25 = TS > 25. GLON = SOURCES.field('GLON')[indTS25] GLAT = SOURCES.field('GLAT')[indTS25] logger.info('Num Src: %i'%len(TS)) logger.info('Num Src TS>25: %i'%len(TS[indTS25])) for i, src in enumerate(SOURCES[indTS25]): x, y, z = hp.rotator.dir2vec(GLON[i],GLAT[i],lonlat=True) b_pix= hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), RADrad[i]*norm) BAD_PIX_SRC.extend(radintpix) return BAD_PIX_SRC
def save_current_figure(file_name, folder=GRATOOLS_OUT_FIG, clear=True, show=False): """Save the current matplotlib figure in `XIMPOL_DOC_FIGURES`. Arguments --------- file_name : string The name of the output file. clear : bool If `True`, the current image is cleared after the fact. """ file_path = os.path.join(folder, file_name) logger.info('Saving current figure to %s...' % file_path) pyplot.savefig(file_path, transparent=True) if show: pyplot.show() if clear: pyplot.clf()
def udgrade_as_psf(in_map, cont_ang): npix = len(in_map) in_nside = hp.pixelfunc.npix2nside(npix) order = int(np.log2(in_nside)) min_pix_area = cont_ang**2 pix_area = hp.pixelfunc.nside2pixarea(in_nside) while (pix_area < min_pix_area): order = order - 1 nside = 2**order pix_area = hp.pixelfunc.nside2pixarea(nside) if order < 7: break out_nside = 2**order out_map = hp.pixelfunc.ud_grade(in_map, out_nside) logger.info('Udgraded map from NSIDE=%i to NSIDE=%i' % (in_nside, out_nside)) return out_map
def get_fore_integral_flux_map(fore_files_list, e_min, e_max): """Returns the foreground map integrated between e_min and e_max A powerlaw is assumed fore the foregriunf energy spectrum, hence the interpolation between 2 given maps at given energies (given by the model) is done in logarithmic scales. fore_files_list: list of str Ordered list of the foreground files (one for each energy) e_min: float the min of the energy bin e_max: float the max of the energy bin """ input_file = os.path.join(FT_DATA_FOLDER, 'models/gll_iem_v06.fits') if not os.path.exists(input_file): abort("Map %s not found!"%input_file) frmaps = pf.open(input_file) fore_en = []#np.array([x[0] for x in frmaps['ENERGIES'].data]) for ff in fore_files_list: m = re.search(FORE_EN, ff) en = int(m.group(0).replace('_', '').replace('.', '')) fore_en.append(en) fore_en = np.array(fore_en) out_name = fore_files_list[0].replace('_%i.fits'%fore_en[0], '_%d-%d.fits'%(e_min, e_max)) if os.path.exists(out_name): logger.info('ATT: file %s already exists and returned...'%out_name) fore_map = hp.read_map(out_name) return fore_map else: logger.info('Computing the integral flux of the foreground model...') logger.info('...between %.2f - %.2f'%(e_min, e_max)) fore_emin_sx, fore_emin_dx = find_outer_energies(e_min, fore_en) fore_emax_sx, fore_emax_dx = find_outer_energies(e_max, fore_en) fore_emin_sx_ind = np.where(fore_en == fore_emin_sx)[0] fore_emin_dx_ind = np.where(fore_en == fore_emin_dx)[0] fore_emax_sx_ind = np.where(fore_en == fore_emax_sx)[0] fore_emax_dx_ind = np.where(fore_en == fore_emax_dx)[0] fore_fmin_sx = hp.read_map(fore_files_list[fore_emin_sx_ind]) fore_fmin_dx = hp.read_map(fore_files_list[fore_emin_dx_ind]) fore_fmax_sx = hp.read_map(fore_files_list[fore_emax_sx_ind]) fore_fmax_dx = hp.read_map(fore_files_list[fore_emax_dx_ind]) m1 = (np.log10(fore_fmin_sx)-np.log10(fore_fmin_dx))/ \ (np.log10(fore_emin_sx)-np.log10(fore_emin_dx)) m2 = (np.log10(fore_fmax_sx)-np.log10(fore_fmax_dx))/ \ (np.log10(fore_emax_sx)-np.log10(fore_emax_dx)) logfore1 = m1*(np.log10(e_min)-np.log10(fore_emin_sx))+ \ np.log10(fore_fmin_sx) logfore2 = m2*(np.log10(e_max)-np.log10(fore_emax_sx))+ \ np.log10(fore_fmax_sx) fore1 = 10**(logfore1) fore2 = 10**(logfore2) fore_integ = np.sqrt(fore1*fore2)*(e_max - e_min) hp.write_map(out_name, fore_integ) logger.info('Created file %s'%out_name) return fore_integ
def optimize_grid_linear(x, y, tolerance=1e-4): """Optimize a pair of (x, y) arrays for the corresponding spline definition. This loops over the input arrays and removes unnecessary data points to minimize the length of the arrays necessary to the spline definition. Args ---- x : array The input x-array. y : array The input y-array. tolerance : float The maximum relative difference between the generic yi value and the\ estrapolation of the two previous optimized data points for the point\ i to be removed. """ assert (len(x) == len(y)) logger.info('Optimizing grid with %d starting points...' % len(x)) # Start a new series with the first two points of the input arrays. _x = [x[0], x[1]] _y = [y[0], y[1]] # Loop over the points 3 ... (N - 1). for i, (_xi, _yi) in enumerate(zip(x, y)[2:-1]): # Extrapolate the last two points of the new series to xi and # see how far we are from the actual yi. delta = interpolate(_x[-2], _y[-2], _x[-1], _y[-1], _xi) - _yi if abs(delta / _yi) > tolerance: # If the difference is larger than the tolerance, add a point. # (This has the drawback that we tend to add pairs of point at # each change of slope.) _x.append(_xi) _y.append(_yi) # Interpolate the points last and (last - 2) to (last - 1). delta = interpolate(_x[-3], _y[-3], _x[-1], _y[-1], _x[-2]) - _y[-2] if abs(delta / _y[-2]) < tolerance: # If the penultimate point was not necessary, remove it. _x.remove(_x[-2]) _y.remove(_y[-2]) # Append the last point of the original array to the list. _x.append(x[-1]) _y.append(y[-1]) _x, _y = numpy.array(_x), numpy.array(_y) logger.info('Done, %d points remaining.' % len(_x)) return _x, _y
def fit_foreground_lstsq(fore_map, data_map, mask_map): """Perform the gaussian fit (least square method) given a data map and a model map, with this expression: data = A + B*model; returns A and B, fit parametres. ATT: maps are intended to be healpix maps (namely numpy arrays) fore_map: numpy array healpy model map data_map: numpy array healpy data map """ nside_out = 64 _notnull = np.where(data_map > 1e-30)[0] _unmask = np.where(mask_map > 1e-30)[0] A = np.vstack([fore_map[_unmask], np.ones(len(fore_map[_unmask]))]).T norm, const = np.linalg.lstsq(A, data_map[_unmask])[0] logger.info('fit param (norm, const): %.3f, %e' %(norm, const)) return norm, const
def mask_high_lat(MASK_LAT, NSIDE): """Returns the 'bad pixels' at high latitudes from the galactic plain . MASK_LAT: float absolute value of galactic latitude definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for high latitudes activated') NPIX = hp.pixelfunc.nside2npix(NSIDE) BAD_PIX_GP = [] iii = range(NPIX) x, y, z = hp.pix2vec(NSIDE, iii) lon, lat = hp.rotator.vec2dir(x, y, z, lonlat=True) for i, b in enumerate(lat): if abs(b) >= MASK_LAT: BAD_PIX_GP.append(iii[i]) return BAD_PIX_GP
def mask_gp(MASK_GP_LAT, NSIDE): """Returns the 'bad pixels' around the galactic plain . MASK_GP_LAT: float absolute value of galactic latitude definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for the galactic plane activated') NPIX = hp.pixelfunc.nside2npix(NSIDE) BAD_PIX_GP = [] iii = range(NPIX) x,y,z = hp.pix2vec(NSIDE,iii) lon,lat = hp.rotator.vec2dir(x,y,z,lonlat=True) for i,b in enumerate(lat): if abs(b) <= MASK_GP_LAT: BAD_PIX_GP.append(iii[i]) return BAD_PIX_GP
def optimize_grid_linear(x, y, tolerance=1e-4): """Optimize a pair of (x, y) arrays for the corresponding spline definition. This loops over the input arrays and removes unnecessary data points to minimize the length of the arrays necessary to the spline definition. Args ---- x : array The input x-array. y : array The input y-array. tolerance : float The maximum relative difference between the generic yi value and the\ estrapolation of the two previous optimized data points for the point\ i to be removed. """ assert(len(x) == len(y)) logger.info('Optimizing grid with %d starting points...' % len(x)) # Start a new series with the first two points of the input arrays. _x = [x[0], x[1]] _y = [y[0], y[1]] # Loop over the points 3 ... (N - 1). for i, (_xi, _yi) in enumerate(zip(x, y)[2:-1]): # Extrapolate the last two points of the new series to xi and # see how far we are from the actual yi. delta = interpolate(_x[-2], _y[-2], _x[-1], _y[-1], _xi) - _yi if abs(delta/_yi) > tolerance: # If the difference is larger than the tolerance, add a point. # (This has the drawback that we tend to add pairs of point at # each change of slope.) _x.append(_xi) _y.append(_yi) # Interpolate the points last and (last - 2) to (last - 1). delta = interpolate(_x[-3], _y[-3], _x[-1], _y[-1], _x[-2]) - _y[-2] if abs(delta/_y[-2]) < tolerance: # If the penultimate point was not necessary, remove it. _x.remove(_x[-2]) _y.remove(_y[-2]) # Append the last point of the original array to the list. _x.append(x[-1]) _y.append(y[-1]) _x, _y = numpy.array(_x), numpy.array(_y) logger.info('Done, %d points remaining.' % len(_x)) return _x, _y
def foreground_map_convert(**kwargs): """Viewer interface for healpix maps """ input_file = kwargs['infile'] nside_out = kwargs['nsideout'] if not os.path.exists(input_file): abort("Map %s not found!"%input_file) frmaps = pf.open(input_file) maps_slices = frmaps[0].data energy = np.array([x[0] for x in frmaps['ENERGIES'].data]) nside = 2048 npix = hp.nside2npix(nside) iii = np.arange(npix) x,y,z = hp.pix2vec(nside, iii) lon_hp, lat_hp = hp.rotator.vec2dir(x,y,z,lonlat=True) hp_frmap = np.arange(npix, dtype=np.float64) lon_fits = np.arange(len(maps_slices[0][0])) nresx = 360./len(lon_fits) lon_fits_1 = (lon_fits[:1440]*nresx+180) lon_fits = np.append(lon_fits_1, lon_fits[1440:]*nresx-180)#+180 lat_fits = np.arange(len(maps_slices[0])) lat_fits = lat_fits*nresx-90 fr_e = [] for i, en in enumerate(energy): logger.info('Running map convertion for energy %.2f...'%en) frmap = maps_slices[i] fmt = dict(xname='$l$', xunits='deg', yname='$b$', yunits='deg', zname='Flux [cm$^{-2}$s$^{-1}$sr$^{-1}$]') lon, _indexx = np.unique(lon_fits, return_index=True) lat, _indexy = np.unique(lat_fits, return_index=True) frmap = frmap[:, _indexx] frspline = xInterpolatedBivariateSplineLinear(lon, lat, frmap.T, **fmt) for i, pix in enumerate(hp_frmap): hp_frmap[i] = frspline((lon_hp[i]+360)%360, lat_hp[i]) out_name = os.path.basename(input_file).replace('.fits','_hp%i_%d.fits' %(nside_out, en)) fr_e.append(hp_frmap[12426]) out_path = os.path.join(GRATOOLS_CONFIG, 'fits', out_name) hp_frmap_out = hp.pixelfunc.ud_grade(hp_frmap, nside_out, pess=True) hp.write_map(out_path, hp_frmap_out, coord='G') logger.info('Writed map %s'%out_path) frmaps.close()
def cp_parse(cp_file): """Parsing of the *_cps.txt files """ logger.info('loading Cp values from %s' % cp_file) ff = open(cp_file, 'r') _emin, _emax, _emean, _cp, _cperr = [], [], [], [], [] for line in ff: try: emin, emax, emean, cp, cperr = [float(item) for item in \ line.split()] _emin.append(emin) _emax.append(emax) _emean.append(emean) _cp.append(cp) _cperr.append(cperr) except: pass ff.close() return np.array(_emin), np.array(_emax),np.array(_emean), \ np.array(_cp), np.array(_cperr)
def get_crbkg(txt_file): """Get the CR residual bkg (spline) as a function of the energy from the txt files """ logger.info('Getting CR residual bkg from file %s' % txt_file) f = open(txt_file, 'r') _bkg, _en = [], [] for line in f: try: e, bkg = [float(item) for item in line.split()] _en.append(e) _bkg.append(bkg) except: pass fmt = dict(xname='Energy', xunits='MeV', yname='E$^{2}$ x CR Residual flux', yunits='MeV cm$^{-2}$ s$^{-1}$ sr$^{-1}$') crbkg = xInterpolatedUnivariateSplineLinear(np.array(_en), np.array(_bkg),\ **fmt) f.close() return crbkg
def mkCl(**kwargs): """ """ get_var_from_file(kwargs['config']) logger.info('Starting Cl analysis...') e_min = data.E_MIN e_max = data.E_MAX in_label = 'fore' out_label = in_label mask_file = data.MASK_FILE cl_txt = open(os.path.join(GRATOOLS_OUT, '%s_polspicecls.txt' % out_label), 'w') for i, (emin, emax) in enumerate(zip(e_min, e_max)): logger.info('Considering bin %.2f - %.2f ...' % (emin, emax)) mask_f = mask_file if type(mask_file) == list: mask_f = mask_file[i] cl_txt.write('ENERGY\t %.2f %.2f\n' % (emin, emax)) l_max = 1000 _l = np.arange(l_max) flux_map_name = in_label + '_%i-%i.fits' % (emin, emax) flux_map_f = os.path.join(GRATOOLS_OUT, 'output_fore/' + flux_map_name) if kwargs['show'] == True: hp.mollview(flux_map_masked.filled(), title='fore map', min=1e-7, max=1e-4, norm='log') plt.show() out_name = '%s_%i-%i' % (out_label, emin, emax) out_folder = os.path.join(GRATOOLS_OUT, 'output_pol') if not os.path.exists(out_folder): os.makedirs(out_folder) pol_dict = data.POLCEPICE_DICT for key in pol_dict: if key == 'clfile': pol_dict[key] = os.path.join(out_folder, '%s_cl.txt' % out_name) if key == 'cl_outmap_file': pol_dict[key] = os.path.join(out_folder, '%s_clraw.txt' % out_name) if key == 'covfileout': pol_dict[key] = os.path.join(out_folder, '%s_cov.fits' % out_name) if key == 'mapfile': pol_dict[key] = flux_map_f if key == 'maskfile': pol_dict[key] = mask_f config_file_name = 'pol_%s' % (out_name) _l, _cl, _cl_err = pol_cl_calculation(pol_dict, config_file_name) cl_txt.write('Cl\t%s\n'%str(list(_cl)).replace('[',''). \ replace(']','').replace(', ', ' ')) cl_txt.write('Cl_ERR\t%s\n\n'%str(list(_cl_err)).replace('[',''). \ replace(']','').replace(', ', ' ')) cl_txt.close() logger.info('Created %s' % (os.path.join(GRATOOLS_OUT, '%s_polspicecls.txt' % out_label)))
def get_cl_param(cl_param_file): """Parsing of *_parameters.txt files. """ logger.info('loading parameters from %s' % cl_param_file) ff = open(cl_param_file, 'r') _emin, _emax, _emean, _f, _ferr, _cn, _fsky = [], [], [], [], [], \ [], [] for line in ff: try: emin, emax, emean, f, ferr, cn, fsky = [float(item) for item in \ line.split()] _emin.append(emin) _emax.append(emax) _emean.append(emean) _f.append(f) _ferr.append(ferr) _cn.append(cn) _fsky.append(fsky) except: pass ff.close() return np.array(_emin), np.array(_emax), np.array(_emean), np.array(_f), \ np.array(_ferr), np.array(_cn), np.array(_fsky)
def cp_parse(cp_file): """Parsing of the *_cps.txt files cp_file : str This file is created by bin/mkcps.py app. """ logger.info('loading Cp values from %s'%cp_file) ff = open(cp_file, 'r') _emin, _emax, _emean, _cp, _cperr = [], [], [], [], [] for line in ff: try: emin, emax, emean, cp, cperr = [float(item) for item in \ line.split()] _emin.append(emin) _emax.append(emax) _emean.append(emean) _cp.append(cp) _cperr.append(cperr) except: pass ff.close() return np.array(_emin), np.array(_emax),np.array(_emean), \ np.array(_cp), np.array(_cperr)
def mask_bat_gp(FLUX_CUT, NSIDE): """Returns the 'bad pixels' around the galactic plain. FLUX_CUT: float absolute value of galactic latitude definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Batman mask for the galactic plane activated') flux_map = hp.read_map(os.path.join(GRATOOLS_CONFIG, 'fits/gll_iem_v06_hp512_1000-1096.fits')) NPIX = hp.pixelfunc.nside2npix(NSIDE) if len(flux_map) != NPIX: flux_map = hp.ud_grade(flux_map, nsideout=NSIDE) else: pass BAD_PIX_GP = [] iii = range(NPIX) x,y,z = hp.pix2vec(NSIDE,iii) lon,lat = hp.rotator.vec2dir(x,y,z,lonlat=True) for i,b in enumerate(flux_map): if b >= FLUX_CUT: BAD_PIX_GP.append(iii[i]) return BAD_PIX_GP
def cpEcross_parse(cp_file): """Parsing of the *_cps.txt files cp_file : str This file is created by bin/mkcpsEcross.py app. """ logger.info('loading Cp values from %s'%cp_file) ff = open(cp_file, 'r') _emin, _emax = [], [] _emin1, _emax1 = [], [] _emin2, _emax2 = [], [] _cp, _cperr = [], [] for line in ff: if 'ENERGY' in line: emin, emax = [float(item) for item in \ line.split()[1:]] _emin.append(emin) _emax.append(emax) ff.close() ff = open(cp_file, 'r') for line in ff: try: emin1, emax1, emin2, emax2, cp, cperr = [float(item) \ for item in line.split()] _emin1.append(emin1) _emax1.append(emax1) _emin2.append(emin2) _emax2.append(emax2) _cp.append(cp) _cperr.append(cperr) except: pass ff.close() return np.array(_emin), np.array(_emax), np.array(_emin1), \ np.array(_emax1), np.array(_emin2), \ np.array(_emax2), np.array(_cp), np.array(_cperr)
def maps_view(**kwargs): """Viewer interface for healpix maps """ input_file = kwargs['infile'] healpix_maps = hp.read_map(input_file, field=kwargs['field']) if not os.path.exists(input_file): abort("Map %s not found!" % input_file) t = os.path.basename(input_file) plt.figure(figsize=(10, 7), dpi=80) nside_out = kwargs['udgrade'] logger.info('Returning a map with NSIDE=%i' % nside_out) if kwargs['field'] == 0: if kwargs['counts'] == True: healpix_maps = hp.pixelfunc.ud_grade(healpix_maps, nside_out, pess=True, power=-2) else: healpix_maps = hp.pixelfunc.ud_grade(healpix_maps, nside_out, pess=True) if kwargs['optimized'] == True: logger.info('Optimizing...') hp.mollview(healpix_maps, title=t.replace('.fits',''), \ coord='G', min=1e-7, max=1e-4, norm='log') hp.graticule() overlay_tag(color='silver', x=0.45) save_current_figure(t.replace('.fits', '.png')) else: hp.mollview(healpix_maps, title=t.replace('.fits',''), \ coord='G') hp.graticule() overlay_tag(color='silver', x=0.45) save_current_figure(t.replace('.fits', '.png')) else: for i, maps in enumerate(healpix_maps): healpix_maps = hp.pixelfunc.ud_grade(healpix_maps, nside_out, \ pess=True) if kwargs['optimized'] == True: logger.info('Optimizing...') hp.mollview(maps, title=t.replace('.fits','_%i'%i), \ coord='G', min=1e-7, max=1e-4, norm='log') hp.graticule() overlay_tag(color='silver', x=0.45) save_current_figure(t.replace('.fits', '.png')) else: hp.mollview(healpix_maps, title=t.replace('.fits',''), \ coord='G') hp.graticule() overlay_tag(color='silver', x=0.05) save_current_figure(t.replace('.fits', '_%i.png' % i))
def mkForeCl(**kwargs): """ """ get_var_from_file(kwargs['config']) logger.info('Starting Cl analysis...') in_label = data.IN_LABEL out_label = data.OUT_LABEL binning_label = data.BINNING_LABEL from GRATools.utils.gFTools import get_cl_param cl_param_file = os.path.join( GRATOOLS_OUT, '%s_%s_parameters.txt' % (in_label, binning_label)) _emin, _emax, _emean, _f, _ferr, _cn, _fsky = get_cl_param(cl_param_file) cl_txt = open(os.path.join(GRATOOLS_OUT, '%s_%s_forecls.txt' \ %(out_label, binning_label)), 'w') for i, (emin, emax) in enumerate(zip(_emin, _emax)): mask_file = data.MASK_FILE if type(mask_file) == list: mask_file = mask_file[i] mask = hp.read_map(mask_file) logger.info('Considering bin %.2f - %.2f ...' % (emin, emax)) cl_txt.write('ENERGY\t %.2f %.2f %.2f\n' % (emin, emax, _emean[i])) l_max = 1000 _l = np.arange(l_max) flux_map_name = in_label + '_fore_%i-%i.fits' % (emin, emax) flux_map = hp.read_map(os.path.join(GRATOOLS_OUT_FORE, flux_map_name)) flux_map_masked = hp.ma(flux_map) flux_map_masked.mask = np.logical_not(mask) fsky = 1.-(len(np.where(flux_map_masked.filled() == hp.UNSEEN)[0])/\ float(len(flux_map))) if kwargs['show'] == True: hp.mollview(flux_map_masked.filled(), title='f$_{sky}$ = %.3f' % fsky, min=1e-7, max=1e-4, norm='log') plt.show() print 'fsky = ', fsky nside = hp.npix2nside(len(flux_map)) wpix = hp.sphtfunc.pixwin(nside)[:l_max] _cl = hp.sphtfunc.anafast(flux_map_masked.filled(), lmax=l_max-1, \ iter=5) _cl_fit = hp.sphtfunc.anafast(flux_map_masked.filled(), iter=4) cl_txt.write('Cl\t%s\n'%str(list(_cl)).replace('[',''). \ replace(']','').replace(', ', ' ')) cl_txt.close() logger.info('Created %s'%(os.path.join(GRATOOLS_OUT, '%s_%s_forecls.txt' \ %(out_label, binning_label))))
def gtselect(label, filter_dict): """gtselect from Science Tools. label: str To automatically set the name of the output file filter_dict: python dict To define all the parameters """ logger.info('Running gtselect...') LABEL = label if not os.path.exists(os.path.join(FT_DATA_OUT, 'output_gtselect')): os.makedirs(os.path.join(FT_DATA_OUT, 'output_gtselect')) OUTPATH = os.path.join(FT_DATA_OUT, 'output_gtselect') OUTFILE = os.path.join(OUTPATH, LABEL + '_filtered.fits') if os.path.exists(OUTFILE): logger.info('ATT: Already created %s' % OUTFILE) return OUTFILE for key in filter_dict: my_apps.filter[key] = filter_dict[key] my_apps.filter['outfile'] = OUTFILE my_apps.filter.run() logger.info('Created %s' % OUTFILE) logger.info('gtselect --> CPU time spent: %.2f' % time.clock()) return OUTFILE
def gtselect(label, filter_dict): """gtselect from Science Tools. label: str To automatically set the name of the output file filter_dict: python dict To define all the parameters """ logger.info('Running gtselect...') LABEL = label if not os.path.exists(os.path.join(FT_DATA_OUT,'output_gtselect')): os.makedirs(os.path.join(FT_DATA_OUT, 'output_gtselect')) OUTPATH = os.path.join(FT_DATA_OUT, 'output_gtselect') OUTFILE = os.path.join(OUTPATH, LABEL + '_filtered.fits') if os.path.exists(OUTFILE): logger.info('ATT: Already created %s'%OUTFILE) return OUTFILE for key in filter_dict: my_apps.filter[key] = filter_dict[key] my_apps.filter['outfile'] = OUTFILE my_apps.filter.run() logger.info('Created %s'%OUTFILE) logger.info('gtselect --> CPU time spent: %.2f'%time.clock()) return OUTFILE
def mkForeCl(**kwargs): """ """ get_var_from_file(kwargs['config']) logger.info('Starting Cl analysis...') in_label = data.IN_LABEL out_label = data.OUT_LABEL binning_label = data.BINNING_LABEL from GRATools.utils.gFTools import get_cl_param cl_param_file = os.path.join(GRATOOLS_OUT, '%s_%s_parameters.txt' %(in_label, binning_label)) _emin, _emax, _emean, _f, _ferr, _cn, _fsky = get_cl_param(cl_param_file) cl_txt = open(os.path.join(GRATOOLS_OUT, '%s_%s_forecls.txt' \ %(out_label, binning_label)), 'w') for i, (emin, emax) in enumerate(zip(_emin, _emax)): mask_file = data.MASK_FILE if type(mask_file) == list: mask_file = mask_file[i] mask = hp.read_map(mask_file) logger.info('Considering bin %.2f - %.2f ...'%(emin, emax)) cl_txt.write('ENERGY\t %.2f %.2f %.2f\n'%(emin, emax, _emean[i])) l_max= 1000 _l = np.arange(l_max) flux_map_name = in_label+'_fore_%i-%i.fits'%(emin, emax) flux_map = hp.read_map(os.path.join(GRATOOLS_OUT_FORE, flux_map_name)) flux_map_masked = hp.ma(flux_map) flux_map_masked.mask = np.logical_not(mask) fsky = 1.-(len(np.where(flux_map_masked.filled() == hp.UNSEEN)[0])/\ float(len(flux_map))) if kwargs['show'] == True: hp.mollview(flux_map_masked.filled(), title='f$_{sky}$ = %.3f'%fsky, min=1e-7, max=1e-4, norm='log') plt.show() print 'fsky = ', fsky nside = hp.npix2nside(len(flux_map)) wpix = hp.sphtfunc.pixwin(nside)[:l_max] _cl = hp.sphtfunc.anafast(flux_map_masked.filled(), lmax=l_max-1, \ iter=5) _cl_fit = hp.sphtfunc.anafast(flux_map_masked.filled(), iter=4) cl_txt.write('Cl\t%s\n'%str(list(_cl)).replace('[',''). \ replace(']','').replace(', ', ' ')) cl_txt.close() logger.info('Created %s'%(os.path.join(GRATOOLS_OUT, '%s_%s_forecls.txt' \ %(out_label, binning_label))))
def mkMask(**kwargs): """ """ logger.info('Starting mask production...') get_var_from_file(kwargs['config']) bad_pix = [] nside = data.NSIDE out_label = data.OUT_LABEL energy = data.ENERGY npix = hp.nside2npix(nside) mask = np.ones(npix) if kwargs['srcmask'] == True: from GRATools.utils.gMasks import mask_src src_mask_rad = data.SRC_MASK_RAD cat_file = data.SRC_CATALOG bad_pix += mask_src(cat_file, src_mask_rad, nside) if kwargs['srcmaskweight'] == True: from GRATools.utils.gMasks import mask_src_weighted src_mask_rad = data.SRC_MASK_RAD cat_file = data.SRC_CATALOG bad_pix += mask_src_weighted(cat_file, energy, nside) if kwargs['gpmask'] == True: from GRATools.utils.gMasks import mask_gp gp_mask_lat = data.GP_MASK_LAT bad_pix += mask_gp(gp_mask_lat, nside) if kwargs['northmask'] == True: from GRATools.utils.gMasks import mask_hemi_north bad_pix += mask_hemi_north(nside) if kwargs['southmask'] == True: from GRATools.utils.gMasks import mask_hemi_south bad_pix += mask_hemi_south(nside) if kwargs['eastmask'] == True: from GRATools.utils.gMasks import mask_hemi_east bad_pix += mask_hemi_east(nside) if kwargs['westmask'] == True: from GRATools.utils.gMasks import mask_hemi_west bad_pix += mask_hemi_west(nside) for bpix in np.unique(bad_pix): mask[bpix] = 0 out_name = os.path.join(GRATOOLS_CONFIG, 'fits/' + out_label + '.fits') fsky = 1 - (len(np.unique(bad_pix)) / float(npix)) logger.info('f$_{sky}$ = %.3f' % fsky) hp.write_map(out_name, mask, coord='G') logger.info('Created %s' % out_name)
def mkCsi(**kwargs): """ """ get_var_from_file(kwargs['config']) ncores = kwargs['ncores'] psf_file = data.PSF_REF_FILE p = multiprocessing.Pool(processes=ncores) logger.info('Starting Csi analysis...') in_label = data.IN_LABEL out_label = data.OUT_LABEL binning_label = data.BINNING_LABEL cl_param_file = os.path.join(GRATOOLS_OUT, '%s_%s_parameters.txt' \ %(in_label, binning_label)) from GRATools.utils.gFTools import get_cl_param _emin, _emax, _emean, _f, _ferr, _cn, _fsky = get_cl_param(cl_param_file) csi_txt = open(os.path.join(GRATOOLS_OUT, '%s_%s_csi.txt' \ %(out_label, binning_label)), 'w') psf_ref = get_psf_ref(psf_file) #psf_ref.plot(show=False) #plt.xscale('log') #plt.yscale('log') #plt.show() for i, (emin, emax) in enumerate(zip(_emin, _emax)): logger.info('Considering bin %.2f - %.2f ...' % (emin, emax)) cont_ang = np.radians(psf_ref(_emean[i])) csi_txt.write('ENERGY\t %.2f %.2f %.2f\n' % (emin, emax, _emean[i])) flux_map_name = in_label + '_flux_%i-%i.fits' % (emin, emax) flux_map = hp.read_map(os.path.join(GRATOOLS_OUT_FLUX, flux_map_name)) flux_map = udgrade_as_psf(flux_map, cont_ang) R = hp.read_map(os.path.join(GRATOOLS_OUT_FLUX, flux_map_name)) R = udgrade_as_psf(R, cont_ang) fsky = 1.-(len(np.where(flux_map == hp.UNSEEN)[0])/\ float(len(flux_map))) logger.info('fsky = %f' % fsky) npix = len(flux_map) nside = hp.npix2nside(npix) _unmask = np.where(flux_map != hp.UNSEEN)[0] npix_unmask = len(_unmask) Imean = _f[i] dI = flux_map - Imean dR = R - Imean R = permute_unmasked_pix(R) dR = permute_unmasked_pix(dR) th_bins = data.TH_BINNING theta = [] for thmin, thmax in zip(th_bins[:-1], th_bins[1:]): th_mean = np.sqrt(thmin * thmax) theta.append(th_mean) theta = np.array(theta) logger.info('Computing Csi...') diri = hp.pixelfunc.pix2ang(nside, _unmask) veci = hp.rotator.dir2vec(diri) xyz = np.array([(veci[0][i], veci[1][i], veci[2][i]) for i in range(0, len(veci[0]))]) args = zip(_unmask, xyz, [dI] * npix_unmask, [dR] * npix_unmask, [nside] * npix_unmask) #args = zip(_unmask, xyz, [flux_map]*npix_unmask, [R]*npix_unmask, # [nside]*npix_unmask) a = np.array(p.map(csi_compute, args)) SUMij_list = a[:, 0] SUMf_list = a[:, 1] SUMR_list = a[:, 2] SUMij_th = [] SUMf_th = [] SUMR_th = [] for i, s in enumerate(SUMij_list[0]): SUMij_th.append(np.sum(SUMij_list[:, i])) SUMf_th.append(np.sum(SUMf_list[:, i])) SUMR_th.append(np.sum(SUMR_list[:, i])) csi = (np.array(SUMij_th)) / np.array(SUMf_th) #-Imean**2 r = (np.array(SUMR_th)) / np.array(SUMf_th) #-Imean**2 csi_txt.write('THETA\t%s\n'%str(list(theta)).replace('[',''). \ replace(']','').replace(', ', ' ')) csi_txt.write('CSI\t%s\n'%str(list(csi)).replace('[',''). \ replace(']','').replace(', ', ' ')) csi_txt.write('R\t%s\n'%str(list(r)).replace('[',''). \ replace(']','').replace(', ', ' ')) csi_txt.close() p.close() p.join() logger.info('Created %s'%(os.path.join(GRATOOLS_OUT, '%s_%s_csi.txt' \ %(out_label, binning_label))))
1000 ] cps_tocompare, cperrs_tocompare = [], [] emins, emaxs, emeans = [], [], [] for f in Cl_FILES: emin, emax, emean, cls, clerrs = cl_parse(f) emins.append(emin) emaxs.append(emax) emeans.append(emean) cps, cperrs = [], [] for i, aps in enumerate(cls): psf_en = psf_ref(emean[i]) l_max = _l_max[i] #min(600, 1.9*(np.pi/np.radians(psf_en))) l_min = _l_min[i] #min(60, max(60-i*5,10)) logger.info('fitting Cl(%i:%i)' % (l_min, l_max)) _l_rebin, _cls_rebin, _clerrs_rebin = [], [], [] for bmin, bmax in zip(rebinning[:-1], rebinning[1:]): _l_rebin.append(np.sqrt(bmin * bmax)) clmean = np.average(aps[bmin:bmax]) clmeanerr = np.sqrt(np.sum(clerrs[i][bmin:bmax]**2))/\ np.sqrt(len(aps[bmin:bmax])) _cls_rebin.append(clmean) _clerrs_rebin.append(clmeanerr) _l_rebin = np.array(_l_rebin) _cls_rebin = np.array(_cls_rebin) _clerrs_rebin = np.array(_clerrs_rebin) l_range_fit = np.where( np.logical_and(_l_rebin >= l_min, _l_rebin < l_max)) cp, cpV = np.polyfit(_l_rebin[l_range_fit], _cls_rebin[l_range_fit],
def mkSTanalysis(**kwargs): """Science Tools analysis chain """ assert (kwargs['config'].endswith('.py')) get_var_from_file(kwargs['config']) logger.info('Starting ST analysis...') from GRATools import FT_DATA_FOLDER from GRATools.utils.gFTools import mergeft PH, SC = 'photon', 'spacecraft' PH_FOLDER = os.path.join(FT_DATA_FOLDER, PH) SC_FOLDER = os.path.join(FT_DATA_FOLDER, SC) start_week, end_week = data.START_WEEK, data.END_WEEK logger.info('Taking data from week %i to week %i' % (start_week, end_week)) FT1_FILE = mergeft(PH_FOLDER, 'FT1_w%i-%i.txt'%(start_week, end_week), \ start_week, end_week) out_label = data.OUT_LABEL txt_out_files = open('output/' + out_label + '_outfiles.txt', 'w') from GRATools.utils.ScienceTools_ import gtselect gtselect_dict = data.GTSELECT_DICT if gtselect_dict['infile'] == 'DEFAULT': gtselect_dict['infile'] = FT1_FILE out_gtselect = gtselect(out_label, gtselect_dict) txt_out_files.write(out_gtselect + '\n') from GRATools.utils.ScienceTools_ import gtmktime gtmktime_dict = data.GTMKTIME_DICT if gtmktime_dict['evfile'] == 'DEFAULT': gtmktime_dict['evfile'] = out_gtselect out_gtmktime = gtmktime(out_label, gtmktime_dict) txt_out_files.write(out_gtmktime + '\n') from GRATools.utils.ScienceTools_ import gtbin gtbin_dict = data.GTBIN_DICT if gtbin_dict['evfile'] == 'DEFAULT': gtbin_dict['evfile'] = out_gtmktime out_gtbin = gtbin(out_label, gtbin_dict) txt_out_files.write(out_gtbin + '\n') if kwargs['gtltcube'] == True: from GRATools.utils.ScienceTools_ import gtltcube gtltcube_dict = data.GTLTCUBE_DICT if gtltcube_dict['evfile'] == 'DEFAULT': gtltcube_dict['evfile'] = out_gtmktime out_gtltcube = gtltcube(out_label, gtltcube_dict) txt_out_files.write(out_gtltcube + '\n') else: logger.info('Not running gtltcube.') pass from GRATools.utils.ScienceTools_ import gtexpcube2 gtexpcube2_dict = data.GTEXPCUBE2_DICT if gtexpcube2_dict['infile'] == 'DEFAULT': gtexpcube2_dict['infile'] = out_gtltcube if gtexpcube2_dict['cmap'] == 'DEFAULT': gtexpcube2_dict['cmap'] = out_gtbin out_gtexpcube2 = gtexpcube2(out_label, gtexpcube2_dict) txt_out_files.write(out_gtexpcube2 + '\n') txt_out_files.close() logger.info('Created output/' + out_label + '_outfiles.txt') logger.info('Done!')
def mkCl(**kwargs): """ """ get_var_from_file(kwargs['config']) logger.info('Calculating PSF with gtpsf...') dict_gtpsf = data.DICT_GTPSF logger.info('Calculating Wbeam Function...') out_wb_label = data.OUT_W_LABEL mask_label = data.MASK_LABEL out_wb_txt = os.path.join(GRATOOLS_OUT, 'Wbeam_%s.txt' % out_wb_label) if not os.path.exists(out_wb_txt): from GRATools.utils.ScienceTools_ import gtpsf gtpsf(dict_gtpsf) from GRATools.utils.gWindowFunc import get_psf psf_file = data.PSF_FILE psf = get_psf(psf_file) _l = np.arange(0, 1000) from GRATools.utils.gWindowFunc import build_wbeam wb = build_wbeam(psf, _l, out_wb_txt) else: from GRATools.utils.gWindowFunc import get_wbeam wb = get_wbeam(out_wb_txt) save_current_figure('Wbeam_%s.png' % out_wb_label, clear=True) logger.info('Starting Cl analysis...') in_label = data.IN_LABEL in_label = in_label + '_' + mask_label out_label = data.OUT_LABEL binning_label = data.BINNING_LABEL mask_file = data.MASK_FILE mask = hp.read_map(mask_file) cl_param_file = os.path.join(GRATOOLS_OUT, '%s_%s_parameters.txt' \ %(in_label, binning_label)) from GRATools.utils.gFTools import get_cl_param _emin, _emax, _emean, _f, _ferr, _cn, _fsky = get_cl_param(cl_param_file) cl_txt = open(os.path.join(GRATOOLS_OUT, '%s_%s_cls.txt' \ %(out_label, binning_label)), 'w') for i, (emin, emax) in enumerate(zip(_emin, _emax)): logger.info('Considering bin %.2f - %.2f ...' % (emin, emax)) gamma = data.WEIGHT_SPEC_INDEX Im = (1 / (1 - gamma)) * (emax**(1 - gamma) - emin**(1 - gamma)) / (emax - emin) eweightedmean = np.power(1 / Im, 1 / gamma) cl_txt.write('ENERGY\t %.2f %.2f %.2f\n' % (emin, emax, eweightedmean)) l_max = 1000 _l = np.arange(l_max) wb_en = wb.hslice(eweightedmean)(_l) flux_map_name = in_label + '_flux_%i-%i.fits' % (emin, emax) flux_map = hp.read_map(os.path.join(GRATOOLS_OUT_FLUX, flux_map_name)) flux_map_masked = hp.ma(flux_map) flux_map_masked.mask = np.logical_not(mask) fsky = 1.-(len(np.where(flux_map_masked.filled() == hp.UNSEEN)[0])/\ float(len(flux_map))) if kwargs['show'] == True: hp.mollview(flux_map_masked.filled(), title='f$_{sky}$ = %.3f' % fsky, min=1e-7, max=1e-4, norm='log') plt.show() print 'fsky = ', fsky nside = hp.npix2nside(len(flux_map)) wpix = hp.sphtfunc.pixwin(nside)[:l_max] _cl = hp.sphtfunc.anafast(flux_map_masked.filled(), lmax=l_max-1, \ iter=5) _cl_fit = hp.sphtfunc.anafast(flux_map_masked.filled(), iter=4) cn_fit = np.average(_cl_fit[-500:-100] / fsky) / len( _cl_fit[-500:-100]) print 'cn fit = ', cn_fit print 'cn poisson = ', _cn[i] cn = _cn[i] wl = wb_en * wpix _cl = (_cl / fsky - cn) / (wl**2) cl_txt.write('Cl\t%s\n'%str(list(_cl)).replace('[',''). \ replace(']','').replace(', ', ' ')) _cl_err = np.sqrt(2. / ((2 * _l + 1) * fsky)) * (_cl + (cn / wl**2)) cl_txt.write('Cl_ERR\t%s\n\n'%str(list(_cl_err)).replace('[',''). \ replace(']','').replace(', ', ' ')) cl_txt.close() logger.info('Created %s'%(os.path.join(GRATOOLS_OUT, '%s_%s_cls.txt' \ %(out_label, binning_label))))
PACKAGE_NAME = 'GRATools' """Basic folder structure of the package. """ GRATOOLS_ROOT = os.path.abspath(os.path.dirname(__file__)) GRATOOLS_BIN = os.path.join(GRATOOLS_ROOT, 'bin') GRATOOLS_CONFIG = os.path.join(GRATOOLS_ROOT, 'config') GRATOOLS_UTILS = os.path.join(GRATOOLS_ROOT, 'utils') GRATOOLS_DOC = os.path.join(GRATOOLS_ROOT, 'doc') GRATOOLS_DATA = os.path.join(GRATOOLS_ROOT, 'data') """ This is where we put the actual (FT1 and FT2) data sets. """ from GRATools.utils.logging_ import logger try: FT_DATA_FOLDER = os.environ['FT_DATA'] logger.info('Base data folder set to $FT_DATA = %s...' % FT_DATA_FOLDER) except KeyError: FT_DATA_FOLDER = '/data1/data/FT-files' logger.info('$FT_DATA not set, base data folder set to %s...' %\ FT_DATA_FOLDER) """ This is the output directory. """ try: GRATOOLS_OUT = os.environ['GRATOOLS_OUT'] GRATOOLS_OUT_FIG = os.environ['GRATOOLS_OUT_FIG'] except: GRATOOLS_OUT = os.path.join(GRATOOLS_ROOT, 'output') GRATOOLS_OUT_FIG = os.path.join(GRATOOLS_ROOT, 'output/figures') if __name__ == '__main__': print('GRATOOLS_ROOT: %s' % GRATOOLS_ROOT)
"""Basic folder structure of the package. """ GRATOOLS_ROOT = os.path.abspath(os.path.dirname(__file__)) GRATOOLS_BIN = os.path.join(GRATOOLS_ROOT, 'bin') GRATOOLS_CONFIG = os.path.join(GRATOOLS_ROOT, 'config') GRATOOLS_UTILS = os.path.join(GRATOOLS_ROOT, 'utils') GRATOOLS_DOC = os.path.join(GRATOOLS_ROOT, 'doc') GRATOOLS_DATA = os.path.join(GRATOOLS_ROOT, 'data') """ This is where we put the actual (FT1 and FT2) data sets. """ from GRATools.utils.logging_ import logger try: FT_DATA_FOLDER = os.environ['FT_DATA'] logger.info('Base data folder set to $FT_DATA = %s...' % FT_DATA_FOLDER) except KeyError: FT_DATA_FOLDER = '/data1/data/FT-files' logger.info('$FT_DATA not set, base data folder set to %s...' %\ FT_DATA_FOLDER) """ This is the output directory. """ try: GRATOOLS_OUT = os.environ['GRATOOLS_OUT'] GRATOOLS_OUT_FIG = os.environ['GRATOOLS_OUT_FIG'] except: GRATOOLS_OUT = os.path.join(GRATOOLS_ROOT, 'output') GRATOOLS_OUT_FIG = os.path.join(GRATOOLS_ROOT, 'output/figures') if __name__ == '__main__':
def mask_src_weighted(cat_file, ENERGY, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. The radii increase with the brightness. SOURCE_CAT: str opened fits file with the sorce catalog NSIDE: int healpix nside parameter """ from GRATools.utils.gWindowFunc import get_psf_ref psf_ref_file = os.path.join(GRATOOLS_CONFIG, 'ascii/PSF_UCV_PSF1.txt') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT = src_cat['LAT_Point_Source_Catalog'] CAT_EXTENDED = src_cat['ExtendedSources'] BAD_PIX_SRC = [] SOURCES = CAT.data EXT_SOURCES = CAT_EXTENDED.data src_cat.close() psf_ref = get_psf_ref(psf_ref_file) psf_en = psf_ref(ENERGY) psf_min, psf_max = psf_ref.y[5], psf_ref.y[-1] norm_min, norm_max = 1, 0.3 norm = norm_min + psf_en*((norm_max - norm_min)/(psf_max - psf_min)) -\ psf_min*((norm_max - norm_min)/(psf_max - psf_min)) logger.info('Normalization of radii due to energy: %.3f' % norm) print 'Psf(%.2f)= %.2f' % (ENERGY, psf_en) FLUX = SOURCES.field('Flux1000') flux_min, flux_max = min(FLUX), max(FLUX) rad_min, rad_max = 2., 5. RADdeg = rad_min + FLUX*((rad_max - rad_min)/(flux_max - flux_min)) -\ flux_min*((rad_max - rad_min)/(flux_max - flux_min)) RADrad = np.radians(RADdeg) #***** #plt.title('Radius($\phi$)') #plt.plot(FLUX, RADdeg, 'ro', ms=3, alpha=0.75) #plt.plot((1e-12, 1e-5), (2, 2), '-', color='silver', linewidth=1.0) #plt.xlabel('$\phi$') #plt.ylabel('Radius [$\circ$]') #plt.yscale('log') #plt.xscale('log') #plt.ylim(0.7, 6) #plt.show() #***** logger.info('Masking the extended Sources') logger.info('-> 10deg around CenA and LMC') logger.info('-> 5deg around the remaining') for i, src in enumerate(EXT_SOURCES): NAME = EXT_SOURCES[i][0] GLON = EXT_SOURCES[i][4] GLAT = EXT_SOURCES[i][5] if NAME == 'LMC' or NAME == 'CenA Lobes': x, y, z = hp.rotator.dir2vec(GLON, GLAT, lonlat=True) b_pix = hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), np.radians(10) * norm) BAD_PIX_SRC.extend(radintpix) else: x, y, z = hp.rotator.dir2vec(GLON, GLAT, lonlat=True) b_pix = hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), np.radians(5) * norm) BAD_PIX_SRC.extend(radintpix) logger.info('Flux-weighted mask for sources activated') for i, src in enumerate(SOURCES): GLON = SOURCES[i][3] GLAT = SOURCES[i][4] x, y, z = hp.rotator.dir2vec(GLON, GLAT, lonlat=True) b_pix = hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), RADrad[i] * norm) BAD_PIX_SRC.extend(radintpix) return BAD_PIX_SRC
def mkRestyle(**kwargs): """ """ logger.info('Starting flux analysis...') get_var_from_file(kwargs['config']) fore_files = data.FORE_FILES_LIST crbkg_file = data.CRBKG_FILE macro_bins = data.MACRO_BINS gamma = data.POWER_LOW_INDEX out_label = data.OUT_LABEL mask_label = data.MASK_LABEL binning_label = data.BINNING_LABEL in_labels_list = data.IN_LABELS_LIST new_txt_name = os.path.join(GRATOOLS_OUT, '%s_%s_%s_parameters.txt' \ %(out_label, mask_label, binning_label)) if os.path.exists(new_txt_name): new_txt_name = new_txt_name.replace('.txt', '_2.txt') new_txt = open(new_txt_name, 'w') new_txt.write( '# \t E_MIN \t E_MAX \t E_MEAN \t F_MEAN \t FERR_MEAN \t CN \t FSKY \n' ) norm_list = [] const_list = [] fore_mean_list = [] #all_counts, all_exps = [], [] #flux_map = [] for i, (minb, maxb) in enumerate(macro_bins): all_counts, all_exps = [], [] flux_map = [] micro_bins = np.arange(minb, maxb + 1) print micro_bins logger.info('Considering bins from %i to %i...' % (minb, maxb - 1)) mask_file = data.MASK_FILE if type(mask_file) == list: mask_file = mask_file[i] mask = hp.read_map(mask_file) _unmask = np.where(mask != 0)[0] maxb = maxb + 1 exists_counts_files, exists_exp_files = [], [] for mb in micro_bins: micro_count_name = os.path.join( GRATOOLS_OUT, 'output_counts/%s_counts_%i.fits' % (out_label, mb)) micro_exp_name = os.path.join( GRATOOLS_OUT, 'output_counts/%s_exposure_%i.fits' % (out_label, mb)) if os.path.exists(micro_count_name): exists_counts_files.append(micro_count_name) exists_exp_files.append(micro_exp_name) if len(exists_counts_files) == len(micro_bins): emin, emax, emean = [], [], [] E_MIN, E_MAX, E_MEAN = 0, 0, 0 txt_name = os.path.join(GRATOOLS_OUT, '%s_outfiles.txt' % in_labels_list[0]) txt = open(txt_name, 'r') for line in txt: if 'gtbin' in line: emin, emax, emean = get_energy_from_fits(line, minbinnum=minb, maxbinnum=maxb) E_MIN, E_MAX = emin[0], emax[-1] E_MEAN = np.sqrt(emax[0] * emin[-1]) logger.info('Counts and exposure maps ready! Retriving them...') for j in range(0, len(micro_bins)): cc = hp.read_map(exists_counts_files[j]) ee = hp.read_map(exists_exp_files[j]) all_counts.append(cc) all_exps.append(ee) else: logger.info('Retriving count and exposure maps...') emin, emax, emean = [], [], [] E_MIN, E_MAX, E_MEAN = 0, 0, 0 count_map, exp_mean_map = [], [] for label in in_labels_list: txt_name = os.path.join(GRATOOLS_OUT, '%s_outfiles.txt' % label) txt = open(txt_name, 'r') logger.info('Ref: %s' % label) for line in txt: if 'gtbin' in line: cmap = hp.read_map(line, field=range(minb, maxb)) cmap_repix = hp.pixelfunc.ud_grade(cmap, kwargs['udgrade'], pess=True, power=-2) count_map.append(np.asarray(cmap_repix)) emin, emax, emean = get_energy_from_fits( line, minbinnum=minb, maxbinnum=maxb) E_MIN, E_MAX = emin[0], emax[-1] E_MEAN = np.sqrt(emax[0] * emin[-1]) if 'gtexpcube2' in line: emap = hp.read_map(line, field=range(minb, maxb + 1)) emap_repix = hp.pixelfunc.ud_grade(emap, kwargs['udgrade'], pess=True) emap_mean = [] for i in range(0, len(emap_repix) - 1): emap_mean.append( np.sqrt(emap_repix[i] * emap_repix[i + 1])) exp_mean_map.append(np.asarray(emap_mean)) txt.close() logger.info('Summing in time...') all_counts, all_exps = count_map[0], exp_mean_map[0] for t in range(1, len(in_labels_list)): all_counts = all_counts + count_map[t] all_exps = all_exps + exp_mean_map[t] for i, cmap in enumerate(all_counts): micro_count_name = os.path.join( GRATOOLS_OUT, 'output_counts/%s_counts_%i.fits' % (out_label, micro_bins[i])) hp.write_map(micro_count_name, cmap) micro_exp_name = os.path.join( GRATOOLS_OUT, 'output_counts/%s_exposure_%i.fits' % (out_label, micro_bins[i])) hp.write_map(micro_exp_name, all_exps[i]) logger.info('Computing the flux for each micro energy bin...') nside = kwargs['udgrade'] npix = hp.nside2npix(nside) sr = 4 * np.pi / npix iii = np.arange(npix) for i, cmap in enumerate(all_counts): flux_map.append(cmap / all_exps[i] / sr) # now I have finelly gridded (in energy) summed in time fluxes logger.info('Rebinning...') logger.info('Merging fluxes from %.2f to %.2f MeV' % (E_MIN, E_MAX)) macro_fluxerr = (emean[0] / emean[0])**(-gamma) / (all_exps[0])**2 macro_counts = all_counts[0] macro_flux = flux_map[0] # implement foreground subtraction if kwargs['foresub'] == True: from GRATools.utils.gFTools import get_foreground_integral_flux_map from GRATools.utils.gFTools import fit_foreground from GRATools.utils.gFTools import flux2counts out_fore_folder = os.path.join(GRATOOLS_OUT, 'output_fore') out_name_fore = os.path.join(out_fore_folder,'fore_%i-%i.fits'\ %(E_MIN, E_MAX)) out_name_forecount = os.path.join(out_fore_folder, out_label+'_forecount_%i-%i.fits'\ %(E_MIN, E_MAX)) if not os.path.exists(out_fore_folder): os.makedirs(out_fore_folder) all_fore = [] all_countfore = [] for e1, e2 in zip(emin, emax): fore = get_foreground_integral_flux_map(fore_files, e1, e2) counts_fore = flux2counts(fore, all_exps[0]) all_fore.append(fore) all_countfore.append(counts_fore) tot_counts = sum(all_counts) tot_flux = sum(flux_map) tot_fore = sum(all_fore) tot_countfore = sum(all_countfore) n0, c0 = fit_foreground(tot_fore, tot_flux) #macro_flux = (all_counts[0] - n0*(all_countfore[0]))/all_exps[0]/sr macro_fore = all_fore[0] macro_countfore = all_countfore[0] CN = np.mean(all_counts[0][_unmask] / (all_exps[0][_unmask])**2) / sr for b in range(1, len(flux_map)): #n, c = fit_foreground(all_countfore[b], all_counts[b]) #logger.info('Norm fact = %.3f'%n) fluxerr = (emean[b] / emean[0])**(-gamma) / (all_exps[b])**2 macro_fluxerr = macro_fluxerr + fluxerr macro_flux = macro_flux+(all_counts[b] - n0*(all_countfore[b]))/\ all_exps[b]/sr macro_counts = macro_counts + all_counts[b] macro_fore = macro_fore + all_fore[b] macro_countfore = macro_countfore + all_countfore[b] CN = CN + np.mean(all_counts[b][_unmask]/ \ (all_exps[b][_unmask])**2)/sr macro_flux = tot_flux - n0 * tot_fore logger.info('CN (white noise) term = %e' % CN) macro_fluxerr = (np.sqrt(all_counts[0] * macro_fluxerr) / sr) macro_fore_masked = hp.ma(macro_fore) macro_fore_masked.mask = np.logical_not(mask) hp.write_map(out_name_fore, macro_fore, coord='G') hp.write_map(out_name_forecount, macro_countfore, coord='G') logger.info('Created %s' % out_name_fore) logger.info('Created %s' % out_name_forecount) FORE_MEAN = np.mean(macro_fore[_unmask]) print 'MEAN FORE FLUX: ', FORE_MEAN fore_mean_list.append(FORE_MEAN) else: CN = np.mean(all_counts[0][_unmask] / (all_exps[0][_unmask])**2) / sr macro_flux = flux_map[0] for b in range(1, len(flux_map)): fluxerr = (emean[b] / emean[0])**(-gamma) / (all_exps[b])**2 macro_fluxerr = macro_fluxerr + fluxerr macro_flux = macro_flux + flux_map[b] macro_counts = macro_counts + all_counts[b] CN = CN + np.mean(all_counts[b][_unmask]/ \ (all_exps[b][_unmask])**2)/sr logger.info('CN (white noise) term = %e' % CN) macro_fluxerr = (np.sqrt(all_counts[0] * macro_fluxerr) / sr) out_count_folder = os.path.join(GRATOOLS_OUT, 'output_counts') if not os.path.exists(out_count_folder): os.makedirs(out_count_folder) out_counts_name = os.path.join(out_count_folder,out_label+'_counts_%i-%i.fits'\ %(E_MIN, E_MAX)) logger.info('Created %s' % out_counts_name) hp.write_map(out_counts_name, macro_counts, coord='G') # now mask the rebinned flux and error maps macro_flux_masked = hp.ma(macro_flux) macro_fluxerr_masked = hp.ma(macro_fluxerr) macro_flux_masked.mask = np.logical_not(mask) macro_fluxerr_masked.mask = np.logical_not(mask) out_folder = os.path.join(GRATOOLS_OUT, 'output_flux') if not os.path.exists(out_folder): os.makedirs(out_folder) out_name = os.path.join(out_folder,out_label+'_%s_flux_%i-%i.fits'\ %(mask_label, E_MIN, E_MAX)) out_name_err = os.path.join(out_folder, out_label+'_%s_fluxerr_%i-%i.fits'\ %(mask_label, E_MIN, E_MAX)) logger.info('Created %s' % out_name) logger.info('Created %s' % out_name_err) hp.write_map(out_name, macro_flux_masked, coord='G') hp.write_map(out_name_err, macro_fluxerr_masked, coord='G') F_MEAN = np.sum(macro_flux[_unmask]) / len(macro_flux[_unmask]) crbkg = get_crbkg(crbkg_file) logger.info('Subtracting CR residual bkg...') #F_MEAN = F_MEAN - (E_MAX-E_MIN)*crbkg(E_MEAN)/E_MEAN**2 FERR_MEAN = np.sqrt(np.sum(macro_fluxerr[_unmask]**2))/\ len(macro_flux[_unmask]) FSKY = float(len(macro_flux[_unmask])) / float(len(macro_flux)) logger.info('Fsky = %.3f' % FSKY) print 'F_MEAN, FERR_MEAN = ', F_MEAN, FERR_MEAN new_txt.write('%.2f \t %.2f \t %.2f \t %e \t %e \t %e \t %f \n' \ %(E_MIN, E_MAX, E_MEAN, F_MEAN, FERR_MEAN, CN, FSKY)) if kwargs['foresub'] == True: new_txt.write('\n\n*** FOREGROUND PARAMETERS***\n\n') new_txt.write('MEAN FLUX \t %s\n' % str(fore_mean_list)) new_txt.close() logger.info('Created %s' %os.path.join(GRATOOLS_OUT, '%s_%s_%s_parameters.txt'\ %(out_label, mask_label, binning_label))) logger.info('done!')
def mkRestyle(**kwargs): """ """ logger.info('Starting the restyling...') get_var_from_file(kwargs['config']) fore_files = data.FORE_FILES_LIST macro_bins = data.MACRO_BINS gamma = data.POWER_LOW_INDEX out_label = data.OUT_LABEL mask_label = data.MASK_LABEL binning_label = data.BINNING_LABEL in_labels_list = data.IN_LABELS_LIST new_txt_name = os.path.join(GRATOOLS_OUT, '%s_%s_%s_parameters.txt' \ %(out_label, mask_label, binning_label)) if os.path.exists(new_txt_name): new_txt_name = new_txt_name.replace('.txt','_2.txt') new_txt = open(new_txt_name,'w') new_txt.write( '# \t E_MIN \t E_MAX \t E_MEAN \t F_MEAN \t FERR_MEAN \t CN \t FSKY \n') fore_mean_list = [] norm_list, norm_sx_list, norm_dx_list = [], [], [] norm1_list, norm1_sx_list, norm1_dx_list = [], [], [] norm2_list, norm2_sx_list, norm2_dx_list = [], [], [] const_list, const_sx_list, const_dx_list = [], [], [] for i, (minb, maxb) in enumerate(macro_bins): all_counts, all_exps = [], [] flux_map = [] micro_bins = np.arange(minb, maxb+1) print micro_bins logger.info('Considering bins from %i to %i...' %(minb, maxb-1)) mask_file = data.MASK_FILE if type(mask_file) == list: mask_file = mask_file[i] mask = hp.read_map(mask_file) _unmask = np.where(mask != 0)[0] maxb = maxb + 1 exists_counts_files, exists_exp_files = [], [] for mb in micro_bins: micro_count_name = os.path.join(GRATOOLS_OUT, 'output_counts/%s_counts_%i.fits' %(out_label, mb)) micro_exp_name = os.path.join(GRATOOLS_OUT, 'output_counts/%s_exposure_%i.fits' %(out_label, mb)) if os.path.exists(micro_count_name): exists_counts_files.append(micro_count_name) exists_exp_files.append(micro_exp_name) if len(exists_counts_files) == len(micro_bins): emin, emax, emean = [], [], [] E_MIN, E_MAX, E_MEAN = 0, 0, 0 txt_name = os.path.join(GRATOOLS_OUT, '%s_outfiles.txt' %in_labels_list[0]) txt = open(txt_name,'r') for line in txt: if 'gtbin' in line: emin, emax, emean = get_energy_from_fits(line, minbinnum=minb, maxbinnum=maxb) E_MIN, E_MAX = emin[0], emax[-1] E_MEAN = np.sqrt(emax[0]*emin[-1]) logger.info('Counts and exposure maps ready! Retriving them...') for j in range(0, len(micro_bins)): cc = hp.read_map(exists_counts_files[j]) ee = hp.read_map(exists_exp_files[j]) all_counts.append(cc) all_exps.append(ee) else: logger.info('Retriving count and exposure maps...') emin, emax, emean = [], [], [] E_MIN, E_MAX, E_MEAN = 0, 0, 0 count_map, exp_mean_map = [], [] for label in in_labels_list: txt_name = os.path.join(GRATOOLS_OUT, '%s_outfiles.txt' %label) txt = open(txt_name,'r') logger.info('Ref: %s'%label) for line in txt: if 'gtbin' in line: cmap = hp.read_map(line, field=range(minb, maxb)) cmap_repix = hp.pixelfunc.ud_grade(cmap, kwargs['udgrade'], pess=True, power=-2) count_map.append(np.asarray(cmap_repix)) emin, emax, emean = get_energy_from_fits(line, minbinnum=minb, maxbinnum=maxb) E_MIN, E_MAX = emin[0], emax[-1] E_MEAN = np.sqrt(emax[0]*emin[-1]) if 'gtexpcube2' in line: emap = hp.read_map(line, field=range(minb, maxb+1)) emap_repix = hp.pixelfunc.ud_grade(emap, kwargs['udgrade'], pess=True) emap_mean = [] for i in range(0,len(emap_repix)-1): emap_mean.append( np.sqrt(emap_repix[i]*emap_repix[i+1])) exp_mean_map.append(np.asarray(emap_mean)) txt.close() logger.info('Summing in time...') all_counts, all_exps = count_map[0], exp_mean_map[0] for t in range(1, len(in_labels_list)): all_counts = all_counts + count_map[t] all_exps = all_exps + exp_mean_map[t] for i, cmap in enumerate(all_counts): micro_count_name = os.path.join(GRATOOLS_OUT, 'output_counts/%s_counts_%i.fits' %(out_label, micro_bins[i])) hp.write_map(micro_count_name, cmap) micro_exp_name = os.path.join(GRATOOLS_OUT, 'output_counts/%s_exposure_%i.fits' %(out_label, micro_bins[i])) hp.write_map(micro_exp_name, all_exps[i]) logger.info('Computing the flux for each micro energy bin...') nside = kwargs['udgrade'] npix = hp.nside2npix(nside) sr = 4*np.pi/npix iii = np.arange(npix) for i, cmap in enumerate(all_counts): flux_map.append(cmap/all_exps[i]/sr) # now I have finelly gridded (in energy) summed in time fluxes logger.info('Rebinning...') logger.info('Merging fluxes from %.2f to %.2f MeV' %(E_MIN, E_MAX)) macro_fluxerr = (emean[0]/emean[0])**(-gamma)/(all_exps[0])**2 macro_counts = all_counts[0] # implement foreground subtraction from GRATools.utils.gForeground import get_fore_integral_flux_map from GRATools.utils.gForeground import get_ref_igrb_spline if kwargs['foresub'] == 'gal': _norm_list, _norm_sx_list, _norm_dx_list = [], [], [] _const_list, _const_sx_list, _const_dx_list = [], [], [] from GRATools.utils.gForeground import fit_foreground_poisson out_fore_folder = os.path.join(GRATOOLS_OUT, 'output_fore') out_name_fore = os.path.join(out_fore_folder,'fore_%i-%i.fits'\ %(E_MIN, E_MAX)) if not os.path.exists(out_fore_folder): os.makedirs(out_fore_folder) all_fore = [] all_c_guess = [] for ii, (e1, e2) in enumerate(zip(emin, emax)): fore = get_fore_integral_flux_map(fore_files, e1, e2) all_fore.append(fore) all_c_guess.append(get_ref_igrb_spline()(emean[ii])*100) n0, c0, n0_sx, n0_dx, c0_sx, c0_dx = \ fit_foreground_poisson(all_fore[0], all_counts[0], mask_map=mask, exp=all_exps[0], n_guess=1., c_guess=all_c_guess[0]) _norm_list.append(n0) _norm_sx_list.append(n0_sx) _norm_dx_list.append(n0_dx) _const_list.append(c0) _const_sx_list.append(c0_sx) _const_dx_list.append(c0_dx) if kwargs['nforefit'] == 'nlow': macro_flux = flux_map[0]-n0_sx*all_fore[0] elif kwargs['nforefit'] == 'nhigh': macro_flux = flux_map[0]-n0_dx*all_fore[0] else: macro_flux = flux_map[0]-n0*all_fore[0] macro_fore = n0*all_fore[0] CN = np.mean(all_counts[0][_unmask]/(all_exps[0][_unmask])**2)/sr for b in range(1, len(flux_map)): n, c, n_sx, n_dx, c_sx, c_dx = \ fit_foreground_poisson(all_fore[b], all_counts[b], mask_map=mask, exp=all_exps[b], n_guess=1., c_guess=all_c_guess[b]) _norm_list.append(n) _norm_sx_list.append(n_sx) _norm_dx_list.append(n_dx) _const_list.append(c) _const_sx_list.append(c_sx) _const_dx_list.append(c_dx) fluxerr = (emean[b]/emean[0])**(-gamma)/(all_exps[b])**2 macro_fluxerr = macro_fluxerr + fluxerr if kwargs['nforefit'] == 'nlow': macro_flux = macro_flux + flux_map[b]-n_sx*all_fore[b] elif kwargs['nforefit'] == 'nhigh': macro_flux = macro_flux + flux_map[b]-n_dx*all_fore[b] else: macro_flux = macro_flux + flux_map[b]-n*all_fore[b] macro_counts = macro_counts + all_counts[b] macro_fore = macro_fore + n*all_fore[b] CN = CN + np.mean(all_counts[b][_unmask]/ \ (all_exps[b][_unmask])**2)/sr logger.info('CN (white noise) term = %e'%CN) macro_fluxerr = (np.sqrt(all_counts[0]*macro_fluxerr)/sr) macro_fore_masked = hp.ma(macro_fore) macro_fore_masked.mask = np.logical_not(mask) hp.write_map(out_name_fore, macro_fore, coord='G') logger.info('Created %s' %out_name_fore) FORE_MEAN = np.mean(macro_fore[_unmask]) fore_mean_list.append(FORE_MEAN) norm_list.append(np.mean(np.array(_norm_list))) norm_sx_list.append(np.amin(np.array(_norm_sx_list))) norm_dx_list.append(np.amax(np.array(_norm_dx_list))) const_list.append(np.sum(np.array(_const_list))) const_sx_list.append(np.sum(np.array(_const_sx_list))) const_dx_list.append(np.sum(np.array(_const_dx_list))) elif kwargs['foresub'] == 'galsrc': cat_file = data.CAT_FILE psf_file = data.PSF_FILE _norm1_list, _norm1_sx_list, _norm1_dx_list = [], [], [] _norm2_list, _norm2_sx_list, _norm2_dx_list = [], [], [] _const_list, _const_sx_list, _const_dx_list = [], [], [] from GRATools.utils.gSourceTemplate import build_src_template from GRATools.utils.gForeground import fit_fore_src_poisson out_fore_folder = os.path.join(GRATOOLS_OUT, 'output_fore') out_name_fore = os.path.join(out_fore_folder,'fore_%i-%i.fits'\ %(E_MIN, E_MAX)) if not os.path.exists(out_fore_folder): os.makedirs(out_fore_folder) all_fore = [] all_srctempl = [] all_c_guess = [] for ii, (e1, e2) in enumerate(zip(emin, emax)): fore = get_fore_integral_flux_map(fore_files, e1, e2) all_fore.append(fore) cat = os.path.basename(cat_file).replace('.fit','') out_name_srctempl = os.path.join(GRATOOLS_OUT, 'output_src/src%s_%i-%i.fits'\ %(cat, e1, e2)) if not os.path.exists(out_name_srctempl): srctempl_map = build_src_template(cat_file, psf_file, emin=e1, emax=e2, b_cut=10) else: logger.info('Retriving source template...') srctempl_map = hp.read_map(out_name_srctempl) all_srctempl.append(srctempl_map) all_c_guess.append(get_ref_igrb_spline()(emean[ii])*100) n10, n20, c0, (n10_sx, n10_dx), (n20_sx, n20_dx), (c0_sx, c0_dx) = \ fit_fore_src_poisson(all_fore[0], all_counts[0], all_srctempl[0], mask_map=mask, exp=all_exps[0], n1_guess=1., n2_guess=1., c_guess=all_c_guess[0]) _norm1_list.append(n10) _norm1_sx_list.append(n10_sx) _norm1_dx_list.append(n10_dx) _norm2_list.append(n20) _norm2_sx_list.append(n20_sx) _norm2_dx_list.append(n20_dx) _const_list.append(c0) _const_sx_list.append(c0_sx) _const_dx_list.append(c0_dx) macro_flux = flux_map[0]-n10*all_fore[0]-n20*all_srctempl[0] macro_fore = n10*all_fore[0] CN = np.mean(all_counts[0][_unmask]/(all_exps[0][_unmask])**2)/sr for b in range(1, len(flux_map)): n1, n2, c, (n1_sx, n1_dx), (n2_sx, n2_dx), (c_sx, c_dx) = \ fit_fore_src_poisson(all_fore[b], all_counts[b], all_srctempl[b], mask_map=mask, exp=all_exps[b], n1_guess=1., n2_guess=1., c_guess=all_c_guess[b]) _norm1_list.append(n1) _norm1_sx_list.append(n1_sx) _norm1_dx_list.append(n1_dx) _norm2_list.append(n2) _norm2_sx_list.append(n2_sx) _norm2_dx_list.append(n2_dx) _const_list.append(c) _const_sx_list.append(c_sx) _const_dx_list.append(c_dx) fluxerr = (emean[b]/emean[0])**(-gamma)/(all_exps[b])**2 macro_fluxerr = macro_fluxerr + fluxerr macro_flux = macro_flux+flux_map[b]-n1*all_fore[b]-n2*all_srctempl[b] macro_counts = macro_counts + all_counts[b] macro_fore = macro_fore + n1*all_fore[b] netcounts = all_counts[0] CN = CN + np.mean(all_counts[b][_unmask]/ \ (all_exps[b][_unmask])**2)/sr logger.info('CN (white noise) term = %e'%CN) macro_fluxerr = (np.sqrt(all_counts[0]*macro_fluxerr)/sr) macro_fore_masked = hp.ma(macro_fore) macro_fore_masked.mask = np.logical_not(mask) hp.write_map(out_name_fore, macro_fore, coord='G') logger.info('Created %s' %out_name_fore) FORE_MEAN = np.mean(macro_fore[_unmask]) fore_mean_list.append(FORE_MEAN) norm1_list.append(np.mean(np.array(_norm1_list))) norm1_sx_list.append(np.amin(np.array(_norm1_sx_list))) norm1_dx_list.append(np.amax(np.array(_norm1_dx_list))) norm2_list.append(np.mean(np.array(_norm2_list))) norm2_sx_list.append(np.amin(np.array(_norm2_sx_list))) norm2_dx_list.append(np.amax(np.array(_norm2_dx_list))) const_list.append(np.sum(np.array(_const_list))) const_sx_list.append(np.sum(np.array(_const_sx_list))) const_dx_list.append(np.sum(np.array(_const_dx_list))) else: CN = np.mean(all_counts[0][_unmask]/(all_exps[0][_unmask])**2)/sr macro_flux = flux_map[0] for b in range(1, len(flux_map)): fluxerr = (emean[b]/emean[0])**(-gamma)/(all_exps[b])**2 macro_fluxerr = macro_fluxerr + fluxerr macro_flux = macro_flux + flux_map[b] macro_counts = macro_counts + all_counts[b] CN = CN + np.mean(all_counts[b][_unmask]/ \ (all_exps[b][_unmask])**2)/sr logger.info('CN (white noise) term = %e'%CN) macro_fluxerr = (np.sqrt(all_counts[0]*macro_fluxerr)/sr) out_count_folder = os.path.join(GRATOOLS_OUT, 'output_counts') if not os.path.exists(out_count_folder): os.makedirs(out_count_folder) out_counts_name = os.path.join(out_count_folder,out_label+'_counts_%i-%i.fits'\ %(E_MIN, E_MAX)) logger.info('Created %s' %out_counts_name) hp.write_map(out_counts_name, macro_counts, coord='G') # now mask the rebinned flux maps macro_flux_masked = hp.ma(macro_flux) macro_fluxerr_masked = hp.ma(macro_fluxerr) macro_flux_masked.mask = np.logical_not(mask) macro_fluxerr_masked.mask = np.logical_not(mask) out_folder = os.path.join(GRATOOLS_OUT, 'output_flux') if not os.path.exists(out_folder): os.makedirs(out_folder) out_name = os.path.join(out_folder, out_label+'_%s_fluxmasked_%i-%i.fits'\ %(mask_label, E_MIN, E_MAX)) out_name_unmask = os.path.join(out_folder, out_label+'_%s_flux_%i-%i.fits'\ %(mask_label, E_MIN, E_MAX)) hp.write_map(out_name, macro_flux_masked, coord='G') hp.write_map(out_name_unmask, macro_flux, coord='G') logger.info('Created %s' %out_name) logger.info('Created %s' %out_name_unmask) F_MEAN = np.sum(macro_flux[_unmask])/len(macro_flux[_unmask]) FERR_MEAN = np.sqrt(np.sum(macro_fluxerr[_unmask]**2))/\ len(macro_flux[_unmask]) FSKY = float(len(macro_flux[_unmask]))/float(len(macro_flux)) logger.info('Fsky = %.3f'%FSKY) print('F_MEAN, FERR_MEAN = ', F_MEAN, FERR_MEAN) new_txt.write('%.2f \t %.2f \t %.2f \t %e \t %e \t %e \t %f \n' \ %(E_MIN, E_MAX, E_MEAN, F_MEAN, FERR_MEAN, CN, FSKY)) if kwargs['foresub'] == 'gal': new_txt.write('\n\n*** FOREGROUND PARAMETERS***\n\n') new_txt.write('MEAN_FORE_FLUX \t %s\n' %str(fore_mean_list)) new_txt.write('NORM_FIT_PARAM \t %s\n' %str(norm_list)) new_txt.write('NORM_FIT_PARAM_errsx \t %s\n' %str(norm_sx_list)) new_txt.write('NORM_FIT_PARAM_errdx \t %s\n' %str(norm_dx_list)) new_txt.write('IGRB_FIT_PARAM \t %s\n' %str(const_list)) new_txt.write('IGRB_FIT_PARAM_errsx \t %s\n' %str(const_sx_list)) new_txt.write('IGRB_FIT_PARAM_errdx \t %s\n' %str(const_dx_list)) if kwargs['foresub'] == 'galsrc': new_txt.write('\n\n*** FOREGROUND PARAMETERS***\n\n') new_txt.write('MEAN_FORE_FLUX \t %s\n' %str(fore_mean_list)) new_txt.write('NORM_FORE_FIT PARAM \t %s\n' %str(norm1_list)) new_txt.write('NORM_FORE_FIT PARAM_errsx \t %s\n' %str(norm1_sx_list)) new_txt.write('NORM_FORE_FIT PARAM_errdx \t %s\n' %str(norm1_dx_list)) new_txt.write('NORM_SRC_FIT_PARAM \t %s\n' %str(norm2_list)) new_txt.write('NORM_SRC_FIT_PARAM_errsx \t %s\n' %str(norm2_sx_list)) new_txt.write('NORM_SRC_FIT_PARAM_errdx \t %s\n' %str(norm2_dx_list)) new_txt.write('IGRB_FIT_PARAM \t %s\n' %str(const_list)) new_txt.write('IGRB_FIT_PARAM_errsx \t %s\n' %str(const_sx_list)) new_txt.write('IGRB_FIT_PARAM_errdx \t %s\n' %str(const_dx_list)) new_txt.close() logger.info('Created %s' %os.path.join(GRATOOLS_OUT, '%s_%s_%s_parameters.txt'\ %(out_label, mask_label, binning_label))) logger.info('done!')
def mkCsi(**kwargs): """ """ get_var_from_file(kwargs['config']) ncores = kwargs['ncores'] psf_file = data.PSF_REF_FILE p = multiprocessing.Pool(processes=ncores) logger.info('Starting Csi analysis...') in_label = data.IN_LABEL out_label = data.OUT_LABEL binning_label = data.BINNING_LABEL cl_param_file = os.path.join(GRATOOLS_OUT, '%s_%s_parameters.txt' \ %(in_label, binning_label)) from GRATools.utils.gFTools import get_cl_param _emin, _emax, _emean, _f, _ferr, _cn, _fsky = get_cl_param(cl_param_file) csi_txt = open(os.path.join(GRATOOLS_OUT, '%s_%s_csi.txt' \ %(out_label, binning_label)), 'w') psf_ref = get_psf_ref(psf_file) #psf_ref.plot(show=False) #plt.xscale('log') #plt.yscale('log') #plt.show() for i, (emin, emax) in enumerate(zip(_emin, _emax)): logger.info('Considering bin %.2f - %.2f ...'%(emin, emax)) cont_ang = np.radians(psf_ref(_emean[i])) csi_txt.write('ENERGY\t %.2f %.2f %.2f\n'%(emin, emax, _emean[i])) flux_map_name = in_label+'_flux_%i-%i.fits'%(emin, emax) flux_map = hp.read_map(os.path.join(GRATOOLS_OUT_FLUX, flux_map_name)) flux_map = udgrade_as_psf(flux_map, cont_ang) R = hp.read_map(os.path.join(GRATOOLS_OUT_FLUX, flux_map_name)) R = udgrade_as_psf(R, cont_ang) fsky = 1.-(len(np.where(flux_map == hp.UNSEEN)[0])/\ float(len(flux_map))) logger.info('fsky = %f'%fsky) npix = len(flux_map) nside = hp.npix2nside(npix) _unmask = np.where(flux_map != hp.UNSEEN)[0] npix_unmask = len(_unmask) Imean = _f[i] dI = flux_map - Imean dR = R - Imean R = permute_unmasked_pix(R) dR = permute_unmasked_pix(dR) th_bins = data.TH_BINNING theta = [] for thmin, thmax in zip(th_bins[:-1], th_bins[1:]): th_mean = np.sqrt(thmin*thmax) theta.append(th_mean) theta = np.array(theta) logger.info('Computing Csi...') diri = hp.pixelfunc.pix2ang(nside, _unmask) veci = hp.rotator.dir2vec(diri) xyz = np.array([(veci[0][i], veci[1][i], veci[2][i]) for i in range(0, len(veci[0]))]) args = zip(_unmask, xyz, [dI]*npix_unmask, [dR]*npix_unmask, [nside]*npix_unmask) #args = zip(_unmask, xyz, [flux_map]*npix_unmask, [R]*npix_unmask, # [nside]*npix_unmask) a = np.array(p.map(csi_compute, args)) SUMij_list = a[:, 0] SUMf_list = a[:, 1] SUMR_list = a[:, 2] SUMij_th = [] SUMf_th = [] SUMR_th = [] for i, s in enumerate(SUMij_list[0]): SUMij_th.append(np.sum(SUMij_list[:, i])) SUMf_th.append(np.sum(SUMf_list[:, i])) SUMR_th.append(np.sum(SUMR_list[:, i])) csi = (np.array(SUMij_th))/np.array(SUMf_th)#-Imean**2 r = (np.array(SUMR_th))/np.array(SUMf_th)#-Imean**2 csi_txt.write('THETA\t%s\n'%str(list(theta)).replace('[',''). \ replace(']','').replace(', ', ' ')) csi_txt.write('CSI\t%s\n'%str(list(csi)).replace('[',''). \ replace(']','').replace(', ', ' ')) csi_txt.write('R\t%s\n'%str(list(r)).replace('[',''). \ replace(']','').replace(', ', ' ')) csi_txt.close() p.close() p.join() logger.info('Created %s'%(os.path.join(GRATOOLS_OUT, '%s_%s_csi.txt' \ %(out_label, binning_label))))