def __init__(self, obs, empty_image, energy_band, offset_band, exclusion_mask=None, ncounts_min=0, save_bkg_scale=True): # Select the events in the given energy and offset range self.energy_band = energy_band self.offset_band = offset_band events = obs.events self.obs_id = events.table.meta["OBS_ID"] events = events.select_energy(self.energy_band) self.events = events.select_offset(self.offset_band) self.images = SkyImageList() self.empty_image = empty_image self.header = self.empty_image.to_image_hdu().header if exclusion_mask: exclusion_mask.name = 'exclusion' self.images['exclusion'] = exclusion_mask self.ncounts_min = ncounts_min self.aeff = obs.aeff self.edisp = obs.edisp self.psf = obs.psf self.bkg = obs.bkg self.obs_center = obs.pointing_radec self.livetime = obs.observation_live_time_duration self.save_bkg_scale = save_bkg_scale if self.save_bkg_scale: self.table_bkg_scale = Table( names=["OBS_ID", "bkg_scale", "N_counts"])
def run(self, observations): """ Run sky image estimation. Parameters ---------- observations : `gammapy.data.ObservationList` List of observations Returns ------- sky_images : `gammapy.image.SkyImageList` List of sky images. """ result = SkyImageList() result['counts'] = self._get_empty_skyimage() result['exposure'] = self._get_empty_skyimage() result['background'] = self._get_empty_skyimage() for observation in observations: counts = self._counts_image(observation) result['counts'].data += counts.data exposure = self._exposure_image(observation) result['exposure'].data += exposure.data background = self._background_image(counts, exposure) result['background'].data += np.nan_to_num( background['background'].data) return result
def make_images_grouped(): images = SkyImageList([ SkyImage.read('counts.fits.gz'), SkyImage.read('background.fits.gz'), SkyImage.read('exposure.fits.gz'), SkyImage.read('exclusion.fits.gz'), SkyImage.read('model.fits.gz'), ]) images[0].name = 'counts' images[1].name = 'background' images[2].name = 'exposure' images[3].name = 'exclusion' images[4].name = 'model' filename = 'input_all.fits.gz' print('Writing {}'.format(filename)) images.write(filename, clobber=True)
def make_images_grouped(): images = SkyImageList([ SkyImage.read('counts.fits.gz'), SkyImage.read('background.fits.gz'), SkyImage.read('exposure.fits.gz'), SkyImage.read('exclusion.fits.gz'), SkyImage.read('model.fits.gz'), ]) images[0].name = 'counts' images[1].name = 'background' images[2].name = 'exposure' images[3].name = 'exclusion' images[4].name = 'model' filename = 'input_all.fits.gz' print('Writing {}'.format(filename)) images.write(filename, clobber=True)
def _background_image(self, counts, exposure): p = self.parameters input_images = SkyImageList() input_images['counts'] = counts exposure_on = exposure.copy() exposure_on.name = 'exposure_on' input_images['exposure_on'] = exposure_on input_images['exclusion'] = self.exclusion_mask return self.background_estimator.run(input_images)
def make_exposure_model(outdir, E1, E2): """ Parameters ---------- outdir: str directory chere are stored the data E1: float energy min E2: float energy max Returns ------- """ exp = SkyImageList.read(outdir + "/fov_bg_maps" + str(E1) + "_" + str(E2) + "_TeV.fits")["exposure"] exp.write(outdir + "/exp_maps" + str(E1) + "_" + str(E2) + "_TeV.fits", clobber=True) load_table_model("exposure", outdir + "/exp_maps" + str(E1) + "_" + str(E2) + "_TeV.fits") exposure.ampl = 1 freeze(exposure.ampl) return exposure
def make_bkg_model(outdir, E1, E2, freeze_bkg, ampl_init=1): """ Parameters ---------- outdir: str directory chere are stored the data E1: float energy min E2: float energy max freeze_bkg: bool True if you want to froze the norm of the bkg in the fit Returns ------- """ bkgmap = SkyImageList.read(outdir + "/fov_bg_maps" + str(E1) + "_" + str(E2) + "_TeV.fits")["bkg"] bkgmap.write(outdir + "/off_maps" + str(E1) + "_" + str(E2) + "_TeV.fits", clobber=True) load_table_model("bkg", outdir + "/off_maps" + str(E1) + "_" + str(E2) + "_TeV.fits") set_par(bkg.ampl, val=ampl_init, min=0, max=None, frozen=freeze_bkg) return bkg
name_method_fond, config_name, image_size, for_integral_flux=False, ereco=energy_reco) outdir_profiles = make_outdir_profile(source_name, name_method_fond, config_name, image_size, for_integral_flux=False, ereco=energy_reco) # Pour pouvoir definir la gaussienne centre sur la source au centre des cartes en general E1 = energy_bins[0].value E2 = energy_bins[1].value on = SkyImageList.read(outdir_data + "/fov_bg_maps" + str(E1) + "_" + str(E2) + "_TeV.fits")["counts"] if "l_gal" in input_param["param_SgrA"]["sourde_name_skycoord2"]: source_center = SkyCoord( input_param["param_SgrA"]["sourde_name_skycoord2"]["l_gal"], input_param["param_SgrA"]["sourde_name_skycoord2"]["b_gal"], unit='deg', frame="galactic").icrs else: source_center = SkyCoord.from_name( input_param["general"]["sourde_name_skycoord"]) param_fit = input_param["param_fit_morpho"] if param_fit["Em_gal"]: name += "_Em_gal" if param_fit["gauss_SgrA"]["fit"]:
from gammapy.datasets import FermiGalacticCenter from gammapy.image import SkyImageList, SkyImage from gammapy.detect import KernelBackgroundEstimator # Parameters CORRELATION_RADIUS = 10 # Pixels SIGNIFICANCE_THRESHOLD = 5 # Sigma MASK_DILATION_RADIUS = 0.5 * u.deg # Load example images. filename = ('$GAMMAPY_EXTRA/datasets/source_diffuse_separation/' 'galactic_simulations/fermi_counts.fits') counts = SkyImage.read(filename) center = SkyCoord(0, 0, frame='galactic', unit='deg') images = SkyImageList() images['counts'] = counts.cutout(center, (10 * u.deg, 80 * u.deg)) kernel_src = Tophat2DKernel(CORRELATION_RADIUS).array kernel_bkg = np.ones((10, 150)) kbe = KernelBackgroundEstimator( kernel_src=kernel_src, kernel_bkg=kernel_bkg, significance_threshold=SIGNIFICANCE_THRESHOLD, mask_dilation_radius=MASK_DILATION_RADIUS, ) result = kbe.run(images) kbe.images_stack_show() plt.show()
from astropy import units as u from astropy.convolution import Gaussian2DKernel from astropy.coordinates import SkyCoord from photutils.detection import find_peaks from gammapy.image import SkyImageList from gammapy.detect import TSImageEstimator from gammapy.catalog import source_catalogs # ## Compute TS image # In[3]: # Load data from files images = SkyImageList.read('../datasets/fermi_survey/all.fits.gz') images['COUNTS'].name = 'counts' images['BACKGROUND'].name = 'background' images['EXPOSURE'].name = 'exposure' # In[4]: # Compute a source kernel (source template) in oversample mode, # PSF is not taken into account kernel = Gaussian2DKernel(2.5, mode='oversample') # Compute a TS map. 'On' is the raw counts map, 'Background' is the background model, # 'ExpGammaMap' denotes to the exposure map. estimator = TSImageEstimator() result = estimator.run(images, kernel)
input_param["energy binning"]["Emax"], input_param["energy binning"]["nbin"], 'TeV') energy_centers = energy_bins.log_centers #outdir data and result config_name = input_param["general"]["config_name"] outdir_data = make_outdir_data(source_name, name_method_fond, len(energy_bins), config_name, image_size, for_integral_flux) outdir_result = make_outdir_filesresult(source_name, name_method_fond, len(energy_bins), config_name, image_size, for_integral_flux) #Pour pouvoir definir la gaussienne centre sur la source au centre des cartes en general E1 = energy_bins[0].value E2 = energy_bins[1].value on = SkyImageList.read(outdir_data + "/fov_bg_maps" + str(E1) + "_" + str(E2) + "_TeV.fits")["counts"] """ Source model paramaters initial """ #Dans HGPS, c est une gaussienne de 0.05deg en sigma donc *2.35 pour fwhm #avec HESS meme une source pontuelle ne fera jamais en dessous de 0.03-0.05 degre, imax = -1 #imax=12 counts = np.zeros( (len(energy_bins[0:imax]), on.data.shape[0], on.data.shape[1])) exposure_data = np.zeros( (len(energy_bins[0:imax]), on.data.shape[0], on.data.shape[1])) bkg_data = np.zeros( (len(energy_bins[0:imax]), on.data.shape[0], on.data.shape[1])) psf_data = np.zeros( (len(energy_bins[0:imax]), on.data.shape[0], on.data.shape[1]))
# name_method_fond = "coszenbinning_zen_0_27_39_49_57_65_72_15binE" name_method_fond = "coszenbinning_zen_0_34_49_61_72_sansLMC" config_name = "Mpp_Std" outdir_data = make_outdir_data(name_source, name_method_fond, len(energy_bins), config_name) outdir_plot = make_outdir_plot(name_source, name_method_fond, len(energy_bins), config_name) exclusion_mask = SkyMask.read('tevcat_exclusion_radius_0p5.fits') #exclusion_mask = SkyMask.read('exclusion_large.fits') for i_E, E in enumerate(energy_bins[:-2]): E1 = energy_bins[i_E].value E2 = energy_bins[i_E + 1].value significance_map = SkyImageList.read(outdir_data + "/fov_bg_maps" + str(E1) + "_" + str(E2) + "_TeV.fits")["significance"] #coord=significance_map.coordinates() #center=significance_map.center #offset=center.separation(coord) #i=np.where(offset>Angle(2,"deg")) #significance_map.data[i]=-1000 refheader = significance_map.to_image_hdu().header exclusion_mask = exclusion_mask.reproject(reference=refheader) pt.figure(i_E) n, bins, patches = histo_significance(significance_map, exclusion_mask) bin_center = (bins[1:] + bins[0:-1]) / 2 popt, pcov = curve_fit(norm, bin_center, n) perr = np.sqrt(np.diag(pcov)) pt.plot(bin_center, norm(bin_center, popt[0], popt[1], popt[2]),