def get_map_dataset(sky_model, geom, geom_etrue, edisp=True, **kwargs): """This computes the total npred""" # define background model m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model = BackgroundModel(m) psf = get_psf(geom_etrue) exposure = get_exposure(geom_etrue) if edisp: # define energy dispersion e_true = geom_etrue.get_axis_by_name("energy").edges e_reco = geom.get_axis_by_name("energy").edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) else: edisp = None # define fit mask center = sky_model.spatial_model.position circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = background_model.map.geom.region_mask([circle]) return MapDataset( model=sky_model, exposure=exposure, background_model=background_model, psf=psf, edisp=edisp, mask_fit=mask_fit, **kwargs )
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = etrue ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] ehi = ereco[1:] self.aeff = EffectiveAreaTable(etrue[:-1], etrue[1:], np.ones(9) * u.cm**2) self.edisp = EnergyDispersion.from_diagonal_response(etrue, ereco) data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins self.on_counts = CountsSpectrum(elo, ehi, data) self.off_counts = CountsSpectrum(elo, ehi, np.ones(elo.shape) * 10) self.livetime = 1000 * u.s self.dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, aeff=self.aeff, edisp=self.edisp, livetime=self.livetime, acceptance=np.ones(elo.shape), acceptance_off=np.ones(elo.shape) * 10, obs_id="test", )
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = etrue ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] ehi = ereco[1:] self.e_reco = ereco self.aeff = EffectiveAreaTable(etrue[:-1], etrue[1:], np.ones(9) * u.cm**2) self.edisp = EnergyDispersion.from_diagonal_response(etrue, ereco) data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins self.on_counts = CountsSpectrum(elo, ehi, data) self.off_counts = CountsSpectrum(elo, ehi, np.ones(elo.shape) * 10) start = u.Quantity([0], "s") stop = u.Quantity([1000], "s") time_ref = Time("2010-01-01 00:00:00.0") self.gti = GTI.create(start, stop, time_ref) self.livetime = self.gti.time_sum self.dataset = SpectrumDatasetOnOff(counts=self.on_counts, counts_off=self.off_counts, aeff=self.aeff, edisp=self.edisp, livetime=self.livetime, acceptance=np.ones(elo.shape), acceptance_off=np.ones(elo.shape) * 10, name="test", gti=self.gti)
def sens(): etrue = np.logspace(0, 1, 21) * u.TeV elo = etrue[:-1] ehi = etrue[1:] area = np.zeros(20) + 1e6 * u.m**2 arf = EffectiveAreaTable(energy_lo=elo, energy_hi=ehi, data=area) ereco = np.logspace(0, 1, 5) * u.TeV rmf = EnergyDispersion.from_diagonal_response(etrue, ereco) bkg_array = np.ones(4) bkg_array[-1] = 1e-3 bkg = CountsSpectrum(energy_lo=ereco[:-1], energy_hi=ereco[1:], data=bkg_array, unit="s-1") sens = SensitivityEstimator(arf=arf, rmf=rmf, bkg=bkg, livetime=1 * u.h, index=2, gamma_min=20, alpha=0.2) sens.run() return sens
def test_from_diagonal_response(self): e_true = [0.5, 1, 2, 4, 6] * u.TeV e_reco = [2, 4, 6] * u.TeV edisp = EnergyDispersion.from_diagonal_response(e_true, e_reco) assert edisp.pdf_matrix.shape == (4, 2) expected = [[0, 0], [0, 0], [1, 0], [0, 1]] assert_equal(edisp.pdf_matrix, expected) # Test square matrix edisp = EnergyDispersion.from_diagonal_response(e_true) assert_allclose(edisp.e_reco.edges.value, e_true.value) assert edisp.e_reco.unit == "TeV" assert_equal(edisp.pdf_matrix[0][0], 1) assert_equal(edisp.pdf_matrix[2][0], 0) assert edisp.pdf_matrix.sum() == 4
def test_set_model(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EnergyDispersion.from_diagonal_response( self.src.energy.edges, self.src.energy.edges) dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff, edisp, self.bkg) with pytest.raises(AttributeError): dataset.parameters dataset.model = self.source_model assert dataset.parameters[0] == self.source_model.parameters[0]
def test_spectrum_dataset_stack_diagonal_safe_mask(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EnergyDispersion.from_diagonal_response( self.src.energy.edges, self.src.energy.edges) livetime = self.livetime dataset1 = SpectrumDataset( counts=self.src.copy(), livetime=livetime, aeff=aeff, edisp=edisp, background=self.bkg.copy(), ) livetime2 = 0.5 * livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], 2 * aeff.data.data) bkg2 = CountsSpectrum( self.src.energy.edges[:-1], self.src.energy.edges[1:], data=2 * self.bkg.data, ) safe_mask2 = np.ones_like(self.src.data, bool) safe_mask2[0] = False dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data[1:], self.src.data[1:] * 2) assert_allclose(dataset1.counts.data[0], self.src.data[0]) assert dataset1.livetime == 1.5 * self.livetime assert_allclose(dataset1.background.data[1:], 3 * self.bkg.data[1:]) assert_allclose(dataset1.background.data[0], self.bkg.data[0]) assert_allclose( dataset1.aeff.data.data.to_value("m2"), 4.0 / 3 * aeff.data.data.to_value("m2"), ) assert_allclose(dataset1.edisp.pdf_matrix[1:], edisp.pdf_matrix[1:]) assert_allclose(dataset1.edisp.pdf_matrix[0], 0.5 * edisp.pdf_matrix[0])
def create(cls, e_reco, e_true=None, reference_time="2000-01-01"): """Create empty SpectrumDatasetOnOff. Empty containers are created with the correct geometry. counts, counts_off and aeff are zero and edisp is diagonal. The safe_mask is set to False in every bin. Parameters ---------- e_reco : `~astropy.units.Quantity` edges of counts vector e_true : `~astropy.units.Quantity` edges of effective area table. If not set use reco energy values. Default : None reference_time : `~astropy.time.Time` reference time of the dataset, Default is "2000-01-01" """ if e_true is None: e_true = e_reco counts = CountsSpectrum(e_reco[:-1], e_reco[1:]) counts_off = CountsSpectrum(e_reco[:-1], e_reco[1:]) aeff = EffectiveAreaTable(e_true[:-1], e_true[1:], np.zeros(e_true[:-1].shape) * u.m**2) edisp = EnergyDispersion.from_diagonal_response(e_true, e_reco) mask_safe = np.zeros_like(counts.data, "bool") gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"), reference_time) livetime = gti.time_sum acceptance = np.ones_like(counts.data, int) acceptance_off = np.ones_like(counts.data, int) return SpectrumDatasetOnOff( counts=counts, counts_off=counts_off, aeff=aeff, edisp=edisp, mask_safe=mask_safe, acceptance=acceptance, acceptance_off=acceptance_off, livetime=livetime, gti=gti, )
def edisp(geom, geom_true): e_reco = geom.get_axis_by_name("energy").edges e_true = geom_true.get_axis_by_name("energy").edges return EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco)
def run_region(self, kr, lon, lat, radius): # TODO: for now we have to read/create the allsky maps each in each job # because we can't pickle <functools._lru_cache_wrapper object # send this back to init when fixed # exposure exposure_hpx = Map.read( self.datadir + "/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz") exposure_hpx.unit = "cm2 s" # background iem infile = self.datadir + "/catalogs/fermi/gll_iem_v06.fits.gz" outfile = self.resdir + "/gll_iem_v06_extra.fits" model_iem = extrapolate_iem(infile, outfile, self.logEc_extra) # ROI roi_time = time() ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg") width = 2 * (radius + self.psf_margin) # Counts counts = Map.create( skydir=ROI_pos, width=width, proj="CAR", coordsys="GAL", binsz=self.dlb, axes=[self.energy_axis], dtype=float, ) counts.fill_by_coord({ "skycoord": self.events.radec, "energy": self.events.energy }) axis = MapAxis.from_nodes(counts.geom.axes[0].center, name="energy", unit="GeV", interp="log") wcs = counts.geom.wcs geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis]) coords = counts.geom.get_coord() # expo data = exposure_hpx.interp_by_coord(coords) exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float) # read PSF psf_kernel = PSFKernel.from_table_psf(self.psf, counts.geom, max_radius=self.psf_margin * u.deg) # Energy Dispersion e_true = exposure.geom.axes[0].edges e_reco = counts.geom.axes[0].edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) # fit mask if coords["lon"].min() < 90 * u.deg and coords["lon"].max( ) > 270 * u.deg: coords["lon"][coords["lon"].value > 180] -= 360 * u.deg mask = ( (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg) & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg) & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg) & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg)) mask_fermi = WcsNDMap(counts.geom, mask) # IEM eval_iem = MapEvaluator(model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp) bkg_iem = eval_iem.compute_npred() # ISO eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp) bkg_iso = eval_iso.compute_npred() # merge iem and iso, only one local normalization is fitted background_total = bkg_iem + bkg_iso background_model = BackgroundModel(background_total) background_model.parameters["norm"].min = 0.0 # Sources model in_roi = self.FHL3.positions.galactic.contained_by(wcs) FHL3_roi = [] for ks in range(len(self.FHL3.table)): if in_roi[ks] == True: model = self.FHL3[ks].sky_model() model.spatial_model.parameters.freeze_all() # freeze spatial model.spectral_model.parameters["amplitude"].min = 0.0 if isinstance(model.spectral_model, PowerLawSpectralModel): model.spectral_model.parameters["index"].min = 0.1 model.spectral_model.parameters["index"].max = 10.0 else: model.spectral_model.parameters["alpha"].min = 0.1 model.spectral_model.parameters["alpha"].max = 10.0 FHL3_roi.append(model) model_total = SkyModels(FHL3_roi) # Dataset dataset = MapDataset( model=model_total, counts=counts, exposure=exposure, psf=psf_kernel, edisp=edisp, background_model=background_model, mask_fit=mask_fermi, name="3FHL_ROI_num" + str(kr), ) cat_stat = dataset.stat_sum() datasets = Datasets([dataset]) fit = Fit(datasets) results = fit.run(optimize_opts=self.optimize_opts) print("ROI_num", str(kr), "\n", results) fit_stat = datasets.stat_sum() if results.message == "Optimization failed.": pass else: datasets.to_yaml(path=Path(self.resdir), prefix=dataset.name, overwrite=True) np.save( self.resdir + "/3FHL_ROI_num" + str(kr) + "_covariance.npy", results.parameters.covariance, ) np.savez( self.resdir + "/3FHL_ROI_num" + str(kr) + "_fit_infos.npz", message=results.message, stat=[cat_stat, fit_stat], ) exec_time = time() - roi_time print("ROI", kr, " time (s): ", exec_time) for model in FHL3_roi: if (self.FHL3[model.name].data["ROI_num"] == kr and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut): flux_points = FluxPointsEstimator( datasets=datasets, e_edges=self.El_flux, source=model.name, sigma_ul=2.0, ).run() filename = self.resdir + "/" + model.name + "_flux_points.fits" flux_points.write(filename, overwrite=True) exec_time = time() - roi_time - exec_time print("ROI", kr, " Flux points time (s): ", exec_time)
# Let's compute a PSF kernel matching the pixel size of our map psf_kernel = PSFKernel.from_table_psf(psf, counts.geom, max_radius="1 deg") # In[ ]: psf_kernel.psf_kernel_map.sum_over_axes().plot(stretch="log", add_cbar=True) # ### Energy Dispersion # For simplicity we assume a diagonal energy dispersion: # In[ ]: e_true = exposure.geom.axes[0].edges e_reco = counts.geom.axes[0].edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) # ## Background # # Let's compute a background cube, with predicted number of background events per pixel from the diffuse Galactic and isotropic model components. For this, we use the use the [gammapy.cube.MapEvaluator](https://docs.gammapy.org/0.12/api/gammapy.cube.MapEvaluator.html) to multiply with the exposure and apply the PSF. The Fermi-LAT energy dispersion at high energies is small, we neglect it here. # In[ ]: model = SkyDiffuseCube(diffuse_galactic) background_gal = BackgroundModel.from_skymodel(model, exposure=exposure, psf=psf_kernel, edisp=edisp) background_gal.map.sum_over_axes().plot()