def test_background_model(background): bkg1 = BackgroundModel(background, norm=2.0).evaluate() assert_allclose(bkg1.data[0][0][0], background.data[0][0][0] * 2.0, rtol=1e-3) assert_allclose(bkg1.data.sum(), background.data.sum() * 2.0, rtol=1e-3) bkg2 = BackgroundModel(background, norm=2.0, tilt=0.2, reference="1000 GeV").evaluate() assert_allclose(bkg2.data[0][0][0], 2.254e-07, rtol=1e-3) assert_allclose(bkg2.data.sum(), 7.352e-06, rtol=1e-3)
def get_map_dataset(sky_model, geom, geom_etrue, edisp=True, **kwargs): """This computes the total npred""" # define background model m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model = BackgroundModel(m) psf = get_psf(geom_etrue) exposure = get_exposure(geom_etrue) if edisp: # define energy dispersion e_true = geom_etrue.get_axis_by_name("energy").edges e_reco = geom.get_axis_by_name("energy").edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) else: edisp = None # define fit mask center = sky_model.spatial_model.position circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = background_model.map.geom.region_mask([circle]) return MapDataset(model=sky_model, exposure=exposure, background_model=background_model, psf=psf, edisp=edisp, mask_fit=mask_fit, **kwargs)
def from_hdulist(cls, hdulist): """Create map dataset from list of HDUs. Parameters ---------- hdulist : `~astropy.io.fits.HDUList` List of HDUs. Returns ------- dataset : `MapDataset` Map dataset. """ init_kwargs = {} init_kwargs["counts"] = Map.from_hdulist(hdulist, hdu="counts") init_kwargs["exposure"] = Map.from_hdulist(hdulist, hdu="exposure") background_map = Map.from_hdulist(hdulist, hdu="background") init_kwargs["background_model"] = BackgroundModel(background_map) if "EDISP_MATRIX" in hdulist: init_kwargs["edisp"] = EnergyDispersion.from_hdulist( hdulist, hdu1="EDISP_MATRIX", hdu2="EDISP_MATRIX_EBOUNDS") if "PSF_KERNEL" in hdulist: psf_map = Map.from_hdulist(hdulist, hdu="psf_kernel") init_kwargs["psf"] = PSFKernel(psf_map) if "MASK_SAFE" in hdulist: mask_safe_map = Map.from_hdulist(hdulist, hdu="mask_safe") init_kwargs["mask_safe"] = mask_safe_map.data.astype(bool) if "MASK_FIT" in hdulist: mask_fit_map = Map.from_hdulist(hdulist, hdu="mask_fit") init_kwargs["mask_fit"] = mask_fit_map.data.astype(bool) return cls(**init_kwargs)
psf_kernel = PSFKernel.from_table_psf(psf, geom, max_radius=0.3 * u.deg) psf_kernel.psf_kernel_map.sum_over_axes().plot(stretch="log") # In[ ]: energy = axis.edges edisp = irfs["edisp"].to_energy_dispersion(offset, e_reco=energy, e_true=energy) edisp.plot_matrix() # Now we have to compute `npred` maps, i.e. "predicted counts per pixel" given the model and the observation infos: exposure, background, PSF and EDISP. For this we use the `MapDataset` object: # In[ ]: background_model = BackgroundModel(background) dataset = MapDataset( model=sky_model, exposure=exposure, background_model=background_model, psf=psf_kernel, edisp=edisp, ) # In[ ]: npred = dataset.npred() # In[ ]: npred.sum_over_axes().plot(add_cbar=True)
def test_background_models(background): bkg_1 = BackgroundModel(background, norm=1.0) bkg_2 = BackgroundModel(background, norm=2.0) models = BackgroundModels([bkg_1, bkg_2]) bkg_eval = models.evaluate() assert_allclose(3 * bkg_1.map.data[0][0][0], bkg_eval.data[0][0][0])
spatial_model = SkyPointSource(lon_0="0.01 deg", lat_0="0.01 deg") spectral_model = PowerLaw2(emin=emin, emax=emax, index=2.0, amplitude="3e-12 cm-2 s-1") model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) model.parameters["index"].frozen = True # ## Modeling the background # # Gammapy fitting framework assumes the background to be an integrated model. # Thus, we will define the background as a model, and freeze its parameters for now. # In[ ]: background_model = BackgroundModel(maps2D["background"]) background_model.parameters["norm"].frozen = True background_model.parameters["tilt"].frozen = True # In[ ]: dataset = MapDataset( model=model, counts=maps2D["counts"], exposure=maps2D["exposure"], background_model=background_model, mask=mask, psf=psf_kernel, ) # In[ ]:
# # No we are ready for the actual likelihood fit. We first define the model as a combination of a point source with a powerlaw: # In[ ]: spatial_model = SkyPointSource(lon_0="0.01 deg", lat_0="0.01 deg") spectral_model = PowerLaw(index=2.2, amplitude="3e-12 cm-2 s-1 TeV-1", reference="1 TeV") model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) # Often, it is useful to fit the normalisation (and also the tilt) of the background. To do so, we have to define the background as a model. In this example, we will keep the tilt fixed and the norm free. # In[ ]: background_model = BackgroundModel(maps["background"], norm=1.1, tilt=0.0) background_model.parameters["norm"].frozen = False background_model.parameters["tilt"].frozen = True # Now we set up the `MapDataset` object by passing the prepared maps, IRFs as well as the model: # In[ ]: dataset = MapDataset( model=model, counts=maps["counts"], exposure=maps["exposure"], background_model=background_model, mask=mask, psf=psf_kernel, edisp=edisp,
# plt.imshow(psf_kernel_array) # ## Map fit # # Let's fit this source assuming a Gaussian spatial shape and a power-law spectral shape, and a background with a flexible normalisation # In[ ]: spatial_model = SkyPointSource(lon_0="83.6 deg", lat_0="22.0 deg", frame="icrs") spectral_model = PowerLaw(index=2.6, amplitude="5e-11 cm-2 s-1 TeV-1", reference="1 TeV") model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) background_model = BackgroundModel(maps["background"], norm=1.0) background_model.parameters["tilt"].frozen = False # In[ ]: get_ipython().run_cell_magic( 'time', '', 'dataset = MapDataset(\n model=model,\n counts=maps["counts"],\n exposure=maps["exposure"],\n background_model=background_model,\n psf=psf_kernel,\n)\nfit = Fit(dataset)\nresult = fit.run()\nprint(result)' ) # Best fit parameters: # In[ ]: result.parameters.to_table()
def simulate_dataset( skymodel, geom, pointing, irfs, livetime=1 * u.h, offset=0 * u.deg, max_radius=0.8 * u.deg, random_state="random-seed", ): """Simulate a 3D dataset. Simulate a source defined with a sky model for a given pointing, geometry and irfs for a given exposure time. This will return a dataset object which includes the counts cube, the exposure cube, the psf cube, the background model and the sky model. Parameters ---------- skymodel : `~gammapy.cube.models.SkyModel` Background model map geom : `~gammapy.maps.WcsGeom` Geometry object for the observation pointing : `~astropy.coordinates.SkyCoord` Pointing position irfs : dict Irfs used for simulating the observation livetime : `~astropy.units.Quantity` Livetime exposure of the simulated observation offset : `~astropy.units.Quantity` Offset from the center of the pointing position. This is used for the PSF and Edisp estimation max_radius : `~astropy.coordinates.Angle` The maximum radius of the PSF kernel. random_state: {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`} Defines random number generator initialisation. Returns ------- dataset : `~gammapy.cube.MapDataset` A dataset of the simulated observation. """ background = make_map_background_irf(pointing=pointing, ontime=livetime, bkg=irfs["bkg"], geom=geom) background_model = BackgroundModel(background) psf = irfs["psf"].to_energy_dependent_table_psf(theta=offset) psf_kernel = PSFKernel.from_table_psf(psf, geom, max_radius=max_radius) exposure = make_map_exposure_true_energy(pointing=pointing, livetime=livetime, aeff=irfs["aeff"], geom=geom) if "edisp" in irfs: energy = geom.axes[0].edges edisp = irfs["edisp"].to_energy_dispersion(offset, e_reco=energy, e_true=energy) else: edisp = None dataset = MapDataset( model=skymodel, exposure=exposure, background_model=background_model, psf=psf_kernel, edisp=edisp, ) npred_map = dataset.npred() rng = get_random_state(random_state) counts = rng.poisson(npred_map.data) dataset.counts = WcsNDMap(geom, counts) return dataset
# In[ ]: e_true = exposure.geom.axes[0].edges e_reco = counts.geom.axes[0].edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) # ## Background # # Let's compute a background cube, with predicted number of background events per pixel from the diffuse Galactic and isotropic model components. For this, we use the use the [gammapy.cube.MapEvaluator](https://docs.gammapy.org/0.12/api/gammapy.cube.MapEvaluator.html) to multiply with the exposure and apply the PSF. The Fermi-LAT energy dispersion at high energies is small, we neglect it here. # In[ ]: model = SkyDiffuseCube(diffuse_galactic) background_gal = BackgroundModel.from_skymodel(model, exposure=exposure, psf=psf_kernel, edisp=edisp) background_gal.map.sum_over_axes().plot() print("Background counts from Galactic diffuse: ", background_gal.map.data.sum()) # In[ ]: model = SkyModel(SkyDiffuseConstant(), diffuse_iso) background_iso = BackgroundModel.from_skymodel(model, exposure=exposure, edisp=edisp) background_iso.map.sum_over_axes().plot(add_cbar=True)
spec.parameters.covariance = covariance[2:5, 2:5] energy_range = [0.3, 10] * u.TeV spec.plot(energy_range=energy_range, energy_power=2) ax = spec.plot_error(energy_range=energy_range, energy_power=2) # Apparently our model should be improved by adding a component for diffuse Galactic emission and at least one second point # source. But before we do that in the next section, we will fit the background as a model. # ### Fitting a background model # # Often, it is useful to fit the normalisation (and also the index) of the background. To do so, we have to define the background as a model and pass it to `MapFit` # In[ ]: background_model = BackgroundModel(cmaps["background"], norm=1.1, tilt=0.0) # In[ ]: fit_bkg = MapFit( model=model, counts=cmaps["counts"], exposure=cmaps["exposure"], background_model=background_model, mask=mask, psf=psf_kernel, edisp=edisp, ) # In[ ]: