def test_psf_kernel_from_gauss_read_write(tmp_path): sigma = 0.5 * u.deg binsz = 0.1 * u.deg geom = WcsGeom.create(binsz=binsz, npix=150, axes=[MapAxis((0, 1, 2))]) kernel = PSFKernel.from_gauss(geom, sigma) # Check that both maps are identical assert_allclose(kernel.psf_kernel_map.data[0], kernel.psf_kernel_map.data[1]) # Is there an odd number of pixels assert_allclose(np.array(kernel.psf_kernel_map.geom.npix) % 2, 1) kernel.write(tmp_path / "tmp.fits", overwrite=True) kernel2 = PSFKernel.read(tmp_path / "tmp.fits") assert_allclose(kernel.psf_kernel_map.data, kernel2.psf_kernel_map.data)
def test_psf_kernel_from_gauss_read_write(tmpdir): sigma = 0.5 * u.deg binsz = 0.1 * u.deg geom = WcsGeom.create(binsz=binsz, npix=150, axes=[MapAxis((0, 1, 2))]) kernel = PSFKernel.from_gauss(geom, sigma) # Check that both maps are identical assert_allclose(kernel.psf_kernel_map.data[0], kernel.psf_kernel_map.data[1]) # Is there an odd number of pixels assert_allclose(np.array(kernel.psf_kernel_map.geom.npix) % 2, 1) filename = str(tmpdir / "test_kernel.fits") # Test read and write kernel.write(filename, overwrite=True) newkernel = PSFKernel.read(filename) assert_allclose(kernel.psf_kernel_map.data, newkernel.psf_kernel_map.data)
# In[ ]: counts = Map.read("$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts.fits.gz") background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background.fits.gz" ) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure.fits.gz" ) maps = {"counts": counts, "background": background, "exposure": exposure} kernel = PSFKernel.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-psf.fits.gz" ) # In[ ]: get_ipython().run_cell_magic('time', '', 'estimator = TSMapEstimator()\nimages = estimator.run(maps, kernel.data)') # ## Plot images # In[ ]: plt.figure(figsize=(15, 5))
model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) # Now we read the maps and IRFs and create the dataset for each observation: # In[ ]: datasets = [] for obs_id in obs_ids: path = Path("analysis_3d_joint") / "obs_{}".format(obs_id) # read counts map and IRFs counts = Map.read(path / "counts.fits.gz") exposure = Map.read(path / "exposure.fits.gz") psf = PSFKernel.read(path / "psf.fits.gz") edisp = EnergyDispersion.read(path / "edisp.fits.gz") # create background model per observation / dataset background = Map.read(path / "background.fits.gz") background_model = BackgroundModel(background) background_model.tilt.frozen = False background_model.norm.value = 1.3 # optionally define a safe energy threshold emin = None mask_data = counts.geom.energy_mask(emin=emin) mask = Map.from_geom(geom=counts.geom, data=mask_data) dataset = MapDataset( model=model,
# ## Likelihood fit # # ### Reading maps and IRFs # As first step we read in the maps and IRFs that we have saved to disk again: # In[ ]: # read maps maps = { "counts": Map.read(str(path / "counts.fits")), "background": Map.read(str(path / "background.fits")), "exposure": Map.read(str(path / "exposure.fits")), } # read IRFs psf_kernel = PSFKernel.read(str(path / "psf.fits")) edisp = EnergyDispersion.read(str(path / "edisp.fits")) # ### Fit mask # # To select a certain energy range for the fit we can create a fit mask: # In[ ]: mask = Map.from_geom(maps["counts"].geom) coords = mask.geom.get_coord() mask.data = coords["energy"] > 0.3 # ### Model fit #