"disk-pwlsimple", "point-pwltest", "test" ] DPI = 120 # observation config IRF_FILE = "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" #IRF_FILE = "$GAMMAPY_DATA/cta-prod3b/caldb/data/cta/prod3b-v2/bcf/South_z20_50h/irf_file.fits" POINTING = SkyCoord(0.0, 0.5, frame="galactic", unit="deg") LIVETIME = 1 * u.hr GTI_TABLE = GTI.create(start=0 * u.s, stop=LIVETIME.to(u.s)) # dataset config ENERGY_AXIS = MapAxis.from_energy_bounds("0.1 TeV", "100 TeV", nbin=10, per_decade=True) ENERGY_AXIS_TRUE = MapAxis.from_energy_bounds("0.03 TeV", "300 TeV", nbin=20, per_decade=True, name="energy_true") MIGRA_AXIS = MapAxis.from_bounds(0.5, 2, nbin=150, node_type="edges", name="migra") WCS_GEOM = WcsGeom.create(skydir=POINTING, width=(4, 4), binsz=0.02,
def test_map_axis_from_energy_units(): with pytest.raises(ValueError): _ = MapAxis.from_energy_bounds(0.1, 10, 2, unit="deg") with pytest.raises(ValueError): _ = MapAxis.from_energy_edges([0.1, 1, 10] * u.deg)
@pytest.mark.parametrize( "pars", [ { "energy": None, "rad": None, "energy_shape": 32, "psf_energy": 0.8659643, "rad_shape": 144, "psf_rad": 0.0015362848, "psf_exposure": 3.14711e12 * u.Unit("cm2 s"), "psf_value_shape": (32, 144), "psf_value": 4369.96391 * u.Unit("sr-1"), }, { "energy": MapAxis.from_energy_bounds(1, 10, 100, "TeV", name="energy_true"), "rad": None, "energy_shape": 100, "psf_energy": 1.428893959, "rad_shape": 144, "psf_rad": 0.0015362848, "psf_exposure": 4.723409e12 * u.Unit("cm2 s"), "psf_value_shape": (100, 144), "psf_value": 3714.303683 * u.Unit("sr-1"), }, { "energy": None, "rad": MapAxis.from_nodes(np.arange(0, 2, 0.002), unit="deg", name="rad"), "energy_shape": 32, "psf_energy": 0.8659643, "rad_shape": 1000,
def energy_axis(): return MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3)
"""Plot Fermi PSF.""" from gammapy.irf import PSFMap from gammapy.maps import WcsGeom, MapAxis filename = "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz" psf = PSFMap.read(filename, format="gtpsf") axis = MapAxis.from_energy_bounds("10 GeV", "2 TeV", nbin=20, name="energy_true") geom = WcsGeom.create(npix=50, binsz=0.01, axes=[axis]) # .to_image() computes the exposure weighted mean PSF kernel = psf.get_psf_kernel(geom=geom).to_image() kernel.psf_kernel_map.plot()
def image_to_cube(input_map, e_min, e_max): e_min = u.Quantity(e_min) e_max = u.Quantity(e_max) axis = MapAxis.from_energy_bounds(e_min, e_max, nbin=1) geom = input_map.geom.to_cube([axis]) return Map.from_geom(geom, data=input_map.data[np.newaxis, :, :])
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset): geom = spectrum_dataset.counts.geom energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30) energy_true = MapAxis.from_energy_bounds( "0.1 TeV", "10 TeV", nbin=30, name="energy_true" ) aeff = EffectiveAreaTable.from_parametrization(energy.edges, "HESS").to_region_map( geom.region ) livetime = 100 * u.s gti = GTI.create(start=0 * u.s, stop=livetime) exposure = aeff * livetime edisp = EDispKernelMap.from_diagonal_response( energy, energy_true, geom=geom.to_image() ) edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :] background = spectrum_dataset.npred_background().copy() mask_safe = RegionNDMap.from_geom(geom=geom, dtype=bool) mask_safe.data += True spectrum_dataset1 = SpectrumDataset( name="ds1", counts=spectrum_dataset.counts.copy(), exposure=exposure.copy(), edisp=edisp.copy(), background=background, gti=gti.copy(), mask_safe=mask_safe ) livetime2 = 0.5 * livetime gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2) aeff2 = aeff * 2 bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data) geom = spectrum_dataset.counts.geom data = np.ones(spectrum_dataset.data_shape, dtype="bool") data[0] = False safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data) exposure2 = aeff2 * livetime2 edisp = edisp.copy() edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :] spectrum_dataset2 = SpectrumDataset( name="ds2", counts=spectrum_dataset.counts.copy(), exposure=exposure2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) reference = spectrum_dataset.counts.data assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2) assert_allclose(spectrum_dataset1.counts.data[0], 141363) assert_allclose(spectrum_dataset1.exposure.data[0], 4.755644e09) assert_allclose( spectrum_dataset1.npred_background().data[1:], 3 * background.data[1:] ) assert_allclose(spectrum_dataset1.npred_background().data[0], background.data[0]) assert_allclose( spectrum_dataset1.exposure.quantity.to_value("m2s"), 2 * (aeff * livetime).quantity.to_value("m2s"), ) kernel = edisp.get_edisp_kernel() kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel() assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:]) assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
def _default_plot_energy_axis(self): energy = self.energy return MapAxis.from_energy_bounds(energy_min=energy.min(), energy_max=energy.max(), nbin=50)
def test_stack(sky_model): axis = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=3) geom = WcsGeom.create( skydir=(266.40498829, -28.93617776), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis], ) axis_etrue = MapAxis.from_energy_bounds( "0.1 TeV", "10 TeV", nbin=5, name="energy_true" ) geom_etrue = WcsGeom.create( skydir=(266.40498829, -28.93617776), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis_etrue], ) edisp = EDispKernelMap.from_diagonal_response( energy_axis=axis, energy_axis_true=axis_etrue, geom=geom ) edisp.exposure_map.quantity = ( 1e0 * u.m ** 2 * u.s * np.ones(edisp.exposure_map.data.shape) ) bkg1 = Map.from_geom(geom) bkg1.data += 0.2 cnt1 = Map.from_geom(geom) cnt1.data = 1.0 * np.ones(cnt1.data.shape) exp1 = Map.from_geom(geom_etrue) exp1.quantity = 1e7 * u.m ** 2 * u.s * np.ones(exp1.data.shape) mask1 = Map.from_geom(geom) mask1.data = np.ones(mask1.data.shape, dtype=bool) mask1.data[0][:][5:10] = False dataset1 = MapDataset( counts=cnt1, background=bkg1, exposure=exp1, mask_safe=mask1, name="dataset-1", edisp=edisp, meta_table=Table({"OBS_ID": [0]}), ) bkg2 = Map.from_geom(geom) bkg2.data = 0.1 * np.ones(bkg2.data.shape) cnt2 = Map.from_geom(geom) cnt2.data = 1.0 * np.ones(cnt2.data.shape) exp2 = Map.from_geom(geom_etrue) exp2.quantity = 1e7 * u.m ** 2 * u.s * np.ones(exp2.data.shape) mask2 = Map.from_geom(geom) mask2.data = np.ones(mask2.data.shape, dtype=bool) mask2.data[0][:][5:10] = False mask2.data[1][:][10:15] = False dataset2 = MapDataset( counts=cnt2, background=bkg2, exposure=exp2, mask_safe=mask2, name="dataset-2", edisp=edisp, meta_table=Table({"OBS_ID": [1]}), ) background_model2 = FoVBackgroundModel(dataset_name="dataset-2") background_model1 = FoVBackgroundModel(dataset_name="dataset-1") dataset1.models = [background_model1, sky_model] dataset2.models = [background_model2, sky_model] stacked = MapDataset.from_geoms(**dataset1.geoms) stacked.stack(dataset1) stacked.stack(dataset2) stacked.models = [sky_model] npred_b = stacked.npred() assert_allclose(npred_b.data.sum(), 1459.985035, 1e-5) assert_allclose(stacked.npred_background().data.sum(), 1360.00, 1e-5) assert_allclose(stacked.counts.data.sum(), 9000, 1e-5) assert_allclose(stacked.mask_safe.data.sum(), 4600) assert_allclose(stacked.exposure.data.sum(), 1.6e11) assert_allclose(stacked.meta_table["OBS_ID"][0], [0, 1])
"""Example plot showing stacking of two datasets.""" from astropy import units as u from astropy.coordinates import SkyCoord import matplotlib.pyplot as plt from gammapy.data import Observation from gammapy.datasets import SpectrumDataset from gammapy.datasets.map import MIGRA_AXIS_DEFAULT from gammapy.irf import EffectiveAreaTable2D, EnergyDispersion2D from gammapy.makers import SpectrumDatasetMaker from gammapy.maps import MapAxis, RegionGeom from gammapy.modeling.models import PowerLawSpectralModel, SkyModel energy_true = MapAxis.from_energy_bounds("0.1 TeV", "20 TeV", nbin=20, per_decade=True, name="energy_true") energy_reco = MapAxis.from_energy_bounds("0.2 TeV", "10 TeV", nbin=10, per_decade=True) aeff = EffectiveAreaTable2D.from_parametrization(energy_axis_true=energy_true, instrument="HESS") offset_axis = MapAxis.from_bounds(0 * u.deg, 5 * u.deg, nbin=2, name="offset") edisp = EnergyDispersion2D.from_gauss( energy_axis_true=energy_true, offset_axis=offset_axis, migra_axis=MIGRA_AXIS_DEFAULT,
def test_interpolate_map_dataset(): energy = MapAxis.from_energy_bounds("1 TeV", "300 TeV", nbin=5, name="energy") energy_true = MapAxis.from_nodes(np.logspace(-1, 3, 20), name="energy_true", interp="log", unit="TeV") # make dummy map IRFs geom_allsky = WcsGeom.create(npix=(5, 3), proj="CAR", binsz=60, axes=[energy], skydir=(0, 0)) geom_allsky_true = geom_allsky.drop("energy").to_cube([energy_true]) # background geom_background = WcsGeom.create(skydir=(0, 0), width=(5, 5), binsz=0.2 * u.deg, axes=[energy]) value = 30 bkg_map = Map.from_geom(geom_background, unit="") bkg_map.data = value * np.ones(bkg_map.data.shape) # effective area - with a gradient that also depends on energy aeff_map = Map.from_geom(geom_allsky_true, unit="cm2 s") ra_arr = np.arange(aeff_map.data.shape[1]) dec_arr = np.arange(aeff_map.data.shape[2]) for i in np.arange(aeff_map.data.shape[0]): aeff_map.data[i, :, :] = ( (i + 1) * 10 * np.meshgrid(dec_arr, ra_arr)[0] + 10 * np.meshgrid(dec_arr, ra_arr)[1] + 10) aeff_map.meta["TELESCOP"] = "HAWC" # psf map width = 0.2 * u.deg rad_axis = MapAxis.from_nodes(np.linspace(0, 2, 50), name="rad", unit="deg") psfMap = PSFMap.from_gauss(energy_true, rad_axis, width) # edispmap edispmap = EDispKernelMap.from_gauss(energy, energy_true, sigma=0.1, bias=0.0, geom=geom_allsky) # events and gti nr_ev = 10 ev_t = Table() gti_t = Table() ev_t["EVENT_ID"] = np.arange(nr_ev) ev_t["TIME"] = nr_ev * [ Time("2011-01-01 00:00:00", scale="utc", format="iso") ] ev_t["RA"] = np.linspace(-1, 1, nr_ev) * u.deg ev_t["DEC"] = np.linspace(-1, 1, nr_ev) * u.deg ev_t["ENERGY"] = np.logspace(0, 2, nr_ev) * u.TeV gti_t["START"] = [Time("2010-12-31 00:00:00", scale="utc", format="iso")] gti_t["STOP"] = [Time("2011-01-02 00:00:00", scale="utc", format="iso")] events = EventList(ev_t) gti = GTI(gti_t) # define observation obs = Observation( obs_id=0, obs_info={ "RA_PNT": 0.0, "DEC_PNT": 0.0 }, gti=gti, aeff=aeff_map, edisp=edispmap, psf=psfMap, bkg=bkg_map, events=events, obs_filter=None, ) # define analysis geometry geom_target = WcsGeom.create(skydir=(0, 0), width=(5, 5), binsz=0.1 * u.deg, axes=[energy]) maker = MapDatasetMaker( selection=["exposure", "counts", "background", "edisp", "psf"]) dataset = MapDataset.create(geom=geom_target, energy_axis_true=energy_true, rad_axis=rad_axis, name="test") dataset = maker.run(dataset, obs) # test counts assert dataset.counts.data.sum() == nr_ev # test background assert np.floor(np.sum(dataset.npred_background().data)) == np.sum( bkg_map.data) coords_bg = { "skycoord": SkyCoord("0 deg", "0 deg"), "energy": energy.center[0] } assert_allclose(dataset.npred_background().get_by_coord(coords_bg)[0], 7.5, atol=1e-4) # test effective area coords_aeff = { "skycoord": SkyCoord("0 deg", "0 deg"), "energy_true": energy_true.center[0], } assert_allclose( aeff_map.get_by_coord(coords_aeff)[0], dataset.exposure.interp_by_coord(coords_aeff)[0], atol=1e-3, ) # test edispmap pdfmatrix_preinterp = edispmap.get_edisp_kernel(SkyCoord( "0 deg", "0 deg")).pdf_matrix pdfmatrix_postinterp = dataset.edisp.get_edisp_kernel( SkyCoord("0 deg", "0 deg")).pdf_matrix assert_allclose(pdfmatrix_preinterp, pdfmatrix_postinterp, atol=1e-7) # test psfmap geom_psf = geom_target.drop("energy").to_cube([energy_true]) psfkernel_preinterp = psfMap.get_psf_kernel(position=SkyCoord( "0 deg", "0 deg"), geom=geom_psf, max_radius=2 * u.deg).data psfkernel_postinterp = dataset.psf.get_psf_kernel( position=SkyCoord("0 deg", "0 deg"), geom=geom_psf, max_radius=2 * u.deg).data assert_allclose(psfkernel_preinterp, psfkernel_postinterp, atol=1e-4)
def test_interp_to_geom(): energy = MapAxis.from_energy_bounds("1 TeV", "300 TeV", nbin=5, name="energy") energy_target = MapAxis.from_energy_bounds("1 TeV", "300 TeV", nbin=7, name="energy") value = 30 coords = { "skycoord": SkyCoord("0 deg", "0 deg"), "energy": energy_target.center[3] } # WcsNDMap geom_wcs = WcsGeom.create(npix=(5, 3), proj="CAR", binsz=60, axes=[energy], skydir=(0, 0)) wcs_map = Map.from_geom(geom_wcs, unit="") wcs_map.data = value * np.ones(wcs_map.data.shape) wcs_geom_target = WcsGeom.create(skydir=(0, 0), width=(10, 10), binsz=0.1 * u.deg, axes=[energy_target]) interp_wcs_map = wcs_map.interp_to_geom(wcs_geom_target, method="linear") assert_allclose(interp_wcs_map.get_by_coord(coords)[0], value, atol=1e-7) assert isinstance(interp_wcs_map, WcsNDMap) assert interp_wcs_map.geom == wcs_geom_target # HpxNDMap geom_hpx = HpxGeom.create(binsz=60, axes=[energy], skydir=(0, 0)) hpx_map = Map.from_geom(geom_hpx, unit="") hpx_map.data = value * np.ones(hpx_map.data.shape) hpx_geom_target = HpxGeom.create(skydir=(0, 0), width=10, binsz=0.1 * u.deg, axes=[energy_target]) interp_hpx_map = hpx_map.interp_to_geom(hpx_geom_target) assert_allclose(interp_hpx_map.get_by_coord(coords)[0], value, atol=1e-7) assert isinstance(interp_hpx_map, HpxNDMap) assert interp_hpx_map.geom == hpx_geom_target # Preserving the counts geom_initial = WcsGeom.create( skydir=(20, 20), width=(5, 5), binsz=0.2 * u.deg, ) test_map = Map.from_geom(geom_initial, unit="") test_map.data = value * np.ones(test_map.data.shape) geom_target = WcsGeom.create( skydir=(20, 20), width=(5, 5), binsz=0.1 * u.deg, ) new_map = test_map.interp_to_geom(geom_target, preserve_counts=True) assert np.floor(np.sum(new_map.data)) == np.sum(test_map.data)
"""Plot an energy dispersion using a gaussian parametrisation""" import matplotlib.pyplot as plt from gammapy.irf import EDispKernel from gammapy.maps import MapAxis energy_axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=10) energy_axis_true = MapAxis.from_energy_bounds( "0.5 TeV", "30 TeV", nbin=10, per_decade=True, name="energy_true" ) edisp = EDispKernel.from_gauss( energy_axis=energy_axis, energy_axis_true=energy_axis_true, sigma=0.1, bias=0 ) edisp.peek() plt.show()
def _default_plot_energy_edges(self): energy = self.energy return MapAxis.from_energy_bounds(energy.min(), energy.max(), 50).edges
print(observations[3].gti) # ## Building 1D datasets from the new observations # # Here we will perform the data reduction in 1D with reflected regions. # # Beware, with small time intervals the background normalization with OFF regions might become problematic. # ### Defining the geometry # # We need to define the ON extraction region. We will keep the same reco and true energy axes as in 3D. # In[ ]: # Target definition e_reco = MapAxis.from_energy_bounds(0.1, 40, 100, "TeV").edges e_true = MapAxis.from_energy_bounds(0.05, 100, 100, "TeV").edges target_position = SkyCoord(83.63308 * u.deg, 22.01450 * u.deg, frame="icrs") on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) # ### Creation of the data reduction makers # # We now create the dataset and background makers for the selected geometry. # In[ ]: dataset_maker = SpectrumDatasetMaker(containment_correction=True, selection=["counts", "aeff", "edisp"]) bkg_maker = ReflectedRegionsBackgroundMaker()