def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset): geom = spectrum_dataset.counts.geom energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30) energy_true = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30, name="energy_true") aeff = EffectiveAreaTable2D.from_parametrization( energy_axis_true=energy_true, instrument="HESS") livetime = 100 * u.s gti = GTI.create(start=0 * u.s, stop=livetime) geom_true = geom.as_energy_true exposure = make_map_exposure_true_energy(geom=geom_true, livetime=livetime, pointing=geom_true.center_skydir, aeff=aeff) edisp = EDispKernelMap.from_diagonal_response(energy, energy_true, geom=geom.to_image()) edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :] background = spectrum_dataset.background mask_safe = RegionNDMap.from_geom(geom=geom, dtype=bool) mask_safe.data += True spectrum_dataset1 = SpectrumDataset( name="ds1", counts=spectrum_dataset.counts.copy(), exposure=exposure.copy(), edisp=edisp.copy(), background=background.copy(), gti=gti.copy(), mask_safe=mask_safe, ) livetime2 = 0.5 * livetime gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2) bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data) geom = spectrum_dataset.counts.geom data = np.ones(spectrum_dataset.data_shape, dtype="bool") data[0] = False safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data) exposure2 = exposure.copy() edisp = edisp.copy() edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :] spectrum_dataset2 = SpectrumDataset( name="ds2", counts=spectrum_dataset.counts.copy(), exposure=exposure2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) reference = spectrum_dataset.counts.data assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2) assert_allclose(spectrum_dataset1.counts.data[0], 141363) assert_allclose(spectrum_dataset1.exposure.quantity[0], 4.755644e09 * u.Unit("cm2 s")) assert_allclose(spectrum_dataset1.background.data[1:], 3 * background.data[1:]) assert_allclose(spectrum_dataset1.background.data[0], background.data[0]) kernel = edisp.get_edisp_kernel() kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel() assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:]) assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
def test_spectrum_dataset_stack_nondiagonal_no_bkg(spectrum_dataset): energy = spectrum_dataset.counts.geom.axes["energy"] geom = spectrum_dataset.counts.geom edisp1 = EDispKernelMap.from_gauss( energy_axis=energy, energy_axis_true=energy.copy(name="energy_true"), sigma=0.1, bias=0, geom=geom.to_image(), ) edisp1.exposure_map.data += 1 aeff = EffectiveAreaTable2D.from_parametrization( energy_axis_true=energy.copy(name="energy_true"), instrument="HESS") livetime = 100 * u.s geom_true = geom.as_energy_true exposure = make_map_exposure_true_energy(geom=geom_true, livetime=livetime, pointing=geom_true.center_skydir, aeff=aeff) geom = spectrum_dataset.counts.geom counts = RegionNDMap.from_geom(geom=geom) gti = GTI.create(start=0 * u.s, stop=livetime) spectrum_dataset1 = SpectrumDataset( counts=counts, exposure=exposure, edisp=edisp1, meta_table=Table({"OBS_ID": [0]}), gti=gti.copy(), ) edisp2 = EDispKernelMap.from_gauss( energy_axis=energy, energy_axis_true=energy.copy(name="energy_true"), sigma=0.2, bias=0.0, geom=geom, ) edisp2.exposure_map.data += 1 gti2 = GTI.create(start=100 * u.s, stop=200 * u.s) spectrum_dataset2 = SpectrumDataset( counts=counts, exposure=exposure.copy(), edisp=edisp2, meta_table=Table({"OBS_ID": [1]}), gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) assert_allclose(spectrum_dataset1.meta_table["OBS_ID"][0], [0, 1]) assert spectrum_dataset1.background_model is None assert_allclose(spectrum_dataset1.gti.time_sum.to_value("s"), 200) assert_allclose(spectrum_dataset1.exposure.quantity[2].to_value("m2 s"), 1573851.079861) kernel = edisp1.get_edisp_kernel() assert_allclose(kernel.get_bias(1 * u.TeV), 0.0, atol=1.2e-3) assert_allclose(kernel.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
def from_ogip_files(cls, filename): """Read `~gammapy.datasets.SpectrumDatasetOnOff` from OGIP files. BKG file, ARF, and RMF must be set in the PHA header and be present in the same folder. The naming scheme is fixed to the following scheme: * PHA file is named ``pha_obs{name}.fits`` * BKG file is named ``bkg_obs{name}.fits`` * ARF file is named ``arf_obs{name}.fits`` * RMF file is named ``rmf_obs{name}.fits`` with ``{name}`` the dataset name. Parameters ---------- filename : str OGIP PHA file to read """ filename = make_path(filename) dirname = filename.parent with fits.open(str(filename), memmap=False) as hdulist: counts = RegionNDMap.from_hdulist(hdulist, format="ogip") acceptance = RegionNDMap.from_hdulist( hdulist, format="ogip", ogip_column="BACKSCAL" ) livetime = counts.meta["EXPOSURE"] * u.s if "GTI" in hdulist: gti = GTI(Table.read(hdulist["GTI"])) else: gti = None mask_safe = RegionNDMap.from_hdulist( hdulist, format="ogip", ogip_column="QUALITY" ) mask_safe.data = np.logical_not(mask_safe.data) phafile = filename.name try: rmffile = phafile.replace("pha", "rmf") kernel = EDispKernel.read(dirname / rmffile) edisp = EDispKernelMap.from_edisp_kernel(kernel, geom=counts.geom) except OSError: # TODO : Add logger and echo warning edisp = None try: bkgfile = phafile.replace("pha", "bkg") with fits.open(str(dirname / bkgfile), memmap=False) as hdulist: counts_off = RegionNDMap.from_hdulist(hdulist, format="ogip") acceptance_off = RegionNDMap.from_hdulist( hdulist, ogip_column="BACKSCAL" ) except OSError: # TODO : Add logger and echo warning counts_off, acceptance_off = None, None arffile = phafile.replace("pha", "arf") aeff = RegionNDMap.read(dirname / arffile, format="ogip-arf") exposure = aeff * livetime exposure.meta["livetime"] = livetime if edisp is not None: edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :] return cls( counts=counts, exposure=exposure, counts_off=counts_off, edisp=edisp, mask_safe=mask_safe, acceptance=acceptance, acceptance_off=acceptance_off, name=str(counts.meta["OBS_ID"]), gti=gti, )
def make_gti(times, time_ref="2010-01-01"): meta = time_ref_to_dict(time_ref) table = Table(times, meta=meta) return GTI(table)
def test_stack_npred(): pwl = PowerLawSpectralModel() gauss = GaussianSpatialModel(sigma="0.2 deg") model = SkyModel(pwl, gauss) axis = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=5) axis_etrue = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=11, name="energy_true") geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis], ) dataset_1 = MapDataset.create( geom, energy_axis_true=axis_etrue, name="dataset-1", gti=GTI.create("0 min", "30 min"), ) dataset_1.psf = None dataset_1.exposure.data += 1 dataset_1.mask_safe.data = geom.energy_mask(energy_min=1 * u.TeV) dataset_1.background.data += 1 bkg_model_1 = FoVBackgroundModel(dataset_name=dataset_1.name) dataset_1.models = [model, bkg_model_1] dataset_2 = MapDataset.create( geom, energy_axis_true=axis_etrue, name="dataset-2", gti=GTI.create("30 min", "60 min"), ) dataset_2.psf = None dataset_2.exposure.data += 1 dataset_2.mask_safe.data = geom.energy_mask(energy_min=0.2 * u.TeV) dataset_2.background.data += 1 bkg_model_2 = FoVBackgroundModel(dataset_name=dataset_2.name) dataset_2.models = [model, bkg_model_2] npred_1 = dataset_1.npred() npred_1.data[~dataset_1.mask_safe.data] = 0 npred_2 = dataset_2.npred() npred_2.data[~dataset_2.mask_safe.data] = 0 stacked_npred = Map.from_geom(geom) stacked_npred.stack(npred_1) stacked_npred.stack(npred_2) stacked = MapDataset.create(geom, energy_axis_true=axis_etrue, name="stacked") stacked.stack(dataset_1) stacked.stack(dataset_2) npred_stacked = stacked.npred() assert_allclose(npred_stacked.data, stacked_npred.data)
"""Example of how to create an ObservationCTA from CTA's 1DC""" from gammapy.data import ObservationCTA, EventList, GTI from gammapy.irf import ( EnergyDependentMultiGaussPSF, EffectiveAreaTable2D, EnergyDispersion2D, Background3D, ) filename = "$GAMMAPY_EXTRA/datasets/cta-1dc/data/baseline/gps/gps_baseline_110380.fits" event_list = EventList.read(filename) gti = GTI.read(filename) filename = ( "$GAMMAPY_EXTRA/datasets/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) aeff = EffectiveAreaTable2D.read(filename) bkg = Background3D.read(filename) edisp = EnergyDispersion2D.read(filename, hdu="Energy Dispersion") psf = EnergyDependentMultiGaussPSF.read(filename, hdu="Point Spread Function") obs = ObservationCTA( obs_id=event_list.table.meta["OBS_ID"], events=event_list, gti=gti, psf=psf, aeff=aeff, edisp=edisp, bkg=bkg, pointing_radec=event_list.pointing_radec, observation_live_time_duration=event_list.observation_live_time_duration,
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = MapAxis.from_energy_edges(etrue, name="energy_true") ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] self.e_reco = MapAxis.from_energy_edges(ereco, name="energy") start = u.Quantity([0], "s") stop = u.Quantity([1000], "s") time_ref = Time("2010-01-01 00:00:00.0") self.gti = GTI.create(start, stop, time_ref) self.livetime = self.gti.time_sum self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs self.aeff = RegionNDMap.create(region="icrs;circle(0.,1.,0.1)", wcs=self.wcs, axes=[self.e_true], unit="cm2") self.aeff.data += 1 data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins axis = MapAxis.from_edges(ereco, name="energy", interp="log") self.on_counts = RegionNDMap.create( region="icrs;circle(0.,1.,0.1)", wcs=self.wcs, axes=[axis], meta={"EXPOSURE": self.livetime.to_value("s")}, ) self.on_counts.data += 1 self.on_counts.data[-1] = 0 self.off_counts = RegionNDMap.create( region="icrs;box(0.,1.,0.1, 0.2,30);box(-1.,-1.,0.1, 0.2,150)", wcs=self.wcs, axes=[axis]) self.off_counts.data += 10 acceptance = RegionNDMap.from_geom(self.on_counts.geom) acceptance.data += 1 data = np.ones(elo.shape) data[-1] = 0 acceptance_off = RegionNDMap.from_geom(self.off_counts.geom) acceptance_off.data += 10 self.edisp = EDispKernelMap.from_diagonal_response( self.e_reco, self.e_true, self.on_counts.geom.to_image()) exposure = self.aeff * self.livetime exposure.meta["livetime"] = self.livetime mask_safe = RegionNDMap.from_geom(self.on_counts.geom, dtype=bool) mask_safe.data += True self.dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, exposure=exposure, edisp=self.edisp, acceptance=acceptance, acceptance_off=acceptance_off, name="test", gti=self.gti, mask_safe=mask_safe, )
def test_map_dataset_fits_io(tmp_path, sky_model, geom, geom_etrue): dataset = get_map_dataset(geom, geom_etrue) bkg_model = FoVBackgroundModel(dataset_name=dataset.name) dataset.models = [sky_model, bkg_model] dataset.counts = dataset.npred() dataset.mask_safe = dataset.mask_fit gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset.gti = gti hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "BACKGROUND", "BACKGROUND_BANDS", "EDISP", "EDISP_BANDS", "EDISP_EXPOSURE", "EDISP_EXPOSURE_BANDS", "PSF", "PSF_BANDS", "PSF_EXPOSURE", "PSF_EXPOSURE_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "MASK_FIT", "MASK_FIT_BANDS", "GTI", ] assert actual == desired dataset.write(tmp_path / "test.fits") dataset_new = MapDataset.read(tmp_path / "test.fits") assert dataset_new.mask.data.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose(dataset.npred_background().data, dataset_new.npred_background().data) assert_allclose(dataset.edisp.edisp_map.data, dataset_new.edisp.edisp_map.data) assert_allclose(dataset.psf.psf_map.data, dataset_new.psf.psf_map.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_fit.data, dataset_new.mask_fit.data) assert_allclose(dataset.mask_safe.data, dataset_new.mask_safe.data) assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert dataset.npred_background().geom == dataset_new.npred_background( ).geom assert dataset.edisp.edisp_map.geom == dataset_new.edisp.edisp_map.geom assert_allclose(dataset.gti.time_sum.to_value("s"), dataset_new.gti.time_sum.to_value("s")) # To test io of psf and edisp map stacked = MapDataset.create(geom) stacked.write(tmp_path / "test-2.fits", overwrite=True) stacked1 = MapDataset.read(tmp_path / "test-2.fits") assert stacked1.psf.psf_map is not None assert stacked1.psf.exposure_map is not None assert stacked1.edisp.edisp_map is not None assert stacked1.edisp.exposure_map is not None assert stacked.mask.data.dtype == bool assert_allclose(stacked1.psf.psf_map, stacked.psf.psf_map) assert_allclose(stacked1.edisp.edisp_map, stacked.edisp.edisp_map)
def create( cls, geom, geom_irf=None, migra_axis=None, rad_axis=None, reference_time="2000-01-01", name="", **kwargs ): """Creates a MapDataset object with zero filled maps Parameters ---------- geom: `~gammapy.maps.WcsGeom` Reference target geometry in reco energy, used for counts and background maps geom_irf: `~gammapy.maps.WcsGeom` Reference image geometry in true energy, used for IRF maps. migra_axis: `~gammapy.maps.MapAxis` Migration axis for the energy dispersion map rad_axis: `~gammapy.maps.MapAxis` Rad axis for the psf map name : str Name of the dataset. """ geom_irf = geom_irf or geom.to_binsz(BINSZ_IRF) migra_axis = migra_axis or MIGRA_AXIS_DEFAULT rad_axis = rad_axis or RAD_AXIS_DEFAULT counts = Map.from_geom(geom, unit="") background = Map.from_geom(geom, unit="") background_model = BackgroundModel(background) energy_axis = geom_irf.get_axis_by_name("ENERGY") exposure_geom = geom.to_image().to_cube([energy_axis]) exposure = Map.from_geom(exposure_geom, unit="m2 s") exposure_irf = Map.from_geom(geom_irf, unit="m2 s") mask_safe = np.zeros(geom.data_shape, dtype=bool) gti = GTI.create([] * u.s, [] * u.s, reference_time=reference_time) geom_migra = geom_irf.to_image().to_cube([migra_axis, energy_axis]) edisp_map = Map.from_geom(geom_migra, unit="") loc = migra_axis.edges.searchsorted(1.0) edisp_map.data[:, loc, :, :] = 1.0 edisp = EDispMap(edisp_map, exposure_irf) geom_rad = geom_irf.to_image().to_cube([rad_axis, energy_axis]) psf_map = Map.from_geom(geom_rad, unit="sr-1") psf = PSFMap(psf_map, exposure_irf) return cls( counts=counts, exposure=exposure, psf=psf, edisp=edisp, background_model=background_model, gti=gti, mask_safe=mask_safe, name=name, **kwargs )
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset): geom = spectrum_dataset.counts.geom energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30) energy_true = MapAxis.from_energy_bounds( "0.1 TeV", "10 TeV", nbin=30, name="energy_true" ) aeff = EffectiveAreaTable.from_parametrization(energy.edges, "HESS").to_region_map( geom.region ) livetime = 100 * u.s gti = GTI.create(start=0 * u.s, stop=livetime) exposure = aeff * livetime edisp = EDispKernelMap.from_diagonal_response( energy, energy_true, geom=geom.to_image() ) edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :] background = spectrum_dataset.npred_background().copy() mask_safe = RegionNDMap.from_geom(geom=geom, dtype=bool) mask_safe.data += True spectrum_dataset1 = SpectrumDataset( name="ds1", counts=spectrum_dataset.counts.copy(), exposure=exposure.copy(), edisp=edisp.copy(), background=background, gti=gti.copy(), mask_safe=mask_safe ) livetime2 = 0.5 * livetime gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2) aeff2 = aeff * 2 bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data) geom = spectrum_dataset.counts.geom data = np.ones(spectrum_dataset.data_shape, dtype="bool") data[0] = False safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data) exposure2 = aeff2 * livetime2 edisp = edisp.copy() edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :] spectrum_dataset2 = SpectrumDataset( name="ds2", counts=spectrum_dataset.counts.copy(), exposure=exposure2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) reference = spectrum_dataset.counts.data assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2) assert_allclose(spectrum_dataset1.counts.data[0], 141363) assert_allclose(spectrum_dataset1.exposure.data[0], 4.755644e09) assert_allclose( spectrum_dataset1.npred_background().data[1:], 3 * background.data[1:] ) assert_allclose(spectrum_dataset1.npred_background().data[0], background.data[0]) assert_allclose( spectrum_dataset1.exposure.quantity.to_value("m2s"), 2 * (aeff * livetime).quantity.to_value("m2s"), ) kernel = edisp.get_edisp_kernel() kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel() assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:]) assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = MapAxis.from_edges(etrue, name="energy_true") ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] ehi = ereco[1:] self.e_reco = MapAxis.from_edges(ereco, name="energy") start = u.Quantity([0], "s") stop = u.Quantity([1000], "s") time_ref = Time("2010-01-01 00:00:00.0") self.gti = GTI.create(start, stop, time_ref) self.livetime = self.gti.time_sum self.on_region = make_region("icrs;circle(0.,1.,0.1)") off_region = make_region("icrs;box(0.,1.,0.1, 0.2,30)") self.off_region = off_region.union( make_region("icrs;box(-1.,-1.,0.1, 0.2,150)")) self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs self.aeff = RegionNDMap.create(region=self.on_region, wcs=self.wcs, axes=[self.e_true], unit="cm2") self.aeff.data += 1 data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins axis = MapAxis.from_edges(ereco, name="energy", interp="log") self.on_counts = RegionNDMap.create(region=self.on_region, wcs=self.wcs, axes=[axis]) self.on_counts.data += 1 self.on_counts.data[-1] = 0 self.off_counts = RegionNDMap.create(region=self.off_region, wcs=self.wcs, axes=[axis]) self.off_counts.data += 10 acceptance = RegionNDMap.from_geom(self.on_counts.geom) acceptance.data += 1 data = np.ones(elo.shape) data[-1] = 0 acceptance_off = RegionNDMap.from_geom(self.off_counts.geom) acceptance_off.data += 10 self.edisp = EDispKernelMap.from_diagonal_response( self.e_reco, self.e_true, self.on_counts.geom) self.dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, aeff=self.aeff, edisp=self.edisp, livetime=self.livetime, acceptance=acceptance, acceptance_off=acceptance_off, name="test", gti=self.gti, )
AVAILABLE_MODELS = [ "point-pwl", "point-ecpl", "point-log-parabola", "point-pwl2", "point-ecpl-3fgl", "point-ecpl-4fgl", "point-template", "diffuse-cube", "disk-pwl", "gauss-pwl", "gauss-pwlsimple", "point-pwlsimple", "disk-pwlsimple", "point-pwltest", "test" ] DPI = 120 # observation config IRF_FILE = "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" #IRF_FILE = "$GAMMAPY_DATA/cta-prod3b/caldb/data/cta/prod3b-v2/bcf/South_z20_50h/irf_file.fits" POINTING = SkyCoord(0.0, 0.5, frame="galactic", unit="deg") LIVETIME = 1 * u.hr GTI_TABLE = GTI.create(start=0 * u.s, stop=LIVETIME.to(u.s)) # dataset config ENERGY_AXIS = MapAxis.from_energy_bounds("0.1 TeV", "100 TeV", nbin=10, per_decade=True) ENERGY_AXIS_TRUE = MapAxis.from_energy_bounds("0.03 TeV", "300 TeV", nbin=20, per_decade=True, name="energy_true") MIGRA_AXIS = MapAxis.from_bounds(0.5, 2, nbin=150, node_type="edges",
def _make_gti(self): # Tentative extraction of the GTI tstart = self.events.table.meta['TSTART'] * u.s tstop = self.events.table.meta['TSTOP'] * u.s time_ref = time_ref_from_dict(self.events.table.meta) return GTI.create(tstart, tstop, time_ref)
def test_interpolate_map_dataset(): energy = MapAxis.from_energy_bounds("1 TeV", "300 TeV", nbin=5, name="energy") energy_true = MapAxis.from_nodes(np.logspace(-1, 3, 20), name="energy_true", interp="log", unit="TeV") # make dummy map IRFs geom_allsky = WcsGeom.create(npix=(5, 3), proj="CAR", binsz=60, axes=[energy], skydir=(0, 0)) geom_allsky_true = geom_allsky.drop("energy").to_cube([energy_true]) # background geom_background = WcsGeom.create(skydir=(0, 0), width=(5, 5), binsz=0.2 * u.deg, axes=[energy]) value = 30 bkg_map = Map.from_geom(geom_background, unit="") bkg_map.data = value * np.ones(bkg_map.data.shape) # effective area - with a gradient that also depends on energy aeff_map = Map.from_geom(geom_allsky_true, unit="cm2 s") ra_arr = np.arange(aeff_map.data.shape[1]) dec_arr = np.arange(aeff_map.data.shape[2]) for i in np.arange(aeff_map.data.shape[0]): aeff_map.data[i, :, :] = ( (i + 1) * 10 * np.meshgrid(dec_arr, ra_arr)[0] + 10 * np.meshgrid(dec_arr, ra_arr)[1] + 10) aeff_map.meta["TELESCOP"] = "HAWC" # psf map width = 0.2 * u.deg rad_axis = MapAxis.from_nodes(np.linspace(0, 2, 50), name="rad", unit="deg") psfMap = PSFMap.from_gauss(energy_true, rad_axis, width) # edispmap edispmap = EDispKernelMap.from_gauss(energy, energy_true, sigma=0.1, bias=0.0, geom=geom_allsky) # events and gti nr_ev = 10 ev_t = Table() gti_t = Table() ev_t["EVENT_ID"] = np.arange(nr_ev) ev_t["TIME"] = nr_ev * [ Time("2011-01-01 00:00:00", scale="utc", format="iso") ] ev_t["RA"] = np.linspace(-1, 1, nr_ev) * u.deg ev_t["DEC"] = np.linspace(-1, 1, nr_ev) * u.deg ev_t["ENERGY"] = np.logspace(0, 2, nr_ev) * u.TeV gti_t["START"] = [Time("2010-12-31 00:00:00", scale="utc", format="iso")] gti_t["STOP"] = [Time("2011-01-02 00:00:00", scale="utc", format="iso")] events = EventList(ev_t) gti = GTI(gti_t) # define observation obs = Observation( obs_id=0, obs_info={ "RA_PNT": 0.0, "DEC_PNT": 0.0 }, gti=gti, aeff=aeff_map, edisp=edispmap, psf=psfMap, bkg=bkg_map, events=events, obs_filter=None, ) # define analysis geometry geom_target = WcsGeom.create(skydir=(0, 0), width=(5, 5), binsz=0.1 * u.deg, axes=[energy]) maker = MapDatasetMaker( selection=["exposure", "counts", "background", "edisp", "psf"]) dataset = MapDataset.create(geom=geom_target, energy_axis_true=energy_true, rad_axis=rad_axis, name="test") dataset = maker.run(dataset, obs) # test counts assert dataset.counts.data.sum() == nr_ev # test background assert np.floor(np.sum(dataset.npred_background().data)) == np.sum( bkg_map.data) coords_bg = { "skycoord": SkyCoord("0 deg", "0 deg"), "energy": energy.center[0] } assert_allclose(dataset.npred_background().get_by_coord(coords_bg)[0], 7.5, atol=1e-4) # test effective area coords_aeff = { "skycoord": SkyCoord("0 deg", "0 deg"), "energy_true": energy_true.center[0], } assert_allclose( aeff_map.get_by_coord(coords_aeff)[0], dataset.exposure.interp_by_coord(coords_aeff)[0], atol=1e-3, ) # test edispmap pdfmatrix_preinterp = edispmap.get_edisp_kernel(SkyCoord( "0 deg", "0 deg")).pdf_matrix pdfmatrix_postinterp = dataset.edisp.get_edisp_kernel( SkyCoord("0 deg", "0 deg")).pdf_matrix assert_allclose(pdfmatrix_preinterp, pdfmatrix_postinterp, atol=1e-7) # test psfmap geom_psf = geom_target.drop("energy").to_cube([energy_true]) psfkernel_preinterp = psfMap.get_psf_kernel(position=SkyCoord( "0 deg", "0 deg"), geom=geom_psf, max_radius=2 * u.deg).data psfkernel_postinterp = dataset.psf.get_psf_kernel( position=SkyCoord("0 deg", "0 deg"), geom=geom_psf, max_radius=2 * u.deg).data assert_allclose(psfkernel_preinterp, psfkernel_postinterp, atol=1e-4)
def run(self, datasets): """Run light curve extraction. Normalize integral and energy flux between emin and emax. Parameters ---------- datasets : list of `~gammapy.datasets.SpectrumDataset` or `~gammapy.datasets.MapDataset` Spectrum or Map datasets. Returns ------- lightcurve : `~gammapy.estimators.LightCurve` the Light Curve object """ datasets = Datasets(datasets) if self.time_intervals is None: gti = datasets.gti else: gti = GTI.from_time_intervals(self.time_intervals) gti = gti.union(overlap_ok=False, merge_equal=False) rows = [] for t_min, t_max in progress_bar(gti.time_intervals, desc="Time intervals"): datasets_to_fit = datasets.select_time(t_min=t_min, t_max=t_max, atol=self.atol) if len(datasets_to_fit) == 0: log.debug( f"No Dataset for the time interval {t_min} to {t_max}") continue row = {"time_min": t_min.mjd, "time_max": t_max.mjd} fp = self.estimate_time_bin_flux(datasets_to_fit) fp_table = fp.to_table() for column in fp_table.colnames: if column == "counts": data = fp_table[column].quantity.sum(axis=1) else: data = fp_table[column].quantity row[column] = data fp_table_flux = fp.to_table(sed_type="flux") for column in fp_table_flux.colnames: if "flux" in column: row[column] = fp_table_flux[column].quantity rows.append(row) if len(rows) == 0: raise ValueError( "LightCurveEstimator: No datasets in time intervals") table = table_from_row_data(rows=rows, meta={"SED_TYPE": "likelihood"}) # TODO: use FluxPoints here return LightCurve(table=table)