def test_no_likelihood_contribution(): dataset = simulate_spectrum_dataset( SkyModel(spectral_model=PowerLawSpectralModel(), name="source") ) dataset.mask_safe = RegionNDMap.from_geom(dataset.counts.geom, dtype=bool) fpe = FluxPointsEstimator([dataset], e_edges=[1, 3, 10] * u.TeV, source="source") fp = fpe.run() assert np.isnan(fp.table["norm"]).all() assert np.isnan(fp.table["norm_err"]).all() assert np.isnan(fp.table["norm_ul"]).all() assert np.isnan(fp.table["norm_scan"]).all() assert_allclose(fp.table["counts"], 0)
def spectrum_dataset(): e_true = np.logspace(0, 1, 21) * u.TeV e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=4) aeff = EffectiveAreaTable.from_constant(value=1e6 * u.m**2, energy=e_true) edisp = EDispKernel.from_diagonal_response(e_true, e_reco.edges) background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[e_reco]) background.data += 3600 background.data[-1] *= 1e-3 return SpectrumDataset(aeff=aeff, livetime="1h", edisp=edisp, background=background)
def test_fake(self): """Test the fake dataset""" source_model = SkyModel(spectral_model=PowerLawSpectralModel()) dataset = SpectrumDatasetOnOff( name="test", counts=self.on_counts, counts_off=self.off_counts, models=source_model, exposure=self.aeff * self.livetime, edisp=self.edisp, acceptance=RegionNDMap.from_geom(geom=self.on_counts.geom, data=1), acceptance_off=RegionNDMap.from_geom(geom=self.off_counts.geom, data=10), ) real_dataset = dataset.copy() background = RegionNDMap.from_geom(dataset.counts.geom) background.data += 1 dataset.fake(npred_background=background, random_state=314) assert real_dataset.counts.data.shape == dataset.counts.data.shape assert real_dataset.counts_off.data.shape == dataset.counts_off.data.shape assert dataset.counts_off.data.sum() == 39 assert dataset.counts.data.sum() == 5
def fpe_map_pwl(): dataset_1 = simulate_map_dataset(name="test-map-pwl") dataset_2 = dataset_1.copy(name="test-map-pwl-2") dataset_2.models = dataset_1.models dataset_2.mask_safe = RegionNDMap.from_geom(dataset_2.counts.geom, dtype=bool) energy_edges = [0.1, 1, 10, 100] * u.TeV datasets = [dataset_1, dataset_2] fpe = FluxPointsEstimator(energy_edges=energy_edges, norm_n_values=3, source="source") return datasets, fpe
def setup(self): self.datasets = _read_hess_obs() # Change threshold to make stuff more interesting geom = self.datasets[0]._geom data = geom.energy_mask(energy_min=1.2 * u.TeV, energy_max=50 * u.TeV) self.datasets[0].mask_safe = RegionNDMap.from_geom(geom=geom, data=data) data = geom.energy_mask(energy_max=20 * u.TeV) self.datasets[1].mask_safe.data &= data self.stacked_dataset = self.datasets[0].copy() self.stacked_dataset.stack(self.datasets[1])
def test_no_likelihood_contribution(): dataset = simulate_spectrum_dataset( SkyModel(spectral_model=PowerLawSpectralModel(), name="source")) dataset_2 = dataset.slice_by_idx(slices={"energy": slice(0, 5)}) dataset.mask_safe = RegionNDMap.from_geom(dataset.counts.geom, dtype=bool) fpe = FluxPointsEstimator(energy_edges=[1, 3, 10] * u.TeV, source="source") table = fpe.run([dataset, dataset_2]).to_table() assert np.isnan(table["norm"]).all() assert np.isnan(table["norm_err"]).all() assert_allclose(table["counts"], 0)
def spectrum_dataset(): name = "test" energy = np.logspace(-1, 1, 31) * u.TeV livetime = 100 * u.s pwl = PowerLawSpectralModel( index=2.1, amplitude="1e5 cm-2 s-1 TeV-1", reference="0.1 TeV", ) temp_mod = ConstantTemporalModel() model = SkyModel(spectral_model=pwl, temporal_model=temp_mod, name="test-source") axis = MapAxis.from_edges(energy, interp="log", name="energy") axis_true = MapAxis.from_edges(energy, interp="log", name="energy_true") background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis]) bkg_model = BackgroundModel(background, name=name + "-bkg", datasets_names=[name]) bkg_model.spectral_model.norm.frozen = True models = Models([bkg_model, model]) exposure = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis_true]) exposure.quantity = u.Quantity("1 cm2") * livetime bkg_rate = np.ones(30) / u.s background.quantity = bkg_rate * livetime start = [1, 3, 5] * u.day stop = [2, 3.5, 6] * u.day t_ref = Time(55555, format="mjd") gti = GTI.create(start, stop, reference_time=t_ref) dataset = SpectrumDataset( models=models, exposure=exposure, name=name, gti=gti, ) dataset.fake(random_state=23) return dataset
def test_fit_range(self): """Test fit range without complication of thresholds""" geom = self.src.geom mask_safe = RegionNDMap.from_geom(geom, dtype=bool) mask_safe.data += True dataset = SpectrumDatasetOnOff( counts=self.src, mask_safe=mask_safe ) assert np.sum(dataset.mask_safe) == self.nbins e_min, e_max = dataset.energy_range assert_allclose(e_max.value, 10) assert_allclose(e_min.value, 0.1)
def test_to_from_ogip_files_no_edisp(self, tmp_path): mask_safe = RegionNDMap.from_geom(self.on_counts.geom, dtype=bool) mask_safe.data += True acceptance = RegionNDMap.from_geom(self.on_counts.geom, data=1.0) exposure = self.aeff * self.livetime exposure.meta["livetime"] = self.livetime dataset = SpectrumDatasetOnOff( counts=self.on_counts, exposure=exposure, mask_safe=mask_safe, acceptance=acceptance, name="test", ) dataset.write(tmp_path / "pha_obstest.fits") newdataset = SpectrumDatasetOnOff.read(tmp_path / "pha_obstest.fits") assert_allclose(self.on_counts.data, newdataset.counts.data) assert newdataset.counts_off is None assert newdataset.edisp is None assert newdataset.gti is None
def test_no_edisp(self): dataset = self.datasets[0].copy() energy = dataset.counts.geom.axes[0].copy(name="energy_true") geom = RegionGeom(region=None, axes=[energy]) data = dataset.aeff.interp_by_coord(geom.get_coord()) dataset.aeff = RegionNDMap.from_geom(geom=geom, data=data, unit="cm2") dataset.edisp = None dataset.models = self.pwl fit = Fit([dataset]) result = fit.run() assert_allclose(result.parameters["index"].value, 2.7961, atol=0.02)
def test_region_nd_map_sum_over_axes(region_map): region_map_summed = region_map.sum_over_axes() weights = RegionNDMap.from_geom(region_map.geom, data=1.0) weights.data[5, :, :] = 0 region_map_summed_weights = region_map.sum_over_axes(weights=weights) assert_allclose(region_map_summed.data, 15) assert_allclose( region_map_summed.data.shape, ( 1, 1, 1, ), ) assert_allclose(region_map_summed_weights.data, 10)
def test_spectrum_dataset_stack_nondiagonal_no_bkg(spectrum_dataset): energy = spectrum_dataset.counts.geom.axes[0] geom = spectrum_dataset.counts.geom.to_image() edisp1 = EDispKernelMap.from_gauss(energy, energy, 0.1, 0, geom=geom) edisp1.exposure_map.data += 1 aeff = EffectiveAreaTable.from_parametrization(energy.edges, "HESS").to_region_map( geom.region ) geom = spectrum_dataset.counts.geom counts = RegionNDMap.from_geom(geom=geom) gti = GTI.create(start=0 * u.s, stop=100 * u.s) spectrum_dataset1 = SpectrumDataset( counts=counts, exposure=aeff * gti.time_sum, edisp=edisp1, meta_table=Table({"OBS_ID": [0]}), gti=gti.copy(), ) edisp2 = EDispKernelMap.from_gauss(energy, energy, 0.2, 0.0, geom=geom) edisp2.exposure_map.data += 1 gti2 = GTI.create(start=100 * u.s, stop=200 * u.s) spectrum_dataset2 = SpectrumDataset( counts=counts, exposure=aeff * gti2.time_sum, edisp=edisp2, meta_table=Table({"OBS_ID": [1]}), gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) assert_allclose(spectrum_dataset1.meta_table["OBS_ID"][0], [0, 1]) assert spectrum_dataset1.background_model is None assert_allclose(spectrum_dataset1.gti.time_sum.to_value("s"), 200) assert_allclose( spectrum_dataset1.exposure.quantity[2].to_value("m2 s"), 1573851.079861 ) kernel = edisp1.get_edisp_kernel() assert_allclose(kernel.get_bias(1 * u.TeV), 0.0, atol=1.2e-3) assert_allclose(kernel.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
def test_stat_profile(self): geom = self.src.geom mask_safe = RegionNDMap.from_geom(geom, dtype=bool) mask_safe.data += True dataset = SpectrumDataset( models=self.source_model, exposure=self.aeff * self.livetime, counts=self.src, mask_safe=mask_safe, ) fit = Fit([dataset]) result = fit.run() true_idx = result.parameters["index"].value values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100) profile = fit.stat_profile("index", values=values) actual = values[np.argmin(profile["stat_scan"])] assert_allclose(actual, true_idx, rtol=0.01)
def make_counts(geom, observation): """Make counts map. Parameters ---------- geom : `~gammapy.maps.RegionGeom` Reference map geom. observation : `~gammapy.data.Observation` Observation container. Returns ------- counts : `~gammapy.maps.RegionNDMap` Counts map. """ counts = RegionNDMap.from_geom(geom) counts.fill_events(observation.events) return counts
def read_arf(filename, livetime): """Read ARF file Parameters ---------- filename : str or `Path` PHA file name livetime : `Quantity` Livetime Returns ------- data : `RegionNDMap` Exposure map """ aeff = RegionNDMap.read(filename, format="ogip-arf") exposure = aeff * livetime exposure.meta["livetime"] = livetime return exposure
def test_to_from_ogip_files_no_edisp(self, tmp_path): mask_safe = RegionNDMap.from_geom(self.on_counts.geom, dtype=bool) mask_safe.data += True dataset = SpectrumDatasetOnOff( counts=self.on_counts, aeff=self.aeff, livetime=self.livetime, mask_safe=mask_safe, acceptance=1, name="test", ) dataset.to_ogip_files(outdir=tmp_path) newdataset = SpectrumDatasetOnOff.from_ogip_files(tmp_path / "pha_obstest.fits") assert_allclose(self.on_counts.data, newdataset.counts.data) assert newdataset.counts_off is None assert newdataset.edisp is None assert newdataset.gti is None
def spectrum_dataset(): e_true = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=20, name="energy_true") e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=4) aeff = EffectiveAreaTable.from_constant(value=1e6 * u.m**2, energy=e_true.edges) background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[e_reco]) background.data += 3600 background.data[-1] *= 1e-3 edisp = EDispKernelMap.from_diagonal_response(energy_axis_true=e_true, energy_axis=e_reco, geom=background.geom) return SpectrumDataset(aeff=aeff, livetime="1h", edisp=edisp, background=background)
def simulate_spectrum_dataset(model, random_state=0): edges = np.logspace(-0.5, 1.5, 21) * u.TeV energy_axis = MapAxis.from_edges(edges, interp="log", name="energy") aeff = EffectiveAreaTable.from_parametrization(energy=edges).to_region_map() bkg_model = SkyModel( spectral_model=PowerLawSpectralModel( index=2.5, amplitude="1e-12 cm-2 s-1 TeV-1" ), name="background", ) bkg_model.spectral_model.amplitude.frozen = True bkg_model.spectral_model.index.frozen = True geom = RegionGeom(region=None, axes=[energy_axis]) acceptance = RegionNDMap.from_geom(geom=geom, data=1) edisp = EDispKernelMap.from_diagonal_response( energy_axis=energy_axis, energy_axis_true=energy_axis.copy(name="energy_true"), geom=geom, ) livetime = 100 * u.h exposure = aeff * livetime dataset = SpectrumDatasetOnOff( name="test_onoff", exposure=exposure, acceptance=acceptance, acceptance_off=5, edisp=edisp, ) dataset.models = bkg_model bkg_npred = dataset.npred_sig() dataset.models = model dataset.fake( random_state=random_state, background_model=BackgroundModel(bkg_npred, datasets_names="test_onoff"), ) return dataset
def test_stat_profile(self): geom = self.src.geom mask_safe = RegionNDMap.from_geom(geom, dtype=bool) mask_safe.data += True dataset = SpectrumDataset( models=self.source_model, exposure=self.exposure, counts=self.src, mask_safe=mask_safe, ) fit = Fit() fit.run(datasets=[dataset]) true_idx = self.source_model.parameters["index"].value values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100) self.source_model.spectral_model.index.scan_values = values profile = fit.stat_profile(datasets=[dataset], parameter="index") actual = values[np.argmin(profile["stat_scan"])] assert_allclose(actual, true_idx, rtol=0.01)
def test_region_nd_map_plot_two_axes(): energy_axis = MapAxis.from_energy_edges([1, 3, 10] * u.TeV) time_ref = Time('1999-01-01T00:00:00.123456789') time_axis = TimeMapAxis(edges_min=[0, 1, 3] * u.d, edges_max=[0.8, 1.9, 5.4] * u.d, reference_time=time_ref) m = RegionNDMap.create("icrs;circle(0, 0, 1)", axes=[energy_axis, time_axis]) m.data = 10 + np.random.random(m.data.size) with mpl_plot_check(): m.plot(axis_name="energy") with mpl_plot_check(): m.plot(axis_name="time") with pytest.raises(ValueError): m.plot()
def test_npred_no_edisp(self): const = 1 * u.Unit("cm-2 s-1 TeV-1") model = SkyModel(spectral_model=ConstantSpectralModel(const=const)) livetime = 1 * u.s aeff = RegionNDMap.create(region=self.on_region, unit="cm2", axes=[self.e_reco.copy(name="energy_true")]) aeff.data += 1 dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, aeff=aeff, models=model, livetime=livetime, ) energy = aeff.geom.axes[0].edges expected = aeff.data[0] * (energy[-1] - energy[0]) * const * livetime assert_allclose(dataset.npred_sig().data.sum(), expected.value)
def spectrum_dataset(): energy = np.logspace(-1, 1, 31) * u.TeV livetime = 100 * u.s pwl = PowerLawSpectralModel( index=2.1, amplitude="1e5 cm-2 s-1 TeV-1", reference="0.1 TeV", ) temp_mod = ConstantTemporalModel() model = SkyModel(spectral_model=pwl, temporal_model=temp_mod, name="test-source") aeff = EffectiveAreaTable.from_constant(energy, "1 cm2") axis = MapAxis.from_edges(energy, interp="log", name="energy") background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis]) bkg_rate = np.ones(30) / u.s background.quantity = bkg_rate * livetime start = [1, 3, 5] * u.day stop = [2, 3.5, 6] * u.day t_ref = Time(55555, format="mjd") gti = GTI.create(start, stop, reference_time=t_ref) dataset = SpectrumDataset( models=model, aeff=aeff, livetime=livetime, background=background, name="test", gti=gti, ) dataset.fake(random_state=23) return dataset
def make_counts_off(self, dataset, observation): """Make off counts. Parameters ---------- dataset : `SpectrumDataset` Spectrum dataset. observation : `DatastoreObservation` Data store observation. Returns ------- counts_off : `RegionNDMap` Off counts. """ finder = self._get_finder(dataset, observation) finder.run() energy_axis = dataset.counts.geom.axes["energy"] if len(finder.reflected_regions) > 0: region_union = list_to_compound_region(finder.reflected_regions) wcs = finder.reference_map.geom.wcs geom = RegionGeom.create(region=region_union, axes=[energy_axis], wcs=wcs) counts_off = RegionNDMap.from_geom(geom=geom) counts_off.fill_events(observation.events) acceptance_off = len(finder.reflected_regions) else: # if no OFF regions are found, off is set to None and acceptance_off to zero log.warning( f"ReflectedRegionsBackgroundMaker failed. No OFF region found outside exclusion mask for {dataset.name}." ) counts_off = None acceptance_off = 0 return counts_off, acceptance_off
def make_exposure(self, geom, observation): """Make exposure. Parameters ---------- geom : `~gammapy.maps.RegionGeom` Reference map geom. observation: `~gammapy.data.Observation` Observation to compute effective area for. Returns ------- exposure : `~gammapy.irf.EffectiveAreaTable` Exposure map. """ offset = observation.pointing_radec.separation(geom.center_skydir) energy = geom.axes["energy_true"] data = observation.aeff.data.evaluate(offset=offset, energy_true=energy.center) if self.containment_correction: if not isinstance(geom.region, CircleSkyRegion): raise TypeError( "Containment correction only supported for circular regions." ) psf = observation.psf.to_energy_dependent_table_psf(theta=offset) containment = psf.containment(energy.center, geom.region.radius) data *= containment.squeeze() data = data * observation.observation_live_time_duration meta = {"livetime": observation.observation_live_time_duration} return RegionNDMap.from_geom(geom, data=data.value, unit=data.unit, meta=meta)
def test_lightcurve_estimator_spectrum_datasets_withmaskfit(): # Doing a LC on one hour bin datasets = get_spectrum_datasets() time_intervals = [ Time(["2010-01-01T00:00:00", "2010-01-01T01:00:00"]), Time(["2010-01-01T01:00:00", "2010-01-01T02:00:00"]), ] e_min_fit = 1 * u.TeV e_max_fit = 3 * u.TeV for dataset in datasets: geom = dataset.counts.geom data = geom.energy_mask(emin=e_min_fit, emax=e_max_fit) dataset.mask_fit = RegionNDMap.from_geom(geom, data=data, dtype=bool) steps = ["err", "counts", "ts", "norm-scan"] estimator = LightCurveEstimator(energy_range=[1, 100] * u.TeV, norm_n_values=3, time_intervals=time_intervals) lightcurve = estimator.run(datasets, steps=steps) assert_allclose(lightcurve.table["time_min"], [55197.0, 55197.041667]) assert_allclose(lightcurve.table["time_max"], [55197.041667, 55197.083333]) assert_allclose(lightcurve.table["stat"], [6.60304, 0.421047], rtol=1e-3) assert_allclose(lightcurve.table["norm"], [0.885082, 0.967022], rtol=1e-3)
def make_observation_list(): """obs with dummy IRF""" nbin = 3 energy = np.logspace(-1, 1, nbin + 1) * u.TeV livetime = 2 * u.h data_on = np.arange(nbin) dataoff_1 = np.ones(3) dataoff_2 = np.ones(3) * 3 dataoff_1[1] = 0 dataoff_2[1] = 0 axis = MapAxis.from_edges(energy, name="energy", interp="log") axis_true = axis.copy(name="energy_true") geom = RegionGeom(region=None, axes=[axis]) geom_true = RegionGeom(region=None, axes=[axis_true]) on_vector = RegionNDMap.from_geom(geom=geom, data=data_on) off_vector1 = RegionNDMap.from_geom(geom=geom, data=dataoff_1) off_vector2 = RegionNDMap.from_geom(geom=geom, data=dataoff_2) mask_safe = RegionNDMap.from_geom(geom, dtype=bool) mask_safe.data += True acceptance = RegionNDMap.from_geom(geom=geom, data=1) acceptance_off_1 = RegionNDMap.from_geom(geom=geom, data=2) acceptance_off_2 = RegionNDMap.from_geom(geom=geom, data=4) aeff = RegionNDMap.from_geom(geom_true, data=1, unit="m2") edisp = EDispKernelMap.from_gauss(energy_axis=axis, energy_axis_true=axis_true, sigma=0.2, bias=0, geom=geom) time_ref = Time("2010-01-01") gti1 = make_gti({ "START": [5, 6, 1, 2], "STOP": [8, 7, 3, 4] }, time_ref=time_ref) gti2 = make_gti({"START": [14], "STOP": [15]}, time_ref=time_ref) exposure = aeff * livetime exposure.meta["livetime"] = livetime obs1 = SpectrumDatasetOnOff( counts=on_vector, counts_off=off_vector1, exposure=exposure, edisp=edisp, mask_safe=mask_safe, acceptance=acceptance.copy(), acceptance_off=acceptance_off_1, name="1", gti=gti1, ) obs2 = SpectrumDatasetOnOff( counts=on_vector, counts_off=off_vector2, exposure=exposure.copy(), edisp=edisp, mask_safe=mask_safe, acceptance=acceptance.copy(), acceptance_off=acceptance_off_2, name="2", gti=gti2, ) obs_list = [obs1, obs2] return obs_list
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = MapAxis.from_energy_edges(etrue, name="energy_true") ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] ehi = ereco[1:] self.e_reco = MapAxis.from_energy_edges(ereco, name="energy") start = u.Quantity([0], "s") stop = u.Quantity([1000], "s") time_ref = Time("2010-01-01 00:00:00.0") self.gti = GTI.create(start, stop, time_ref) self.livetime = self.gti.time_sum self.on_region = make_region("icrs;circle(0.,1.,0.1)") off_region = make_region("icrs;box(0.,1.,0.1, 0.2,30)") self.off_region = off_region.union( make_region("icrs;box(-1.,-1.,0.1, 0.2,150)")) self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs self.aeff = RegionNDMap.create(region=self.on_region, wcs=self.wcs, axes=[self.e_true], unit="cm2") self.aeff.data += 1 data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins axis = MapAxis.from_edges(ereco, name="energy", interp="log") self.on_counts = RegionNDMap.create( region=self.on_region, wcs=self.wcs, axes=[axis], meta={"EXPOSURE": self.livetime.to_value("s")}, ) self.on_counts.data += 1 self.on_counts.data[-1] = 0 self.off_counts = RegionNDMap.create(region=self.off_region, wcs=self.wcs, axes=[axis]) self.off_counts.data += 10 acceptance = RegionNDMap.from_geom(self.on_counts.geom) acceptance.data += 1 data = np.ones(elo.shape) data[-1] = 0 acceptance_off = RegionNDMap.from_geom(self.off_counts.geom) acceptance_off.data += 10 self.edisp = EDispKernelMap.from_diagonal_response( self.e_reco, self.e_true, self.on_counts.geom.to_image()) exposure = self.aeff * self.livetime exposure.meta["livetime"] = self.livetime mask_safe = RegionNDMap.from_geom(self.on_counts.geom, dtype=bool) mask_safe.data += True self.dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, exposure=exposure, edisp=self.edisp, acceptance=acceptance, acceptance_off=acceptance_off, name="test", gti=self.gti, mask_safe=mask_safe, )
def test_spectrum_dataset_stack_nondiagonal_no_bkg(spectrum_dataset): energy = spectrum_dataset.counts.geom.axes["energy"] geom = spectrum_dataset.counts.geom edisp1 = EDispKernelMap.from_gauss( energy_axis=energy, energy_axis_true=energy.copy(name="energy_true"), sigma=0.1, bias=0, geom=geom.to_image(), ) edisp1.exposure_map.data += 1 aeff = EffectiveAreaTable2D.from_parametrization( energy_axis_true=energy.copy(name="energy_true"), instrument="HESS") livetime = 100 * u.s geom_true = geom.as_energy_true exposure = make_map_exposure_true_energy(geom=geom_true, livetime=livetime, pointing=geom_true.center_skydir, aeff=aeff) geom = spectrum_dataset.counts.geom counts = RegionNDMap.from_geom(geom=geom) gti = GTI.create(start=0 * u.s, stop=livetime) spectrum_dataset1 = SpectrumDataset( counts=counts, exposure=exposure, edisp=edisp1, meta_table=Table({"OBS_ID": [0]}), gti=gti.copy(), ) edisp2 = EDispKernelMap.from_gauss( energy_axis=energy, energy_axis_true=energy.copy(name="energy_true"), sigma=0.2, bias=0.0, geom=geom, ) edisp2.exposure_map.data += 1 gti2 = GTI.create(start=100 * u.s, stop=200 * u.s) spectrum_dataset2 = SpectrumDataset( counts=counts, exposure=exposure.copy(), edisp=edisp2, meta_table=Table({"OBS_ID": [1]}), gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) assert_allclose(spectrum_dataset1.meta_table["OBS_ID"][0], [0, 1]) assert spectrum_dataset1.background_model is None assert_allclose(spectrum_dataset1.gti.time_sum.to_value("s"), 200) assert_allclose(spectrum_dataset1.exposure.quantity[2].to_value("m2 s"), 1573851.079861) kernel = edisp1.get_edisp_kernel() assert_allclose(kernel.get_bias(1 * u.TeV), 0.0, atol=1.2e-3) assert_allclose(kernel.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset): geom = spectrum_dataset.counts.geom energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30) energy_true = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30, name="energy_true") aeff = EffectiveAreaTable2D.from_parametrization( energy_axis_true=energy_true, instrument="HESS") livetime = 100 * u.s gti = GTI.create(start=0 * u.s, stop=livetime) geom_true = geom.as_energy_true exposure = make_map_exposure_true_energy(geom=geom_true, livetime=livetime, pointing=geom_true.center_skydir, aeff=aeff) edisp = EDispKernelMap.from_diagonal_response(energy, energy_true, geom=geom.to_image()) edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :] background = spectrum_dataset.background mask_safe = RegionNDMap.from_geom(geom=geom, dtype=bool) mask_safe.data += True spectrum_dataset1 = SpectrumDataset( name="ds1", counts=spectrum_dataset.counts.copy(), exposure=exposure.copy(), edisp=edisp.copy(), background=background.copy(), gti=gti.copy(), mask_safe=mask_safe, ) livetime2 = 0.5 * livetime gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2) bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data) geom = spectrum_dataset.counts.geom data = np.ones(spectrum_dataset.data_shape, dtype="bool") data[0] = False safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data) exposure2 = exposure.copy() edisp = edisp.copy() edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :] spectrum_dataset2 = SpectrumDataset( name="ds2", counts=spectrum_dataset.counts.copy(), exposure=exposure2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, gti=gti2, ) spectrum_dataset1.stack(spectrum_dataset2) reference = spectrum_dataset.counts.data assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2) assert_allclose(spectrum_dataset1.counts.data[0], 141363) assert_allclose(spectrum_dataset1.exposure.quantity[0], 4.755644e09 * u.Unit("cm2 s")) assert_allclose(spectrum_dataset1.background.data[1:], 3 * background.data[1:]) assert_allclose(spectrum_dataset1.background.data[0], background.data[0]) kernel = edisp.get_edisp_kernel() kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel() assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:]) assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset): geom = spectrum_dataset.counts.geom energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30) energy_true = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30, name="energy_true") aeff = EffectiveAreaTable.from_parametrization( energy.edges, "HESS").to_region_map(geom.region) livetime = 100 * u.s gti = GTI.create(start=0 * u.s, stop=livetime) exposure = aeff * livetime edisp = EDispKernelMap.from_diagonal_response(energy, energy_true, geom=geom.to_image()) edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :] background = spectrum_dataset.background_model.map.copy() spectrum_dataset1 = SpectrumDataset(name="ds1", counts=spectrum_dataset.counts.copy(), exposure=exposure.copy(), edisp=edisp.copy(), models=BackgroundModel( background, name="ds1-bkg", datasets_names=["ds1"]), gti=gti.copy()) livetime2 = 0.5 * livetime gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2) aeff2 = aeff * 2 bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data) geom = spectrum_dataset.counts.geom data = np.ones(spectrum_dataset.data_shape, dtype="bool") data[0] = False safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data) exposure2 = aeff2 * livetime2 edisp = edisp.copy() edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :] spectrum_dataset2 = SpectrumDataset(name="ds2", counts=spectrum_dataset.counts.copy(), exposure=exposure2, edisp=edisp, models=BackgroundModel( bkg2, name="ds2-bkg", datasets_names=["ds2"]), mask_safe=safe_mask2, gti=gti2) spectrum_dataset1.stack(spectrum_dataset2) reference = spectrum_dataset.counts.data assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2) assert_allclose(spectrum_dataset1.counts.data[0], 141363) assert_allclose(spectrum_dataset1.exposure.data[0], 4.755644e+09) assert_allclose(spectrum_dataset1.background_model.map.data[1:], 3 * background.data[1:]) assert_allclose(spectrum_dataset1.background_model.map.data[0], background.data[0]) assert_allclose( spectrum_dataset1.exposure.quantity.to_value("m2s"), 2 * (aeff * livetime).quantity.to_value("m2s"), ) kernel = edisp.get_edisp_kernel() kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel() assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:]) assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])