def setup(self): self.nbins = 30 binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV self.source_model = PowerLawSpectralModel(index=2.1, amplitude=1e5 * u.Unit("cm-2 s-1 TeV-1"), reference=0.1 * u.TeV) self.livetime = 100 * u.s aeff = EffectiveAreaTable.from_constant(binning, "1 cm2") bkg_rate = np.ones(self.nbins) / u.s bkg_expected = (bkg_rate * self.livetime).to_value("") self.bkg = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=bkg_expected) random_state = get_random_state(23) flux = self.source_model.integral(binning[:-1], binning[1:]) self.npred = (flux * aeff.data.data[0] * self.livetime).to_value("") self.npred += bkg_expected source_counts = random_state.poisson(self.npred) self.src = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=source_counts) self.dataset = SpectrumDataset( model=self.source_model, counts=self.src, aeff=aeff, livetime=self.livetime, background=self.bkg, )
def test_spectrum_dataset_stack_nondiagonal_no_bkg(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp1 = EDispKernel.from_gauss(self.src.energy.edges, self.src.energy.edges, 0.1, 0.0) livetime = self.livetime dataset1 = SpectrumDataset(counts=None, livetime=livetime, aeff=aeff, edisp=edisp1, background=None) livetime2 = livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], aeff.data.data) edisp2 = EDispKernel.from_gauss(self.src.energy.edges, self.src.energy.edges, 0.2, 0.0) dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp2, background=None, ) dataset1.stack(dataset2) assert dataset1.counts is None assert dataset1.background is None assert dataset1.livetime == 2 * self.livetime assert_allclose(dataset1.aeff.data.data.to_value("m2"), aeff.data.data.to_value("m2")) assert_allclose(dataset1.edisp.get_bias(1 * u.TeV), 0.0, atol=1.2e-3) assert_allclose(dataset1.edisp.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
def setup(self): self.nbins = 30 binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV self.source_model = PowerLawSpectralModel(index=2.1, amplitude=1e5 / u.TeV / u.s, reference=0.1 * u.TeV) self.livetime = 100 * u.s bkg_rate = np.ones(self.nbins) / u.s bkg_expected = bkg_rate * self.livetime self.bkg = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=bkg_expected) random_state = get_random_state(23) self.npred = (self.source_model.integral(binning[:-1], binning[1:]) * self.livetime) self.npred += bkg_expected source_counts = random_state.poisson(self.npred) self.src = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=source_counts) self.dataset = SpectrumDataset( model=self.source_model, counts=self.src, livetime=self.livetime, background=self.bkg, )
def test_set_model(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EnergyDispersion.from_diagonal_response( self.src.energy.edges, self.src.energy.edges) dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff, edisp, self.bkg) with pytest.raises(AttributeError): dataset.parameters dataset.model = self.source_model assert dataset.parameters[0] == self.source_model.parameters[0]
def spectrum_dataset_crab_fine(): e_true = np.logspace(-2, 2.5, 109) * u.TeV e_reco = np.logspace(-2, 2, 73) * u.TeV pos = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return SpectrumDataset.create(e_reco, e_true, region=region)
def test_set_model(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EDispKernel.from_diagonal_response(self.src.energy.edges, self.src.energy.edges) dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff, edisp, self.bkg) spectral_model = PowerLawSpectralModel() model = SkyModel(spectral_model=spectral_model, name="test") dataset.models = model assert dataset.models["test"] is model models = Models([model]) dataset.models = models assert dataset.models["test"] is model
def spectrum_dataset_gc(): e_reco = np.logspace(0, 2, 5) * u.TeV e_true = np.logspace(-0.5, 2, 11) * u.TeV pos = SkyCoord(0.0, 0.0, unit="deg", frame="galactic") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return SpectrumDataset.create(e_reco, e_true, region=region)
def test_spectrum_dataset_stack_diagonal_safe_mask(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EDispKernel.from_diagonal_response(self.src.energy.edges, self.src.energy.edges) livetime = self.livetime dataset1 = SpectrumDataset( counts=self.src.copy(), livetime=livetime, aeff=aeff, edisp=edisp, background=self.bkg.copy(), ) livetime2 = 0.5 * livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], 2 * aeff.data.data) bkg2 = CountsSpectrum( self.src.energy.edges[:-1], self.src.energy.edges[1:], data=2 * self.bkg.data, ) safe_mask2 = np.ones_like(self.src.data, bool) safe_mask2[0] = False dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data[1:], self.src.data[1:] * 2) assert_allclose(dataset1.counts.data[0], self.src.data[0]) assert dataset1.livetime == 1.5 * self.livetime assert_allclose(dataset1.background.data[1:], 3 * self.bkg.data[1:]) assert_allclose(dataset1.background.data[0], self.bkg.data[0]) assert_allclose( dataset1.aeff.data.data.to_value("m2"), 4.0 / 3 * aeff.data.data.to_value("m2"), ) assert_allclose(dataset1.edisp.pdf_matrix[1:], edisp.pdf_matrix[1:]) assert_allclose(dataset1.edisp.pdf_matrix[0], 0.5 * edisp.pdf_matrix[0])
def test_stack_no_livetime(): e_reco = np.logspace(0, 1, 3) * u.TeV dataset_1 = SpectrumDataset.create(e_reco=e_reco) dataset_1.livetime = None dataset_2 = dataset_1.copy() with pytest.raises(ValueError): dataset_1.stack(dataset_2)
def _spectrum_extraction(self): """Run all steps for the spectrum extraction.""" log.info("Reducing spectrum datasets.") datasets_settings = self.config.datasets on_lon = datasets_settings.on_region.lon on_lat = datasets_settings.on_region.lat on_center = SkyCoord(on_lon, on_lat, frame=datasets_settings.on_region.frame) on_region = CircleSkyRegion(on_center, datasets_settings.on_region.radius) maker_config = {} if datasets_settings.containment_correction: maker_config[ "containment_correction"] = datasets_settings.containment_correction e_reco = self._make_energy_axis( datasets_settings.geom.axes.energy).edges maker_config["selection"] = ["counts", "aeff", "edisp"] dataset_maker = SpectrumDatasetMaker(**maker_config) bkg_maker_config = {} if datasets_settings.background.exclusion: exclusion_region = Map.read(datasets_settings.background.exclusion) bkg_maker_config["exclusion_mask"] = exclusion_region bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config) safe_mask_selection = self.config.datasets.safe_mask.methods safe_mask_settings = self.config.datasets.safe_mask.settings safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection, **safe_mask_settings) e_true = self._make_energy_axis( datasets_settings.geom.axes.energy_true).edges reference = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=on_region) datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = dataset_maker.run(reference.copy(), obs) dataset = bkg_maker.run(dataset, obs) if dataset.counts_off is None: log.info( f"No OFF region found for observation {obs.obs_id}. Discarding." ) continue dataset = safe_mask_maker.run(dataset, obs) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets) if self.config.datasets.stack: stacked = self.datasets.stack_reduce(name="stacked") self.datasets = Datasets([stacked])
def test_cash(self): """Simple CASH fit to the on vector""" dataset = SpectrumDataset(model=self.source_model, counts=self.src) npred = dataset.npred().data assert_allclose(npred[5], 660.5171, rtol=1e-5) stat_val = dataset.likelihood() assert_allclose(stat_val, -107346.5291, rtol=1e-5) self.source_model.parameters["index"].value = 1.12 fit = Fit([dataset]) result = fit.run() # These values are check with sherpa fits, do not change pars = result.parameters assert_allclose(pars["index"].value, 1.995525, rtol=1e-3) assert_allclose(pars["amplitude"].value, 100245.9, rtol=1e-3)
def test_incorrect_mask(self): mask_fit = np.ones(self.nbins, dtype=np.dtype("float")) with pytest.raises(ValueError): SpectrumDataset( models=self.source_model, counts=self.src, livetime=self.livetime, mask_fit=mask_fit, background=self.bkg, )
def data(self) -> SpectrumDataset: """Actual event data in form of a SpectrumDataset. """ dataset_empty = SpectrumDataset.create(e_reco=self.energy_axis, e_true=self.energy_axis, region=self.on_region) maker = SpectrumDatasetMaker(containment_correction=False, selection=["background", "aeff", "edisp"]) dataset = maker.run(dataset_empty, self.obs) dataset.models = self.true_model dataset.fake() return dataset
def test_likelihood_profile(self): dataset = SpectrumDataset( model=self.source_model, counts=self.src, mask_safe=np.ones(self.src.energy.nbin, dtype=bool), ) fit = Fit([dataset]) result = fit.run() true_idx = result.parameters["index"].value values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100) profile = fit.likelihood_profile("index", values=values) actual = values[np.argmin(profile["likelihood"])] assert_allclose(actual, true_idx, rtol=0.01)
def spectrum_dataset(): e_true = np.logspace(0, 1, 21) * u.TeV e_reco = np.logspace(0, 1, 5) * u.TeV aeff = EffectiveAreaTable.from_constant(value=1e6 * u.m**2, energy=e_true) edisp = EDispKernel.from_diagonal_response(e_true, e_reco) data = 3600 * np.ones(4) data[-1] *= 1e-3 background = CountsSpectrum(energy_lo=e_reco[:-1], energy_hi=e_reco[1:], data=data) return SpectrumDataset(aeff=aeff, livetime="1h", edisp=edisp, background=background)
def test_spectrumdataset_create(self): e_reco = u.Quantity([0.1, 1, 10.0], "TeV") e_true = u.Quantity([0.05, 0.5, 5, 20.0], "TeV") empty_dataset = SpectrumDataset.create(e_reco, e_true) assert empty_dataset.counts.total_counts == 0 assert empty_dataset.data_shape[0] == 2 assert empty_dataset.background.total_counts == 0 assert empty_dataset.background.energy.nbin == 2 assert empty_dataset.aeff.data.axis("energy").nbin == 3 assert empty_dataset.edisp.data.axis("e_reco").nbin == 2 assert empty_dataset.livetime.value == 0 assert len(empty_dataset.gti.table) == 0 assert empty_dataset.energy_range[0] is None assert_allclose(empty_dataset.mask_safe, 0)
def test_npred_models(self): e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3).edges dataset = SpectrumDataset.create(e_reco=e_reco) dataset.livetime = 1 * u.h dataset.aeff.data.data += 1e10 * u.Unit("cm2") pwl_1 = PowerLawSpectralModel(index=2) pwl_2 = PowerLawSpectralModel(index=2) model_1 = SkyModel(spectral_model=pwl_1) model_2 = SkyModel(spectral_model=pwl_2) dataset.models = Models([model_1, model_2]) npred = dataset.npred() assert_allclose(npred.data.sum(), 64.8)
def test_run(observations, phase_bkg_maker): maker = SpectrumDatasetMaker() e_reco = np.logspace(0, 2, 5) * u.TeV e_true = np.logspace(-0.5, 2, 11) * u.TeV pos = SkyCoord("08h35m20.65525s", "-45d10m35.1545s", frame="icrs") radius = Angle(0.2, "deg") region = SphericalCircleSkyRegion(pos, radius) dataset_empty = SpectrumDataset.create(e_reco, e_true, region=region) obs = observations["111630"] dataset = maker.run(dataset_empty, obs) dataset_on_off = phase_bkg_maker.run(dataset, obs) assert_allclose(dataset_on_off.acceptance, 0.1) assert_allclose(dataset_on_off.acceptance_off, 0.3) assert_allclose(dataset_on_off.counts.data.sum(), 28) assert_allclose(dataset_on_off.counts_off.data.sum(), 57)
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations): pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) maker = SpectrumDatasetMaker(selection=["counts"]) datasets = [] e_reco = np.logspace(0, 2, 5) * u.TeV e_true = np.logspace(-0.5, 2, 11) * u.TeV dataset_empty = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=region) for obs in observations: dataset = maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) assert datasets[0].counts_off is None assert_allclose(datasets[0].acceptance_off, 0)
def test_reflected_bkg_maker(on_region, reflected_bkg_maker, observations): datasets = [] e_reco = np.logspace(0, 2, 5) * u.TeV e_true = np.logspace(-0.5, 2, 11) * u.TeV dataset_empty = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=on_region) maker = SpectrumDatasetMaker(selection=["counts"]) for obs in observations: dataset = maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) assert_allclose(datasets[0].counts_off.data.sum(), 76) assert_allclose(datasets[1].counts_off.data.sum(), 60) regions_0 = compound_region_to_list(datasets[0].counts_off.region) regions_1 = compound_region_to_list(datasets[1].counts_off.region) assert_allclose(len(regions_0), 11) assert_allclose(len(regions_1), 11)
def to_spectrum_dataset(self, on_region, containment_correction=False): """Return a ~gammapy.spectrum.SpectrumDataset from on_region. Counts and background are summed in the on_region. Effective area is taken from the average exposure divided by the livetime. Here we assume it is the sum of the GTIs. EnergyDispersion is obtained at the on_region center. Only regions with centers are supported. Parameters ---------- on_region : `~regions.SkyRegion` the input ON region on which to extract the spectrum containment_correction : bool Apply containment correction for point sources and circular on regions Returns ------- dataset : `~gammapy.spectrum.SpectrumDataset` the resulting reduced dataset """ if self.gti is not None: livetime = self.gti.time_sum else: raise ValueError("No GTI in `MapDataset`, cannot compute livetime") if self.counts is not None: counts = self.counts.get_spectrum(on_region, np.sum) else: counts = None if self.background_model is not None: background = self.background_model.evaluate().get_spectrum( on_region, np.sum ) else: background = None if self.exposure is not None: exposure = self.exposure.get_spectrum(on_region, np.mean) aeff = EffectiveAreaTable( energy_lo=exposure.energy.edges[:-1], energy_hi=exposure.energy.edges[1:], data=exposure.data / livetime, ) else: aeff = None if containment_correction: if not isinstance(on_region, CircleSkyRegion): raise TypeError( "Containement correction is only supported for" " `CircleSkyRegion`." ) elif self.psf is None or isinstance(self.psf, PSFKernel): raise ValueError("No PSFMap set. Containement correction impossible") else: psf_table = self.psf.get_energy_dependent_table_psf(on_region.center) aeff = apply_containment_fraction(aeff, psf_table, on_region.radius) if self.edisp is not None: if isinstance(self.edisp, EnergyDispersion): edisp = self.edisp else: self.edisp.get_energy_dispersion(on_region.center, self._energy_axis) else: edisp = None return SpectrumDataset( counts=counts, background=background, aeff=aeff, edisp=edisp, livetime=livetime, gti=self.gti, name=self.name, )
class TestSpectrumDataset: """Test fit on counts spectra without any IRFs""" def setup(self): self.nbins = 30 binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV self.source_model = PowerLaw(index=2.1, amplitude=1e5 / u.TeV / u.s, reference=0.1 * u.TeV) self.livetime = 100 * u.s bkg_rate = np.ones(self.nbins) / u.s bkg_expected = bkg_rate * self.livetime self.bkg = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=bkg_expected) random_state = get_random_state(23) self.npred = (self.source_model.integral(binning[:-1], binning[1:]) * self.livetime) self.npred += bkg_expected source_counts = random_state.poisson(self.npred) self.src = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=source_counts) self.dataset = SpectrumDataset( model=self.source_model, counts=self.src, livetime=self.livetime, background=self.bkg, ) def test_data_shape(self): assert self.dataset.data_shape[0] == self.nbins def test_energy_range(self): energy_range = self.dataset.energy_range assert energy_range.unit == u.TeV assert_allclose(energy_range.to_value("TeV"), [0.1, 10.0]) def test_cash(self): """Simple CASH fit to the on vector""" fit = Fit(self.dataset) result = fit.run() assert result.success assert "minuit" in repr(result) npred = self.dataset.npred().data.sum() assert_allclose(npred, self.npred.sum(), rtol=1e-3) assert_allclose(result.total_stat, -18087404.624, rtol=1e-3) pars = result.parameters assert_allclose(pars["index"].value, 2.1, rtol=1e-2) assert_allclose(pars.error("index"), 0.00127, rtol=1e-2) assert_allclose(pars["amplitude"].value, 1e5, rtol=1e-3) assert_allclose(pars.error("amplitude"), 153.450, rtol=1e-2) def test_fake(self): """Test the fake dataset""" real_dataset = self.dataset.copy() self.dataset.fake(314) assert real_dataset.counts.data.shape == self.dataset.counts.data.shape assert real_dataset.background.data.sum( ) == self.dataset.background.data.sum() assert int(real_dataset.counts.data.sum()) == 907010 assert self.dataset.counts.data.sum() == 907331 def test_incorrect_mask(self): mask_fit = np.ones(self.nbins, dtype=np.dtype("float")) with pytest.raises(ValueError): SpectrumDataset( model=self.source_model, counts=self.src, livetime=self.livetime, mask_fit=mask_fit, background=self.bkg, ) def test_set_model(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EnergyDispersion.from_diagonal_response( self.src.energy.edges, self.src.energy.edges) dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff, edisp, self.bkg) with pytest.raises(AttributeError): dataset.parameters dataset.model = self.source_model assert dataset.parameters[0] == self.source_model.parameters[0] def test_str(self): assert "SpectrumDataset" in str(self.dataset)
# ## Spatial analysis # # See other notebooks for how to run a 3D cube or 2D image based analysis. # ## Spectrum # # We'll run a spectral analysis using the classical reflected regions background estimation method, # and using the on-off (often called WSTAT) likelihood function. # In[ ]: e_reco = np.logspace(-1, np.log10(40), 40) * u.TeV e_true = np.logspace(np.log10(0.05), 2, 200) * u.TeV dataset_empty = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=on_region) # In[ ]: dataset_maker = SpectrumDatasetMaker(containment_correction=False, selection=["counts", "aeff", "edisp"]) bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask) safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) # In[ ]: get_ipython().run_cell_magic( 'time', '', 'datasets = []\n\nfor observation in observations:\n dataset = dataset_maker.run(dataset_empty, observation)\n dataset_on_off = bkg_maker.run(dataset, observation)\n dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)\n datasets.append(dataset_on_off)' )
class TestSpectrumDataset: """Test fit on counts spectra without any IRFs""" def setup(self): self.nbins = 30 binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV self.source_model = PowerLawSpectralModel(index=2.1, amplitude=1e5 / u.TeV / u.s, reference=0.1 * u.TeV) self.livetime = 100 * u.s bkg_rate = np.ones(self.nbins) / u.s bkg_expected = bkg_rate * self.livetime self.bkg = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=bkg_expected) random_state = get_random_state(23) self.npred = (self.source_model.integral(binning[:-1], binning[1:]) * self.livetime) self.npred += bkg_expected source_counts = random_state.poisson(self.npred) self.src = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=source_counts) self.dataset = SpectrumDataset( model=self.source_model, counts=self.src, livetime=self.livetime, background=self.bkg, ) def test_data_shape(self): assert self.dataset.data_shape[0] == self.nbins def test_energy_range(self): energy_range = self.dataset.energy_range assert energy_range.unit == u.TeV assert_allclose(energy_range.to_value("TeV"), [0.1, 10.0]) def test_cash(self): """Simple CASH fit to the on vector""" fit = Fit(self.dataset) result = fit.run() assert result.success assert "minuit" in repr(result) npred = self.dataset.npred().data.sum() assert_allclose(npred, self.npred.sum(), rtol=1e-3) assert_allclose(result.total_stat, -18087404.624, rtol=1e-3) pars = result.parameters assert_allclose(pars["index"].value, 2.1, rtol=1e-2) assert_allclose(pars.error("index"), 0.00127, rtol=1e-2) assert_allclose(pars["amplitude"].value, 1e5, rtol=1e-3) assert_allclose(pars.error("amplitude"), 153.450, rtol=1e-2) def test_fake(self): """Test the fake dataset""" real_dataset = self.dataset.copy() self.dataset.fake(314) assert real_dataset.counts.data.shape == self.dataset.counts.data.shape assert real_dataset.background.data.sum( ) == self.dataset.background.data.sum() assert int(real_dataset.counts.data.sum()) == 907010 assert self.dataset.counts.data.sum() == 907331 def test_incorrect_mask(self): mask_fit = np.ones(self.nbins, dtype=np.dtype("float")) with pytest.raises(ValueError): SpectrumDataset( model=self.source_model, counts=self.src, livetime=self.livetime, mask_fit=mask_fit, background=self.bkg, ) def test_set_model(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EnergyDispersion.from_diagonal_response( self.src.energy.edges, self.src.energy.edges) dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff, edisp, self.bkg) with pytest.raises(AttributeError): dataset.parameters dataset.model = self.source_model assert dataset.parameters[0] == self.source_model.parameters[0] def test_str(self): assert "SpectrumDataset" in str(self.dataset) def test_spectrumdataset_create(self): e_reco = u.Quantity([0.1, 1, 10.0], "TeV") e_true = u.Quantity([0.05, 0.5, 5, 20.0], "TeV") empty_dataset = SpectrumDataset.create(e_reco, e_true) assert empty_dataset.counts.total_counts == 0 assert empty_dataset.data_shape[0] == 2 assert empty_dataset.background.total_counts == 0 assert empty_dataset.background.energy.nbin == 2 assert empty_dataset.aeff.data.axis("energy").nbin == 3 assert empty_dataset.edisp.data.axis("e_reco").nbin == 2 assert empty_dataset.livetime.value == 0 assert len(empty_dataset.gti.table) == 0 assert empty_dataset.energy_range[0] is None assert_allclose(empty_dataset.mask_safe, 0) def test_spectrum_dataset_stack_diagonal_safe_mask(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EnergyDispersion.from_diagonal_response( self.src.energy.edges, self.src.energy.edges) livetime = self.livetime dataset1 = SpectrumDataset( counts=self.src.copy(), livetime=livetime, aeff=aeff, edisp=edisp, background=self.bkg.copy(), ) livetime2 = 0.5 * livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], 2 * aeff.data.data) bkg2 = CountsSpectrum( self.src.energy.edges[:-1], self.src.energy.edges[1:], data=2 * self.bkg.data, ) safe_mask2 = np.ones_like(self.src.data, bool) safe_mask2[0] = False dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data[1:], self.src.data[1:] * 2) assert_allclose(dataset1.counts.data[0], self.src.data[0]) assert dataset1.livetime == 1.5 * self.livetime assert_allclose(dataset1.background.data[1:], 3 * self.bkg.data[1:]) assert_allclose(dataset1.background.data[0], self.bkg.data[0]) assert_allclose( dataset1.aeff.data.data.to_value("m2"), 4.0 / 3 * aeff.data.data.to_value("m2"), ) assert_allclose(dataset1.edisp.pdf_matrix[1:], edisp.pdf_matrix[1:]) assert_allclose(dataset1.edisp.pdf_matrix[0], 0.5 * edisp.pdf_matrix[0]) def test_spectrum_dataset_stack_nondiagonal_no_bkg(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp1 = EnergyDispersion.from_gauss(self.src.energy.edges, self.src.energy.edges, 0.1, 0.0) livetime = self.livetime dataset1 = SpectrumDataset(counts=None, livetime=livetime, aeff=aeff, edisp=edisp1, background=None) livetime2 = livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], aeff.data.data) edisp2 = EnergyDispersion.from_gauss(self.src.energy.edges, self.src.energy.edges, 0.2, 0.0) dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp2, background=None, ) dataset1.stack(dataset2) assert dataset1.counts is None assert dataset1.background is None assert dataset1.livetime == 2 * self.livetime assert_allclose(dataset1.aeff.data.data.to_value("m2"), aeff.data.data.to_value("m2")) assert_allclose(dataset1.edisp.get_bias(1 * u.TeV), 0.0, atol=1e-3) assert_allclose(dataset1.edisp.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
mask = data_store.obs_table["TARGET_NAME"] == "Crab" obs_ids = data_store.obs_table["OBS_ID"][mask].data observations = data_store.get_observations(obs_ids) crab_position = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") # The ON region center is defined in the icrs frame. The angle is defined w.r.t. to its axis. rectangle = RectangleSkyRegion(center=crab_position, width=0.5 * u.deg, height=0.4 * u.deg, angle=0 * u.deg) bkg_maker = ReflectedRegionsBackgroundMaker(min_distance=0.1 * u.rad) dataset_maker = SpectrumDatasetMaker(selection=["counts"]) dataset_empty = SpectrumDataset.create(e_reco=np.logspace(-1, 2, 30) * u.TeV, region=rectangle) datasets = [] for obs in observations: dataset = dataset_maker.run(dataset_empty, obs) dataset_on_off = bkg_maker.run(observation=obs, dataset=dataset) datasets.append(dataset_on_off) m = Map.create(skydir=crab_position, width=(8, 8), proj="TAN") _, ax, _ = m.plot(vmin=-1, vmax=0) rectangle.to_pixel(ax.wcs).plot(ax=ax, color="black") plot_spectrum_datasets_off_regions(datasets=datasets, ax=ax)
plt.loglog() print(cta_irf["aeff"].data) # In[ ]: edisp = cta_irf["edisp"].to_energy_dispersion(offset=offset, e_true=energy, e_reco=energy) edisp.plot_matrix() print(edisp.data) # In[ ]: dataset = SpectrumDataset(aeff=aeff, edisp=edisp, model=model_ref, livetime=livetime, obs_id=0) dataset.fake(random_state=42) # In[ ]: # Take a quick look at the simulated counts dataset.counts.plot() # ## Include Background # # In this section we will include a background component. Furthermore, we will also simulate more than one observation and fit each one individually in order to get average fit results. # In[ ]:
class TestSpectrumDataset: """Test fit on counts spectra without any IRFs""" def setup(self): self.nbins = 30 binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV self.source_model = SkyModel(spectral_model=PowerLawSpectralModel( index=2.1, amplitude=1e5 * u.Unit("cm-2 s-1 TeV-1"), reference=0.1 * u.TeV, )) self.livetime = 100 * u.s aeff = EffectiveAreaTable.from_constant(binning, "1 cm2") bkg_rate = np.ones(self.nbins) / u.s bkg_expected = (bkg_rate * self.livetime).to_value("") self.bkg = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=bkg_expected) random_state = get_random_state(23) flux = self.source_model.spectral_model.integral( binning[:-1], binning[1:]) self.npred = (flux * aeff.data.data[0] * self.livetime).to_value("") self.npred += bkg_expected source_counts = random_state.poisson(self.npred) self.src = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=source_counts) self.dataset = SpectrumDataset( models=self.source_model, counts=self.src, aeff=aeff, livetime=self.livetime, background=self.bkg, name="test", ) def test_data_shape(self): assert self.dataset.data_shape[0] == self.nbins def test_energy_range(self): energy_range = self.dataset.energy_range assert energy_range.unit == u.TeV assert_allclose(energy_range.to_value("TeV"), [0.1, 10.0]) def test_cash(self): """Simple CASH fit to the on vector""" fit = Fit([self.dataset]) result = fit.run() # assert result.success assert "minuit" in repr(result) npred = self.dataset.npred().data.sum() assert_allclose(npred, self.npred.sum(), rtol=1e-3) assert_allclose(result.total_stat, -18087404.624, rtol=1e-3) pars = result.parameters assert_allclose(pars["index"].value, 2.1, rtol=1e-2) assert_allclose(pars.error("index"), 0.00127, rtol=1e-2) assert_allclose(pars["amplitude"].value, 1e5, rtol=1e-3) assert_allclose(pars.error("amplitude"), 153.450, rtol=1e-2) def test_fake(self): """Test the fake dataset""" real_dataset = self.dataset.copy() self.dataset.fake(314) assert real_dataset.counts.data.shape == self.dataset.counts.data.shape assert real_dataset.background.data.sum( ) == self.dataset.background.data.sum() assert int(real_dataset.counts.data.sum()) == 907010 assert self.dataset.counts.data.sum() == 907331 def test_incorrect_mask(self): mask_fit = np.ones(self.nbins, dtype=np.dtype("float")) with pytest.raises(ValueError): SpectrumDataset( models=self.source_model, counts=self.src, livetime=self.livetime, mask_fit=mask_fit, background=self.bkg, ) def test_set_model(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EDispKernel.from_diagonal_response(self.src.energy.edges, self.src.energy.edges) dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff, edisp, self.bkg) spectral_model = PowerLawSpectralModel() model = SkyModel(spectral_model=spectral_model, name="test") dataset.models = model assert dataset.models["test"] is model models = Models([model]) dataset.models = models assert dataset.models["test"] is model def test_npred_models(self): e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3).edges dataset = SpectrumDataset.create(e_reco=e_reco) dataset.livetime = 1 * u.h dataset.aeff.data.data += 1e10 * u.Unit("cm2") pwl_1 = PowerLawSpectralModel(index=2) pwl_2 = PowerLawSpectralModel(index=2) model_1 = SkyModel(spectral_model=pwl_1) model_2 = SkyModel(spectral_model=pwl_2) dataset.models = Models([model_1, model_2]) npred = dataset.npred() assert_allclose(npred.data.sum(), 64.8) def test_str(self): assert "SpectrumDataset" in str(self.dataset) def test_spectrumdataset_create(self): e_reco = u.Quantity([0.1, 1, 10.0], "TeV") e_true = u.Quantity([0.05, 0.5, 5, 20.0], "TeV") empty_dataset = SpectrumDataset.create(e_reco, e_true, name="test") assert empty_dataset.name == "test" assert empty_dataset.counts.total_counts == 0 assert empty_dataset.data_shape[0] == 2 assert empty_dataset.background.total_counts == 0 assert empty_dataset.background.energy.nbin == 2 assert empty_dataset.aeff.data.axis("energy").nbin == 3 assert empty_dataset.edisp.data.axis("e_reco").nbin == 2 assert empty_dataset.livetime.value == 0 assert len(empty_dataset.gti.table) == 0 assert empty_dataset.energy_range[0] is None assert_allclose(empty_dataset.mask_safe, 0) def test_spectrum_dataset_stack_diagonal_safe_mask(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp = EDispKernel.from_diagonal_response(self.src.energy.edges, self.src.energy.edges) livetime = self.livetime dataset1 = SpectrumDataset( counts=self.src.copy(), livetime=livetime, aeff=aeff, edisp=edisp, background=self.bkg.copy(), ) livetime2 = 0.5 * livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], 2 * aeff.data.data) bkg2 = CountsSpectrum( self.src.energy.edges[:-1], self.src.energy.edges[1:], data=2 * self.bkg.data, ) safe_mask2 = np.ones_like(self.src.data, bool) safe_mask2[0] = False dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp, background=bkg2, mask_safe=safe_mask2, ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data[1:], self.src.data[1:] * 2) assert_allclose(dataset1.counts.data[0], self.src.data[0]) assert dataset1.livetime == 1.5 * self.livetime assert_allclose(dataset1.background.data[1:], 3 * self.bkg.data[1:]) assert_allclose(dataset1.background.data[0], self.bkg.data[0]) assert_allclose( dataset1.aeff.data.data.to_value("m2"), 4.0 / 3 * aeff.data.data.to_value("m2"), ) assert_allclose(dataset1.edisp.pdf_matrix[1:], edisp.pdf_matrix[1:]) assert_allclose(dataset1.edisp.pdf_matrix[0], 0.5 * edisp.pdf_matrix[0]) def test_spectrum_dataset_stack_nondiagonal_no_bkg(self): aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges, "HESS") edisp1 = EDispKernel.from_gauss(self.src.energy.edges, self.src.energy.edges, 0.1, 0.0) livetime = self.livetime dataset1 = SpectrumDataset(counts=None, livetime=livetime, aeff=aeff, edisp=edisp1, background=None) livetime2 = livetime aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1], self.src.energy.edges[1:], aeff.data.data) edisp2 = EDispKernel.from_gauss(self.src.energy.edges, self.src.energy.edges, 0.2, 0.0) dataset2 = SpectrumDataset( counts=self.src.copy(), livetime=livetime2, aeff=aeff2, edisp=edisp2, background=None, ) dataset1.stack(dataset2) assert dataset1.counts is None assert dataset1.background is None assert dataset1.livetime == 2 * self.livetime assert_allclose(dataset1.aeff.data.data.to_value("m2"), aeff.data.data.to_value("m2")) assert_allclose(dataset1.edisp.get_bias(1 * u.TeV), 0.0, atol=1.2e-3) assert_allclose(dataset1.edisp.get_resolution(1 * u.TeV), 0.1581, atol=1e-2) def test_info_dict(self): info_dict = self.dataset.info_dict() assert_allclose(info_dict["n_on"], 907010) assert_allclose(info_dict["background"], 3000.0) assert_allclose(info_dict["significance"], 2924.522174) assert_allclose(info_dict["excess"], 904010) assert_allclose(info_dict["livetime"].value, 1e2) assert info_dict["name"] == "test" @requires_dependency("matplotlib") def test_peek(self): with mpl_plot_check(): self.dataset.peek() self.dataset.edisp = None with mpl_plot_check(): self.dataset.peek() @requires_dependency("matplotlib") def test_plot_fit(self): with mpl_plot_check(): self.dataset.plot_fit()
# Load the IRFs # In this simulation, we use the CTA-1DC irfs shipped with gammapy. irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits") # In[ ]: obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) print(obs) # In[ ]: # Make the SpectrumDataset dataset_empty = SpectrumDataset.create(e_reco=energy_axis.edges, e_true=energy_axis_true.edges, region=on_region) maker = SpectrumDatasetMaker(selection=["aeff", "edisp", "background"]) dataset = maker.run(dataset_empty, obs) # In[ ]: # Set the model on the dataset, and fake dataset.model = model dataset.fake(random_state=42) print(dataset) # You can see that backgound counts are now simulated # ### OnOff analysis #