def read(): datasets = [] spatial_model = PointSpatialModel(lon_0="-0.05 deg", lat_0="-0.05 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") for ind in range(N_OBS): dataset = MapDataset.read(f"dataset-{ind}.fits") dataset.model = model datasets.append(dataset) return datasets
def test_contributes(): center_sky = SkyCoord(3, 4, unit="deg", frame="galactic") circle_sky_12 = CircleSkyRegion(center=center_sky, radius=1 * u.deg) axis = MapAxis.from_edges(np.logspace(-1, 1, 3), unit=u.TeV, name="energy") geom = WcsGeom.create(skydir=(3, 4), npix=(5, 4), frame="galactic", axes=[axis]) mask = geom.region_mask([circle_sky_12]) spatial_model = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.9 deg", frame="galactic") assert spatial_model.evaluation_region.height == 2 * spatial_model.evaluation_radius model4 = SkyModel( spatial_model=spatial_model, spectral_model=PowerLawSpectralModel(), name="source-4", ) assert model4.contributes(mask, margin=0 * u.deg)
def spectrum_dataset(): #TODO: change the fixture scope to "session". This currently crashes fitting tests name = "test" energy = np.logspace(-1, 1, 31) * u.TeV livetime = 100 * u.s pwl = PowerLawSpectralModel( index=2.1, amplitude="1e5 cm-2 s-1 TeV-1", reference="0.1 TeV", ) temp_mod = ConstantTemporalModel() model = SkyModel(spectral_model=pwl, temporal_model=temp_mod, name="test-source") axis = MapAxis.from_edges(energy, interp="log", name="energy") axis_true = MapAxis.from_edges(energy, interp="log", name="energy_true") background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis]) models = Models([model]) exposure = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis_true]) exposure.quantity = u.Quantity("1 cm2") * livetime bkg_rate = np.ones(30) / u.s background.quantity = bkg_rate * livetime start = [1, 3, 5] * u.day stop = [2, 3.5, 6] * u.day t_ref = Time(55555, format="mjd") gti = GTI.create(start, stop, reference_time=t_ref) dataset = SpectrumDataset( models=models, exposure=exposure, background=background, name=name, gti=gti, ) dataset.fake(random_state=23) return dataset
def simulate_spectrum_dataset(model, random_state=0): energy_edges = np.logspace(-0.5, 1.5, 21) * u.TeV energy_axis = MapAxis.from_edges(energy_edges, interp="log", name="energy") aeff = EffectiveAreaTable.from_parametrization(energy=energy_edges).to_region_map() bkg_model = SkyModel( spectral_model=PowerLawSpectralModel( index=2.5, amplitude="1e-12 cm-2 s-1 TeV-1" ), name="background", ) bkg_model.spectral_model.amplitude.frozen = True bkg_model.spectral_model.index.frozen = True geom = RegionGeom(region=None, axes=[energy_axis]) acceptance = RegionNDMap.from_geom(geom=geom, data=1) edisp = EDispKernelMap.from_diagonal_response( energy_axis=energy_axis, energy_axis_true=energy_axis.copy(name="energy_true"), geom=geom, ) livetime = 100 * u.h exposure = aeff * livetime dataset = SpectrumDatasetOnOff( name="test_onoff", exposure=exposure, acceptance=acceptance, acceptance_off=5, edisp=edisp, ) dataset.models = bkg_model bkg_npred = dataset.npred_signal() dataset.models = model dataset.fake( random_state=random_state, npred_background=bkg_npred, ) return dataset
def test_integrate_geom(): model = GaussianSpatialModel(lon="0d", lat="0d", sigma=0.1 * u.deg, frame='icrs') spectral_model = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") sky_model = SkyModel(spectral_model=spectral_model, spatial_model=model) center = SkyCoord("0d", "0d", frame='icrs') radius = 0.3 * u.deg square = CircleSkyRegion(center, radius) axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3, name='energy_true') geom = RegionGeom(region=square, axes=[axis]) integral = sky_model.integrate_geom(geom).data assert_allclose(integral / 1e-12, [[[5.299]], [[2.460]], [[1.142]]], rtol=1e-3)
def dataset(): position = SkyCoord(0.0, 0.0, frame="galactic", unit="deg") energy_axis = MapAxis.from_bounds(1, 10, nbin=3, unit="TeV", name="energy", interp="log") spatial_model = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.2 deg", frame="galactic") spectral_model = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) geom = WcsGeom.create(skydir=position, binsz=1, width="5 deg", frame="galactic", axes=[energy_axis]) t_min = 0 * u.s t_max = 30000 * u.s gti = GTI.create(start=t_min, stop=t_max) geom_true = geom.copy() geom_true.axes[0].name = "energy_true" dataset = get_map_dataset(sky_model=skymodel, geom=geom, geom_etrue=geom_true, edisp=True) dataset.gti = gti return dataset
def models(backgrounds): spatial_model = GaussianSpatialModel(lon_0="3 deg", lat_0="4 deg", sigma="3 deg", frame="galactic") spectral_model = PowerLawSpectralModel(index=2, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") model1 = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name="source-1", ) model2 = model1.copy(name="source-2") model2.datasets_names = ["dataset-1"] model3 = model1.copy(name="source-3") model3.datasets_names = "dataset-2" model3.spatial_model = PointSpatialModel() model3.parameters.freeze_all() models = Models([model1, model2, model3] + backgrounds) return models
def __init__( self, model=None, kernel_width="0.2 deg", downsampling_factor=None, method="root brentq", error_method="covar", error_sigma=1, ul_method="covar", ul_sigma=2, threshold=None, rtol=0.001, ): if method not in ["root brentq", "root newton", "leastsq iter"]: raise ValueError(f"Not a valid method: '{method}'") if error_method not in ["covar", "conf"]: raise ValueError(f"Not a valid error method '{error_method}'") self.kernel_width = Angle(kernel_width) if model is None: model = SkyModel( spectral_model=PowerLawSpectralModel(), spatial_model=PointSpatialModel(), ) self.model = model self.downsampling_factor = downsampling_factor self.parameters = { "method": method, "error_method": error_method, "error_sigma": error_sigma, "ul_method": ul_method, "ul_sigma": ul_sigma, "threshold": threshold, "rtol": rtol, }
def test_flux_point_dataset_serialization(tmp_path): path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits" data = FluxPoints.read(path) data.table["e_ref"] = data.e_ref.to("TeV") # TODO: remove duplicate definition this once model is redefine as skymodel spatial_model = ConstantSpatialModel() spectral_model = PowerLawSpectralModel(index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV") model = SkyModel(spatial_model, spectral_model, name="test_model") dataset = FluxPointsDataset(SkyModels([model]), data, name="test_dataset") Datasets([dataset]).to_yaml(tmp_path, prefix="tmp") datasets = Datasets.from_yaml(tmp_path / "tmp_datasets.yaml", tmp_path / "tmp_models.yaml") new_dataset = datasets[0] assert_allclose(new_dataset.data.table["dnde"], dataset.data.table["dnde"], 1e-4) if dataset.mask_fit is None: assert np.all(new_dataset.mask_fit == dataset.mask_safe) assert np.all(new_dataset.mask_safe == dataset.mask_safe) assert new_dataset.name == "test_dataset"
def test_large_oversampling(): nbin = 2 energy_axis_true = MapAxis.from_energy_bounds(".1 TeV", "10 TeV", nbin=nbin, name="energy_true") geom = WcsGeom.create(width=1, binsz=0.02, axes=[energy_axis_true]) spectral_model = ConstantSpectralModel() spatial_model = GaussianSpatialModel(lon_0=0 * u.deg, lat_0=0 * u.deg, sigma=1e-4 * u.deg, frame="icrs") models = SkyModel(spectral_model=spectral_model, spatial_model=spatial_model) model = Models(models) exposure = Map.from_geom(geom, unit="m2 s") exposure.data += 1.0 psf = PSFKernel.from_gauss(geom, sigma="0.1 deg") evaluator = MapEvaluator(model=model[0], exposure=exposure, psf=psf) flux_1 = evaluator.compute_flux_spatial() spatial_model.sigma.value = 0.001 flux_2 = evaluator.compute_flux_spatial() spatial_model.sigma.value = 0.01 flux_3 = evaluator.compute_flux_spatial() spatial_model.sigma.value = 0.03 flux_4 = evaluator.compute_flux_spatial() assert_allclose(flux_1.data.sum(), nbin, rtol=1e-4) assert_allclose(flux_2.data.sum(), nbin, rtol=1e-4) assert_allclose(flux_3.data.sum(), nbin, rtol=1e-4) assert_allclose(flux_4.data.sum(), nbin, rtol=1e-4)
def test_npred_no_edisp(self): const = 1 * u.Unit("cm-2 s-1 TeV-1") model = SkyModel(spectral_model=ConstantSpectralModel(const=const)) livetime = 1 * u.s aeff = RegionNDMap.create( region=self.on_region, unit="cm2", axes=[self.e_reco.copy(name="energy_true")], ) aeff.data += 1 dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, exposure=aeff * livetime, models=model, ) energy = aeff.geom.axes[0].edges expected = aeff.data[0] * (energy[-1] - energy[0]) * const * livetime assert_allclose(dataset.npred_signal().data.sum(), expected.value)
def spectrum_dataset(): energy = np.logspace(-1, 1, 31) * u.TeV livetime = 100 * u.s pwl = PowerLawSpectralModel( index=2.1, amplitude="1e5 cm-2 s-1 TeV-1", reference="0.1 TeV", ) temp_mod = ConstantTemporalModel() model = SkyModel(spectral_model=pwl, temporal_model=temp_mod, name="test-source") aeff = EffectiveAreaTable.from_constant(energy, "1 cm2") axis = MapAxis.from_edges(energy, interp="log", name="energy") background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis]) bkg_rate = np.ones(30) / u.s background.quantity = bkg_rate * livetime start = [1, 3, 5] * u.day stop = [2, 3.5, 6] * u.day t_ref = Time(55555, format="mjd") gti = GTI.create(start, stop, reference_time=t_ref) dataset = SpectrumDataset( models=model, aeff=aeff, livetime=livetime, background=background, name="test", gti=gti, ) dataset.fake(random_state=23) return dataset
def test_significance_map_estimator_map_dataset_exposure(simple_dataset): simple_dataset.exposure += 1e10 * u.cm**2 * u.s axis = simple_dataset.exposure.geom.axes[0] simple_dataset.psf = PSFMap.from_gauss(axis, sigma="0.05 deg") model = SkyModel( PowerLawSpectralModel(amplitude="1e-9 cm-2 s-1 TeV-1"), GaussianSpatialModel(lat_0=0.0 * u.deg, lon_0=0.0 * u.deg, sigma=0.1 * u.deg, frame="icrs"), name="sky_model", ) simple_dataset.models = [model] simple_dataset.npred() estimator = ExcessMapEstimator(0.1 * u.deg, selection_optional="all") result = estimator.run(simple_dataset) assert_allclose(result["npred_excess"].data.sum(), 19733.602, rtol=1e-3) assert_allclose(result["sqrt_ts"].data[0, 10, 10], 4.217129, rtol=1e-3)
def setup(self): self.nbins = 30 binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV self.source_model = SkyModel(spectral_model=PowerLawSpectralModel( index=2.1, amplitude=1e5 * u.Unit("cm-2 s-1 TeV-1"), reference=0.1 * u.TeV, )) self.livetime = 100 * u.s aeff = EffectiveAreaTable.from_constant(binning, "1 cm2") bkg_rate = np.ones(self.nbins) / u.s bkg_expected = (bkg_rate * self.livetime).to_value("") self.bkg = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=bkg_expected) random_state = get_random_state(23) flux = self.source_model.spectral_model.integral( binning[:-1], binning[1:]) self.npred = (flux * aeff.data.data[0] * self.livetime).to_value("") self.npred += bkg_expected source_counts = random_state.poisson(self.npred) self.src = CountsSpectrum(energy_lo=binning[:-1], energy_hi=binning[1:], data=source_counts) self.dataset = SpectrumDataset( models=self.source_model, counts=self.src, aeff=aeff, livetime=self.livetime, background=self.bkg, name="test", )
def test_sky_point_source(): # Test special case of point source. Regression test for GH 2367. energy_axis = MapAxis.from_edges([1, 10], unit="TeV", name="energy", interp="log") exposure = Map.create( skydir=(100, 70), npix=(4, 4), binsz=0.1, proj="AIT", unit="cm2 s", axes=[energy_axis], ) exposure.data = np.ones_like(exposure.data) spatial_model = PointSpatialModel(100.06 * u.deg, 70.03 * u.deg, frame="icrs") # Create a spectral model with integral flux of 1 cm-2 s-1 in this energy band spectral_model = ConstantSpectralModel("1 cm-2 s-1 TeV-1") spectral_model.const.value /= spectral_model.integral( 1 * u.TeV, 10 * u.TeV).value model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) evaluator = MapEvaluator(model=model, exposure=exposure) flux = evaluator.compute_flux().to_value("cm-2 s-1")[0] expected = [ [0, 0, 0, 0], [0, 0.140, 0.058, 0.0], [0, 0.564, 0.236, 0], [0, 0, 0, 0], ] assert_allclose(flux, expected, atol=0.01) assert_allclose(flux.sum(), 1)
def test_compute_ts_map(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="0.1 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator( model=model, threshold=1, kernel_width="1 deg", selection_optional=[] ) result = ts_estimator.run(input_dataset) assert_allclose(result["ts"].data[0, 99, 99], 1704.23, rtol=1e-2) assert_allclose(result["niter"].data[0, 99, 99], 8) assert_allclose(result["flux"].data[0, 99, 99], 1.02e-09, rtol=1e-2) assert_allclose(result["flux_err"].data[0, 99, 99], 3.84e-11, rtol=1e-2) assert result["flux"].unit == u.Unit("cm-2s-1") assert result["flux_err"].unit == u.Unit("cm-2s-1") # Check mask is correctly taken into account assert np.isnan(result["ts"].data[0, 30, 40]) energy_axis = result["ts"].geom.axes["energy"] assert_allclose(energy_axis.edges.value, [0.1, 1])
def test_fake(self): """Test the fake dataset""" source_model = SkyModel(spectral_model=PowerLawSpectralModel()) dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, models=source_model, aeff=self.aeff, livetime=self.livetime, edisp=self.edisp, acceptance=1, acceptance_off=10, ) real_dataset = dataset.copy() background = RegionNDMap.from_geom(dataset.counts.geom) background.data += 1 dataset.fake(background_model=background, random_state=314) assert real_dataset.counts.data.shape == dataset.counts.data.shape assert real_dataset.counts_off.data.shape == dataset.counts_off.data.shape assert dataset.counts_off.data.sum() == 39 assert dataset.counts.data.sum() == 5
def to_template_sky_model(self, geom, spectral_model=None, name=None): """Merge a list of models into a single `~gammapy.modeling.models.SkyModel` Parameters ---------- spectral_model : `~gammapy.modeling.models.SpectralModel` One of the NormSpectralMdel name : str Name of the new model """ from . import PowerLawNormSpectralModel, SkyModel, TemplateSpatialModel unit = u.Unit("1 / (cm2 s sr TeV)") map_ = Map.from_geom(geom, unit=unit) for m in self: map_ += m.evaluate_geom(geom).to(unit) spatial_model = TemplateSpatialModel(map_, normalize=False) if spectral_model is None: spectral_model = PowerLawNormSpectralModel() return SkyModel( spectral_model=spectral_model, spatial_model=spatial_model, name=name )
def get_lc(datasets): spatial_model1 = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.3 deg", frame="galactic") spatial_model1.parameters["lon_0"].frozen = True spatial_model1.parameters["lat_0"].frozen = True spatial_model1.parameters["sigma"].frozen = True spectral_model1 = PowerLawSpectralModel(index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") model_fit = SkyModel( spatial_model=spatial_model1, spectral_model=spectral_model1, name="model_fit", ) for dataset in datasets: dataset.models[1] = model_fit lc_maker = LightCurveEstimator(e_edges=[1.0, 10.0] * u.TeV, source="model_fit", reoptimize=False) lc = lc_maker.run(datasets) print(lc.table["flux"])
def test_compute_ts_map(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="0.1 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator( model=model, method="leastsq iter", threshold=1, kernel_width="1 deg" ) result = ts_estimator.run(input_dataset) assert "leastsq iter" in repr(ts_estimator) assert_allclose(result["ts"].data[99, 99], 1704.23, rtol=1e-2) assert_allclose(result["niter"].data[99, 99], 3) assert_allclose(result["flux"].data[99, 99], 1.02e-09, rtol=1e-2) assert_allclose(result["flux_err"].data[99, 99], 3.84e-11, rtol=1e-2) assert_allclose(result["flux_ul"].data[99, 99], 1.10e-09, rtol=1e-2) assert result["flux"].unit == u.Unit("cm-2s-1") assert result["flux_err"].unit == u.Unit("cm-2s-1") assert result["flux_ul"].unit == u.Unit("cm-2s-1") # Check mask is correctly taken into account assert np.isnan(result["ts"].data[30, 40])
def test_compute_ts_map_downsampled(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="0.11 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator( model=model, downsampling_factor=2, kernel_width="1 deg", selection_optional=["ul"] ) result = ts_estimator.run(input_dataset) assert_allclose(result["ts"].data[0, 99, 99], 1661.49, rtol=1e-2) assert_allclose(result["niter"].data[0, 99, 99], 7) assert_allclose(result["flux"].data[0, 99, 99], 1.065988e-09, rtol=1e-2) assert_allclose(result["flux_err"].data[0, 99, 99], 4.005628e-11, rtol=1e-2) assert_allclose(result["flux_ul"].data[0, 99, 99], 8.220152e-11, rtol=1e-2) assert result["flux"].unit == u.Unit("cm-2s-1") assert result["flux_err"].unit == u.Unit("cm-2s-1") assert result["flux_ul"].unit == u.Unit("cm-2s-1") # Check mask is correctly taken into account assert np.isnan(result["ts"].data[0, 30, 40])
def test_map_properties(map_flux_estimate): model = SkyModel( PowerLawSpectralModel(amplitude="1e-10 cm-2s-1TeV-1", index=2)) fe = FluxMaps(data=map_flux_estimate, reference_model=model) assert fe.dnde.unit == u.Unit("cm-2s-1TeV-1") assert_allclose(fe.dnde.quantity.value[:, 2, 2], [1e-9, 1e-11]) assert_allclose(fe.dnde_err.quantity.value[:, 2, 2], [1e-10, 1e-12]) assert_allclose(fe.dnde_errn.quantity.value[:, 2, 2], [2e-10, 2e-12]) assert_allclose(fe.dnde_errp.quantity.value[:, 2, 2], [1.5e-10, 1.5e-12]) assert_allclose(fe.dnde_ul.quantity.value[:, 2, 2], [2e-9, 2e-11]) assert fe.e2dnde.unit == u.Unit("TeV cm-2s-1") assert_allclose(fe.e2dnde.quantity.value[:, 2, 2], [1e-10, 1e-10]) assert_allclose(fe.e2dnde_err.quantity.value[:, 2, 2], [1e-11, 1e-11]) assert_allclose(fe.e2dnde_errn.quantity.value[:, 2, 2], [2e-11, 2e-11]) assert_allclose(fe.e2dnde_errp.quantity.value[:, 2, 2], [1.5e-11, 1.5e-11]) assert_allclose(fe.e2dnde_ul.quantity.value[:, 2, 2], [2e-10, 2e-10]) assert fe.flux.unit == u.Unit("cm-2s-1") assert_allclose(fe.flux.quantity.value[:, 2, 2], [9e-10, 9e-11]) assert_allclose(fe.flux_err.quantity.value[:, 2, 2], [9e-11, 9e-12]) assert_allclose(fe.flux_errn.quantity.value[:, 2, 2], [1.8e-10, 1.8e-11]) assert_allclose(fe.flux_errp.quantity.value[:, 2, 2], [1.35e-10, 1.35e-11]) assert_allclose(fe.flux_ul.quantity.value[:, 2, 2], [1.8e-9, 1.8e-10]) assert fe.eflux.unit == u.Unit("TeV cm-2s-1") assert_allclose(fe.eflux.quantity.value[:, 2, 2], [2.302585e-10, 2.302585e-10]) assert_allclose(fe.eflux_err.quantity.value[:, 2, 2], [2.302585e-11, 2.302585e-11]) assert_allclose(fe.eflux_errn.quantity.value[:, 2, 2], [4.60517e-11, 4.60517e-11]) assert_allclose(fe.eflux_errp.quantity.value[:, 2, 2], [3.4538775e-11, 3.4538775e-11]) assert_allclose(fe.eflux_ul.quantity.value[:, 2, 2], [4.60517e-10, 4.60517e-10])
def estimate_exposure(self, dataset): """Estimate exposure map in reco energy Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Input dataset. Returns ------- exposure : `Map` Exposure map """ # TODO: clean this up a bit... models = dataset.models model = SkyModel( spectral_model=self.model.spectral_model, spatial_model=ConstantFluxSpatialModel(), ) model.apply_irf["psf"] = False energy_axis = dataset.exposure.geom.get_axis_by_name("energy_true") energy = energy_axis.edges flux = model.spectral_model.integral(emin=energy.min(), emax=energy.max()) self._flux_estimator.flux_ref = flux.to_value("cm-2 s-1") dataset.models = [model] npred = dataset.npred() dataset.models = models data = (npred.data / flux).to("cm2 s") return npred.copy(data=data.value, unit=data.unit)
def fake_dataset(): axis = MapAxis.from_energy_bounds(0.1, 10, 5, unit="TeV", name="energy") axis_true = MapAxis.from_energy_bounds(0.05, 20, 10, unit="TeV", name="energy_true") geom = WcsGeom.create(npix=50, binsz=0.02, axes=[axis]) dataset = MapDataset.create(geom) dataset.psf = PSFMap.from_gauss(axis_true, sigma="0.05 deg") dataset.mask_safe += np.ones(dataset.data_shape, dtype=bool) dataset.background += 1 dataset.exposure += 1e12 * u.cm**2 * u.s spatial_model = PointSpatialModel() spectral_model = PowerLawSpectralModel(amplitude="1e-10 cm-2s-1TeV-1", index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="source") dataset.models = [model] dataset.fake(random_state=42) return dataset
def test_fov_bkg_maker_fit_with_source_model(obs_dataset, exclusion_mask): fov_bkg_maker = FoVBackgroundMaker(method="fit", exclusion_mask=exclusion_mask) test_dataset = obs_dataset.copy() spatial_model = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.2 deg", frame="galactic") spectral_model = PowerLawSpectralModel(index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) test_dataset.models = model dataset = fov_bkg_maker.run(test_dataset) # Here we check that source parameters are correctly thawed after fit. assert dataset.models.parameters["index"].frozen is False assert dataset.models.parameters["lon_0"].frozen is False assert dataset.background_model.norm.frozen is False assert_allclose(dataset.background_model.norm.value, 0.8307, rtol=1e-4) assert_allclose(dataset.background_model.tilt.value, 0.0, rtol=1e-4)
def test_plot_fit(self): dataset = self.dataset.copy() dataset.models = SkyModel(spectral_model=PowerLawSpectralModel()) with mpl_plot_check(): dataset.plot_fit()
from astropy import units as u import matplotlib.pyplot as plt from gammapy.modeling.models import ( Models, SkyModel, SuperExpCutoffPowerLaw3FGLSpectralModel, ) energy_range = [0.1, 100] * u.TeV model = SuperExpCutoffPowerLaw3FGLSpectralModel( index_1=1, index_2=2, amplitude="1e-12 TeV-1 s-1 cm-2", reference="1 TeV", ecut="10 TeV", ) model.plot(energy_range) plt.grid(which="both") plt.ylim(1e-24, 1e-10) # %% # YAML representation # ------------------- # Here is an example YAML file using the model: model = SkyModel(spectral_model=model, name="super-exp-cutoff-power-law-3fgl-model") models = Models([model]) print(models.to_yaml())
""" # %% # Example plot # ------------ # Here is an example plot of the model: from astropy import units as u import matplotlib.pyplot as plt from gammapy.modeling.models import BrokenPowerLawSpectralModel, Models, SkyModel energy_range = [0.1, 100] * u.TeV model = BrokenPowerLawSpectralModel( index1=1.5, index2=2.5, amplitude="1e-12 TeV-1 cm-2 s-1", ebreak="1 TeV", ) model.plot(energy_range) plt.grid(which="both") # %% # YAML representation # ------------------- # Here is an example YAML file using the model: model = SkyModel(spectral_model=model, name="broken-power-law-model") models = Models([model]) print(models.to_yaml())
def stacked_model(ds, ds_stack=None, first=False, debug=False): """ This function is not finalised and should be checked. It is intended to recompute an effective spectral model from the exisiting already stacked model -from the previous call- and the current model in a dataset list. It also extract the masked dataset of the first dataset in the list (when first = True) Parameters ---------- ds : Dataset The current dataset. ds_stack : Dataset, optional The current stacked dataset if first is False. The default is None. first : Boolean, optional If True, extract the masked dataset - Valid for the first dataset of the list. The default is False. debug : Boolean, optional If True, let's talk a bit. The default is False. Returns ------- model : Skymodel The current stcake model. """ # Get unmasked Reconstructed E bining from current dataset # (but they are all identical) e_axis = ds.background.geom.axes[0] # E reco sampling from the IRF if first: # The reconstructed binning is required to apply the masking # The theoretical spectrum is therefore evaluated on reconstrcuted energies which # assumes that the reconstructed energy is not too different from the true one. # e_axis = dsets[0].aeff.data.axes[0] # E true sampling from the IRF flux_org = ds.models[0].spectral_model(e_axis.center) mask_org = ds.mask_safe.data.flatten() spec = TemplateSpectralModel(energy=e_axis.center, values=flux_org * mask_org, interp_kwargs={"values_scale": "log"}) model = SkyModel(spectral_model=spec, name="Stack" + "-" + ds.name) else: flux_stacked = ds_stack.models[0].spectral_model(e_axis.center) dt_stack = ds_stack.gti.time_sum # Duration on present stack flux_org = ds.models[0].spectral_model(e_axis.center) mask_org = ds.mask_safe.data.flatten() dt = ds.gti.time_sum # Duration on present stack # Create ad-hoc new flux and model dt_new = dt + dt_stack flux_new = (dt_stack.value * flux_stacked + dt.value * flux_org * mask_org) / (dt_stack.value + dt.value) # Create a new SkyModel from the flux template model spec = TemplateSpectralModel(energy=e_axis.center, values=flux_new, interp_kwargs={"values_scale": "log"}) model = SkyModel(spectral_model=spec, name="Stack" + "-" + ds.name) if debug: livetime = ds.gti.time_sum # Duration on present stack print(72 * "-") print(" Current dataset dt={:10.2f} - {} with model {}".format( livetime, ds.name, ds.models[0].name)) print(" On stack : dt=", dt_stack, " F0 = ", flux_stacked[0]) print(" To add : dt=", dt, " F0 = ", flux_org[0]) print(" To stack : dt=", dt_new, " F0 = ", flux_new[0]) print("") return model
def generate_dataset(Eflux, flux, Erange=None, tstart=Time('2000-01-01 02:00:00', scale='utc'), tobs=100 * u.s, irf_file=None, alpha=1 / 5, name=None, fake=True, onoff=True, seed='random-seed', debug=False): """ Generate a dataset from a list of energies and flux points either as a SpectrumDataset or a SpectrumDatasetOnOff Note : - in SpectrumDataset, the backgound counts are assumed precisely know and are not fluctuated. - in SpectrumDatasetOnOff, the background counts (off counts) are fluctuated from the IRF known values. Parameters ---------- Eflux : Quantity Energies at which the flux is given. flux : Quantity Flux corresponding to the given energies. Erange : List, optional The energy boundaries within which the flux is defined, if not over all energies. The default is None. tstart : Time object, optional Start date of the dataset. The default is Time('2000-01-01 02:00:00',scale='utc'). tobs : Quantity, optional Duration of the observation. The default is 100*u.s. irf_file : String, optional The IRf file name. The default is None. alpha : Float, optional The on over off surface ratio for the On-Off analysis. The default is 1/5. name : String, optional The dataset name, also used to name tthe spectrum. The default is None. fake : Boolean, optional If True, the dataset counts are fluctuated. The default is True. onoff : Boolean, optional If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet. The default is True. seed : String, optional The seed for the randome generator; If an integer will generate the same random series at each run. The default is 'random-seed'. debug: Boolean If True, let's talk a bit. The default is False. Returns ------- ds : Dataset object The dataset. """ random_state = get_random_state(seed) ### Define on region on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg, frame="icrs") # Observing region on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg) # Define energy axis (see spectrum analysis notebook) # edges for SpectrumDataset - all dataset should have the same axes # Note that linear spacing is clearly problematic for powerlaw fluxes # Axes can also be defined using MapAxis unit = u.GeV E1v = min(Eflux).to(unit).value E2v = max(Eflux).to(unit).value # ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit # ereco_axis = MapAxis.from_edges(ereco.to("TeV").value, # unit="TeV", # name="energy", # interp="log") ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit, 0.9 * E2v * unit, nbin=4, per_decade=True, name="energy") # etrue = np.logspace(np.log10( E1v), np.log10( E2v), 50) * unit # etrue_axis = MapAxis.from_edges(etrue.to("TeV").value, # unit="TeV", # name="energy_true", # interp="log") etrue_axis = MapAxis.from_energy_bounds(E1v * unit, E2v * unit, nbin=4, per_decade=True, name="energy_true") if (debug): print("Dataset ", name) print("Etrue : ", etrue_axis.edges) print("Ereco : ", ereco_axis.edges) # Load IRF irf = load_cta_irfs(irf_file) spec = TemplateSpectralModel(energy=Eflux, values=flux, interp_kwargs={"values_scale": "log"}) model = SkyModel(spectral_model=spec, name="Spec" + str(name)) obs = Observation.create(obs_id=1, pointing=on_pointing, livetime=tobs, irfs=irf, deadtime_fraction=0, reference_time=tstart) ds_empty = SpectrumDataset.create( e_reco=ereco_axis, # Ereco.edges, e_true=etrue_axis, #Etrue.edges, region=on_region, name=name) maker = SpectrumDatasetMaker(containment_correction=False, selection=["exposure", "background", "edisp"]) ds = maker.run(ds_empty, obs) ds.models = model mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0], energy_max=Erange[1]) mask = mask & ds.mask_safe.data ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask) ds.fake(random_state=random_state) # Fake is mandatory ? # Transform SpectrumDataset into SpectrumDatasetOnOff if needed if (onoff): ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds, acceptance=1, acceptance_off=1 / alpha) print("Transformed in ONOFF") if fake: print(" Fluctuations : seed = ", seed) if (onoff): ds.fake(npred_background=ds.npred_background()) else: ds.fake(random_state=random_state) print("ds.energy_range = ", ds.energy_range) return ds