def test_datasets_to_io(tmp_path): filedata = "$GAMMAPY_DATA/tests/models/gc_example_datasets.yaml" filemodel = "$GAMMAPY_DATA/tests/models/gc_example_models.yaml" datasets = Datasets.read(filedata, filemodel) assert len(datasets) == 2 assert len(datasets.parameters) == 22 dataset0 = datasets[0] assert dataset0.counts.data.sum() == 6824 assert_allclose(dataset0.exposure.data.sum(), 2072125400000.0, atol=0.1) assert dataset0.psf is not None assert dataset0.edisp is not None assert_allclose(dataset0.background_model.evaluate().data.sum(), 4094.2, atol=0.1) assert dataset0.background_model.name == "background_irf_gc" dataset1 = datasets[1] assert dataset1.background_model.name == "background_irf_g09" assert (dataset0.models["gll_iem_v06_cutout"] == dataset1.models["gll_iem_v06_cutout"]) assert isinstance(dataset0.models, Models) assert len(dataset0.models) == 2 assert dataset0.models[0].name == "gc" assert dataset0.models[1].name == "gll_iem_v06_cutout" assert (dataset0.models[0].parameters["reference"] is dataset1.models[1].parameters["reference"]) assert_allclose(dataset1.models[1].parameters["lon_0"].value, 0.9, atol=0.1) datasets.write(tmp_path, prefix="written") datasets_read = Datasets.read(tmp_path / "written_datasets.yaml", tmp_path / "written_models.yaml") assert len(datasets_read) == 2 dataset0 = datasets_read[0] assert dataset0.counts.data.sum() == 6824 assert_allclose(dataset0.exposure.data.sum(), 2072125400000.0, atol=0.1) assert dataset0.psf is not None assert dataset0.edisp is not None assert_allclose(dataset0.background_model.evaluate().data.sum(), 4094.2, atol=0.1)
def input_dataset(): datasets = Datasets.read( filedata="$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml", filemodel="$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml", ) dataset = datasets[0] dataset.psf = None return dataset
def test_spectrum_dataset_on_off_to_yaml(tmpdir): spectrum_datasets_on_off = make_observation_list() datasets = Datasets(spectrum_datasets_on_off) datasets.write(path=tmpdir) datasets_read = Datasets.read(tmpdir / "_datasets.yaml", tmpdir / "_models.yaml") assert len(datasets_read) == len(datasets) assert datasets_read[0].name == datasets[0].name assert datasets_read[1].name == datasets[1].name assert datasets_read[1].counts.data.sum() == datasets[1].counts.data.sum()
def read_regions(self): for kr in self.ROIs_sel: filedata = self.resdir / f"3FHL_ROI_num{kr}_datasets.yaml" filemodel = self.resdir / f"3FHL_ROI_num{kr}_models.yaml" try: dataset = list(Datasets.read(filedata, filemodel))[0] except (FileNotFoundError, IOError): continue pars = dataset.parameters pars.covariance = np.load(self.resdir / f"{dataset.name}_covariance.npy") infos = np.load(self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz") self.diags["message"].append(infos["message"]) self.diags["stat"].append(infos["stat"]) if self.savefig: self.plot_maps(dataset) for model in dataset.models: if ( self.FHL3[model.name].data["ROI_num"] == kr and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut ): model.spatial_model.parameters.covariance = pars.get_subcovariance( model.spatial_model.parameters ) model.spectral_model.parameters.covariance = pars.get_subcovariance( model.spectral_model.parameters ) dataset.background_model.parameters.covariance = pars.get_subcovariance( dataset.background_model.parameters ) res_spec = model.spectral_model cat_spec = self.FHL3[model.name].spectral_model() res_fp = FluxPoints.read( self.resdir / f"{model.name}_flux_points.fits" ) res_fp.table["is_ul"] = res_fp.table["ts"] < 1.0 cat_fp = self.FHL3[model.name].flux_points.to_sed_type("dnde") self.update_spec_diags( dataset, model, cat_spec, res_spec, cat_fp, res_fp ) if self.savefig: self.plot_spec(kr, model, cat_spec, res_spec, cat_fp, res_fp)
def test_flux_point_dataset_serialization(tmp_path): path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits" data = FluxPoints.read(path) data.table["e_ref"] = data.e_ref.to("TeV") spectral_model = PowerLawSpectralModel(index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV") model = SkyModel(spectral_model=spectral_model, name="test_model") dataset = FluxPointsDataset(model, data, name="test_dataset") Datasets([dataset]).write(tmp_path, prefix="tmp") datasets = Datasets.read(tmp_path / "tmp_datasets.yaml", tmp_path / "tmp_models.yaml") new_dataset = datasets[0] assert_allclose(new_dataset.data.table["dnde"], dataset.data.table["dnde"], 1e-4) if dataset.mask_fit is None: assert np.all(new_dataset.mask_fit == dataset.mask_safe) assert np.all(new_dataset.mask_safe == dataset.mask_safe) assert new_dataset.name == "test_dataset"
def read(filename): return Datasets.read(f"{filename}_datasets.yaml", f"{filename}_models.yaml")
# frozen: true # # ``` # ## Reading different datasets # # # ### Fermi-LAT 3FHL: map dataset for 3D analysis # For now we let's use the datasets serialization only to read the 3D `MapDataset` associated to Fermi-LAT 3FHL data and models. # In[ ]: path = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL" filedata = Path(path + "_datasets.yaml") filemodel = Path(path + "_models.yaml") datasets = Datasets.read(filedata=filedata, filemodel=filemodel) dataset_fermi = datasets[0] # We get the Crab model in order to share it with the other datasets # In[ ]: crab_model = dataset_fermi.models["Crab Nebula"] crab_spec = crab_model.spectral_model print(crab_spec) # ### HESS-DL3: 1D ON/OFF dataset for spectral fitting # # The ON/OFF datasets can be read from PHA files following the [OGIP standards](https://heasarc.gsfc.nasa.gov/docs/heasarc/ofwg/docs/spectra/ogip_92_007/node5.html). # We read the PHA files from each observation, and compute a stacked dataset for simplicity. # Then the Crab spectral model previously defined is added to the dataset.