Пример #1
0
def make_plots(reference, result):
    fpoints_ref = FluxPoints.read(reference)
    fpoints_res = FluxPoints.read(result)

    fig = plt.figure(figsize=(7, 5))
    opts = {"energy_power": 2}
    fpoints_ref.plot(**opts, label="reference")
    fpoints_res.plot(**opts)
    plt.legend()

    return fig
Пример #2
0
def dataset():
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.e_ref.to("TeV")
    model = SkyModel(spectral_model=PowerLawSpectralModel(
        index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV"))
    dataset = FluxPointsDataset(model, data)
    return dataset
Пример #3
0
    def read_regions(self):
        for kr in self.ROIs_sel:
            filedata = Path(self.resdir + "/3FHL_ROI_num" + str(kr) +
                            "_datasets.yaml")
            filemodel = Path(self.resdir + "/3FHL_ROI_num" + str(kr) +
                             "_models.yaml")
            try:
                dataset = list(Datasets.from_yaml(filedata, filemodel))[0]
            except (FileNotFoundError, IOError):
                continue

            pars = dataset.parameters
            pars.covariance = np.load(self.resdir + "/" + dataset.name +
                                      "_covariance.npy")

            infos = np.load(self.resdir + "/3FHL_ROI_num" + str(kr) +
                            "_fit_infos.npz")
            self.diags["message"].append(infos["message"])
            self.diags["stat"].append(infos["stat"])

            if self.savefig:
                self.plot_maps(dataset)

            for model in list(dataset.model):
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):

                    model.spatial_model.parameters.covariance = pars.get_subcovariance(
                        model.spatial_model.parameters)
                    model.spectral_model.parameters.covariance = pars.get_subcovariance(
                        model.spectral_model.parameters)
                    dataset.background_model.parameters.covariance = pars.get_subcovariance(
                        dataset.background_model.parameters)
                    res_spec = model.spectral_model
                    cat_spec = self.FHL3[model.name].spectral_model()

                    res_fp = FluxPoints.read(self.resdir + "/" + model.name +
                                             "_flux_points.fits")
                    res_fp.table["is_ul"] = res_fp.table["ts"] < 1.0
                    cat_fp = self.FHL3[model.name].flux_points.to_sed_type(
                        "dnde")

                    self.update_spec_diags(dataset, model, cat_spec, res_spec,
                                           cat_fp, res_fp)
                    if self.savefig:
                        self.plot_spec(kr, model, cat_spec, res_spec, cat_fp,
                                       res_fp)
Пример #4
0
def test_flux_point_dataset_serialization(tmp_path):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.e_ref.to("TeV")
    spectral_model = PowerLawSpectralModel(index=2.3,
                                           amplitude="2e-13 cm-2 s-1 TeV-1",
                                           reference="1 TeV")
    model = SkyModel(spectral_model=spectral_model, name="test_model")
    dataset = FluxPointsDataset(model, data, name="test_dataset")

    Datasets([dataset]).write(tmp_path, prefix="tmp")
    datasets = Datasets.read(tmp_path / "tmp_datasets.yaml",
                             tmp_path / "tmp_models.yaml")
    new_dataset = datasets[0]
    assert_allclose(new_dataset.data.table["dnde"], dataset.data.table["dnde"],
                    1e-4)
    if dataset.mask_fit is None:
        assert np.all(new_dataset.mask_fit == dataset.mask_safe)
    assert np.all(new_dataset.mask_safe == dataset.mask_safe)
    assert new_dataset.name == "test_dataset"
Пример #5
0
def test_flux_point_dataset_serialization(tmp_path):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.e_ref.to("TeV")
    # TODO: remove duplicate definition this once model is redefine as skymodel
    spatial_model = ConstantSpatialModel()
    spectral_model = PowerLawSpectralModel(index=2.3,
                                           amplitude="2e-13 cm-2 s-1 TeV-1",
                                           reference="1 TeV")
    model = SkyModel(spatial_model, spectral_model, name="test_model")
    dataset = FluxPointsDataset(SkyModels([model]), data, name="test_dataset")

    Datasets([dataset]).to_yaml(tmp_path, prefix="tmp")
    datasets = Datasets.from_yaml(tmp_path / "tmp_datasets.yaml",
                                  tmp_path / "tmp_models.yaml")
    new_dataset = datasets[0]
    assert_allclose(new_dataset.data.table["dnde"], dataset.data.table["dnde"],
                    1e-4)
    if dataset.mask_fit is None:
        assert np.all(new_dataset.mask_fit == dataset.mask_safe)
    assert np.all(new_dataset.mask_safe == dataset.mask_safe)
    assert new_dataset.name == "test_dataset"
Пример #6
0
 def test_write_ecsv(self, tmp_path, flux_points):
     flux_points.write(tmp_path / "flux_points.ecsv")
     actual = FluxPoints.read(tmp_path / "flux_points.ecsv")
     assert str(flux_points) == str(actual)
Пример #7
0
 def test_write_fits(self, tmp_path, flux_points):
     flux_points.write(tmp_path / "tmp.fits")
     actual = FluxPoints.read(tmp_path / "tmp.fits")
     assert str(flux_points) == str(actual)
Пример #8
0
def flux_points_likelihood():
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/binlike.fits"
    return FluxPoints.read(path).to_sed_type("dnde")
Пример #9
0
def flux_points(request):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/" + request.param
    return FluxPoints.read(path)
Пример #10
0
 def test_write_ecsv(self, tmpdir, flux_points):
     filename = tmpdir / "flux_points.ecsv"
     flux_points.write(filename)
     actual = FluxPoints.read(filename)
     assert str(flux_points) == str(actual)
Пример #11
0
    datasets.append(dataset)

dataset_hess = Datasets(datasets).stack_reduce()
dataset_hess.name = "HESS"
dataset_hess.models = crab_model

# ### HAWC: 1D dataset for flux point fitting
#
# The HAWC flux point are taken from https://arxiv.org/pdf/1905.12518.pdf. Then these flux points are read from a pre-made FITS file and passed to a `FluxPointsDataset` together with the source spectral model.
#

# In[ ]:

# read flux points from https://arxiv.org/pdf/1905.12518.pdf
filename = "$GAMMAPY_DATA/hawc_crab/HAWC19_flux_points.fits"
flux_points_hawc = FluxPoints.read(filename)
dataset_hawc = FluxPointsDataset(crab_model, flux_points_hawc, name="HAWC")

# ## Datasets serialization
#
# The `datasets` object contains each dataset previously defined.
# It can be saved on disk as datasets.yaml, models.yaml, and several data files specific to each dataset. Then the `datasets` can be rebuild later from these files.

# In[ ]:

datasets = Datasets([dataset_fermi, dataset_hess, dataset_hawc])
path = Path("crab-3datasets")
path.mkdir(exist_ok=True)

datasets.write(path=path, prefix="crab_10GeV_100TeV", overwrite=True)
filedata = path / "crab_10GeV_100TeV_datasets.yaml"