Exemple #1
0
def main():
    # TODO: add the rxj1713 plotting
    targets = ["crab", "msh1552", "pks2155"]
    for target in targets:
        log.info(f"Processing source: {target}")
        path_ref = Path(str(target) + "/reference/")
        path_res = Path(str(target) + "/results/")
        path_plot = Path(str(target) + "/plots/")

        for ndim in [1, 3]:
            # Load the reference and best-fit spectral models
            with open(str(path_ref / f"reference-{ndim}d.yaml")) as file:
                reference_spectrum_file = yaml.safe_load(file)
                reference_spectrum = Model.create(
                    "PowerLawSpectralModel",
                    model_type="spectral",
                    index=reference_spectrum_file["index"],
                    amplitude=
                    f"{reference_spectrum_file['amplitude']} TeV-1 cm-2 s-1",
                    reference=f"{reference_spectrum_file['reference']} TeV",
                )
            reference_spectrum_errors = {
                "index_err": reference_spectrum_file["index_err"],
                "amplitude_err": reference_spectrum_file["amplitude_err"],
            }
            with open(str(path_res / f"result-{ndim}d.yaml")) as file:
                result_spectrum_file = yaml.safe_load(file)
                result_spectrum = Model.create(
                    "PowerLawSpectralModel",
                    model_type="spectral",
                    index=result_spectrum_file["index"],
                    amplitude=
                    f"{result_spectrum_file['amplitude']} TeV-1 cm-2 s-1",
                    reference=f"{result_spectrum_file['reference']} TeV",
                )
            result_spectrum_errors = {
                "index_err": result_spectrum_file["index_err"],
                "amplitude_err": result_spectrum_file["amplitude_err"],
            }

            # Load the reference and best-fit flux points
            reference_fpoints = (
                str(path_ref) +
                f"/gammapy_{target}_{ndim}d_spectral_points.ecsv")
            reference_fpoints = FluxPoints.read(reference_fpoints)
            result_fpoints = str(path_res) + f"/flux-points-{ndim}d.ecsv"
            result_fpoints = FluxPoints.read(result_fpoints)

            # Plot
            fig = make_plots(
                reference_spectrum,
                reference_spectrum_errors,
                result_spectrum,
                result_spectrum_errors,
                reference_fpoints,
                result_fpoints,
            )
            log.info(f"Writing {path_plot}")
            fig.savefig(str(path_plot) + f"/flux-points-{ndim}d.png")
Exemple #2
0
    def read_regions(self):
        for kr in self.ROIs_sel:
            filedata = self.resdir / f"3FHL_ROI_num{kr}_datasets.yaml"
            filemodel = self.resdir / f"3FHL_ROI_num{kr}_models.yaml"
            try:
                dataset = list(Datasets.read(filedata, filemodel,
                                             lazy=False))[0]
            except (FileNotFoundError, IOError):
                continue

            infos = np.load(self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz")
            self.diags["message"].append(infos["message"])
            self.diags["stat"].append(infos["stat"])

            if self.savefig:
                self.plot_maps(dataset)

            for model in dataset.models:
                if (isinstance(model, FoVBackgroundModel) is False
                        and self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    res_spec = model.spectral_model
                    cat_spec = self.FHL3[model.name].spectral_model()

                    res_fp = FluxPoints.read(self.resdir /
                                             f"{model.name}_flux_points.fits",
                                             reference_model=cat_spec)
                    cat_fp = self.FHL3[model.name].flux_points
                    self.update_spec_diags(dataset, model, cat_spec, res_spec,
                                           cat_fp, res_fp)
                    if self.savefig:
                        self.plot_spec(kr, model, cat_spec, res_spec, cat_fp,
                                       res_fp)
Exemple #3
0
 def test_write_ecsv(self, tmp_path, flux_points):
     flux_points.write(tmp_path / "flux_points.ecsv",
                       sed_type=flux_points.sed_type_init)
     actual = FluxPoints.read(tmp_path / "flux_points.ecsv")
     actual._data.pop("is_ul", None)
     flux_points._data.pop("is_ul", None)
     assert str(flux_points) == str(actual)
Exemple #4
0
def make_summary(types):
    log.info("Making summary plots.")

    ax = plt.subplot()

    for type in types:
        filename = make_path("results")
        path = filename / f"lightcurve_{type}.fits"
        # TODO: fix reference model I/O
        model = define_model_1d()

        lc = FluxPoints.read(path, format="lightcurve", reference_model=model)
        lc.plot(ax=ax, label=type, sed_type="flux", markersize=0)

    lc_ref = read_ref_lightcurve()
    lc_ref.plot(ax=ax, label='ref', alpha=0.2, sed_type="flux", markersize=0)

    ax.set_yscale("linear")
    plt.legend()

    if len(types) > 1:
        filename = make_path("results")
        path = filename / f"lightcurve_comparison.png"
        plt.savefig(path)
    else:
        filename = make_path("results")
        path = filename / f"lightcurve_{types[0]}.png"
        plt.savefig(path)

    plt.close()
Exemple #5
0
def test_flux_point_dataset_serialization(tmp_path):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.energy_ref.to("TeV")
    spectral_model = PowerLawSpectralModel(
        index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV"
    )
    model = SkyModel(spectral_model=spectral_model, name="test_model")
    dataset = FluxPointsDataset(model, data, name="test_dataset")

    Datasets([dataset]).write(
        filename=tmp_path / "tmp_datasets.yaml",
        filename_models=tmp_path / "tmp_models.yaml",
    )

    datasets = Datasets.read(
        filename=tmp_path / "tmp_datasets.yaml",
        filename_models=tmp_path / "tmp_models.yaml",
    )

    new_dataset = datasets[0]
    assert_allclose(new_dataset.data.dnde, dataset.data.dnde, 1e-4)
    if dataset.mask_fit is None:
        assert np.all(new_dataset.mask_fit == dataset.mask_safe)
    assert np.all(new_dataset.mask_safe == dataset.mask_safe)
    assert new_dataset.name == "test_dataset"
Exemple #6
0
def test_serialisation(tmpdir):
    dataset = get_simple_dataset_on_off()
    geom = dataset.counts.geom
    regions = make_concentric_annulus_sky_regions(
        center=geom.center_skydir,
        radius_max=0.2 * u.deg,
    )

    est = FluxProfileEstimator(regions, energy_edges=[0.1, 10] * u.TeV)
    result = est.run(dataset)

    result.write(tmpdir / "profile.fits", format="profile")

    profile = FluxPoints.read(
        tmpdir / "profile.fits",
        format="profile",
        reference_model=PowerLawSpectralModel(),
    )

    assert_allclose(result.norm, profile.norm, rtol=1e-4)
    assert_allclose(result.norm_err, profile.norm_err, rtol=1e-4)
    assert_allclose(result.npred, profile.npred)
    assert_allclose(result.ts, profile.ts)

    assert np.all(result.is_ul == profile.is_ul)
Exemple #7
0
def dataset():
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.e_ref.to("TeV")
    model = SkyModel(spectral_model=PowerLawSpectralModel(
        index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV"))
    dataset = FluxPointsDataset(model, data)
    return dataset
Exemple #8
0
def test_lightcurve_read_write(tmp_path, lc, format):
    table = lc.to_table(format="lightcurve", sed_type="flux")
    table.write(tmp_path / "tmp", format=format)
    lc = FluxPoints.read(tmp_path / "tmp", format="lightcurve")

    # Check if time-related info round-trips
    axis = lc.geom.axes["time"]
    assert axis.reference_time.scale == "utc"
    assert axis.reference_time.format == "mjd"
    assert_allclose(axis.time_mid.mjd, [55198, 55202.5])
def dataset():
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.e_ref.to("TeV")
    model = SkyModel(spectral_model=PowerLawSpectralModel(
        index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV"))

    obs_table = Table()
    obs_table["TELESCOP"] = ["CTA"]
    obs_table["OBS_ID"] = ["0001"]
    obs_table["INSTRUME"] = ["South_Z20_50h"]

    dataset = FluxPointsDataset(model, data, meta_table=obs_table)
    return dataset
Exemple #10
0
def test_flux_points_estimator_no_norm_scan(fpe_pwl, tmpdir):
    datasets, fpe = fpe_pwl
    fpe.selection_optional = None

    fp = fpe.run(datasets)

    assert_allclose(fpe.fit.optimize_opts["tol"], 0.2)
    assert_allclose(fpe.fit.minuit.tol, 0.2)

    assert fp.sed_type_init == "likelihood"
    assert "stat_scan" not in fp._data

    # test GADF I/O
    fp.write(tmpdir / "test.fits", format="gadf-sed")
    fp_new = FluxPoints.read(tmpdir / "test.fits")
    assert fp_new.meta["sed_type_init"] == "likelihood"
Exemple #11
0
def test_run_ecpl(fpe_ecpl, tmpdir):
    datasets, fpe = fpe_ecpl

    fp = fpe.run(datasets)

    table = fp.to_table()

    actual = table["ref_flux"].quantity
    desired = [9.024362e-13, 1.781341e-13, 1.260298e-18
               ] * u.Unit("1 / (cm2 s)")
    assert_allclose(actual, desired, rtol=1e-3)

    actual = table["ref_dnde"].quantity
    desired = [1.351382e-12, 7.527318e-15, 2.523659e-22
               ] * u.Unit("1 / (cm2 s TeV)")
    assert_allclose(actual, desired, rtol=1e-3)

    actual = table["ref_eflux"].quantity
    desired = [4.770557e-13, 2.787695e-13, 1.371963e-17
               ] * u.Unit("TeV / (cm2 s)")
    assert_allclose(actual, desired, rtol=1e-3)

    actual = table["norm"].data
    assert_allclose(actual, [1.001683, 1.061821, 1.237512e03], rtol=1e-3)

    actual = table["norm_err"].data
    assert_allclose(actual, [1.386091e-01, 2.394241e-01, 3.259756e03],
                    rtol=1e-2)

    actual = table["norm_errn"].data
    assert_allclose(actual, [1.374962e-01, 2.361246e-01, 2.888978e03],
                    rtol=1e-2)

    actual = table["norm_errp"].data
    assert_allclose(actual, [1.397358e-01, 2.428481e-01, 3.716550e03],
                    rtol=1e-2)

    actual = table["norm_ul"].data
    assert_allclose(actual, [1.283433e00, 1.555117e00, 9.698645e03], rtol=1e-2)

    actual = table["sqrt_ts"].data
    assert_allclose(actual, [7.678454, 4.735691, 0.399243], rtol=1e-2)

    # test GADF I/O
    fp.write(tmpdir / "test.fits", format="gadf-sed")
    fp_new = FluxPoints.read(tmpdir / "test.fits")
    assert fp_new.meta["sed_type_init"] == "likelihood"
Exemple #12
0
def test_run_map_pwl(fpe_map_pwl, tmpdir):
    datasets, fpe = fpe_map_pwl
    fp = fpe.run(datasets)

    table = fp.to_table()

    actual = table["e_min"].data
    assert_allclose(actual, [0.1, 1.178769, 8.48342], rtol=1e-5)

    actual = table["e_max"].data
    assert_allclose(actual, [1.178769, 8.483429, 100.0], rtol=1e-5)

    actual = table["e_ref"].data
    assert_allclose(actual, [0.343332, 3.162278, 29.126327], rtol=1e-5)

    actual = table["norm"].data
    assert_allclose(actual, [0.974726, 0.96342, 0.994251], rtol=1e-2)

    actual = table["norm_err"].data
    assert_allclose(actual, [0.067637, 0.052022, 0.087059], rtol=3e-2)

    actual = table["counts"].data
    assert_allclose(actual, [[44611, 0], [1923, 0], [282, 0]])

    actual = table["norm_ul"].data
    assert_allclose(actual, [1.111852, 1.07004, 1.17829], rtol=1e-2)

    actual = table["sqrt_ts"].data
    assert_allclose(actual, [16.681221, 28.408676, 21.91912], rtol=1e-2)

    actual = table["norm_scan"][0]
    assert_allclose(actual, [0.2, 1.0, 5])

    actual = table["stat_scan"][0] - table["stat"][0]
    assert_allclose(actual, [1.628398e02, 1.452456e-01, 2.008018e03],
                    rtol=1e-2)

    # test GADF I/O
    fp.write(tmpdir / "test.fits", format="gadf-sed")
    fp_new = FluxPoints.read(tmpdir / "test.fits")
    assert fp_new.meta["sed_type_init"] == "likelihood"
Exemple #13
0
 def test_write_ecsv(self, tmp_path, flux_points):
     flux_points.write(tmp_path / "flux_points.ecsv")
     actual = FluxPoints.read(tmp_path / "flux_points.ecsv")
     assert str(flux_points) == str(actual)
Exemple #14
0
 def test_write_fits(self, tmp_path, flux_points):
     flux_points.write(tmp_path / "tmp.fits")
     actual = FluxPoints.read(tmp_path / "tmp.fits")
     assert str(flux_points) == str(actual)
Exemple #15
0
def flux_points_likelihood():
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/binlike.fits"
    return FluxPoints.read(path).to_sed_type("dnde")
Exemple #16
0
def flux_points(request):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/" + request.param
    return FluxPoints.read(path)
Exemple #17
0
def test_run_pwl(fpe_pwl, tmpdir):
    datasets, fpe = fpe_pwl

    fp = fpe.run(datasets)
    table = fp.to_table()

    actual = table["e_min"].data
    assert_allclose(actual, [0.316228, 1.0, 10.0], rtol=1e-5)

    actual = table["e_max"].data
    assert_allclose(actual, [1.0, 10.0, 31.622777], rtol=1e-5)

    actual = table["e_ref"].data
    assert_allclose(actual, [0.562341, 3.162278, 17.782794], rtol=1e-3)

    actual = table["ref_flux"].quantity
    desired = [2.162278e-12, 9.000000e-13, 6.837722e-14
               ] * u.Unit("1 / (cm2 s)")
    assert_allclose(actual, desired, rtol=1e-3)

    actual = table["ref_dnde"].quantity
    desired = [3.162278e-12, 1.000000e-13, 3.162278e-15
               ] * u.Unit("1 / (cm2 s TeV)")
    assert_allclose(actual, desired, rtol=1e-3)

    actual = table["ref_eflux"].quantity
    desired = [1.151293e-12, 2.302585e-12, 1.151293e-12
               ] * u.Unit("TeV / (cm2 s)")
    assert_allclose(actual, desired, rtol=1e-3)

    actual = table["norm"].data
    assert_allclose(actual, [1.081434, 0.91077, 0.922176], rtol=1e-3)

    actual = table["norm_err"].data
    assert_allclose(actual, [0.066374, 0.061025, 0.179729], rtol=1e-2)

    actual = table["norm_errn"].data
    assert_allclose(actual, [0.065803, 0.060403, 0.171376], rtol=1e-2)

    actual = table["norm_errp"].data
    assert_allclose(actual, [0.06695, 0.061652, 0.18839], rtol=1e-2)

    actual = table["counts"].data.squeeze()
    assert_allclose(actual, [1490, 748, 43])

    actual = table["norm_ul"].data
    assert_allclose(actual, [1.216227, 1.035472, 1.316878], rtol=1e-2)

    actual = table["sqrt_ts"].data
    assert_allclose(actual, [18.568429, 18.054651, 7.057121], rtol=1e-2)

    actual = table["norm_scan"][0][[0, 5, -1]]
    assert_allclose(actual, [0.2, 1.0, 5.0])

    actual = table["stat_scan"][0][[0, 5, -1]]
    assert_allclose(actual, [220.369, 4.301, 1881.626], rtol=1e-2)

    actual = table["npred"].data
    assert_allclose(actual, [[1492.966], [749.459], [43.105]], rtol=1e-3)

    actual = table["npred_excess"].data
    assert_allclose(actual, [[660.5625], [421.5402], [34.3258]], rtol=1e-3)

    actual = table.meta["UL_CONF"]
    assert_allclose(actual, 0.9544997)

    npred_excess_err = fp.npred_excess_err.data.squeeze()
    assert_allclose(npred_excess_err, [40.541334, 28.244024, 6.690005],
                    rtol=1e-3)

    npred_excess_errp = fp.npred_excess_errp.data.squeeze()
    assert_allclose(npred_excess_errp, [40.838806, 28.549508, 7.013377],
                    rtol=1e-3)

    npred_excess_errn = fp.npred_excess_errn.data.squeeze()
    assert_allclose(npred_excess_errn, [40.247313, 27.932033, 6.378465],
                    rtol=1e-3)

    npred_excess_ul = fp.npred_excess_ul.data.squeeze()
    assert_allclose(npred_excess_ul, [742.87486, 479.169719, 49.019125],
                    rtol=1e-3)

    # test GADF I/O
    fp.write(tmpdir / "test.fits", format="gadf-sed")
    fp_new = FluxPoints.read(tmpdir / "test.fits")
    assert fp_new.meta["sed_type_init"] == "likelihood"