예제 #1
0
    def test_ecpl_fit(self):
        self.set_model(self.ecpl)
        fit = Fit(self.obs_list[0])
        fit.run()

        actual = fit.datasets.parameters["lambda_"].quantity
        assert actual.unit == "TeV-1"
        assert_allclose(actual.value, 0.145215, rtol=1e-2)
예제 #2
0
    def test_joint_fit(self):
        self.set_model(self.pwl)
        fit = Fit(self.obs_list)
        fit.run()
        actual = fit.datasets.parameters["index"].value
        assert_allclose(actual, 2.7806, rtol=1e-3)

        actual = fit.datasets.parameters["amplitude"].quantity
        assert actual.unit == "cm-2 s-1 TeV-1"
        assert_allclose(actual.value, 5.200e-11, rtol=1e-3)
예제 #3
0
def test_likelihood_profile_reoptimize():
    dataset = MyDataset()
    fit = Fit(dataset)
    fit.run()

    dataset.parameters["y"].value = 0
    result = fit.likelihood_profile("x", nvalues=3, reoptimize=True)

    assert_allclose(result["values"], [0, 2, 4], atol=1e-7)
    assert_allclose(result["likelihood"], [4, 0, 4], atol=1e-7)
예제 #4
0
    def test_compound(self):
        model = self.pwl * 2
        self.set_model(model)
        fit = Fit(self.obs_list[0])
        fit.run()
        pars = fit.datasets.parameters

        assert_allclose(pars["index"].value, 2.8166, rtol=1e-3)
        p = pars["amplitude"]
        assert p.unit == "cm-2 s-1 TeV-1"
        assert_allclose(p.value, 5.0714e-12, rtol=1e-3)
예제 #5
0
def test_likelihood_profile():
    dataset = MyDataset()
    fit = Fit(dataset)
    fit.run()
    result = fit.likelihood_profile("x", nvalues=3)

    assert_allclose(result["values"], [0, 2, 4], atol=1e-7)
    assert_allclose(result["likelihood"], [4, 0, 4], atol=1e-7)

    # Check that original value state wasn't changed
    assert_allclose(dataset.parameters["x"].value, 2)
예제 #6
0
def test_map_fit_one_energy_bin(sky_model, geom_image):
    dataset = get_map_dataset(sky_model, geom_image, geom_image)
    sky_model.spectral_model.index.value = 3.0
    sky_model.spectral_model.index.frozen = True
    dataset.background_model.norm.value = 0.5

    dataset.counts = dataset.npred()

    # Move a bit away from the best-fit point, to make sure the optimiser runs
    sky_model.parameters["sigma"].value = 0.21
    dataset.background_model.parameters["norm"].frozen = True

    fit = Fit(dataset)
    result = fit.run()

    assert result.success

    npred = dataset.npred().data.sum()
    assert_allclose(npred, 1087.073518, rtol=1e-3)
    assert_allclose(result.total_stat, 5177.19198, rtol=1e-3)

    pars = result.parameters

    assert_allclose(pars["lon_0"].value, 0.2, rtol=1e-2)
    assert_allclose(pars.error("lon_0"), 0.04623, rtol=1e-2)

    assert_allclose(pars["sigma"].value, 0.2, rtol=1e-2)
    assert_allclose(pars.error("sigma"), 0.031759, rtol=1e-2)

    assert_allclose(pars["amplitude"].value, 1e-11, rtol=1e-2)
    assert_allclose(pars.error("amplitude"), 2.163318e-12, rtol=1e-2)
예제 #7
0
    def test_stats(self):
        dataset = self.obs_list[0]
        dataset.model = self.pwl

        fit = Fit([dataset])
        result = fit.run()

        stats = dataset.likelihood_per_bin()
        actual = np.sum(stats[dataset.mask_safe])

        desired = result.total_stat
        assert_allclose(actual, desired)
예제 #8
0
 def test_likelihood_profile(self):
     dataset = SpectrumDataset(
         model=self.source_model,
         counts=self.src,
         mask_safe=np.ones(self.src.energy.nbin, dtype=bool),
     )
     fit = Fit([dataset])
     result = fit.run()
     true_idx = result.parameters["index"].value
     values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100)
     profile = fit.likelihood_profile("index", values=values)
     actual = values[np.argmin(profile["likelihood"])]
     assert_allclose(actual, true_idx, rtol=0.01)
예제 #9
0
    def test_stacked_fit(self):
        obs_stacker = SpectrumDatasetOnOffStacker(self.obs_list)
        obs_stacker.run()

        dataset = obs_stacker.stacked_obs
        dataset.model = self.pwl

        fit = Fit([dataset])
        result = fit.run()
        pars = result.parameters

        assert_allclose(pars["index"].value, 2.7767, rtol=1e-3)
        assert u.Unit(pars["amplitude"].unit) == "cm-2 s-1 TeV-1"
        assert_allclose(pars["amplitude"].value, 5.191e-11, rtol=1e-3)
예제 #10
0
    def test_no_edisp(self):
        dataset = self.obs_list[0]

        # Bring aeff in RECO space
        energy = dataset.counts.energy.center
        data = dataset.aeff.data.evaluate(energy=energy)
        e_edges = dataset.counts.energy.edges

        dataset.aeff = EffectiveAreaTable(data=data,
                                          energy_lo=e_edges[:-1],
                                          energy_hi=e_edges[1:])
        dataset.edisp = None
        dataset.model = self.pwl

        fit = Fit([dataset])
        result = fit.run()
        assert_allclose(result.parameters["index"].value, 2.7961, atol=0.02)
예제 #11
0
    def test_cash(self):
        """Simple CASH fit to the on vector"""
        fit = Fit(self.dataset)
        result = fit.run()

        assert result.success
        assert "minuit" in repr(result)

        npred = self.dataset.npred().data.sum()
        assert_allclose(npred, self.npred.sum(), rtol=1e-3)
        assert_allclose(result.total_stat, -18087404.624, rtol=1e-3)

        pars = result.parameters
        assert_allclose(pars["index"].value, 2.1, rtol=1e-2)
        assert_allclose(pars.error("index"), 0.00127, rtol=1e-2)

        assert_allclose(pars["amplitude"].value, 1e5, rtol=1e-3)
        assert_allclose(pars.error("amplitude"), 153.450, rtol=1e-2)
예제 #12
0
    def test_cash(self):
        """Simple CASH fit to the on vector"""
        dataset = SpectrumDataset(model=self.source_model, counts=self.src)

        npred = dataset.npred().data
        assert_allclose(npred[5], 660.5171, rtol=1e-5)

        stat_val = dataset.likelihood()
        assert_allclose(stat_val, -107346.5291, rtol=1e-5)

        self.source_model.parameters["index"].value = 1.12

        fit = Fit([dataset])
        result = fit.run()

        # These values are check with sherpa fits, do not change
        pars = result.parameters
        assert_allclose(pars["index"].value, 1.995525, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.9, rtol=1e-3)
예제 #13
0
def test_run(backend):
    dataset = MyDataset()
    fit = Fit(dataset)
    result = fit.run(optimize_opts={"backend": backend},
                     covariance_opts={"backend": backend})
    pars = result.parameters

    assert result.success is True

    assert_allclose(pars["x"].value, 2, rtol=1e-3)
    assert_allclose(pars["y"].value, 3e2, rtol=1e-3)
    assert_allclose(pars["z"].value, 4e-2, rtol=1e-3)

    assert_allclose(pars.error("x"), 1, rtol=1e-7)
    assert_allclose(pars.error("y"), 1, rtol=1e-7)
    assert_allclose(pars.error("z"), 1, rtol=1e-7)

    assert_allclose(pars.correlation[0, 1], 0, atol=1e-7)
    assert_allclose(pars.correlation[0, 2], 0, atol=1e-7)
    assert_allclose(pars.correlation[1, 2], 0, atol=1e-7)
예제 #14
0
    def test_wstat(self):
        """WStat with on source and background spectrum"""
        on_vector = self.src.copy()
        on_vector.data += self.bkg.data
        obs = SpectrumDatasetOnOff(
            counts=on_vector,
            counts_off=self.off,
            acceptance=1,
            acceptance_off=1 / self.alpha,
        )
        obs.model = self.source_model

        self.source_model.parameters.index = 1.12

        fit = Fit(obs)
        result = fit.run()
        pars = self.source_model.parameters

        assert_allclose(pars["index"].value, 1.997342, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.187067, rtol=1e-3)
        assert_allclose(result.total_stat, 30.022316, rtol=1e-3)
예제 #15
0
#
# Now we'll fit a global model to the spectrum. First we do a joint likelihood fit to all observations. If you want to stack the observations see below. We will also produce a debug plot in order to show how the global fit matches one of the individual observations.

# In[ ]:

model = PowerLaw(index=2,
                 amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"),
                 reference=1 * u.TeV)

datasets_joint = extraction.spectrum_observations

for dataset in datasets_joint:
    dataset.model = model

fit_joint = Fit(datasets_joint)
result_joint = fit_joint.run()

# we make a copy here to compare it later
model_best_joint = model.copy()
model_best_joint.parameters.covariance = result_joint.parameters.covariance

# In[ ]:

print(result_joint)

# In[ ]:

plt.figure(figsize=(8, 6))
ax_spectrum, ax_residual = datasets_joint[0].plot_fit()
ax_spectrum.set_ylim(0, 25)
예제 #16
0
# In[ ]:

model = SkyModel(
    SkyPointSource("0 deg", "0 deg"),
    PowerLaw(index=2.5, amplitude="1e-11 cm-2 s-1 TeV-1", reference="100 GeV"),
)

dataset = MapDataset(
    model=model,
    counts=counts,
    exposure=exposure,
    background_model=background_total,
    psf=psf_kernel,
)
fit = Fit(dataset)
result = fit.run()

# In[ ]:

print(result)

# In[ ]:

dataset.parameters.to_table()

# In[ ]:

residual = counts - dataset.npred()
residual.sum_over_axes().smooth("0.1 deg").plot(cmap="coolwarm",
                                                vmin=-3,
                                                vmax=3,
예제 #17
0
class TestSpectralFit:
    """Test fit in astrophysical scenario"""
    def setup(self):
        path = "$GAMMAPY_DATA/joint-crab/spectra/hess/"
        obs1 = SpectrumDatasetOnOff.from_ogip_files(path + "pha_obs23523.fits")
        obs2 = SpectrumDatasetOnOff.from_ogip_files(path + "pha_obs23592.fits")
        self.obs_list = [obs1, obs2]

        self.pwl = PowerLaw(index=2,
                            amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"),
                            reference=1 * u.TeV)

        self.ecpl = ExponentialCutoffPowerLaw(
            index=2,
            amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"),
            reference=1 * u.TeV,
            lambda_=0.1 / u.TeV,
        )

        # Example fit for one observation
        self.obs_list[0].model = self.pwl
        self.fit = Fit(self.obs_list[0])

    def set_model(self, model):
        for obs in self.obs_list:
            obs.model = model

    @requires_dependency("iminuit")
    def test_basic_results(self):
        self.set_model(self.pwl)
        result = self.fit.run()
        pars = self.fit.datasets.parameters

        assert self.pwl is self.obs_list[0].model

        assert_allclose(result.total_stat, 38.343, rtol=1e-3)
        assert_allclose(pars["index"].value, 2.817, rtol=1e-3)
        assert pars["amplitude"].unit == "cm-2 s-1 TeV-1"
        assert_allclose(pars["amplitude"].value, 5.142e-11, rtol=1e-3)
        assert_allclose(self.obs_list[0].npred().data[60], 0.6102, rtol=1e-3)
        pars.to_table()

    def test_basic_errors(self):
        self.set_model(self.pwl)
        self.fit.run()
        pars = self.fit.datasets.parameters

        assert_allclose(pars.error("index"), 0.1496, rtol=1e-3)
        assert_allclose(pars.error("amplitude"), 6.423e-12, rtol=1e-3)
        pars.to_table()

    def test_compound(self):
        model = self.pwl * 2
        self.set_model(model)
        fit = Fit(self.obs_list[0])
        fit.run()
        pars = fit.datasets.parameters

        assert_allclose(pars["index"].value, 2.8166, rtol=1e-3)
        p = pars["amplitude"]
        assert p.unit == "cm-2 s-1 TeV-1"
        assert_allclose(p.value, 5.0714e-12, rtol=1e-3)

    def test_ecpl_fit(self):
        self.set_model(self.ecpl)
        fit = Fit(self.obs_list[0])
        fit.run()

        actual = fit.datasets.parameters["lambda_"].quantity
        assert actual.unit == "TeV-1"
        assert_allclose(actual.value, 0.145215, rtol=1e-2)

    def test_joint_fit(self):
        self.set_model(self.pwl)
        fit = Fit(self.obs_list)
        fit.run()
        actual = fit.datasets.parameters["index"].value
        assert_allclose(actual, 2.7806, rtol=1e-3)

        actual = fit.datasets.parameters["amplitude"].quantity
        assert actual.unit == "cm-2 s-1 TeV-1"
        assert_allclose(actual.value, 5.200e-11, rtol=1e-3)
예제 #18
0
# Now we'll fit a model to the spectrum with the `Fit` class. First we load a power law model with an initial value for the index and the amplitude and then wo do a likelihood fit. The fit results are printed below.

# In[ ]:

model = PowerLaw(index=4,
                 amplitude="1.3e-9 cm-2 s-1 TeV-1",
                 reference="0.02 TeV")

emin_fit, emax_fit = (0.04 * u.TeV, 0.4 * u.TeV)

for obs in extraction.spectrum_observations:
    obs.model = model
    obs.mask_fit = obs.counts.energy_mask(emin=emin_fit, emax=emax_fit)

joint_fit = Fit(extraction.spectrum_observations)
joint_result = joint_fit.run()

model.parameters.covariance = joint_result.parameters.covariance
print(joint_result)

# Now you might want to do the stacking here even if in our case there is only one observation which makes it superfluous.
# We can compute flux points by fitting the norm of the global model in energy bands.

# In[ ]:

e_edges = np.logspace(np.log10(0.04), np.log10(0.4), 7) * u.TeV

from gammapy.spectrum import SpectrumDatasetOnOffStacker

stacker = SpectrumDatasetOnOffStacker(extraction.spectrum_observations)
dataset = stacker.run()
예제 #19
0
class SpectrumAnalysisIACT:
    """High-level analysis class to perform a full 1D IACT spectral analysis.

    Observation selection must have happened before.

    Config options:

    * outdir : `pathlib.Path`, str
        Output folder, None means no output
    * background : dict
        Forwarded to `~gammapy.background.ReflectedRegionsBackgroundEstimator`
    * extraction : dict
        Forwarded to `~gammapy.spectrum.SpectrumExtraction`
    * fp_binning : `~astropy.units.Quantity`
        Flux points binning

    Parameters
    ----------
    observations : `~gammapy.data.Observations`
        Observations to analyse
    config : dict
        Config dict
    """
    def __init__(self, observations, config):
        self.observations = observations
        self.config = config

    def __str__(self):
        ss = self.__class__.__name__
        ss += "\n{}".format(self.observations)
        ss += "\n{}".format(self.config)
        return ss

    def run(self, optimize_opts=None):
        """Run all steps."""
        log.info("Running {}".format(self.__class__.__name__))
        self.run_extraction()
        self.run_fit(optimize_opts)

    def run_extraction(self):
        """Run all steps for the spectrum extraction."""
        self.background_estimator = ReflectedRegionsBackgroundEstimator(
            observations=self.observations, **self.config["background"])
        self.background_estimator.run()

        self.extraction = SpectrumExtraction(
            observations=self.observations,
            bkg_estimate=self.background_estimator.result,
            **self.config["extraction"])

        self.extraction.run()

    @property
    def _result_dict(self):
        """Convert to dict."""
        val = dict()
        model = self.config["fit"]["model"]
        val["model"] = model.to_dict()

        fit_range = self.config["fit"].get("fit_range")

        if fit_range is not None:
            val["fit_range"] = dict(
                min=fit_range[0].value,
                max=fit_range[1].value,
                unit=fit_range.unit.to_string("fits"),
            )

        val["statval"] = float(self.fit_result.total_stat)
        val["statname"] = "wstat"

        return val

    def write(self, filename, mode="w"):
        """Write to YAML file.

        Parameters
        ----------
        filename : str
            File to write
        mode : str
            Write mode
        """
        d = self._result_dict
        val = yaml.safe_dump(d, default_flow_style=False)

        with open(str(filename), mode) as outfile:
            outfile.write(val)

    def run_fit(self, optimize_opts=None):
        """Run all step for the spectrum fit."""
        fit_range = self.config["fit"].get("fit_range")
        model = self.config["fit"]["model"]

        for obs in self.extraction.spectrum_observations:
            if fit_range is not None:
                obs.mask_fit = obs.counts.energy_mask(fit_range[0],
                                                      fit_range[1])
            obs.model = model

        self.fit = Fit(self.extraction.spectrum_observations)
        self.fit_result = self.fit.run(optimize_opts=optimize_opts)

        model = self.config["fit"]["model"]
        modelname = model.__class__.__name__

        model.parameters.covariance = self.fit_result.parameters.covariance

        filename = make_path(
            self.config["outdir"]) / "fit_result_{}.yaml".format(modelname)

        self.write(filename=filename)

        obs_stacker = SpectrumDatasetOnOffStacker(
            self.extraction.spectrum_observations)
        obs_stacker.run()

        datasets_fp = obs_stacker.stacked_obs
        datasets_fp.model = model
        self.flux_point_estimator = FluxPointsEstimator(
            e_edges=self.config["fp_binning"], datasets=datasets_fp)
        fp = self.flux_point_estimator.run()
        fp.table["is_ul"] = fp.table["ts"] < 4
        self.flux_points = fp

    @property
    def spectrum_result(self):
        """`~gammapy.spectrum.FluxPointsDataset`"""
        return FluxPointsDataset(data=self.flux_points,
                                 model=self.fit.datasets.datasets[0].model)
예제 #20
0
# ## Power Law Fit
#
# First we start with fitting a simple [power law](https://docs.gammapy.org/0.11/api/gammapy.spectrum.models.PowerLaw.html#gammapy.spectrum.models.PowerLaw).

# In[ ]:

pwl = PowerLaw(index=2, amplitude="1e-12 cm-2 s-1 TeV-1", reference="1 TeV")

# After creating the model we run the fit by passing the `'flux_points'` and `'pwl'` objects:

# In[ ]:

dataset_pwl = FluxPointsDataset(pwl, flux_points, likelihood="chi2assym")
fitter = Fit(dataset_pwl)
result_pwl = fitter.run()

# And print the result:

# In[ ]:

print(result_pwl)

# In[ ]:

print(pwl)

# Finally we plot the data points and the best fit model:

# In[ ]:
예제 #21
0
def test_map_fit(sky_model, geom, geom_etrue):
    dataset_1 = get_map_dataset(sky_model,
                                geom,
                                geom_etrue,
                                evaluation_mode="local")
    dataset_1.background_model.norm.value = 0.5
    dataset_1.counts = dataset_1.npred()

    dataset_2 = get_map_dataset(sky_model,
                                geom,
                                geom_etrue,
                                evaluation_mode="global",
                                likelihood="cstat")
    dataset_2.counts = dataset_2.npred()

    sky_model.parameters["sigma"].frozen = True

    dataset_1.background_model.norm.value = 0.49
    dataset_2.background_model.norm.value = 0.99

    fit = Fit([dataset_1, dataset_2])
    result = fit.run()

    assert result.success
    assert "minuit" in repr(result)

    npred = dataset_1.npred().data.sum()
    assert_allclose(npred, 4454.932873, rtol=1e-3)
    assert_allclose(result.total_stat, 12728.351643, rtol=1e-3)

    pars = result.parameters
    assert_allclose(pars["lon_0"].value, 0.2, rtol=1e-2)
    assert_allclose(pars.error("lon_0"), 0.003627, rtol=1e-2)

    assert_allclose(pars["index"].value, 3, rtol=1e-2)
    assert_allclose(pars.error("index"), 0.031294, rtol=1e-2)

    assert_allclose(pars["amplitude"].value, 1e-11, rtol=1e-2)
    assert_allclose(pars.error("amplitude"), 3.885326e-13, rtol=1e-2)

    # background norm 1
    assert_allclose(pars[6].value, 0.5, rtol=1e-2)
    assert_allclose(pars.error(pars[6]), 0.015399, rtol=1e-2)

    # background norm 2
    assert_allclose(pars[9].value, 1, rtol=1e-2)
    assert_allclose(pars.error(pars[9]), 0.02104, rtol=1e-2)

    # test mask_safe evaluation
    mask_safe = geom.energy_mask(emin=1 * u.TeV)
    dataset_1.mask_safe = mask_safe
    dataset_2.mask_safe = mask_safe

    stat = fit.datasets.likelihood()
    assert_allclose(stat, 5895.205587)

    # test model evaluation outside image

    with pytest.raises(ValueError):
        dataset_1.model.skymodels[0].spatial_model.lon_0.value = 150
        dataset_1.npred()

    with mpl_plot_check():
        dataset_1.plot_residuals()