Ejemplo n.º 1
0
    def run(self, datasets):
        """Run."""
        datasets = Datasets(datasets)
        # find extension parameter
        # TODO: write something better
        model = datasets.models[self.source].spatial_model

        if hasattr(model, "sigma"):
            self.size_parameter = model.sigma
        elif hasattr(model, "r_0"):
            self.size_parameter = model.r_0
        elif hasattr(model, "radius"):
            self.size_parameter = model.radius
        else:
            raise ValueError(
                f"Cannot find size parameter on model {self.source}")

        rows = []

        for energy_min, energy_max in progress_bar(zip(self.energy_edges[:-1],
                                                       self.energy_edges[1:]),
                                                   desc="Energy bins"):
            datasets_sliced = datasets.slice_by_energy(energy_min=energy_min,
                                                       energy_max=energy_max)
            datasets_sliced = Datasets(
                [_.to_image(name=_.name) for _ in datasets_sliced])
            datasets_sliced.models = datasets.models  #.copy()
            row = self.estimate_size(datasets_sliced)
            rows.append(row)
        return rows
Ejemplo n.º 2
0
    def run(self, datasets):
        """Estimate flux for a given energy range.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset`
            Spectrum datasets.

        Returns
        -------
        result : dict
            Dict with results for the flux point.
        """
        datasets = Datasets(datasets)
        models = datasets.models.copy()

        model = self.get_scale_model(models)

        energy_min, energy_max = datasets.energy_ranges
        energy_axis = MapAxis.from_energy_edges([energy_min.min(), energy_max.max()])

        with np.errstate(invalid="ignore", divide="ignore"):
            result = model.reference_fluxes(energy_axis=energy_axis)
            # convert to scalar values
            result = {key: value.item() for key, value in result.items()}

        models[self.source].spectral_model = model
        datasets.models = models
        result.update(super().run(datasets, model.norm))
        return result
Ejemplo n.º 3
0
    def estimate_flux_point(self, datasets, energy_min, energy_max):
        """Estimate flux point for a single energy group.

        Parameters
        ----------
        datasets : `Datasets`
            Datasets
        energy_min, energy_max : `~astropy.units.Quantity`
            Energy bounds to compute the flux point for.

        Returns
        -------
        result : dict
            Dict with results for the flux point.
        """
        datasets_sliced = datasets.slice_by_energy(energy_min=energy_min,
                                                   energy_max=energy_max)
        if self.sum_over_energy_groups:
            datasets_sliced = Datasets(
                [_.to_image(name=_.name) for _ in datasets_sliced])

        if len(datasets_sliced) > 0:
            datasets_sliced.models = datasets.models.copy()
            return super().run(datasets=datasets_sliced)
        else:
            log.warning(
                f"No dataset contribute in range {energy_min}-{energy_max}")
            model = datasets.models[self.source].spectral_model
            return self._nan_result(datasets, model, energy_min, energy_max)
Ejemplo n.º 4
0
    def run(self, datasets):
        """Estimate flux for a given energy range.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset`
            Spectrum datasets.

        Returns
        -------
        result : dict
            Dict with results for the flux point.
        """
        datasets = Datasets(datasets)
        models = datasets.models

        datasets = datasets.slice_by_energy(energy_min=self.energy_min,
                                            energy_max=self.energy_max)

        if len(datasets) > 0:
            # TODO: this relies on the energy binning of the first dataset
            energy_axis = datasets[0].counts.geom.axes["energy"]
            energy_min, energy_max = energy_axis.edges.min(
            ), energy_axis.edges.max()
        else:
            energy_min, energy_max = self.energy_min, self.energy_max

        any_contribution = np.any(
            [dataset.mask.data.any() for dataset in datasets])

        model = self.get_scale_model(models)

        with np.errstate(invalid="ignore", divide="ignore"):
            result = self.get_reference_flux_values(model.model, energy_min,
                                                    energy_max)

        if len(datasets) == 0 or not any_contribution:
            result.update(self.nan_result)
        else:
            models[self.source].spectral_model = model

            datasets.models = models
            result.update(self._parameter_estimator.run(datasets, model.norm))
            result["sqrt_ts"] = self.get_sqrt_ts(result["ts"], result["norm"])

        return result
Ejemplo n.º 5
0
    def run(self, datasets):
        """Estimate flux for a given energy range.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset`
            Spectrum datasets.

        Returns
        -------
        result : dict
            Dict with results for the flux point.
        """
        datasets = Datasets(datasets)
        models = datasets.models.copy()

        contributions = []

        for dataset in datasets:
            if dataset.mask is not None:
                value = dataset.mask.data.any()
            else:
                value = True
            contributions.append(value)

        model = self.get_scale_model(models)

        energy_min, energy_max = datasets.energy_ranges

        with np.errstate(invalid="ignore", divide="ignore"):
            result = self.get_reference_flux_values(
                model.model, energy_min.min(), energy_max.min()
            )

        if len(datasets) == 0 or not np.any(contributions):
            result.update(self.nan_result)
        else:
            models[self.source].spectral_model = model

            datasets.models = models
            result.update(super().run(datasets, model.norm))
            result["sqrt_ts"] = self.get_sqrt_ts(result["ts"], result["norm"])

        return result
Ejemplo n.º 6
0
def test_inhomogeneous_datasets(fermi_datasets, hess_datasets):
    datasets = Datasets()

    datasets.extend(fermi_datasets)
    datasets.extend(hess_datasets)

    datasets = datasets.slice_by_energy(
        energy_min=1 * u.TeV,
        energy_max=10 * u.TeV,
    )
    datasets.models = fermi_datasets.models

    estimator = FluxEstimator(source="Crab Nebula",
                              selection_optional=[],
                              reoptimize=True)
    result = estimator.run(datasets)

    assert_allclose(result["norm"], 1.190622, atol=1e-3)
    assert_allclose(result["ts"], 612.50171, atol=1e-3)
    assert_allclose(result["norm_err"], 0.090744, atol=1e-3)
    assert_allclose(result["e_min"], 0.693145 * u.TeV, atol=1e-3)
    assert_allclose(result["e_max"], 10 * u.TeV, atol=1e-3)
Ejemplo n.º 7
0
def test_map_fit(sky_model, geom, geom_etrue):
    dataset_1 = get_map_dataset(geom, geom_etrue, name="test-1")
    dataset_2 = get_map_dataset(geom, geom_etrue, name="test-2")
    datasets = Datasets([dataset_1, dataset_2])

    models = Models(datasets.models)
    models.insert(0, sky_model)

    models["test-1-bkg"].spectral_model.norm.value = 0.5
    models["test-model"].spatial_model.sigma.frozen = True

    datasets.models = models
    dataset_2.counts = dataset_2.npred()
    dataset_1.counts = dataset_1.npred()

    models["test-1-bkg"].spectral_model.norm.value = 0.49
    models["test-2-bkg"].spectral_model.norm.value = 0.99

    fit = Fit(datasets)
    result = fit.run()

    assert result.success
    assert "minuit" in repr(result)

    npred = dataset_1.npred().data.sum()
    assert_allclose(npred, 7525.790688, rtol=1e-3)
    assert_allclose(result.total_stat, 21659.2139, rtol=1e-3)

    pars = result.parameters
    assert_allclose(pars["lon_0"].value, 0.2, rtol=1e-2)
    assert_allclose(pars["lon_0"].error, 0.002244, rtol=1e-2)

    assert_allclose(pars["index"].value, 3, rtol=1e-2)
    assert_allclose(pars["index"].error, 0.024277, rtol=1e-2)

    assert_allclose(pars["amplitude"].value, 1e-11, rtol=1e-2)
    assert_allclose(pars["amplitude"].error, 4.216154e-13, rtol=1e-2)

    # background norm 1
    assert_allclose(pars[8].value, 0.5, rtol=1e-2)
    assert_allclose(pars[8].error, 0.015811, rtol=1e-2)

    # background norm 2
    assert_allclose(pars[11].value, 1, rtol=1e-2)
    assert_allclose(pars[11].error, 0.02147, rtol=1e-2)

    # test mask_safe evaluation
    mask_safe = geom.energy_mask(energy_min=1 * u.TeV)
    dataset_1.mask_safe = Map.from_geom(geom, data=mask_safe)
    dataset_2.mask_safe = Map.from_geom(geom, data=mask_safe)

    stat = fit.datasets.stat_sum()
    assert_allclose(stat, 14823.579908, rtol=1e-5)

    region = sky_model.spatial_model.to_region()

    initial_counts = dataset_1.counts.copy()
    with mpl_plot_check():
        dataset_1.plot_residuals(kwargs_spectral=dict(region=region))

    # check dataset has not changed
    assert initial_counts == dataset_1.counts

    # test model evaluation outside image
    dataset_1.models[0].spatial_model.lon_0.value = 150
    dataset_1.npred()
    assert not dataset_1._evaluators["test-model"].contributes
Ejemplo n.º 8
0
dataset1 = m_dataset1.to_spectrum_dataset(region)
dataset1.mask_safe.data[0:2] = False

dataset2 = m_dataset1.to_spectrum_dataset(region)
dataset2.mask_safe.data[1:3] = False
dataset2.exposure = dataset2.exposure / 2.0
dataset2.edisp.exposure_map = dataset2.edisp.exposure_map / 2.0
dataset2.counts = dataset2.counts / 2.0
dataset2.background = dataset2.background / 2.0

datasets = Datasets([dataset1, dataset2])
dataset_stacked = datasets.stack_reduce()

pwl = PowerLawSpectralModel(index=4)
model = SkyModel(spectral_model=pwl, name="test")
datasets.models = model
dataset_stacked.models = model


plt.figure(figsize=(20, 5))
ax1 = plt.subplot(141)
ax2 = plt.subplot(142)
ax3 = plt.subplot(143)
ax4 = plt.subplot(144)

dataset1.edisp.get_edisp_kernel().plot_matrix(ax=ax1)
ax1.set_title("Energy dispersion dataset1")

dataset2.edisp.get_edisp_kernel().plot_matrix(ax=ax2)
ax2.set_title("Energy dispersion dataset2")