Exemplo n.º 1
0
def test_cash_errors(n_on, mu_bkg, result):
    stat = CashCountsStatistic(n_on, mu_bkg)
    errn = stat.compute_errn()
    errp = stat.compute_errp()

    assert_allclose(errn, result[0], atol=1e-5)
    assert_allclose(errp, result[1], atol=1e-5)
Exemplo n.º 2
0
def convolved_map_dataset_counts_statistics(dataset, kernel):
    """Return CountsDataset objects containing smoothed maps from the MapDataset"""
    # Kernel is modified later make a copy here
    kernel = copy.deepcopy(kernel)
    kernel.normalize("peak")

    # fft convolution adds numerical noise, to ensure integer results we call
    # np.rint
    n_on_conv = np.rint(dataset.counts.convolve(kernel.array).data)

    if isinstance(dataset, MapDatasetOnOff):
        background = dataset.background
        background.data[dataset.acceptance_off.data == 0] = 0.0
        background_conv = background.convolve(kernel.array).data

        n_off_conv = dataset.counts_off.convolve(kernel.array).data

        with np.errstate(invalid="ignore", divide="ignore"):
            alpha_conv = background_conv / n_off_conv

        return WStatCountsStatistic(n_on_conv.data, n_off_conv.data,
                                    alpha_conv.data)
    else:
        background_conv = dataset.npred().convolve(kernel.array).data
        return CashCountsStatistic(n_on_conv.data, background_conv.data)
Exemplo n.º 3
0
    def info_dict(self, in_safe_energy_range=True):
        """Info dict with summary statistics, summed over energy

        Parameters
        ----------
        in_safe_energy_range : bool
            Whether to sum only in the safe energy range

        Returns
        -------
        info_dict : dict
            Dictionary with summary info.
        """
        info = dict()
        mask = self.mask_safe.data if in_safe_energy_range else slice(None)

        info["name"] = self.name
        info["livetime"] = self.livetime.copy()

        info["n_on"] = self.counts.data[mask].sum()

        info["background"] = self.background.data[mask].sum()
        info["excess"] = self.excess.data[mask].sum()
        info["significance"] = CashCountsStatistic(
            self.counts.data[mask].sum(), self.background.data[mask].sum()
        ).significance

        info["background_rate"] = info["background"] / info["livetime"]
        info["gamma_rate"] = info["excess"] / info["livetime"]
        return info
Exemplo n.º 4
0
def convolved_map_dataset_counts_statistics(dataset, kernel, mask,
                                            correlate_off):
    """Return CountsDataset objects containing smoothed maps from the MapDataset"""
    # Kernel is modified later make a copy here
    kernel = copy.deepcopy(kernel)
    kernel.normalize("peak")

    # fft convolution adds numerical noise, to ensure integer results we call
    # np.rint
    n_on = dataset.counts * mask
    n_on_conv = np.rint(n_on.convolve(kernel.array).data)

    if isinstance(dataset, MapDatasetOnOff):
        n_off = dataset.counts_off * mask
        npred_sig = dataset.npred_signal() * mask
        acceptance_on = dataset.acceptance * mask
        acceptance_off = dataset.acceptance_off * mask

        npred_sig_convolve = npred_sig.convolve(kernel.array)
        acceptance_on_convolve = acceptance_on.convolve(kernel.array)
        if correlate_off:
            n_off = n_off.convolve(kernel.array)
            acceptance_off = acceptance_off.convolve(kernel.array)

        with np.errstate(invalid="ignore", divide="ignore"):
            alpha = acceptance_on_convolve / acceptance_off

        return WStatCountsStatistic(n_on_conv.data, n_off.data, alpha.data,
                                    npred_sig_convolve.data)
    else:

        npred = dataset.npred() * mask
        background_conv = npred.convolve(kernel.array)
        return CashCountsStatistic(n_on_conv.data, background_conv.data)
Exemplo n.º 5
0
def test_cash_basic(n_on, mu_bkg, result):
    stat = CashCountsStatistic(n_on, mu_bkg)
    excess = stat.excess
    significance = stat.significance
    p_value = stat.p_value

    assert_allclose(excess, result[0])
    assert_allclose(significance, result[1], atol=1e-5)
    assert_allclose(p_value, result[2], atol=1e-5)
Exemplo n.º 6
0
def test_cash_basic(n_on, mu_bkg, result):
    stat = CashCountsStatistic(n_on, mu_bkg)
    excess = stat.n_sig
    sqrt_ts = stat.sqrt_ts
    p_value = stat.p_value

    assert_allclose(excess, result[0])
    assert_allclose(sqrt_ts, result[1], atol=1e-5)
    assert_allclose(p_value, result[2], atol=1e-5)
Exemplo n.º 7
0
def cli(n_observed, mu_background, value):
    """Compute significance for a Poisson count observation.

    The significance is the tail probability to observe N_OBSERVED counts
    or more, given a known background level MU_BACKGROUND."""
    stat = CashCountsStatistic(n_observed, mu_background)
    if value == "sqrt_ts":
        s = stat.sqrt_ts
    else:
        s = stat.p_value

    print(s)
Exemplo n.º 8
0
 def _significance_cube(cubes, method):
     if method in {"lima"}:
         scube = CashCountsStatistic(cubes["counts"],
                                     cubes["background"]).significance
     elif method == "asmooth":
         scube = _significance_asmooth(cubes["counts"], cubes["background"])
     elif method == "ts":
         raise NotImplementedError()
     else:
         raise ValueError(
             "Not a valid significance estimation method."
             " Choose one of the following: 'lima' or 'asmooth'")
     return scube
Exemplo n.º 9
0
def convolved_map_dataset_counts_statistics(dataset,
                                            kernel,
                                            apply_mask_fit=False):
    """Return CountsDataset objects containing smoothed maps from the MapDataset"""
    # Kernel is modified later make a copy here
    kernel = copy.deepcopy(kernel)
    kernel.normalize("peak")

    mask = np.ones(dataset.data_shape, dtype=bool)
    if dataset.mask_safe:
        mask *= dataset.mask_safe
    if apply_mask_fit:
        mask *= dataset.mask_fit

    # fft convolution adds numerical noise, to ensure integer results we call
    # np.rint
    n_on = dataset.counts * mask
    n_on = n_on.sum_over_axes(keepdims=True)
    n_on_conv = np.rint(n_on.convolve(kernel.array).data)

    if isinstance(dataset, MapDatasetOnOff):
        background = dataset.counts_off_normalised * mask
        background.data[dataset.acceptance_off.data == 0] = 0.0
        n_off = dataset.counts_off * mask

        background = background.sum_over_axes(keepdims=True)
        n_off = n_off.sum_over_axes(keepdims=True)

        background_conv = background.convolve(kernel.array)
        n_off_conv = n_off.convolve(kernel.array)

        npred_sig = dataset.npred_sig() * mask
        npred_sig = npred_sig.sum_over_axes(keepdims=True)
        mu_sig = npred_sig.convolve(kernel.array)

        with np.errstate(invalid="ignore", divide="ignore"):
            alpha_conv = background_conv / n_off_conv

        return WStatCountsStatistic(n_on_conv.data, n_off_conv.data,
                                    alpha_conv.data, mu_sig.data)
    else:

        npred = dataset.npred() * mask
        npred = npred.sum_over_axes(keepdims=True)
        background_conv = npred.convolve(kernel.array)
        return CashCountsStatistic(n_on_conv.data, background_conv.data)
Exemplo n.º 10
0
"""Example plot showing the profile of the Cash statistic and its connection to excess errors."""
import numpy as np
import matplotlib.pyplot as plt
from gammapy.stats import CashCountsStatistic

count_statistic = CashCountsStatistic(n_on=13, mu_bkg=5.5)
excess = count_statistic.excess

errn = count_statistic.compute_errn(1.0)
errp = count_statistic.compute_errp(1.0)

errn_2sigma = count_statistic.compute_errn(2.0)
errp_2sigma = count_statistic.compute_errp(2.0)

# We compute the Cash statistic profile
mu_signal = np.linspace(-1.5, 25, 100)
stat_values = count_statistic._stat_fcn(mu_signal)

xmin, xmax = -1.5, 25
ymin, ymax = -42, -28.0
plt.figure(figsize=(5, 5))
plt.plot(mu_signal, stat_values, color="k")
plt.xlim(xmin, xmax)
plt.ylim(ymin, ymax)
plt.xlabel(r"Number of expected signal event, $\mu_{sig}$")
plt.ylabel(r"Cash statistic value, TS ")

plt.hlines(
    count_statistic.TS_max + 1,
    xmin=excess + errn,
    xmax=excess + errp,
Exemplo n.º 11
0
"""Example plot showing the profile of the Cash statistic and its connection to significance."""
import numpy as np
import matplotlib.pyplot as plt
from gammapy.stats import CashCountsStatistic

count_statistic = CashCountsStatistic(n_on=13, mu_bkg=5.5)
excess = count_statistic.n_sig

# We compute the Cash statistic profile
mu_signal = np.linspace(-1.5, 25, 100)
stat_values = count_statistic._stat_fcn(mu_signal)

xmin, xmax = -1.5, 25
ymin, ymax = -42, -28.0
plt.figure(figsize=(5, 5))
plt.plot(mu_signal, stat_values, color="k")
plt.xlim(xmin, xmax)
plt.ylim(ymin, ymax)

plt.xlabel(r"Number of expected signal event, $\mu_{sig}$")
plt.ylabel(r"Cash statistic value, TS ")
plt.vlines(
    excess,
    ymin=ymin,
    ymax=count_statistic.stat_max,
    linestyle="dashed",
    color="k",
    label="Best fit",
)
plt.hlines(
    count_statistic.stat_max, xmin=xmin, xmax=excess, linestyle="dashed", color="k"
Exemplo n.º 12
0
    def make_prof(self, sp_datasets):
        """ Utility to make the profile in each region

        Parameters
        ----------
        sp_datasets : `~gammapy.datasets.MapDatasets` of `~gammapy.datasets.SpectrumDataset` or \
        `~gammapy.datasets.SpectrumDatasetOnOff`
            the dataset to use for profile extraction

        Returns
        --------
        results : list of dictionary
            the list of results (list of keys: x_min, x_ref, x_max, alpha, counts, background, excess, ts, sqrt_ts, \
            err, errn, errp, ul, exposure, solid_angle)
        """
        results = []

        distance = self._get_projected_distance()

        for index, spds in enumerate(sp_datasets):
            old_model = None
            if spds.models is not None:
                old_model = spds.models
            spds.models = SkyModel(spectral_model=self.spectrum)
            e_reco = spds.counts.geom.axes["energy"].edges

            # ToDo: When the function to_spectrum_dataset will manage the masks, use the following line
            # mask = spds.mask if spds.mask is not None else slice(None)
            mask = slice(None)
            if isinstance(spds, SpectrumDatasetOnOff):
                stats = WStatCountsStatistic(
                    spds.counts.data[mask][:, 0, 0],
                    spds.counts_off.data[mask][:, 0, 0],
                    spds.alpha.data[mask][:, 0, 0],
                )

            else:
                stats = CashCountsStatistic(
                    spds.counts.data[mask][:, 0, 0],
                    spds.npred_background().data[mask][:, 0, 0],
                )

            result = {
                "x_min": distance.edges[index],
                "x_max": distance.edges[index + 1],
                "x_ref": distance.center[index],
                "energy_edge": e_reco,
            }
            if isinstance(spds, SpectrumDatasetOnOff):
                result["alpha"] = stats.alpha
            result.update(
                {
                    "counts": stats.n_on,
                    "background": stats.mu_bkg,
                    "excess": stats.n_sig,
                }
            )

            result["ts"] = stats.ts
            result["sqrt_ts"] = stats.sqrt_ts

            result["err"] = stats.error * self.n_sigma

            if "errn-errp" in self.selection_optional:
                result["errn"] = stats.compute_errn(self.n_sigma)
                result["errp"] = stats.compute_errp(self.n_sigma)

            if "ul" in self.selection_optional:
                result["ul"] = stats.compute_upper_limit(self.n_sigma_ul)

            npred = spds.npred().data[mask][:, 0, 0]
            e_reco_lo = e_reco[:-1]
            e_reco_hi = e_reco[1:]
            flux = (
                stats.n_sig
                / npred
                * spds.models[0].spectral_model.integral(e_reco_lo, e_reco_hi).value
            )
            result["flux"] = flux

            result["flux_err"] = stats.error / stats.n_sig * flux

            if "errn-errp" in self.selection_optional:
                result["flux_errn"] = np.abs(result["errn"]) / stats.n_sig * flux
                result["flux_errp"] = result["errp"] / stats.n_sig * flux

            if "ul" in self.selection_optional:
                result["flux_ul"] = result["ul"] / stats.n_sig * flux

            solid_angle = spds.counts.geom.solid_angle()
            result["solid_angle"] = (
                np.full(result["counts"].shape, solid_angle.to_value("sr")) * u.sr
            )

            results.append(result)
            if old_model is not None:
                spds.models = old_model

        return results
Exemplo n.º 13
0
def test_cash_excess_matching_significance(mu_bkg, significance, result):
    stat = CashCountsStatistic(1, mu_bkg)
    excess = stat.excess_matching_significance(significance)

    assert_allclose(excess, result, atol=1e-3)
Exemplo n.º 14
0
def test_cash_ul(n_on, mu_bkg, result):
    stat = CashCountsStatistic(n_on, mu_bkg)
    ul = stat.compute_upper_limit()

    assert_allclose(ul, result[0], atol=1e-5)