예제 #1
0
    def _get_sigma_flux(cls, excess, bkg, alpha, min_sigma):
        """Compute flux to get `min_sigma` sigma detection. Returns fraction
        of minimal flux and the resulting signifiance"""

        # Gross binning
        flux_level = np.arange(0.0, 10, 0.01)[1:]
        sigma = significance_on_off(
            n_on=excess * flux_level + bkg,
            n_off=bkg / alpha,
            alpha=alpha,
            method="lima",
        )

        the_idx = (np.abs(sigma - min_sigma)).argmin()
        min_flux = flux_level[the_idx]

        # Fine binning
        flux_level = np.arange(min_flux - 0.05, min_flux + 0.05, 0.001)
        sigma = significance_on_off(
            n_on=excess * flux_level + bkg,
            n_off=bkg / alpha,
            alpha=alpha,
            method="lima",
        )
        the_idx = (np.abs(sigma - min_sigma)).argmin()

        return flux_level[the_idx], sigma[the_idx]
예제 #2
0
def test_excess_matching_significance_on_off_roundtrip(p):
    if p["method"] == "direct":
        pytest.skip()

    s = significance_on_off(p["n_on"], p["n_off"], p["alpha"], p["method"])
    excess = excess_matching_significance_on_off(p["n_off"], p["alpha"], s,
                                                 p["method"])
    n_on = excess + background(p["n_off"], p["alpha"])
    s2 = significance_on_off(n_on, p["n_off"], p["alpha"], p["method"])
    assert_allclose(s, s2, atol=0.0001)
예제 #3
0
def significance_image(infile,
                       outfile,
                       theta,
                       overwrite):
    """Make correlated significance image.

    TODO: describe
    """
    from astropy.io import fits
    from gammapy.image import disk_correlate
    from gammapy.stats import significance_on_off

    log.info('Reading {0}'.format(infile))
    hdus = fits.open(infile)
    n_on = hdus['On'].data
    n_off = hdus['Off'].data
    a_on = hdus['OnExposure'].data
    a_off = hdus['OffExposure'].data

    log.info('Correlating n_on and a_on map')
    theta = theta / hdus['On'].header['CDELT2']
    n_on = disk_correlate(n_on, theta)
    a_on = disk_correlate(a_on, theta)

    log.info('Computing significance map')
    alpha = a_on / a_off
    significance = significance_on_off(n_on, n_off, alpha)

    log.info('Writing {0}'.format(outfile))
    fits.writeto(outfile, data=significance, header=hdus['On'].header, clobber=overwrite)
예제 #4
0
    def info_dict(self, in_safe_energy_range=True):
        """Info dict with summary statistics, summed over energy

        Parameters
        ----------
        in_safe_energy_range : bool
            Whether to sum only in the safe energy range

        Returns
        -------
        info_dict : dict
            Dictionary with summary info.
        """
        info = super().info_dict(in_safe_energy_range)
        mask = self.mask_safe if in_safe_energy_range else slice(None)

        # TODO: handle energy dependent a_on / a_off
        info["a_on"] = self.acceptance[0].copy()

        if self.counts_off is not None:
            info["n_off"] = self.counts_off.data[mask].sum()
            info["a_off"] = self.acceptance_off[0].copy()
        else:
            info["n_off"] = 0
            info["a_off"] = 1

        info["alpha"] = self.alpha[0].copy()
        info["significance"] = significance_on_off(
            self.counts.data[mask].sum(),
            self.counts_off.data[mask].sum(),
            self.alpha[0],
        )

        return info
예제 #5
0
def significance_image(infile,
                       outfile,
                       theta,
                       overwrite):
    """Make correlated significance image.

    TODO: describe
    """
    import logging
    logging.basicConfig(level=logging.DEBUG, format='%(levelname)s - %(message)s')
    from astropy.io import fits
    from gammapy.image import disk_correlate
    from gammapy.stats import significance_on_off

    logging.info('Reading {0}'.format(infile))
    hdus = fits.open(infile)
    n_on = hdus['On'].data
    n_off = hdus['Off'].data
    a_on = hdus['OnExposure'].data
    a_off = hdus['OffExposure'].data

    logging.info('Correlating n_on and a_on map')
    theta = theta / hdus['On'].header['CDELT2']
    n_on = disk_correlate(n_on, theta)
    a_on = disk_correlate(a_on, theta)

    logging.info('Computing significance map')
    alpha = a_on / a_off
    significance = significance_on_off(n_on, n_off, alpha)

    logging.info('Writing {0}'.format(outfile))
    fits.writeto(outfile, data=significance, header=hdus['On'].header, clobber=overwrite)
예제 #6
0
    def compute_lima_on_off_image(n_on, n_off, a_on, a_off, kernel):
        """Compute Li & Ma significance and flux images for on-off observations.

        Parameters
        ----------
        n_on : `~gammapy.maps.WcsNDMap`
            Counts image
        n_off : `~gammapy.maps.WcsNDMap`
            Off counts image
        a_on : `~gammapy.maps.WcsNDMap`
            Relative background efficiency in the on region
        a_off : `~gammapy.maps.WcsNDMap`
            Relative background efficiency in the off region
        kernel : `astropy.convolution.Kernel2D`
            Convolution kernel

        Returns
        -------
        images : dict
            Dictionary containing result maps
            Keys are: significance, n_on, background, excess, alpha

        See Also
        --------
        gammapy.stats.significance_on_off
        """
        # Kernel is modified later make a copy here
        kernel = copy.deepcopy(kernel)
        kernel.normalize("peak")

        # fft convolution adds numerical noise, to ensure integer results we call
        # np.rint
        n_on_conv = np.rint(n_on.convolve(kernel.array).data)

        with np.errstate(invalid="ignore", divide="ignore"):
            background = a_on / a_off
        background *= n_off
        background.data[a_off.data == 0] = 0.0
        background_conv = background.convolve(kernel.array).data

        n_off_conv = n_off.convolve(kernel.array).data

        with np.errstate(invalid="ignore", divide="ignore"):
            alpha_conv = background_conv / n_off_conv

        significance_conv = significance_on_off(n_on_conv,
                                                n_off_conv,
                                                alpha_conv,
                                                method="lima")
        excess_conv = n_on_conv - background_conv

        return {
            "significance": n_on.copy(data=significance_conv),
            "n_on": n_on.copy(data=n_on_conv),
            "background": n_on.copy(data=background_conv),
            "excess": n_on.copy(data=excess_conv),
            "alpha": n_on.copy(data=alpha_conv),
        }
예제 #7
0
    def get_binned_data(self, g, p, e, nbins=100, score_range=[-1, 1]):
        """Returns binned data as a dictionnary"""
        # colname_clf_output = self.config["column_definition"]["classification_output"][
        #     "name"
        # ]
        colname_clf_output = "EVENT_TYPE"

        res = dict()
        # Histogram of events
        res["hist_sig"], edges = np.histogram(
            # a=g[colname_clf_output].values,
            a=g[colname_clf_output],
            bins=nbins,
            range=score_range,
            # weights=g["weight_corrected"].values,
            weights=g["weight_corrected"],
        )
        res["hist_p"], edges = np.histogram(
            # a=p[colname_clf_output].values,
            a=p[colname_clf_output],
            bins=nbins,
            range=score_range,
            # weights=p["weight_corrected"].values,
            weights=p["weight_corrected"],
        )
        res["hist_e"], edges = np.histogram(
            # a=e[colname_clf_output].values,
            a=e[colname_clf_output],
            bins=nbins,
            range=score_range,
            # weights=e["weight_corrected"].values,
            weights=e["weight_corrected"],
        )
        res["hist_bkg"] = res["hist_p"] + res["hist_e"]
        res["score"] = (edges[:-1] + edges[1:]) / 2.0
        res["score_edges"] = edges

        # Efficiencies
        res["hist_eff_sig"] = 1.0 - np.cumsum(res["hist_sig"]) / np.sum(
            res["hist_sig"])
        res["hist_eff_bkg"] = 1.0 - np.cumsum(res["hist_bkg"]) / np.sum(
            res["hist_bkg"])

        # Cumulative statistics
        alpha = self.config["analysis"]["alpha"]
        res["cumul_noff"] = res["hist_eff_bkg"] * sum(res["hist_bkg"]) / alpha
        res["cumul_excess"] = sum(res["hist_sig"]) - np.cumsum(res["hist_sig"])
        res["cumul_non"] = res["cumul_excess"] + res["cumul_noff"] * alpha
        res["cumul_sigma"] = significance_on_off(n_on=res["cumul_non"],
                                                 n_off=res["cumul_noff"],
                                                 alpha=alpha,
                                                 method="lima")

        return res
예제 #8
0
def test_significance_on_off_against_known_background():
    # Check that the Li & Ma limit formula is correct
    # With small alpha and high counts, the significance
    # and significance_on_off should be very close
    actual = significance(n_on=1300, mu_bkg=1100, method="lima")
    assert_allclose(actual, 5.8600870406703329)
    actual = significance_on_off(n_on=1300,
                                 n_off=1100 / 1.0e-8,
                                 alpha=1e-8,
                                 method="lima")
    assert_allclose(actual, 5.8600864348078519)
def test_cta_correct_sigma():
    """Run sensitivity estimation for one CTA IRF example."""

    sens = SensitivityEstimator(irf=None,
                                livetime=5.0 * u.h,
                                gamma_min=10,
                                sigma=10.0)
    excess = sens.get_excess([1200])
    off = 1200 * 5
    on = excess + 1200
    sigma = significance_on_off(on, off, alpha=0.2)
    assert_almost_equal(sigma, 10, decimal=1)
예제 #10
0
def test_cta_correct_sigma():
    """Run sensitivity estimation for one CTA IRF example."""

    sens = SensitivityEstimator(
        irf=None,
        livetime=5.0 * u.h,
        gamma_min=10,
        sigma=10.0
    )
    excess = sens.get_excess([1200])
    off = 1200 * 5
    on = excess + 1200
    sigma = significance_on_off(on, off, alpha=0.2)
    assert_almost_equal(sigma, 10, decimal=1)
예제 #11
0
    def get_binned_data(self, g, p, e, nbins=100, score_range=[-1, 1]):
        """Returns binned data as a dictionnary"""
        colname_clf_output = self.config['column_definition'][
            'classification_output']['name']

        res = dict()
        # Histogram of events
        res['hist_sig'], edges = np.histogram(
            a=g[colname_clf_output].values,
            bins=nbins,
            range=score_range,
            weights=g['weight_corrected'].values)
        res['hist_p'], edges = np.histogram(
            a=p[colname_clf_output].values,
            bins=nbins,
            range=score_range,
            weights=p['weight_corrected'].values)
        res['hist_e'], edges = np.histogram(
            a=e[colname_clf_output].values,
            bins=nbins,
            range=score_range,
            weights=e['weight_corrected'].values)
        res['hist_bkg'] = res['hist_p'] + res['hist_e']
        res['score'] = (edges[:-1] + edges[1:]) / 2.
        res['score_edges'] = edges

        # Efficiencies
        res['hist_eff_sig'] = 1. - np.cumsum(res['hist_sig']) / np.sum(
            res['hist_sig'])
        res['hist_eff_bkg'] = 1. - np.cumsum(res['hist_bkg']) / np.sum(
            res['hist_bkg'])

        # Cumulative statistics
        alpha = self.config['analysis']['alpha']
        res['cumul_noff'] = res['hist_eff_bkg'] * sum(res['hist_bkg']) / alpha
        res['cumul_excess'] = sum(res['hist_sig']) - np.cumsum(res['hist_sig'])
        res['cumul_non'] = res['cumul_excess'] + res['cumul_noff'] * alpha
        res['cumul_sigma'] = significance_on_off(n_on=res['cumul_non'],
                                                 n_off=res['cumul_noff'],
                                                 alpha=alpha,
                                                 method='lima')

        return res
예제 #12
0
    def info_dict(self, in_safe_energy_range=True):
        """Info dict with summary statistics, summed over energy

        Parameters
        ----------
        in_safe_energy_range : bool
            Whether to sum only in the safe energy range

        Returns
        -------
        info_dict : dict
            Dictionary with summary info.
        """
        info = dict()
        mask = self.mask_safe if in_safe_energy_range else slice(None)

        info["name"] = self.name
        info["livetime"] = self.livetime.copy()

        # TODO: handle energy dependent a_on / a_off
        info["a_on"] = self.acceptance[0].copy()
        info["n_on"] = self.counts.data[mask].sum()

        if self.counts_off is not None:
            info["n_off"] = self.counts_off.data[mask].sum()
            info["a_off"] = self.acceptance_off[0].copy()
        else:
            info["n_off"] = 0
            info["a_off"] = 1

        info["alpha"] = self.alpha[0].copy()
        info["background"] = self.background.data[mask].sum()
        info["excess"] = self.excess.data[mask].sum()
        info["significance"] = significance_on_off(
            self.counts.data[mask].sum(),
            self.counts_off.data[mask].sum(),
            self.alpha[0],
        )

        info["background_rate"] = info["background"] / info["livetime"]
        info["gamma_rate"] = info["excess"] / info["livetime"]
        return info
예제 #13
0
 def sigma(self):
     """Li-Ma significance for observation statistics (float)."""
     return significance_on_off(self.n_on,
                                self.n_off,
                                self.alpha,
                                method="lima")
예제 #14
0
def test_significance_on_off(p):
    s = significance_on_off(p["n_on"], p["n_off"], p["alpha"], p["method"])
    assert_allclose(s, p["s"], atol=1e-5)
예제 #15
0
 def target_function(on, off, alpha):
     return significance_on_off(on, off, alpha,
                                method='lima') - self.sigma
예제 #16
0
 def target_function(on, off, alpha):
     return significance_on_off(on, off, alpha, method='lima') - self.sigma