예제 #1
0
def test_excess_matching_significance_on_off_roundtrip(p):
    if p["method"] == "direct":
        pytest.skip()

    s = significance_on_off(p["n_on"], p["n_off"], p["alpha"], p["method"])
    excess = excess_matching_significance_on_off(p["n_off"], p["alpha"], s,
                                                 p["method"])
    n_on = excess + background(p["n_off"], p["alpha"])
    s2 = significance_on_off(n_on, p["n_off"], p["alpha"], p["method"])
    assert_allclose(s, s2, atol=0.0001)
예제 #2
0
def test_excess_matching_significance_on_off():
    # Negative significance should work
    excess = excess_matching_significance_on_off(n_off=10,
                                                 alpha=0.1,
                                                 significance=-1)
    assert_allclose(excess, -0.83198, atol=1e-3)

    # Cases that can't be achieved with n_on >= 0 should return NaN
    excess = excess_matching_significance_on_off(n_off=10,
                                                 alpha=0.1,
                                                 significance=-2)
    assert np.isnan(excess)

    # Arrays should work
    excess = excess_matching_significance_on_off(n_off=[10, 20],
                                                 alpha=0.1,
                                                 significance=5)
    assert_allclose(excess, [9.82966, 12.038423], atol=1e-3)
    excess = excess_matching_significance_on_off(n_off=[10, 20],
                                                 alpha=0.1,
                                                 significance=5,
                                                 method="simple")
    assert_allclose(excess, [26.05544, 27.03444], atol=1e-3)
    excess = excess_matching_significance_on_off(n_off=10,
                                                 alpha=[0.1, 0.3],
                                                 significance=5)
    assert_allclose(excess, [9.82966, 16.664516], atol=1e-3)
    excess = excess_matching_significance_on_off(n_off=10,
                                                 alpha=0.1,
                                                 significance=[3, 5])
    assert_allclose(excess, [4.818497, 9.82966], atol=1e-3)
    excess = excess_matching_significance_on_off(n_off=[10, 20],
                                                 alpha=[0.1, 0.3],
                                                 significance=[3, 5])
    assert_allclose(excess, [4.818497, 20.68810], atol=1e-3)
    excess = excess_matching_significance_on_off(n_off=[[10, 20], [10, 20]],
                                                 alpha=0.1,
                                                 significance=5)
    assert_allclose(excess, [[9.82966, 12.038423], [9.82966, 12.038423]],
                    atol=1e-3)
예제 #3
0
    def estimate_min_excess(self, dataset):
        """Estimate minimum excess to reach the given significance.

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Spectrum dataset

        Returns
        -------
        excess : `RegionNDMap`
            Minimal excess
        """
        n_off = dataset.counts_off.data
        excess_counts = excess_matching_significance_on_off(
            n_off=n_off, alpha=dataset.alpha.data, significance=self.sigma
        )
        is_gamma_limited = excess_counts < self.gamma_min
        excess_counts[is_gamma_limited] = self.gamma_min
        excess = dataset.background.copy()
        excess.data = excess_counts
        return excess
예제 #4
0
    def run(self):
        """Run the computation."""
        # TODO: let the user decide on energy binning
        # then integrate bkg model and gamma over those energy bins.
        energy = self.rmf.e_reco.center

        bkg_counts = (self.bkg.quantity.to("1/s") * self.livetime).value

        excess_counts = excess_matching_significance_on_off(
            n_off=bkg_counts / self.alpha, alpha=self.alpha, significance=self.sigma
        )
        is_gamma_limited = excess_counts < self.gamma_min
        excess_counts[is_gamma_limited] = self.gamma_min

        model = PowerLawSpectralModel(
            index=self.index, amplitude="1 cm-2 s-1 TeV-1", reference="1 TeV"
        )

        # TODO: simplify the following computation
        predictor = SpectrumEvaluator(
            model, aeff=self.arf, edisp=self.rmf, livetime=self.livetime
        )
        counts = predictor.compute_npred().data
        phi_0 = excess_counts / counts

        dnde_model = model(energy=energy)
        diff_flux = (phi_0 * dnde_model * energy ** 2).to("erg / (cm2 s)")

        # TODO: take self.bkg_sys into account
        # and add a criterion 'bkg sys'
        criterion = []
        for idx in range(len(energy)):
            if is_gamma_limited[idx]:
                c = "gamma"
            else:
                c = "significance"
            criterion.append(c)

        table = Table(
            [
                Column(
                    data=energy,
                    name="energy",
                    format="5g",
                    description="Reconstructed Energy",
                ),
                Column(
                    data=diff_flux,
                    name="e2dnde",
                    format="5g",
                    description="Energy squared times differential flux",
                ),
                Column(
                    data=excess_counts,
                    name="excess",
                    format="5g",
                    description="Number of excess counts in the bin",
                ),
                Column(
                    data=bkg_counts,
                    name="background",
                    format="5g",
                    description="Number of background counts in the bin",
                ),
                Column(
                    data=criterion,
                    name="criterion",
                    description="Sensitivity-limiting criterion",
                ),
            ]
        )
        self._results_table = table
        return table
예제 #5
0
    def run(self):
        """Run the computation."""

        # TODO: let the user decide on energy binning
        # then integrate bkg model and gamma over those energy bins.
        energy = self.irf.bkg.energy.log_center()

        bkg_counts = (self.irf.bkg.data.data * self.livetime).value

        excess_counts = excess_matching_significance_on_off(
            n_off=bkg_counts / self.alpha,
            alpha=self.alpha,
            significance=self.sigma)
        is_gamma_limited = excess_counts < self.gamma_min
        excess_counts[is_gamma_limited] = self.gamma_min

        model = PowerLaw(
            index=self.slope,
            amplitude=1 * u.Unit("cm-2 s-1 TeV-1"),
            reference=1 * u.TeV,
        )

        # TODO: simplify the following computation
        predictor = CountsPredictor(model,
                                    aeff=self.irf.aeff,
                                    edisp=self.irf.rmf,
                                    livetime=self.livetime)
        predictor.run()
        counts = predictor.npred.data.data.value
        phi_0 = excess_counts / counts * u.Unit("cm-2 s-1 TeV-1")
        # TODO: should use model.__call__ here
        dnde_model = model.evaluate(energy=energy,
                                    index=self.slope,
                                    amplitude=1,
                                    reference=1 * u.TeV)
        diff_flux = (phi_0 * dnde_model * energy**2).to("erg / (cm2 s)")

        # TODO: take self.bkg_sys into account
        # and add a criterion 'bkg sys'
        criterion = []
        for idx in range(len(energy)):
            if is_gamma_limited[idx]:
                c = "gamma"
            else:
                c = "significance"
            criterion.append(c)

        table = Table([
            Column(
                data=energy,
                name="energy",
                format="5g",
                description="Reconstructed Energy",
            ),
            Column(
                data=diff_flux,
                name="e2dnde",
                format="5g",
                description="Energy squared times differential flux",
            ),
            Column(
                data=excess_counts,
                name="excess",
                format="5g",
                description="Number of excess counts in the bin",
            ),
            Column(
                data=bkg_counts,
                name="background",
                format="5g",
                description="Number of background counts in the bin",
            ),
            Column(
                data=criterion,
                name="criterion",
                description="Sensitivity-limiting criterion",
            ),
        ])
        self._results_table = table