Exemple #1
0
    def test(aeff):
        assert aeff.data.axes["energy_true"].nbin == 96
        assert aeff.data.axes["offset"].nbin == 6
        assert aeff.data.data.shape == (96, 6)

        assert aeff.data.axes["energy_true"].unit == "TeV"
        assert aeff.data.axes["offset"].unit == "deg"
        assert aeff.data.data.unit == "m2"

        assert_quantity_allclose(aeff.high_threshold, 100 * u.TeV, rtol=1e-3)
        assert_quantity_allclose(aeff.low_threshold, 0.870964 * u.TeV, rtol=1e-3)

        test_val = aeff.data.evaluate(energy_true="14 TeV", offset="0.2 deg")
        assert_allclose(test_val.value, 683177.5, rtol=1e-3)

        # Test ARF export
        offset = 0.236 * u.deg
        e_axis = np.logspace(0, 1, 20) * u.TeV
        effareafrom2d = aeff.to_effective_area_table(offset, e_axis)

        energy = np.sqrt(e_axis[:-1] * e_axis[1:])
        area = aeff.data.evaluate(energy_true=energy, offset=offset)

        energy_axis_true = MapAxis.from_energy_edges(e_axis, name="energy_true")
        effarea1d = EffectiveAreaTable(energy_axis_true=energy_axis_true, data=area)

        actual = effareafrom2d.data.evaluate(energy_true="2.34 TeV")
        desired = effarea1d.data.evaluate(energy_true="2.34 TeV")
        assert_equal(actual, desired)

        # Test ARF export #2
        offset = 1.2 * u.deg
        actual = aeff.to_effective_area_table(offset=offset).data.data
        desired = aeff.data.evaluate(offset=offset)
        assert_allclose(actual.value, desired.value.squeeze(), rtol=1e-9)
Exemple #2
0
    def setup(self):
        etrue = np.logspace(-1, 1, 10) * u.TeV
        self.e_true = etrue
        ereco = np.logspace(-1, 1, 5) * u.TeV
        elo = ereco[:-1]
        ehi = ereco[1:]
        self.e_reco = ereco
        self.aeff = EffectiveAreaTable(etrue[:-1], etrue[1:],
                                       np.ones(9) * u.cm**2)
        self.edisp = EDispKernel.from_diagonal_response(etrue, ereco)

        data = np.ones(elo.shape)
        data[-1] = 0  # to test stats calculation with empty bins
        self.on_counts = CountsSpectrum(elo, ehi, data)
        self.off_counts = CountsSpectrum(elo, ehi, np.ones(elo.shape) * 10)

        start = u.Quantity([0], "s")
        stop = u.Quantity([1000], "s")
        time_ref = Time("2010-01-01 00:00:00.0")
        self.gti = GTI.create(start, stop, time_ref)
        self.livetime = self.gti.time_sum

        self.dataset = SpectrumDatasetOnOff(
            counts=self.on_counts,
            counts_off=self.off_counts,
            aeff=self.aeff,
            edisp=self.edisp,
            livetime=self.livetime,
            acceptance=np.ones(elo.shape),
            acceptance_off=np.ones(elo.shape) * 10,
            name="test",
            gti=self.gti,
        )
Exemple #3
0
    def test_spectrum_dataset_stack_nondiagonal_no_bkg(self):
        aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges,
                                                       "HESS")
        edisp1 = EDispKernel.from_gauss(self.src.energy.edges,
                                        self.src.energy.edges, 0.1, 0.0)
        livetime = self.livetime
        dataset1 = SpectrumDataset(counts=None,
                                   livetime=livetime,
                                   aeff=aeff,
                                   edisp=edisp1,
                                   background=None)

        livetime2 = livetime
        aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1],
                                   self.src.energy.edges[1:], aeff.data.data)
        edisp2 = EDispKernel.from_gauss(self.src.energy.edges,
                                        self.src.energy.edges, 0.2, 0.0)
        dataset2 = SpectrumDataset(
            counts=self.src.copy(),
            livetime=livetime2,
            aeff=aeff2,
            edisp=edisp2,
            background=None,
        )
        dataset1.stack(dataset2)

        assert dataset1.counts is None
        assert dataset1.background is None
        assert dataset1.livetime == 2 * self.livetime
        assert_allclose(dataset1.aeff.data.data.to_value("m2"),
                        aeff.data.data.to_value("m2"))
        assert_allclose(dataset1.edisp.get_bias(1 * u.TeV), 0.0, atol=1.2e-3)
        assert_allclose(dataset1.edisp.get_resolution(1 * u.TeV),
                        0.1581,
                        atol=1e-2)
Exemple #4
0
    def setup(self):
        etrue = np.logspace(-1, 1, 10) * u.TeV
        self.e_true = etrue
        ereco = np.logspace(-1, 1, 5) * u.TeV
        elo = ereco[:-1]
        ehi = ereco[1:]

        self.aeff = EffectiveAreaTable(etrue[:-1], etrue[1:],
                                       np.ones(9) * u.cm**2)
        self.edisp = EnergyDispersion.from_diagonal_response(etrue, ereco)

        data = np.ones(elo.shape)
        data[-1] = 0  # to test stats calculation with empty bins
        self.on_counts = CountsSpectrum(elo, ehi, data)
        self.off_counts = CountsSpectrum(elo, ehi, np.ones(elo.shape) * 10)

        self.livetime = 1000 * u.s

        self.dataset = SpectrumDatasetOnOff(
            counts=self.on_counts,
            counts_off=self.off_counts,
            aeff=self.aeff,
            edisp=self.edisp,
            livetime=self.livetime,
            acceptance=np.ones(elo.shape),
            acceptance_off=np.ones(elo.shape) * 10,
            obs_id="test",
        )
Exemple #5
0
def test_spectrum_dataset_stack_nondiagonal_no_bkg(spectrum_dataset):
    energy = spectrum_dataset.counts.geom.axes[0].edges

    aeff = EffectiveAreaTable.from_parametrization(energy, "HESS")
    edisp1 = EDispKernel.from_gauss(energy, energy, 0.1, 0)
    livetime = 100 * u.s
    spectrum_dataset1 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime, aeff=aeff, edisp=edisp1,
    )

    livetime2 = livetime
    aeff2 = EffectiveAreaTable(
        energy[:-1], energy[1:], aeff.data.data
    )
    edisp2 = EDispKernel.from_gauss(energy, energy, 0.2, 0.0)
    spectrum_dataset2 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime2,
        aeff=aeff2,
        edisp=edisp2,
    )
    spectrum_dataset1.stack(spectrum_dataset2)

    assert spectrum_dataset1.background is None
    assert spectrum_dataset1.livetime == 2 * livetime
    assert_allclose(
        spectrum_dataset1.aeff.data.data.to_value("m2"), aeff.data.data.to_value("m2")
    )
    assert_allclose(spectrum_dataset1.edisp.get_bias(1 * u.TeV), 0.0, atol=1.2e-3)
    assert_allclose(spectrum_dataset1.edisp.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
Exemple #6
0
def sens():
    etrue = np.logspace(0, 1, 21) * u.TeV
    elo = etrue[:-1]
    ehi = etrue[1:]
    area = np.zeros(20) + 1e6 * u.m**2

    arf = EffectiveAreaTable(energy_lo=elo, energy_hi=ehi, data=area)

    ereco = np.logspace(0, 1, 5) * u.TeV
    rmf = EnergyDispersion.from_diagonal_response(etrue, ereco)

    bkg_array = np.ones(4)
    bkg_array[-1] = 1e-3
    bkg = CountsSpectrum(energy_lo=ereco[:-1],
                         energy_hi=ereco[1:],
                         data=bkg_array,
                         unit="s-1")

    sens = SensitivityEstimator(arf=arf,
                                rmf=rmf,
                                bkg=bkg,
                                livetime=1 * u.h,
                                index=2,
                                gamma_min=20,
                                alpha=0.2)
    sens.run()
    return sens
Exemple #7
0
    def make_mask_energy_aeff_max(self, dataset):
        """Make safe energy mask from effective area maximum value.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.SpectrumDataset`
            Dataset to compute mask for.

        Returns
        -------
        mask_safe : `~numpy.ndarray`
            Safe data range mask.
        """
        geom = dataset._geom

        if self.position is None:
            position = PointSkyRegion(dataset.counts.geom.center_skydir)
        else:
            position = PointSkyRegion(self.position)

        exposure = dataset.exposure.get_spectrum(position)

        energy = exposure.geom.axes["energy_true"]
        aeff = EffectiveAreaTable(
            energy_axis_true=energy,
            data=(exposure.quantity / dataset.gti.time_sum).squeeze(),
        )
        aeff_thres = (self.aeff_percent / 100) * aeff.max_area
        e_min = aeff.find_energy(aeff_thres)
        return geom.energy_mask(emin=e_min)
Exemple #8
0
    def make_mask_energy_aeff_max(self, dataset):
        """Make safe energy mask from effective area maximum value.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.SpectrumDataset`
            Dataset to compute mask for.

        Returns
        -------
        mask_safe : `~numpy.ndarray`
            Safe data range mask.
        """
        geom = dataset._geom

        if isinstance(dataset, MapDataset):
            position = self.position
            if position is None:
                position = dataset.counts.geom.center_skydir
            exposure = dataset.exposure
            energy = exposure.geom.get_axis_by_name("energy_true")
            coord = MapCoord.create({"skycoord": position, "energy_true": energy.center})
            exposure_1d = exposure.interp_by_coord(coord)
            aeff = EffectiveAreaTable(
                energy_lo=energy.edges[:-1],
                energy_hi=energy.edges[1:],
                data=exposure_1d,
            )
        else:
            aeff = dataset.aeff

        aeff_thres = (self.aeff_percent / 100) * aeff.max_area
        e_min = aeff.find_energy(aeff_thres)
        return geom.energy_mask(emin=e_min)
Exemple #9
0
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset):
    geom = spectrum_dataset.counts.geom

    energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30)
    energy_true = MapAxis.from_energy_bounds("0.1 TeV",
                                             "10 TeV",
                                             nbin=30,
                                             name="energy_true")

    aeff = EffectiveAreaTable.from_parametrization(energy.edges, "HESS")
    edisp = EDispKernelMap.from_diagonal_response(energy,
                                                  energy_true,
                                                  geom=geom.to_image())
    livetime = 100 * u.s
    background = spectrum_dataset.background
    spectrum_dataset1 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime,
        aeff=aeff,
        edisp=edisp.copy(),
        background=background.copy(),
    )

    livetime2 = 0.5 * livetime
    aeff2 = EffectiveAreaTable(energy.edges[:-1], energy.edges[1:],
                               2 * aeff.data.data)
    bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data)

    geom = spectrum_dataset.counts.geom
    data = np.ones(spectrum_dataset.data_shape, dtype="bool")
    data[0] = False
    safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data)

    spectrum_dataset2 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime2,
        aeff=aeff2,
        edisp=edisp,
        background=bkg2,
        mask_safe=safe_mask2,
    )
    spectrum_dataset1.stack(spectrum_dataset2)

    reference = spectrum_dataset.counts.data
    assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2)
    assert_allclose(spectrum_dataset1.counts.data[0], 141363)
    assert spectrum_dataset1.livetime == 1.5 * livetime
    assert_allclose(spectrum_dataset1.background.data[1:],
                    3 * background.data[1:])
    assert_allclose(spectrum_dataset1.background.data[0], background.data[0])
    assert_allclose(
        spectrum_dataset1.aeff.data.data.to_value("m2"),
        4.0 / 3 * aeff.data.data.to_value("m2"),
    )
    kernel = edisp.get_edisp_kernel()
    kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel()

    assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:])
    assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
Exemple #10
0
    def create(cls,
               e_reco,
               e_true=None,
               region=None,
               reference_time="2000-01-01",
               name=None,
               meta_table=None):
        """Creates empty spectrum dataset.

        Empty containers are created with the correct geometry.
        counts, background and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~gammapy.maps.MapAxis`
            counts energy axis. Its name must be "energy".
        e_true : `~gammapy.maps.MapAxis`
            effective area table energy axis. Its name must be "energy-true".
            If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        meta_table : `~astropy.table.Table`
            Table listing informations on observations used to create the dataset.
            One line per observation for stacked datasets.
        """
        if e_true is None:
            e_true = e_reco.copy(name="energy_true")

        if region is None:
            region = "icrs;circle(0, 0, 1)"

        counts = RegionNDMap.create(region=region, axes=[e_reco])
        background = RegionNDMap.create(region=region, axes=[e_reco])

        aeff = EffectiveAreaTable(
            e_true.edges[:-1],
            e_true.edges[1:],
            np.zeros(e_true.edges[:-1].shape) * u.m**2,
        )
        edisp = EDispKernel.from_diagonal_response(e_true.edges, e_reco.edges)
        mask_safe = RegionNDMap.from_geom(counts.geom, dtype="bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)
        livetime = gti.time_sum

        return SpectrumDataset(
            counts=counts,
            aeff=aeff,
            edisp=edisp,
            mask_safe=mask_safe,
            background=background,
            livetime=livetime,
            gti=gti,
            name=name,
        )
Exemple #11
0
    def setup(self):
        etrue = np.logspace(-1, 1, 10) * u.TeV
        self.e_true = etrue
        ereco = np.logspace(-1, 1, 5) * u.TeV
        elo = ereco[:-1]
        ehi = ereco[1:]
        self.e_reco = ereco
        self.aeff = EffectiveAreaTable(etrue[:-1], etrue[1:], np.ones(9) * u.cm ** 2)
        self.edisp = EDispKernel.from_diagonal_response(etrue, ereco)

        start = u.Quantity([0], "s")
        stop = u.Quantity([1000], "s")
        time_ref = Time("2010-01-01 00:00:00.0")
        self.gti = GTI.create(start, stop, time_ref)
        self.livetime = self.gti.time_sum

        self.on_region = make_region("icrs;circle(0.,1.,0.1)")
        off_region = make_region("icrs;box(0.,1.,0.1, 0.2,30)")
        self.off_region = off_region.union(
            make_region("icrs;box(-1.,-1.,0.1, 0.2,150)")
        )
        self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs

        data = np.ones(elo.shape)
        data[-1] = 0  # to test stats calculation with empty bins

        axis = MapAxis.from_edges(ereco, name="energy", interp="log")
        self.on_counts = RegionNDMap.create(
            region=self.on_region, wcs=self.wcs, axes=[axis]
        )
        self.on_counts.data += 1
        self.on_counts.data[-1] = 0

        self.off_counts = RegionNDMap.create(
            region=self.off_region, wcs=self.wcs, axes=[axis]
        )
        self.off_counts.data += 10

        acceptance = RegionNDMap.from_geom(self.on_counts.geom)
        acceptance.data += 1

        data = np.ones(elo.shape)
        data[-1] = 0

        acceptance_off = RegionNDMap.from_geom(self.off_counts.geom)
        acceptance_off.data += 10

        self.dataset = SpectrumDatasetOnOff(
            counts=self.on_counts,
            counts_off=self.off_counts,
            aeff=self.aeff,
            edisp=self.edisp,
            livetime=self.livetime,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            name="test",
            gti=self.gti,
        )
Exemple #12
0
    def create(cls,
               e_reco,
               e_true=None,
               region=None,
               reference_time="2000-01-01",
               name=None):
        """Creates empty spectrum dataset.

        Empty containers are created with the correct geometry.
        counts, background and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~astropy.units.Quantity`
            edges of counts vector
        e_true : `~astropy.units.Quantity`
            edges of effective area table. If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        """
        if e_true is None:
            e_true = e_reco

        if region is None:
            region = "icrs;circle(0, 0, 1)"

        # TODO: change .create() API
        energy = MapAxis.from_edges(e_reco, interp="log", name="energy")
        counts = RegionNDMap.create(region=region, axes=[energy])
        background = RegionNDMap.create(region=region, axes=[energy])

        aeff = EffectiveAreaTable(e_true[:-1], e_true[1:],
                                  np.zeros(e_true[:-1].shape) * u.m**2)
        edisp = EDispKernel.from_diagonal_response(e_true, e_reco)
        mask_safe = RegionNDMap.from_geom(counts.geom, dtype="bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)
        livetime = gti.time_sum

        return SpectrumDataset(
            counts=counts,
            aeff=aeff,
            edisp=edisp,
            mask_safe=mask_safe,
            background=background,
            livetime=livetime,
            gti=gti,
            name=name,
        )
Exemple #13
0
    def create(cls,
               e_reco,
               e_true=None,
               region=None,
               reference_time="2000-01-01",
               name=None):
        """Create empty SpectrumDatasetOnOff.

        Empty containers are created with the correct geometry.
        counts, counts_off and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~astropy.units.Quantity`
            edges of counts vector
        e_true : `~astropy.units.Quantity`
            edges of effective area table. If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        """
        if e_true is None:
            e_true = e_reco

        counts = CountsSpectrum(e_reco[:-1], e_reco[1:], region=region)
        counts_off = CountsSpectrum(e_reco[:-1], e_reco[1:], region=region)
        aeff = EffectiveAreaTable(e_true[:-1], e_true[1:],
                                  np.zeros(e_true[:-1].shape) * u.m**2)
        edisp = EDispKernel.from_diagonal_response(e_true, e_reco)
        mask_safe = np.zeros_like(counts.data, "bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)
        livetime = gti.time_sum
        acceptance = np.ones_like(counts.data, int)
        acceptance_off = np.ones_like(counts.data, int)

        return SpectrumDatasetOnOff(
            counts=counts,
            counts_off=counts_off,
            aeff=aeff,
            edisp=edisp,
            mask_safe=mask_safe,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            livetime=livetime,
            gti=gti,
            name=name,
        )
Exemple #14
0
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset):
    geom = spectrum_dataset.counts.geom

    energy = np.logspace(-1, 1, 31) * u.TeV
    aeff = EffectiveAreaTable.from_parametrization(energy, "HESS")
    edisp = EDispKernel.from_diagonal_response(energy, energy)
    livetime = 100 * u.s
    background = spectrum_dataset.background
    spectrum_dataset1 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime,
        aeff=aeff,
        edisp=edisp,
        background=background.copy(),
    )

    livetime2 = 0.5 * livetime
    aeff2 = EffectiveAreaTable(energy[:-1], energy[1:], 2 * aeff.data.data)
    bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data)

    geom = spectrum_dataset.counts.geom
    data = np.ones(spectrum_dataset.data_shape, dtype="bool")
    data[0] = False
    safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data)

    spectrum_dataset2 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime2,
        aeff=aeff2,
        edisp=edisp,
        background=bkg2,
        mask_safe=safe_mask2,
    )
    spectrum_dataset1.stack(spectrum_dataset2)

    reference = spectrum_dataset.counts.data
    assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2)
    assert_allclose(spectrum_dataset1.counts.data[0], 141363)
    assert spectrum_dataset1.livetime == 1.5 * livetime
    assert_allclose(spectrum_dataset1.background.data[1:],
                    3 * background.data[1:])
    assert_allclose(spectrum_dataset1.background.data[0], background.data[0])
    assert_allclose(
        spectrum_dataset1.aeff.data.data.to_value("m2"),
        4.0 / 3 * aeff.data.data.to_value("m2"),
    )
    assert_allclose(spectrum_dataset1.edisp.pdf_matrix[1:],
                    edisp.pdf_matrix[1:])
    assert_allclose(spectrum_dataset1.edisp.pdf_matrix[0],
                    0.5 * edisp.pdf_matrix[0])
Exemple #15
0
    def test_spectrum_dataset_stack_diagonal_safe_mask(self):
        aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges,
                                                       "HESS")
        edisp = EDispKernel.from_diagonal_response(self.src.energy.edges,
                                                   self.src.energy.edges)
        livetime = self.livetime
        dataset1 = SpectrumDataset(
            counts=self.src.copy(),
            livetime=livetime,
            aeff=aeff,
            edisp=edisp,
            background=self.bkg.copy(),
        )

        livetime2 = 0.5 * livetime
        aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1],
                                   self.src.energy.edges[1:],
                                   2 * aeff.data.data)
        bkg2 = CountsSpectrum(
            self.src.energy.edges[:-1],
            self.src.energy.edges[1:],
            data=2 * self.bkg.data,
        )
        safe_mask2 = np.ones_like(self.src.data, bool)
        safe_mask2[0] = False
        dataset2 = SpectrumDataset(
            counts=self.src.copy(),
            livetime=livetime2,
            aeff=aeff2,
            edisp=edisp,
            background=bkg2,
            mask_safe=safe_mask2,
        )
        dataset1.stack(dataset2)

        assert_allclose(dataset1.counts.data[1:], self.src.data[1:] * 2)
        assert_allclose(dataset1.counts.data[0], self.src.data[0])
        assert dataset1.livetime == 1.5 * self.livetime
        assert_allclose(dataset1.background.data[1:], 3 * self.bkg.data[1:])
        assert_allclose(dataset1.background.data[0], self.bkg.data[0])
        assert_allclose(
            dataset1.aeff.data.data.to_value("m2"),
            4.0 / 3 * aeff.data.data.to_value("m2"),
        )
        assert_allclose(dataset1.edisp.pdf_matrix[1:], edisp.pdf_matrix[1:])
        assert_allclose(dataset1.edisp.pdf_matrix[0],
                        0.5 * edisp.pdf_matrix[0])
Exemple #16
0
    def test_no_edisp(self):
        dataset = self.datasets[0].copy()

        # Bring aeff in RECO space
        energy = dataset.counts.geom.axes[0].center
        data = dataset.aeff.data.evaluate(energy_true=energy)
        e_edges = dataset.counts.geom.axes[0].edges

        dataset.aeff = EffectiveAreaTable(
            data=data, energy_lo=e_edges[:-1], energy_hi=e_edges[1:]
        )
        dataset.edisp = None
        dataset.models = self.pwl

        fit = Fit([dataset])
        result = fit.run()
        assert_allclose(result.parameters["index"].value, 2.7961, atol=0.02)
Exemple #17
0
    def test_npred_no_edisp(self):
        const = 1 * u.Unit("cm-2 s-1 TeV-1")
        model = SkyModel(spectral_model=ConstantSpectralModel(const=const))
        livetime = 1 * u.s

        e_reco = self.on_counts.geom.axes[0].edges
        aeff = EffectiveAreaTable(e_reco[:-1], e_reco[1:], np.ones(4) * u.cm ** 2)
        dataset = SpectrumDatasetOnOff(
            counts=self.on_counts,
            counts_off=self.off_counts,
            aeff=aeff,
            models=model,
            livetime=livetime,
        )

        energy = aeff.energy.edges
        expected = aeff.data.data[0] * (energy[-1] - energy[0]) * const * livetime

        assert_allclose(dataset.npred_sig().data.sum(), expected.value)
Exemple #18
0
    def test_EffectiveAreaTable(tmpdir, aeff):
        arf = aeff.to_effective_area_table(offset=0.3 * u.deg)

        assert_quantity_allclose(arf.data.evaluate(), arf.data.data)

        with mpl_plot_check():
            arf.plot()

        filename = str(tmpdir / "effarea_test.fits")
        arf.write(filename)
        arf2 = EffectiveAreaTable.read(filename)

        assert_quantity_allclose(arf.data.evaluate(), arf2.data.evaluate())

        test_aeff = 0.6 * arf.max_area
        node_above = np.where(arf.data.data > test_aeff)[0][0]
        energy = arf.data.axis("energy")
        ener_above = energy.center[node_above]
        ener_below = energy.center[node_above - 1]
        test_ener = arf.find_energy(test_aeff)

        assert ener_below < test_ener and test_ener < ener_above

        elo_threshold = arf.find_energy(0.1 * arf.max_area)
        assert elo_threshold.unit == "TeV"
        assert_allclose(elo_threshold.value, 0.554086, rtol=1e-3)

        ehi_threshold = arf.find_energy(0.9 * arf.max_area,
                                        emin=30 * u.TeV,
                                        emax=100 * u.TeV)
        assert ehi_threshold.unit == "TeV"
        assert_allclose(ehi_threshold.value, 53.347217, rtol=1e-3)

        # Test evaluation outside safe range
        data = [np.nan, np.nan, 0, 0, 1, 2, 3, np.nan, np.nan]
        energy = np.logspace(0, 10, 10) * u.TeV
        aeff = EffectiveAreaTable(data=data,
                                  energy_lo=energy[:-1],
                                  energy_hi=energy[1:])
        vals = aeff.evaluate_fill_nan()
        assert vals[1] == 0
        assert vals[-1] == 3
    def test_EffectiveAreaTable(tmp_path, aeff):
        arf = aeff.to_effective_area_table(offset=0.3 * u.deg)

        assert_quantity_allclose(arf.data.evaluate(), arf.data.data)

        with mpl_plot_check():
            arf.plot()

        arf.write(tmp_path / "tmp.fits")
        arf2 = EffectiveAreaTable.read(tmp_path / "tmp.fits")

        assert_quantity_allclose(arf.data.evaluate(), arf2.data.evaluate())

        test_aeff = 0.6 * arf.max_area
        node_above = np.where(arf.data.data > test_aeff)[0][0]
        energy = arf.data.axes["energy_true"]
        ener_above = energy.center[node_above]
        ener_below = energy.center[node_above - 1]
        test_ener = arf.find_energy(test_aeff)

        assert ener_below < test_ener and test_ener < ener_above

        elo_threshold = arf.find_energy(0.1 * arf.max_area)
        assert elo_threshold.unit == "TeV"
        assert_allclose(elo_threshold.value, 0.554086, rtol=1e-3)

        ehi_threshold = arf.find_energy(0.9 * arf.max_area,
                                        emin=30 * u.TeV,
                                        emax=100 * u.TeV)
        assert ehi_threshold.unit == "TeV"
        assert_allclose(ehi_threshold.value, 53.347217, rtol=1e-3)

        # Test evaluation outside safe range
        data = [np.nan, np.nan, 0, 0, 1, 2, 3, np.nan, np.nan]
        energy_axis_true = MapAxis.from_energy_bounds("1 TeV",
                                                      "10 TeV",
                                                      nbin=9,
                                                      name="energy_true")
        aeff = EffectiveAreaTable(data=data, energy_axis_true=energy_axis_true)
        vals = aeff.evaluate_fill_nan()
        assert vals[1] == 0
        assert vals[-1] == 3
Exemple #20
0
def test_apply_containment_fraction():
    n_edges_energy = 5
    energy = energy_logspace(0.1, 10.0, nbins=n_edges_energy + 1, unit="TeV")
    area = np.ones(n_edges_energy) * 4 * u.m**2
    aeff = EffectiveAreaTable(energy[:-1], energy[1:], data=area)

    nrad = 100
    rad = Angle(np.linspace(0, 0.5, nrad), "deg")
    psf_table = TablePSF.from_shape(shape="disk", width="0.2 deg", rad=rad)
    psf_values = (np.resize(psf_table.psf_value.value,
                            (n_edges_energy, nrad)) * psf_table.psf_value.unit)
    edep_psf_table = EnergyDependentTablePSF(aeff.energy.center,
                                             rad,
                                             psf_value=psf_values)

    new_aeff = apply_containment_fraction(aeff, edep_psf_table,
                                          Angle("0.1 deg"))

    assert_allclose(new_aeff.data.data.value, 1.0, rtol=5e-4)
    assert new_aeff.data.data.unit == "m2"
Exemple #21
0
    def create(cls, e_reco, e_true=None, reference_time="2000-01-01"):
        """Creates empty SpectrumDataset

        Empty containers are created with the correct geometry.
        counts, background and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~astropy.units.Quantity`
            edges of counts vector
        e_true : `~astropy.units.Quantity`
            edges of effective area table. If not set use reco energy values. Default : None
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        """
        if e_true is None:
            e_true = e_reco

        counts = CountsSpectrum(e_reco[:-1], e_reco[1:])
        background = CountsSpectrum(e_reco[:-1], e_reco[1:])
        aeff = EffectiveAreaTable(e_true[:-1], e_true[1:],
                                  np.zeros(e_true[:-1].shape) * u.m**2)
        edisp = EnergyDispersion.from_diagonal_response(e_true, e_reco)
        mask_safe = np.zeros_like(counts.data, "bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)
        livetime = gti.time_sum

        return SpectrumDataset(
            counts=counts,
            aeff=aeff,
            edisp=edisp,
            mask_safe=mask_safe,
            background=background,
            livetime=livetime,
            gti=gti,
        )
Exemple #22
0
def test_model(model):
    print(model)
    print(model(energy=Q(10, 'TeV')))
    print(model.integral(emin=Q(1, 'TeV'), emax=Q(2, 'TeV')))

    # plot
    # butterfly
    # npred
    reco_bins = 5
    true_bins = 10
    e_reco = Q(np.logspace(-1, 1, reco_bins + 1), 'TeV')
    e_true = Q(np.logspace(-1.5, 1.5, true_bins + 1), 'TeV')
    livetime = Q(26, 'min')
    aeff_data = Q(np.ones(true_bins) * 1e5, 'cm2')
    aeff = EffectiveAreaTable(energy=e_true, data=aeff_data)
    edisp_data = make_perfect_resolution(e_true, e_reco)
    edisp = EnergyDispersion(edisp_data, EnergyBounds(e_true),
                             EnergyBounds(e_reco))
    npred = calculate_predicted_counts(model=model,
                                       livetime=livetime,
                                       aeff=aeff,
                                       edisp=edisp)
    print(npred.data)
Exemple #23
0
def make_observation_list():
    """obs with dummy IRF"""
    nbin = 3
    energy = np.logspace(-1, 1, nbin + 1) * u.TeV
    livetime = 2 * u.h
    data_on = np.arange(nbin)
    dataoff_1 = np.ones(3)
    dataoff_2 = np.ones(3) * 3
    dataoff_1[1] = 0
    dataoff_2[1] = 0
    on_vector = CountsSpectrum(energy_lo=energy[:-1],
                               energy_hi=energy[1:],
                               data=data_on)
    off_vector1 = CountsSpectrum(energy_lo=energy[:-1],
                                 energy_hi=energy[1:],
                                 data=dataoff_1)
    off_vector2 = CountsSpectrum(energy_lo=energy[:-1],
                                 energy_hi=energy[1:],
                                 data=dataoff_2)
    aeff = EffectiveAreaTable(energy_lo=energy[:-1],
                              energy_hi=energy[1:],
                              data=np.ones(nbin) * 1e5 * u.m**2)
    edisp = EnergyDispersion.from_gauss(e_true=energy,
                                        e_reco=energy,
                                        sigma=0.2,
                                        bias=0)

    time_ref = Time("2010-01-01")
    gti1 = make_gti({
        "START": [5, 6, 1, 2],
        "STOP": [8, 7, 3, 4]
    },
                    time_ref=time_ref)
    gti2 = make_gti({"START": [14], "STOP": [15]}, time_ref=time_ref)

    obs1 = SpectrumDatasetOnOff(
        counts=on_vector,
        counts_off=off_vector1,
        aeff=aeff,
        edisp=edisp,
        livetime=livetime,
        mask_safe=np.ones(on_vector.energy.nbin, dtype=bool),
        acceptance=1,
        acceptance_off=2,
        name="2",
        gti=gti1,
    )
    obs2 = SpectrumDatasetOnOff(
        counts=on_vector,
        counts_off=off_vector2,
        aeff=aeff,
        edisp=edisp,
        livetime=livetime,
        mask_safe=np.ones(on_vector.energy.nbin, dtype=bool),
        acceptance=1,
        acceptance_off=4,
        name="2",
        gti=gti2,
    )

    obs_list = [obs1, obs2]
    return obs_list
Exemple #24
0
    def to_spectrum_dataset(self, on_region, containment_correction=False):
        """Return a ~gammapy.spectrum.SpectrumDataset from on_region.

        Counts and background are summed in the on_region.

        Effective area is taken from the average exposure divided by the livetime.
        Here we assume it is the sum of the GTIs.

        EnergyDispersion is obtained at the on_region center.
        Only regions with centers are supported.

        Parameters
        ----------
        on_region : `~regions.SkyRegion`
            the input ON region on which to extract the spectrum
        containment_correction : bool
            Apply containment correction for point sources and circular on regions

        Returns
        -------
        dataset : `~gammapy.spectrum.SpectrumDataset`
            the resulting reduced dataset
        """
        if self.gti is not None:
            livetime = self.gti.time_sum
        else:
            raise ValueError("No GTI in `MapDataset`, cannot compute livetime")

        if self.counts is not None:
            counts = self.counts.get_spectrum(on_region, np.sum)
        else:
            counts = None

        if self.background_model is not None:
            background = self.background_model.evaluate().get_spectrum(
                on_region, np.sum
            )
        else:
            background = None

        if self.exposure is not None:
            exposure = self.exposure.get_spectrum(on_region, np.mean)
            aeff = EffectiveAreaTable(
                energy_lo=exposure.energy.edges[:-1],
                energy_hi=exposure.energy.edges[1:],
                data=exposure.data / livetime,
            )
        else:
            aeff = None

        if containment_correction:
            if not isinstance(on_region, CircleSkyRegion):
                raise TypeError(
                    "Containement correction is only supported for"
                    " `CircleSkyRegion`."
                )
            elif self.psf is None or isinstance(self.psf, PSFKernel):
                raise ValueError("No PSFMap set. Containement correction impossible")
            else:
                psf_table = self.psf.get_energy_dependent_table_psf(on_region.center)
                aeff = apply_containment_fraction(aeff, psf_table, on_region.radius)

        if self.edisp is not None:
            if isinstance(self.edisp, EnergyDispersion):
                edisp = self.edisp
            else:
                self.edisp.get_energy_dispersion(on_region.center, self._energy_axis)
        else:
            edisp = None

        return SpectrumDataset(
            counts=counts,
            background=background,
            aeff=aeff,
            edisp=edisp,
            livetime=livetime,
            gti=self.gti,
            name=self.name,
        )
Exemple #25
0
                              PHACountsSpectrum, SpectrumObservation)
from gammapy.utils.random import get_random_state
import numpy as np
import astropy.units as u
import matplotlib.pyplot as plt

e_true = SpectrumExtraction.DEFAULT_TRUE_ENERGY
e_reco = SpectrumExtraction.DEFAULT_RECO_ENERGY

# EDISP
edisp = EnergyDispersion.from_gauss(e_true=e_true, e_reco=e_true, sigma=0.2)

# AEFF
nodes = np.sqrt(e_true[:-1] * e_true[1:])
data = abramowski_effective_area(energy=nodes)
aeff = EffectiveAreaTable(data=data, energy=e_true)
lo_threshold = aeff.find_energy(0.1 * aeff.max_area)

# MODEL
model = PowerLaw(index=2.3 * u.Unit(''),
                 amplitude=2.5 * 1e-12 * u.Unit('cm-2 s-1 TeV-1'),
                 reference=1 * u.TeV)

# COUNTS
livetime = 2 * u.h
npred = calculate_predicted_counts(model=model,
                                   aeff=aeff,
                                   edisp=edisp,
                                   livetime=livetime)

bkg = 0.2 * npred.data
    e_true=e_true,
    e_reco=e_reco,
    sigma=0.2,
    bias=0,
)

#Aeff
ee = EnergyBounds(np.logspace(-2, 2.5, 109) * u.TeV)
p1 = 6.85e9
p2 = 0.0891
p3 = 5.0e5

f = lambda x: p1 * (x / u.MeV)**(-p2) * np.exp(((-p3) * u.MeV) / x)
value = f(ee.log_centers.to('MeV'))
data = value * u.cm**2
aeff1 = EffectiveAreaTable(ee.lower_bounds, ee.upper_bounds, data)

f2 = lambda x: p1 * (x / u.MeV)**(-p2) * np.exp(((-p3 * factor) * u.MeV) / x)
value2 = f2(ee.log_centers.to('MeV'))
data2 = value2 * u.cm**2
aeff2 = EffectiveAreaTable(ee.lower_bounds, ee.upper_bounds, data2)

#======================================

#DEFINE MODEL

#Model for the source
index = 2.1 * u.Unit('')
amplitude = 3.5e-12 * u.Unit('cm-2 s-1 TeV-1')
reference = 1 * u.TeV
pwl = PowerLaw(index=index, amplitude=amplitude, reference=reference)
Exemple #27
0
    def stack(self, other):
        r"""Stack this dataset with another one.

        Safe mask is applied to compute the stacked counts vector.
        Counts outside each dataset safe mask are lost.

        Stacking is performed in-place.

        The stacking of 2 datasets is implemented as follows.
        Here, :math:`k` denotes a bin in reconstructed energy and :math:`j = {1,2}` is the dataset number

        The ``mask_safe`` of each dataset is defined as:

        .. math::
            \epsilon_{jk} =\left\{\begin{array}{cl} 1, &
            \mbox{if bin k is inside the energy thresholds}\\ 0, &
            \mbox{otherwise} \end{array}\right.

        Then the total ``counts`` and model background ``bkg`` are computed according to:

        .. math::
            \overline{\mathrm{n_{on}}}_k =  \mathrm{n_{on}}_{1k} \cdot \epsilon_{1k} +
             \mathrm{n_{on}}_{2k} \cdot \epsilon_{2k}

            \overline{bkg}_k = bkg_{1k} \cdot \epsilon_{1k} +
             bkg_{2k} \cdot \epsilon_{2k}

        The stacked ``safe_mask`` is then:

        .. math::
            \overline{\epsilon_k} = \epsilon_{1k} OR \epsilon_{2k}

        Parameters
        ----------
        other : `~gammapy.spectrum.SpectrumDataset`
            the dataset to stack to the current one
        """
        if not isinstance(other, SpectrumDataset):
            raise TypeError("Incompatible types for SpectrumDataset stacking")

        if self.counts is not None:
            self.counts *= self.mask_safe
            self.counts.stack(other.counts, weights=other.mask_safe)

        if self.background is not None and self.stat_type == "cash":
            self.background *= self.mask_safe
            self.background.stack(other.background, weights=other.mask_safe)

        if self.livetime is None or other.livetime is None:
            raise ValueError(
                "IRF stacking requires livetime for both datasets.")
        else:
            stacked_livetime = self.livetime + other.livetime

            if self.exposure and other.exposure:
                stacked_exposure = self.exposure
                stacked_exposure.stack(other.exposure)

                stacked_aeff = EffectiveAreaTable(
                    stacked_exposure.geom.axes[0].edges[:-1],
                    stacked_exposure.geom.axes[0].edges[1:],
                    np.squeeze(stacked_exposure.quantity / stacked_livetime))

            if self.edisp is not None:
                self.edisp.edisp_map *= self.mask_safe.data
                self.edisp.stack(other.edisp, weights=other.mask_safe)

            self.aeff = stacked_aeff

        if self.mask_safe is not None and other.mask_safe is not None:
            self.mask_safe.stack(other.mask_safe)

        if self.gti is not None:
            self.gti = self.gti.stack(other.gti).union()

        # TODO: for the moment, since dead time is not accounted for, livetime cannot be the sum of GTIs
        if self.livetime is not None:
            self.livetime += other.livetime