コード例 #1
0
    def run(self, dataset, observation):
        """Run all steps.

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Input dataset.
        observation : `Observation`
            Data store observation.

        Returns
        -------
        dataset_on_off : `SpectrumDatasetOnOff`
            On off dataset.
        """
        counts_off = self.make_counts_off(dataset, observation)
        counts = self.make_counts(dataset, observation)

        acceptance = RegionNDMap.from_geom(geom=dataset.counts.geom)
        acceptance.data = np.sum([_[1] - _[0] for _ in self.on_phase])

        acceptance_off = RegionNDMap.from_geom(geom=dataset.counts.geom)
        acceptance_off.data = np.sum([_[1] - _[0] for _ in self.off_phase])

        dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
            dataset=dataset,
            counts_off=counts_off,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
        )
        dataset_on_off.counts = counts
        return dataset_on_off
コード例 #2
0
def make_observation_list():
    """obs with dummy IRF"""
    nbin = 3
    energy = np.logspace(-1, 1, nbin + 1) * u.TeV
    livetime = 2 * u.h
    data_on = np.arange(nbin)
    dataoff_1 = np.ones(3)
    dataoff_2 = np.ones(3) * 3
    dataoff_1[1] = 0
    dataoff_2[1] = 0

    axis = MapAxis.from_edges(energy, name="energy", interp="log")
    axis_true = axis.copy(name="energy_true")

    geom = RegionGeom(region=None, axes=[axis])
    geom_true = RegionGeom(region=None, axes=[axis_true])

    on_vector = RegionNDMap.from_geom(geom=geom, data=data_on)
    off_vector1 = RegionNDMap.from_geom(geom=geom, data=dataoff_1)
    off_vector2 = RegionNDMap.from_geom(geom=geom, data=dataoff_2)
    mask_safe = RegionNDMap.from_geom(geom, dtype=bool)
    mask_safe.data += True

    aeff = RegionNDMap.from_geom(geom_true, data=1, unit="m2")
    edisp = EDispKernelMap.from_gauss(
        energy_axis=axis, energy_axis_true=axis, sigma=0.2, bias=0, geom=geom
    )

    time_ref = Time("2010-01-01")
    gti1 = make_gti({"START": [5, 6, 1, 2], "STOP": [8, 7, 3, 4]}, time_ref=time_ref)
    gti2 = make_gti({"START": [14], "STOP": [15]}, time_ref=time_ref)

    exposure = aeff * livetime
    exposure.meta["livetime"] = livetime

    obs1 = SpectrumDatasetOnOff(
        counts=on_vector,
        counts_off=off_vector1,
        exposure=exposure,
        edisp=edisp,
        mask_safe=mask_safe,
        acceptance=1,
        acceptance_off=2,
        name="1",
        gti=gti1,
    )
    obs2 = SpectrumDatasetOnOff(
        counts=on_vector,
        counts_off=off_vector2,
        exposure=exposure.copy(),
        edisp=edisp,
        mask_safe=mask_safe,
        acceptance=1,
        acceptance_off=4,
        name="2",
        gti=gti2,
    )

    obs_list = [obs1, obs2]
    return obs_list
コード例 #3
0
ファイル: test_spectrum.py プロジェクト: AtreyeeS/gammapy
    def setup(self):
        self.nbins = 30
        energy = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = SkyModel(
            spectral_model=PowerLawSpectralModel(index=2,
                                                 amplitude=1e5 *
                                                 u.Unit("cm-2 s-1 TeV-1"),
                                                 reference=0.1 * u.TeV))
        bkg_model = PowerLawSpectralModel(index=3,
                                          amplitude=1e4 *
                                          u.Unit("cm-2 s-1 TeV-1"),
                                          reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.spectral_model.integral(
            energy[:-1], energy[1:]).value
        source_counts = random_state.poisson(npred)

        axis = MapAxis.from_edges(energy, name="energy", interp="log")
        geom = RegionGeom(region=None, axes=[axis])

        self.src = RegionNDMap.from_geom(geom=geom, data=source_counts)
        self.exposure = RegionNDMap.from_geom(geom.as_energy_true,
                                              data=1,
                                              unit="cm2 s")

        npred_bkg = bkg_model.integral(energy[:-1], energy[1:]).value

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = RegionNDMap.from_geom(geom=geom, data=bkg_counts)
        self.off = RegionNDMap.from_geom(geom=geom, data=off_counts)
コード例 #4
0
ファイル: test_spectrum.py プロジェクト: AtreyeeS/gammapy
    def test_wstat(self):
        """WStat with on source and background spectrum"""
        on_vector = self.src.copy()
        on_vector.data += self.bkg.data
        acceptance = RegionNDMap.from_geom(self.src.geom, data=1)
        acceptance_off = RegionNDMap.from_geom(self.bkg.geom,
                                               data=1 / self.alpha)

        dataset = SpectrumDatasetOnOff(
            counts=on_vector,
            counts_off=self.off,
            exposure=self.exposure,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
        )
        dataset.models = self.source_model

        self.source_model.parameters.index = 1.12

        fit = Fit()
        result = fit.run(datasets=[dataset])
        pars = self.source_model.parameters

        assert_allclose(pars["index"].value, 1.997342, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.187067, rtol=1e-3)
        assert_allclose(result.total_stat, 30.022316, rtol=1e-3)
コード例 #5
0
ファイル: test_spectrum.py プロジェクト: dkkyjy/gammapy
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset):
    geom = spectrum_dataset.counts.geom

    energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30)
    energy_true = MapAxis.from_energy_bounds("0.1 TeV",
                                             "10 TeV",
                                             nbin=30,
                                             name="energy_true")

    aeff = EffectiveAreaTable.from_parametrization(energy.edges, "HESS")
    edisp = EDispKernelMap.from_diagonal_response(energy,
                                                  energy_true,
                                                  geom=geom.to_image())
    livetime = 100 * u.s
    background = spectrum_dataset.background
    spectrum_dataset1 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime,
        aeff=aeff,
        edisp=edisp.copy(),
        background=background.copy(),
    )

    livetime2 = 0.5 * livetime
    aeff2 = EffectiveAreaTable(energy.edges[:-1], energy.edges[1:],
                               2 * aeff.data.data)
    bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data)

    geom = spectrum_dataset.counts.geom
    data = np.ones(spectrum_dataset.data_shape, dtype="bool")
    data[0] = False
    safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data)

    spectrum_dataset2 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime2,
        aeff=aeff2,
        edisp=edisp,
        background=bkg2,
        mask_safe=safe_mask2,
    )
    spectrum_dataset1.stack(spectrum_dataset2)

    reference = spectrum_dataset.counts.data
    assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2)
    assert_allclose(spectrum_dataset1.counts.data[0], 141363)
    assert spectrum_dataset1.livetime == 1.5 * livetime
    assert_allclose(spectrum_dataset1.background.data[1:],
                    3 * background.data[1:])
    assert_allclose(spectrum_dataset1.background.data[0], background.data[0])
    assert_allclose(
        spectrum_dataset1.aeff.data.data.to_value("m2"),
        4.0 / 3 * aeff.data.data.to_value("m2"),
    )
    kernel = edisp.get_edisp_kernel()
    kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel()

    assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:])
    assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
コード例 #6
0
ファイル: test_spectrum.py プロジェクト: gaia-verna/gammapy
    def setup(self):
        etrue = np.logspace(-1, 1, 10) * u.TeV
        self.e_true = etrue
        ereco = np.logspace(-1, 1, 5) * u.TeV
        elo = ereco[:-1]
        ehi = ereco[1:]
        self.e_reco = ereco
        self.aeff = EffectiveAreaTable(etrue[:-1], etrue[1:], np.ones(9) * u.cm ** 2)
        self.edisp = EDispKernel.from_diagonal_response(etrue, ereco)

        start = u.Quantity([0], "s")
        stop = u.Quantity([1000], "s")
        time_ref = Time("2010-01-01 00:00:00.0")
        self.gti = GTI.create(start, stop, time_ref)
        self.livetime = self.gti.time_sum

        self.on_region = make_region("icrs;circle(0.,1.,0.1)")
        off_region = make_region("icrs;box(0.,1.,0.1, 0.2,30)")
        self.off_region = off_region.union(
            make_region("icrs;box(-1.,-1.,0.1, 0.2,150)")
        )
        self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs

        data = np.ones(elo.shape)
        data[-1] = 0  # to test stats calculation with empty bins

        axis = MapAxis.from_edges(ereco, name="energy", interp="log")
        self.on_counts = RegionNDMap.create(
            region=self.on_region, wcs=self.wcs, axes=[axis]
        )
        self.on_counts.data += 1
        self.on_counts.data[-1] = 0

        self.off_counts = RegionNDMap.create(
            region=self.off_region, wcs=self.wcs, axes=[axis]
        )
        self.off_counts.data += 10

        acceptance = RegionNDMap.from_geom(self.on_counts.geom)
        acceptance.data += 1

        data = np.ones(elo.shape)
        data[-1] = 0

        acceptance_off = RegionNDMap.from_geom(self.off_counts.geom)
        acceptance_off.data += 10

        self.dataset = SpectrumDatasetOnOff(
            counts=self.on_counts,
            counts_off=self.off_counts,
            aeff=self.aeff,
            edisp=self.edisp,
            livetime=self.livetime,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            name="test",
            gti=self.gti,
        )
コード例 #7
0
ファイル: test_spectrum.py プロジェクト: gaia-verna/gammapy
def make_observation_list():
    """obs with dummy IRF"""
    nbin = 3
    energy = np.logspace(-1, 1, nbin + 1) * u.TeV
    livetime = 2 * u.h
    data_on = np.arange(nbin)
    dataoff_1 = np.ones(3)
    dataoff_2 = np.ones(3) * 3
    dataoff_1[1] = 0
    dataoff_2[1] = 0

    axis = MapAxis.from_edges(energy, name="energy", interp="log")
    geom = RegionGeom(region=None, axes=[axis])

    on_vector = RegionNDMap.from_geom(geom=geom, data=data_on)
    off_vector1 = RegionNDMap.from_geom(geom=geom, data=dataoff_1)
    off_vector2 = RegionNDMap.from_geom(geom=geom, data=dataoff_2)
    mask_safe = RegionNDMap.from_geom(geom, dtype=bool)
    mask_safe.data += True

    aeff = EffectiveAreaTable.from_constant(energy, "1 cm2")
    edisp = EDispKernel.from_gauss(e_true=energy, e_reco=energy, sigma=0.2, bias=0)

    time_ref = Time("2010-01-01")
    gti1 = make_gti({"START": [5, 6, 1, 2], "STOP": [8, 7, 3, 4]}, time_ref=time_ref)
    gti2 = make_gti({"START": [14], "STOP": [15]}, time_ref=time_ref)

    obs1 = SpectrumDatasetOnOff(
        counts=on_vector,
        counts_off=off_vector1,
        aeff=aeff,
        edisp=edisp,
        livetime=livetime,
        mask_safe=mask_safe,
        acceptance=1,
        acceptance_off=2,
        name="1",
        gti=gti1,
    )
    obs2 = SpectrumDatasetOnOff(
        counts=on_vector,
        counts_off=off_vector2,
        aeff=aeff,
        edisp=edisp,
        livetime=livetime,
        mask_safe=mask_safe,
        acceptance=1,
        acceptance_off=4,
        name="2",
        gti=gti2,
    )

    obs_list = [obs1, obs2]
    return obs_list
コード例 #8
0
def simulate_spectrum_dataset(model, random_state=0):
    energy_edges = np.logspace(-0.5, 1.5, 21) * u.TeV
    energy_axis = MapAxis.from_edges(energy_edges, interp="log", name="energy")
    energy_axis_true = energy_axis.copy(name="energy_true")

    aeff = EffectiveAreaTable2D.from_parametrization(
        energy_axis_true=energy_axis_true)

    bkg_model = SkyModel(
        spectral_model=PowerLawSpectralModel(index=2.5,
                                             amplitude="1e-12 cm-2 s-1 TeV-1"),
        name="background",
    )
    bkg_model.spectral_model.amplitude.frozen = True
    bkg_model.spectral_model.index.frozen = True

    geom = RegionGeom.create(region="icrs;circle(0, 0, 0.1)",
                             axes=[energy_axis])
    acceptance = RegionNDMap.from_geom(geom=geom, data=1)
    edisp = EDispKernelMap.from_diagonal_response(
        energy_axis=energy_axis,
        energy_axis_true=energy_axis_true,
        geom=geom,
    )

    geom_true = RegionGeom.create(region="icrs;circle(0, 0, 0.1)",
                                  axes=[energy_axis_true])
    exposure = make_map_exposure_true_energy(pointing=SkyCoord("0d", "0d"),
                                             aeff=aeff,
                                             livetime=100 * u.h,
                                             geom=geom_true)

    mask_safe = RegionNDMap.from_geom(geom=geom, dtype=bool)
    mask_safe.data += True

    acceptance_off = RegionNDMap.from_geom(geom=geom, data=5)
    dataset = SpectrumDatasetOnOff(
        name="test_onoff",
        exposure=exposure,
        acceptance=acceptance,
        acceptance_off=acceptance_off,
        edisp=edisp,
        mask_safe=mask_safe,
    )
    dataset.models = bkg_model
    bkg_npred = dataset.npred_signal()

    dataset.models = model
    dataset.fake(
        random_state=random_state,
        npred_background=bkg_npred,
    )
    return dataset
コード例 #9
0
    def create(
        cls,
        e_reco,
        e_true=None,
        region=None,
        reference_time="2000-01-01",
        name=None,
        meta_table=None,
    ):
        """Create empty SpectrumDatasetOnOff.

        Empty containers are created with the correct geometry.
        counts, counts_off and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~gammapy.maps.MapAxis`
            counts energy axis. Its name must be "energy".
        e_true : `~gammapy.maps.MapAxis`
            effective area table energy axis. Its name must be "energy-true".
            If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        meta_table : `~astropy.table.Table`
            Table listing informations on observations used to create the dataset.
            One line per observation for stacked datasets.
        """
        dataset = super().create(
            e_reco=e_reco,
            e_true=e_true,
            region=region,
            reference_time=reference_time,
            name=name,
        )

        counts_off = dataset.counts.copy()
        acceptance = RegionNDMap.from_geom(counts_off.geom, dtype=int)
        acceptance.data += 1

        acceptance_off = RegionNDMap.from_geom(counts_off.geom, dtype=int)
        acceptance_off.data += 1

        return cls.from_spectrum_dataset(
            dataset=dataset,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            counts_off=counts_off,
        )
コード例 #10
0
ファイル: test_spectrum.py プロジェクト: mireianievas/gammapy
 def test_str(self):
     model = SkyModel(spectral_model=PowerLawSpectralModel())
     dataset = SpectrumDatasetOnOff(
         counts=self.on_counts,
         counts_off=self.off_counts,
         models=model,
         exposure=self.aeff * self.livetime,
         edisp=self.edisp,
         acceptance=RegionNDMap.from_geom(geom=self.on_counts.geom, data=1),
         acceptance_off=RegionNDMap.from_geom(geom=self.off_counts.geom, data=10),
     )
     assert "SpectrumDatasetOnOff" in str(dataset)
     assert "wstat" in str(dataset)
コード例 #11
0
    def __init__(
        self,
        models=None,
        counts=None,
        counts_off=None,
        livetime=None,
        aeff=None,
        edisp=None,
        mask_safe=None,
        mask_fit=None,
        acceptance=None,
        acceptance_off=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        self.counts = counts
        self.counts_off = counts_off

        if livetime is not None:
            livetime = u.Quantity(livetime)

        self.livetime = livetime
        self.mask_fit = mask_fit
        self.aeff = aeff
        self.edisp = edisp
        self.mask_safe = mask_safe
        self.meta_table = meta_table

        if np.isscalar(acceptance):
            data = np.ones(self._geom.data_shape) * acceptance
            acceptance = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance = acceptance

        if np.isscalar(acceptance_off):
            data = np.ones(self._geom.data_shape) * acceptance_off
            acceptance_off = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance_off = acceptance_off

        self._evaluators = {}
        self._name = make_name(name)
        self.gti = gti
        self.models = models

        # TODO: this enforces the exposure on the edisp map, maybe better move
        #  to where the EDispKernelMap is created?
        if edisp is not None:
            self.edisp.exposure_map.data = self.exposure.data
コード例 #12
0
ファイル: test_spectrum.py プロジェクト: vikasj78/gammapy
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset):
    geom = spectrum_dataset.counts.geom

    energy = np.logspace(-1, 1, 31) * u.TeV
    aeff = EffectiveAreaTable.from_parametrization(energy, "HESS")
    edisp = EDispKernel.from_diagonal_response(energy, energy)
    livetime = 100 * u.s
    background = spectrum_dataset.background
    spectrum_dataset1 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime,
        aeff=aeff,
        edisp=edisp,
        background=background.copy(),
    )

    livetime2 = 0.5 * livetime
    aeff2 = EffectiveAreaTable(energy[:-1], energy[1:], 2 * aeff.data.data)
    bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data)

    geom = spectrum_dataset.counts.geom
    data = np.ones(spectrum_dataset.data_shape, dtype="bool")
    data[0] = False
    safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data)

    spectrum_dataset2 = SpectrumDataset(
        counts=spectrum_dataset.counts.copy(),
        livetime=livetime2,
        aeff=aeff2,
        edisp=edisp,
        background=bkg2,
        mask_safe=safe_mask2,
    )
    spectrum_dataset1.stack(spectrum_dataset2)

    reference = spectrum_dataset.counts.data
    assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2)
    assert_allclose(spectrum_dataset1.counts.data[0], 141363)
    assert spectrum_dataset1.livetime == 1.5 * livetime
    assert_allclose(spectrum_dataset1.background.data[1:],
                    3 * background.data[1:])
    assert_allclose(spectrum_dataset1.background.data[0], background.data[0])
    assert_allclose(
        spectrum_dataset1.aeff.data.data.to_value("m2"),
        4.0 / 3 * aeff.data.data.to_value("m2"),
    )
    assert_allclose(spectrum_dataset1.edisp.pdf_matrix[1:],
                    edisp.pdf_matrix[1:])
    assert_allclose(spectrum_dataset1.edisp.pdf_matrix[0],
                    0.5 * edisp.pdf_matrix[0])
コード例 #13
0
ファイル: utils.py プロジェクト: maxnoe/gammapy
def make_counts_off_rad_max(geom_off, rad_max, events):
    """Extract the OFF counts from a list of point regions and given rad max.

    This methods does **not** check for overlap of the regions defined by rad_max.

    Parameters
    ----------
    geom_off: `~gammapy.maps.RegionGeom`
        reference map geom for the on region
    rad_max: `~gammapy.irf.RadMax2D`
        the RAD_MAX_2D table IRF
    events: `~gammapy.data.EventList`
        event list to be used to compute the OFF counts

    Returns
    -------
    counts_off : `~gammapy.maps.RegionNDMap`
        OFF Counts vs estimated energy extracted from the ON region.
    """
    if not geom_off.is_all_point_sky_regions:
        raise ValueError(
            f"Only supports PointSkyRegions, got {geom_off.region} instead")

    counts_off = RegionNDMap.from_geom(geom=geom_off)

    for off_region in compound_region_to_regions(geom_off.region):
        selected_events = events.select_rad_max(rad_max=rad_max,
                                                position=off_region.center)
        counts_off.fill_events(selected_events)

    return counts_off
コード例 #14
0
 def to_region_map(self, region=None):
     """"""
     axis = self.data.axes["energy_true"]
     geom = RegionGeom(region=region, axes=[axis])
     return RegionNDMap.from_geom(geom=geom,
                                  data=self.data.data.value,
                                  unit=self.data.data.unit)
コード例 #15
0
def simulate_spectrum_dataset(model, random_state=0):
    edges = np.logspace(-0.5, 1.5, 21) * u.TeV
    energy_axis = MapAxis.from_edges(edges, interp="log", name="energy")

    aeff = EffectiveAreaTable.from_parametrization(energy=edges).to_region_map()
    bkg_model = SkyModel(
        spectral_model=PowerLawSpectralModel(
            index=2.5, amplitude="1e-12 cm-2 s-1 TeV-1"
        ),
        name="background",
    )
    bkg_model.spectral_model.amplitude.frozen = True
    bkg_model.spectral_model.index.frozen = True

    geom = RegionGeom(region=None, axes=[energy_axis])
    acceptance = RegionNDMap.from_geom(geom=geom, data=1)
    edisp = EDispKernelMap.from_diagonal_response(
        energy_axis=energy_axis,
        energy_axis_true=energy_axis.copy(name="energy_true"),
        geom=geom
    )

    dataset = SpectrumDatasetOnOff(
        aeff=aeff, livetime=100 * u.h, acceptance=acceptance, acceptance_off=5, edisp=edisp
    )
    dataset.models = bkg_model
    bkg_npred = dataset.npred_sig()

    dataset.models = model
    dataset.fake(random_state=random_state, background_model=bkg_npred)
    return dataset
コード例 #16
0
    def make_aeff(self, geom, observation):
        """Make effective area.

        Parameters
        ----------
        geom : `~gammapy.maps.RegionGeom`
            Reference map geom.
        observation: `~gammapy.data.Observation`
            Observation to compute effective area for.


        Returns
        -------
        aeff : `~gammapy.irf.EffectiveAreaTable`
            Effective area table.
        """
        offset = observation.pointing_radec.separation(geom.center_skydir)
        energy = geom.get_axis_by_name("energy_true")

        data = observation.aeff.data.evaluate(offset=offset,
                                              energy_true=energy.center)

        if self.containment_correction:
            if not isinstance(geom.region, CircleSkyRegion):
                raise TypeError(
                    "Containment correction only supported for circular regions."
                )
            psf = observation.psf.to_energy_dependent_table_psf(theta=offset)
            containment = psf.containment(energy.center, geom.region.radius)
            data *= containment.squeeze()

        return RegionNDMap.from_geom(geom, data=data.value, unit=data.unit)
コード例 #17
0
    def make_background(geom, observation):
        """Make background.

        Parameters
        ----------
        geom : `~gammapy.maps.RegionGeom`
            Reference map geom.
        observation: `~gammapy.data.Observation`
            Observation to compute effective area for.

        Returns
        -------
        background : `~gammapy.spectrum.RegionNDMap`
            Background spectrum
        """
        offset = observation.pointing_radec.separation(geom.center_skydir)
        e_reco = geom.get_axis_by_name("energy").edges

        bkg = observation.bkg

        data = bkg.evaluate_integrate(fov_lon=0 * u.deg,
                                      fov_lat=offset,
                                      energy_reco=e_reco)

        data *= geom.solid_angle()
        data *= observation.observation_time_duration
        return RegionNDMap.from_geom(geom=geom, data=data.to_value(""))
コード例 #18
0
ファイル: reflected.py プロジェクト: mireianievas/gammapy
    def run(self, dataset, observation):
        """Run reflected regions background maker

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Spectrum dataset.
        observation : `DatastoreObservation`
            Data store observation.

        Returns
        -------
        dataset_on_off : `SpectrumDatasetOnOff`
            On off dataset.
        """
        counts_off, acceptance_off = self.make_counts_off(dataset, observation)
        acceptance = RegionNDMap.from_geom(geom=dataset.counts.geom, data=1)

        dataset_onoff = SpectrumDatasetOnOff.from_spectrum_dataset(
            dataset=dataset,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            counts_off=counts_off,
            name=dataset.name,
        )

        if dataset_onoff.counts_off is None:
            dataset_onoff.mask_safe.data[...] = False
            log.warning(
                f"ReflectedRegionsBackgroundMaker failed. Setting {dataset_onoff.name} mask to False."
            )
        return dataset_onoff
コード例 #19
0
ファイル: test_lightcurve.py プロジェクト: sanjaymsh/gammapy
def test_lightcurve_estimator_spectrum_datasets_withmaskfit():
    # Doing a LC on one hour bin
    datasets = get_spectrum_datasets()
    time_intervals = [
        Time(["2010-01-01T00:00:00", "2010-01-01T01:00:00"]),
        Time(["2010-01-01T01:00:00", "2010-01-01T02:00:00"]),
    ]

    e_min_fit = 1 * u.TeV
    e_max_fit = 3 * u.TeV
    for dataset in datasets:
        geom = dataset.counts.geom
        data = geom.energy_mask(emin=e_min_fit, emax=e_max_fit)
        dataset.mask_fit = RegionNDMap.from_geom(geom, data=data, dtype=bool)

    selection = ["scan"]
    estimator = LightCurveEstimator(
        e_edges=[1, 30] * u.TeV,
        norm_n_values=3,
        time_intervals=time_intervals,
        selection_optional=selection,
    )
    lightcurve = estimator.run(datasets)
    assert_allclose(lightcurve.table["time_min"], [55197.0, 55197.041667])
    assert_allclose(lightcurve.table["time_max"], [55197.041667, 55197.083333])
    assert_allclose(lightcurve.table["stat"], [6.603043, 0.421051], rtol=1e-3)
    assert_allclose(lightcurve.table["norm"], [0.885124, 0.967054], rtol=1e-3)
コード例 #20
0
    def npred_sig(self):
        """Predicted counts from source model (`RegionNDMap`)."""
        npred_total = RegionNDMap.from_geom(self._geom)

        if self.models:
            for model in self.models:
                if model.datasets_names is not None:
                    if self.name not in model.datasets_names:
                        continue

                evaluator = self._evaluators.get(model.name)

                if evaluator is None:
                    evaluator = MapEvaluator(
                        model=model,
                        exposure=self.exposure,
                        edisp=self.edisp,
                        gti=self.gti,
                    )
                    self._evaluators[model.name] = evaluator

                npred = evaluator.compute_npred()
                npred_total.stack(npred)

        return npred_total
コード例 #21
0
ファイル: spectrum.py プロジェクト: dkkyjy/gammapy
 def exposure(self):
     """Excess (aeff * livetime)"""
     data = self.livetime * self.aeff.data.data
     geom = RegionGeom(region=None, axes=[self.aeff.energy])
     return RegionNDMap.from_geom(geom=geom,
                                  data=data.value,
                                  unit=data.unit)
コード例 #22
0
def make_counts_rad_max(geom, rad_max, events):
    """Extract the counts using for the ON region size the values in the
    `RAD_MAX_2D` table.

    Parameters
    ----------
    geom : `~gammapy.maps.RegionGeom`
        reference map geom
    rad_max : `~gammapy.irf.RadMax2D`
        the RAD_MAX_2D table IRF
    events : `~gammapy.data.EventList`
        event list to be used to compute the ON counts

    Returns
    -------
    counts : `~gammapy.maps.RegionNDMap`
        Counts vs estimated energy extracted from the ON region.
    """
    rad_max = get_rad_max_vs_energy(rad_max, events.pointing_radec, geom)

    counts_list = []

    # create and fill a map per each energy bin, fetch the counts
    for i, rad in enumerate(rad_max):
        on_region = CircleSkyRegion(center=geom.center_skydir, radius=rad)
        energy_range = geom.axes["energy"].slice(i)
        on_region_geom = RegionGeom(on_region, axes=[energy_range])
        counts = Map.from_geom(on_region_geom)
        counts.fill_events(events)
        counts_list.append(counts.data[0])

    counts = RegionNDMap.from_geom(geom, data=np.asarray(counts_list))

    return counts
コード例 #23
0
    def create(
        cls,
        e_reco,
        e_true=None,
        region=None,
        reference_time="2000-01-01",
        name=None,
        meta_table=None,
    ):
        """Creates empty spectrum dataset.

        Empty containers are created with the correct geometry.
        counts, background and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~gammapy.maps.MapAxis`
            counts energy axis. Its name must be "energy".
        e_true : `~gammapy.maps.MapAxis`
            effective area table energy axis. Its name must be "energy-true".
            If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        meta_table : `~astropy.table.Table`
            Table listing informations on observations used to create the dataset.
            One line per observation for stacked datasets.
        """
        if e_true is None:
            e_true = e_reco.copy(name="energy_true")

        if region is None:
            region = "icrs;circle(0, 0, 1)"

        name = make_name(name)
        counts = RegionNDMap.create(region=region, axes=[e_reco])
        background = RegionNDMap.create(region=region, axes=[e_reco])
        exposure = RegionNDMap.create(region=region,
                                      axes=[e_true],
                                      unit="cm2 s",
                                      meta={"livetime": 0 * u.s})
        edisp = EDispKernelMap.from_diagonal_response(e_reco,
                                                      e_true,
                                                      geom=counts.geom)
        mask_safe = RegionNDMap.from_geom(counts.geom, dtype="bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)

        return SpectrumDataset(
            counts=counts,
            exposure=exposure,
            background=background,
            edisp=edisp,
            mask_safe=mask_safe,
            gti=gti,
            name=name,
        )
コード例 #24
0
ファイル: test_spectrum.py プロジェクト: ddeka2910/gammapy
    def test_fake(self):
        """Test the fake dataset"""
        source_model = SkyModel(spectral_model=PowerLawSpectralModel())
        dataset = SpectrumDatasetOnOff(
            name="test",
            counts=self.on_counts,
            counts_off=self.off_counts,
            models=source_model,
            exposure=self.aeff * self.livetime,
            edisp=self.edisp,
            acceptance=1,
            acceptance_off=10,
        )
        real_dataset = dataset.copy()

        background = RegionNDMap.from_geom(dataset.counts.geom)
        background.data += 1
        background_model = BackgroundModel(background,
                                           name="test-bkg",
                                           datasets_names="test")
        dataset.fake(background_model=background_model, random_state=314)

        assert real_dataset.counts.data.shape == dataset.counts.data.shape
        assert real_dataset.counts_off.data.shape == dataset.counts_off.data.shape
        assert dataset.counts_off.data.sum() == 39
        assert dataset.counts.data.sum() == 5
コード例 #25
0
    def make_counts_off(self, dataset, observation):
        """Make off counts.

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Spectrum dataset.
        observation : `DatastoreObservation`
            Data store observation.


        Returns
        -------
        counts_off : `RegionNDMap`
            Off counts.
        """
        finder = self._get_finder(dataset, observation)
        finder.run()

        energy_axis = dataset.counts.geom.axes["energy"]

        if len(finder.reflected_regions) > 0:
            region_union = list_to_compound_region(finder.reflected_regions)
            wcs = finder.reference_map.geom.wcs
            geom = RegionGeom.create(region=region_union,
                                     axes=[energy_axis],
                                     wcs=wcs)
            counts_off = RegionNDMap.from_geom(geom=geom)
            counts_off.fill_events(observation.events)
            acceptance_off = len(finder.reflected_regions)
        else:
            # if no OFF regions are found, off is set to None and acceptance_off to zero
            counts_off = None
            acceptance_off = 0
        return counts_off, acceptance_off
コード例 #26
0
def test_lightcurve_estimator_spectrum_datasets_withmaskfit():
    # Doing a LC on one hour bin
    datasets = get_spectrum_datasets()
    time_intervals = [
        Time(["2010-01-01T00:00:00", "2010-01-01T01:00:00"]),
        Time(["2010-01-01T01:00:00", "2010-01-01T02:00:00"]),
    ]

    e_min_fit = 1 * u.TeV
    e_max_fit = 3 * u.TeV
    for dataset in datasets:
        geom = dataset.counts.geom
        data = geom.energy_mask(emin=e_min_fit, emax=e_max_fit)
        dataset.mask_fit = RegionNDMap.from_geom(geom, data=data, dtype=bool)

    steps = ["err", "counts", "ts", "norm-scan"]
    estimator = LightCurveEstimator(datasets,
                                    norm_n_values=3,
                                    time_intervals=time_intervals)
    lightcurve = estimator.run(e_ref=10 * u.TeV,
                               e_min=1 * u.TeV,
                               e_max=100 * u.TeV,
                               steps=steps)
    assert_allclose(lightcurve.table["time_min"], [55197.0, 55197.041667])
    assert_allclose(lightcurve.table["time_max"], [55197.041667, 55197.083333])
    assert_allclose(lightcurve.table["stat"], [6.60304, 0.421047], rtol=1e-3)
    assert_allclose(lightcurve.table["norm"], [0.885082, 0.967022], rtol=1e-3)
コード例 #27
0
ファイル: spectrum.py プロジェクト: vikasj78/gammapy
    def __init__(
        self,
        models=None,
        counts=None,
        counts_off=None,
        livetime=None,
        aeff=None,
        edisp=None,
        mask_safe=None,
        mask_fit=None,
        acceptance=None,
        acceptance_off=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        self.counts = counts
        self.counts_off = counts_off

        if livetime is not None:
            livetime = u.Quantity(livetime)

        self.livetime = livetime
        self.mask_fit = mask_fit
        self.aeff = aeff
        self.edisp = edisp
        self.mask_safe = mask_safe
        self.meta_table = meta_table

        if np.isscalar(acceptance):
            data = np.ones(self._geom.data_shape) * acceptance
            acceptance = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance = acceptance

        if np.isscalar(acceptance_off):
            data = np.ones(self._geom.data_shape) * acceptance_off
            acceptance_off = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance_off = acceptance_off

        self._evaluators = {}
        self._name = make_name(name)
        self.gti = gti
        self.models = models
コード例 #28
0
    def create(cls,
               e_reco,
               e_true=None,
               region=None,
               reference_time="2000-01-01",
               name=None):
        """Create empty SpectrumDatasetOnOff.

        Empty containers are created with the correct geometry.
        counts, counts_off and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~astropy.units.Quantity`
            edges of counts vector
        e_true : `~astropy.units.Quantity`
            edges of effective area table. If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        """
        dataset = super().create(
            e_reco=e_reco,
            e_true=e_true,
            region=region,
            reference_time=reference_time,
            name=name,
        )

        counts_off = dataset.counts.copy()
        acceptance = RegionNDMap.from_geom(counts_off.geom, dtype=int)
        acceptance.data += 1

        acceptance_off = RegionNDMap.from_geom(counts_off.geom, dtype=int)
        acceptance_off.data += 1

        return cls.from_spectrum_dataset(
            dataset=dataset,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            counts_off=counts_off,
        )
コード例 #29
0
    def npred_sig(self):
        """Predicted counts from source model (`RegionNDMap`)."""
        npred_total = RegionNDMap.from_geom(self._geom)

        for evaluator in self.evaluators.values():
            npred = evaluator.compute_npred()
            npred_total.stack(npred)

        return npred_total
コード例 #30
0
ファイル: test_regionnd.py プロジェクト: paranoya/gammapy
def test_region_nd_map_sum_over_axes(region_map):
    region_map_summed = region_map.sum_over_axes()
    weights = RegionNDMap.from_geom(region_map.geom, data=1.0)
    weights.data[5, :, :] = 0
    region_map_summed_weights = region_map.sum_over_axes(weights=weights)

    assert_allclose(region_map_summed.data, 15)
    assert_allclose(region_map_summed.data.shape, (1, 1, 1,))
    assert_allclose(region_map_summed_weights.data, 10)