예제 #1
0
def spectrum_dataset():
    e_true = MapAxis.from_energy_bounds("1 TeV",
                                        "10 TeV",
                                        nbin=20,
                                        name="energy_true")
    e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=4)
    aeff = EffectiveAreaTable.from_constant(value=1e6 * u.m**2,
                                            energy=e_true.edges)

    background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)",
                                    axes=[e_reco])
    background.data += 3600
    background.data[-1] *= 1e-3
    edisp = EDispKernelMap.from_diagonal_response(energy_axis_true=e_true,
                                                  energy_axis=e_reco,
                                                  geom=background.geom)
    return SpectrumDataset(aeff=aeff,
                           livetime="1h",
                           edisp=edisp,
                           background=background)
예제 #2
0
def test_npred_models():
    e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3)
    spectrum_dataset = SpectrumDataset.create(e_reco=e_reco)
    spectrum_dataset.exposure.quantity = 1e10 * u.Unit("cm2 h")

    pwl_1 = PowerLawSpectralModel(index=2)
    pwl_2 = PowerLawSpectralModel(index=2)
    model_1 = SkyModel(spectral_model=pwl_1)
    model_2 = SkyModel(spectral_model=pwl_2)

    spectrum_dataset.models = Models([model_1, model_2])
    npred = spectrum_dataset.npred()

    assert_allclose(npred.data.sum(), 64.8)

    npred_sig = spectrum_dataset.npred_sig()
    assert_allclose(npred_sig.data.sum(), 64.8)

    npred_sig_model1 = spectrum_dataset.npred_sig(model=model_1)
    assert_allclose(npred_sig_model1.data.sum(), 32.4)
예제 #3
0
def test_spectrum_dataset_create():
    e_reco = MapAxis.from_edges(u.Quantity([0.1, 1, 10.0], "TeV"),
                                name="energy")
    e_true = MapAxis.from_edges(u.Quantity([0.05, 0.5, 5, 20.0], "TeV"),
                                name="energy_true")
    empty_spectrum_dataset = SpectrumDataset.create(e_reco,
                                                    e_true,
                                                    name="test")

    assert empty_spectrum_dataset.name == "test"
    assert empty_spectrum_dataset.counts.data.sum() == 0
    assert empty_spectrum_dataset.data_shape[0] == 2
    assert empty_spectrum_dataset.background.data.sum() == 0
    assert empty_spectrum_dataset.background.geom.axes[0].nbin == 2
    assert empty_spectrum_dataset.aeff.data.axis("energy_true").nbin == 3
    assert empty_spectrum_dataset.edisp.data.axis("energy").nbin == 2
    assert empty_spectrum_dataset.livetime.value == 0
    assert len(empty_spectrum_dataset.gti.table) == 0
    assert empty_spectrum_dataset.energy_range[0] is None
    assert_allclose(empty_spectrum_dataset.mask_safe, 0)
예제 #4
0
def spectrum_dataset():
    e_true = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=20, name="energy_true")
    e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=4)

    background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[e_reco])
    background.data += 3600
    background.data[0] *= 1e3
    background.data[-1] *= 1e-3
    edisp = EDispKernelMap.from_diagonal_response(
        energy_axis_true=e_true, energy_axis=e_reco, geom=background.geom
    )
    aeff = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[e_true], unit="m2")
    aeff.data += 1e6

    livetime = 1 * u.h
    exposure = aeff * livetime

    return SpectrumDataset(
        name="test", exposure=exposure, edisp=edisp, background=background
    )
예제 #5
0
    def run(self, dataset, observation):
        """Make spectrum dataset.

        Parameters
        ----------
        dataset : `~gammapy.spectrum.SpectrumDataset`
            Spectrum dataset.
        observation: `~gammapy.data.Observation`
            Observation to reduce.

        Returns
        -------
        dataset : `~gammapy.spectrum.SpectrumDataset`
            Spectrum dataset.
        """
        kwargs = {
            "gti": observation.gti,
            "livetime": observation.observation_live_time_duration,
        }
        energy_axis = dataset.counts.geom.get_axis_by_name("energy")
        energy_axis_true = dataset.aeff.data.axis("energy_true")
        region = dataset.counts.geom.region

        if "counts" in self.selection:
            kwargs["counts"] = self.make_counts(dataset.counts.geom, observation)

        if "background" in self.selection:
            kwargs["background"] = self.make_background(
                dataset.counts.geom, observation
            )

        if "aeff" in self.selection:
            kwargs["aeff"] = self.make_aeff(region, energy_axis_true, observation)

        if "edisp" in self.selection:

            kwargs["edisp"] = self.make_edisp(
                region.center, energy_axis, energy_axis_true, observation
            )

        return SpectrumDataset(name=dataset.name, **kwargs)
예제 #6
0
def test_spectrum_dataset_create():
    e_reco = MapAxis.from_edges(u.Quantity([0.1, 1, 10.0], "TeV"),
                                name="energy")
    e_true = MapAxis.from_edges(u.Quantity([0.05, 0.5, 5, 20.0], "TeV"),
                                name="energy_true")
    geom = RegionGeom(region=None, axes=[e_reco])
    empty_spectrum_dataset = SpectrumDataset.create(geom,
                                                    energy_axis_true=e_true,
                                                    name="test")

    assert empty_spectrum_dataset.name == "test"
    assert empty_spectrum_dataset.counts.data.sum() == 0
    assert empty_spectrum_dataset.data_shape[0] == 2
    assert empty_spectrum_dataset.background.data.sum() == 0
    assert empty_spectrum_dataset.background.geom.axes[0].nbin == 2
    assert empty_spectrum_dataset.exposure.geom.axes[0].nbin == 3
    assert empty_spectrum_dataset.edisp.edisp_map.geom.axes["energy"].nbin == 2
    assert empty_spectrum_dataset.gti.time_sum.value == 0
    assert len(empty_spectrum_dataset.gti.table) == 0
    assert empty_spectrum_dataset.energy_range[0] is None
    assert_allclose(empty_spectrum_dataset.mask_safe, 0)
예제 #7
0
    def test_stat_profile(self):
        geom = self.src.geom
        mask_safe = RegionNDMap.from_geom(geom, dtype=bool)
        mask_safe.data += True

        dataset = SpectrumDataset(
            models=self.source_model,
            exposure=self.exposure,
            counts=self.src,
            mask_safe=mask_safe,
        )
        fit = Fit()
        result = fit.run(datasets=[dataset])
        true_idx = self.source_model.parameters["index"].value

        values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100)
        self.source_model.spectral_model.index.scan_values = values

        profile = fit.stat_profile(datasets=[dataset], parameter="index")
        actual = values[np.argmin(profile["stat_scan"])]
        assert_allclose(actual, true_idx, rtol=0.01)
예제 #8
0
def test_run(observations, phase_bkg_maker):

    maker = SpectrumDatasetMaker()

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true")

    pos = SkyCoord("08h35m20.65525s", "-45d10m35.1545s", frame="icrs")
    radius = Angle(0.2, "deg")
    region = SphericalCircleSkyRegion(pos, radius)

    dataset_empty = SpectrumDataset.create(e_reco, e_true, region=region)

    obs = observations["111630"]
    dataset = maker.run(dataset_empty, obs)
    dataset_on_off = phase_bkg_maker.run(dataset, obs)

    assert_allclose(dataset_on_off.acceptance, 0.1)
    assert_allclose(dataset_on_off.acceptance_off, 0.3)

    assert_allclose(dataset_on_off.counts.data.sum(), 28)
    assert_allclose(dataset_on_off.counts_off.data.sum(), 57)
예제 #9
0
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations):
    pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs")
    radius = Angle(0.11, "deg")
    region = CircleSkyRegion(pos, radius)

    maker = SpectrumDatasetMaker(selection=["counts"])

    datasets = []

    e_reco = np.logspace(0, 2, 5) * u.TeV
    e_true = np.logspace(-0.5, 2, 11) * u.TeV
    dataset_empty = SpectrumDataset.create(e_reco=e_reco,
                                           e_true=e_true,
                                           region=region)

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert datasets[0].counts_off is None
    assert_allclose(datasets[0].acceptance_off, 0)
예제 #10
0
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations):
    pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs")
    radius = Angle(0.11, "deg")
    region = CircleSkyRegion(pos, radius)

    maker = SpectrumDatasetMaker(selection=["counts"])

    datasets = []

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV,
                                name="energy_true")
    geom = RegionGeom.create(region=region, axes=[e_reco])
    dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert datasets[0].counts_off is None
    assert_allclose(datasets[0].acceptance_off, 0)
예제 #11
0
def get_spectrumdataset(name):
    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    energy_axis = MapAxis.from_energy_bounds(0.1,
                                             40,
                                             nbin=15,
                                             per_decade=True,
                                             unit="TeV",
                                             name="energy")
    energy_axis_true = MapAxis.from_energy_bounds(0.05,
                                                  100,
                                                  nbin=20,
                                                  per_decade=True,
                                                  unit="TeV",
                                                  name="energy_true")

    geom = RegionGeom.create(region=on_region, axes=[energy_axis])
    return SpectrumDataset.create(geom=geom,
                                  energy_axis_true=energy_axis_true,
                                  name=name)
예제 #12
0
def get_spectrumdataset_rad_max(name):
    """get the spectrum dataset maker for the energy-dependent spectrum extraction"""
    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_center = PointSkyRegion(target_position)

    energy_axis = MapAxis.from_energy_bounds(0.005,
                                             50,
                                             nbin=28,
                                             per_decade=False,
                                             unit="TeV",
                                             name="energy")
    energy_axis_true = MapAxis.from_energy_bounds(0.005,
                                                  50,
                                                  nbin=20,
                                                  per_decade=False,
                                                  unit="TeV",
                                                  name="energy_true")

    geom = RegionGeom.create(region=on_center, axes=[energy_axis])

    return SpectrumDataset.create(geom=geom,
                                  energy_axis_true=energy_axis_true,
                                  name=name)
예제 #13
0
def test_npred_models():
    e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3)

    geom = RegionGeom(region=None, axes=[e_reco])

    spectrum_dataset = SpectrumDataset.create(geom=geom)
    spectrum_dataset.exposure.quantity = 1e10 * u.Unit("cm2 h")

    pwl_1 = PowerLawSpectralModel(index=2)
    pwl_2 = PowerLawSpectralModel(index=2)
    model_1 = SkyModel(spectral_model=pwl_1)
    model_2 = SkyModel(spectral_model=pwl_2)

    spectrum_dataset.models = Models([model_1, model_2])
    npred = spectrum_dataset.npred()

    assert_allclose(npred.data.sum(), 64.8)

    npred_sig = spectrum_dataset.npred_signal()
    assert_allclose(npred_sig.data.sum(), 64.8)

    npred_sig_model1 = spectrum_dataset.npred_signal(model_name=model_1.name)
    assert_allclose(npred_sig_model1.data.sum(), 32.4)
예제 #14
0
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations, caplog):
    pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs")
    radius = Angle(0.11, "deg")
    region = CircleSkyRegion(pos, radius)

    maker = SpectrumDatasetMaker(selection=["counts", "exposure"])

    datasets = []

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV,
                                name="energy_true")
    geom = RegionGeom.create(region=region, axes=[e_reco])
    dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)
    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, obs)
        datasets.append(dataset_on_off)

    assert datasets[0].counts_off is None
    assert_allclose(datasets[0].acceptance_off, 0)
    assert_allclose(datasets[0].mask_safe.data, False)

    assert "WARNING" in [record.levelname for record in caplog.records]

    message1 = (
        f"ReflectedRegionsBackgroundMaker failed. "
        f"No OFF region found outside exclusion mask for dataset '{datasets[0].name}'."
    )
    message2 = (f"ReflectedRegionsBackgroundMaker failed. "
                f"Setting {datasets[0].name} mask to False.")

    assert message1 in [record.message for record in caplog.records]
    assert message2 in [record.message for record in caplog.records]
예제 #15
0
    def run(self, dataset, observation):
        """Make spectrum dataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.SpectrumDataset`
            Spectrum dataset.
        observation: `~gammapy.data.Observation`
            Observation to reduce.

        Returns
        -------
        dataset : `~gammapy.datasets.SpectrumDataset`
            Spectrum dataset.
        """
        kwargs = {
            "gti": observation.gti,
            "meta_table": self.make_meta_table(observation),
        }

        if "counts" in self.selection:
            kwargs["counts"] = self.make_counts(dataset.counts.geom,
                                                observation)

        if "background" in self.selection:
            kwargs["background"] = self.make_background(
                dataset.counts.geom, observation)

        if "exposure" in self.selection:
            kwargs["exposure"] = self.make_exposure(dataset.exposure.geom,
                                                    observation)

        if "edisp" in self.selection:
            kwargs["edisp"] = self.make_edisp_kernel(
                dataset.edisp.edisp_map.geom, observation)

        return SpectrumDataset(name=dataset.name, **kwargs)
예제 #16
0
def test_spectrum_dataset_maker_hess_dl3(spectrum_dataset_crab, observations_hess_dl3):
    datasets = []
    maker = SpectrumDatasetMaker(use_region_center=False)

    datasets = []
    for obs in observations_hess_dl3:
        dataset = maker.run(spectrum_dataset_crab, obs)
        datasets.append(dataset)

    # Exposure
    assert_allclose(datasets[0].exposure.data.sum(), 7374718644.757894)
    assert_allclose(datasets[1].exposure.data.sum(), 6691006466.659032)

    # Background
    assert_allclose(datasets[0].npred_background().data.sum(), 7.7429157, rtol=1e-5)
    assert_allclose(datasets[1].npred_background().data.sum(), 5.7314076, rtol=1e-5)

    # Compare background with using bigger region
    e_reco = datasets[0].background.geom.axes['energy']
    e_true = datasets[0].exposure.geom.axes['energy_true']
    geom_bigger = RegionGeom.create("icrs;circle(83.63, 22.01, 0.22)", axes=[e_reco])

    datasets_big_region = []
    bigger_region_dataset = SpectrumDataset.create(geom=geom_bigger, energy_axis_true=e_true)
    for obs in observations_hess_dl3:
        dataset = maker.run(bigger_region_dataset, obs)
        datasets_big_region.append(dataset)

    ratio_regions = datasets[0].counts.geom.solid_angle()/datasets_big_region[1].counts.geom.solid_angle()
    ratio_bg_1 = datasets[0].npred_background().data.sum()/ datasets_big_region[0].npred_background().data.sum()
    ratio_bg_2 = datasets[1].npred_background().data.sum()/ datasets_big_region[1].npred_background().data.sum()
    assert_allclose(ratio_bg_1, ratio_regions, rtol=1e-2)
    assert_allclose(ratio_bg_2, ratio_regions, rtol=1e-2)

    #Edisp -> it isn't exactly 8, is that right? it also isn't without averaging
    assert_allclose(datasets[0].edisp.edisp_map.data[:,:,0,0].sum(), e_reco.nbin*2, rtol=1e-1)
    assert_allclose(datasets[1].edisp.edisp_map.data[:,:,0,0].sum(), e_reco.nbin*2, rtol=1e-1)
예제 #17
0
def test_reflected_bkg_maker(on_region, reflected_bkg_maker, observations):
    datasets = []

    e_reco = np.logspace(0, 2, 5) * u.TeV
    e_true = np.logspace(-0.5, 2, 11) * u.TeV

    dataset_empty = SpectrumDataset.create(e_reco=e_reco,
                                           e_true=e_true,
                                           region=on_region)

    maker = SpectrumDatasetMaker(selection=["counts"])

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert_allclose(datasets[0].counts_off.data.sum(), 76)
    assert_allclose(datasets[1].counts_off.data.sum(), 60)

    regions_0 = compound_region_to_list(datasets[0].counts_off.geom.region)
    regions_1 = compound_region_to_list(datasets[1].counts_off.geom.region)
    assert_allclose(len(regions_0), 11)
    assert_allclose(len(regions_1), 11)
예제 #18
0
def test_reflected_bkg_maker(on_region, reflected_bkg_maker, observations):
    datasets = []

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV,
                                name="energy_true")

    geom = RegionGeom(region=on_region, axes=[e_reco])
    dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

    maker = SpectrumDatasetMaker(selection=["counts"])

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert_allclose(datasets[0].counts_off.data.sum(), 76)
    assert_allclose(datasets[1].counts_off.data.sum(), 60)

    regions_0 = compound_region_to_list(datasets[0].counts_off.geom.region)
    regions_1 = compound_region_to_list(datasets[1].counts_off.geom.region)
    assert_allclose(len(regions_0), 11)
    assert_allclose(len(regions_1), 11)
예제 #19
0
def test_spectrum_dataset_stack_nondiagonal_no_bkg(spectrum_dataset):
    energy = spectrum_dataset.counts.geom.axes["energy"]
    geom = spectrum_dataset.counts.geom

    edisp1 = EDispKernelMap.from_gauss(
        energy_axis=energy,
        energy_axis_true=energy.copy(name="energy_true"),
        sigma=0.1,
        bias=0,
        geom=geom.to_image(),
    )
    edisp1.exposure_map.data += 1

    aeff = EffectiveAreaTable2D.from_parametrization(
        energy_axis_true=energy.copy(name="energy_true"), instrument="HESS")

    livetime = 100 * u.s

    geom_true = geom.as_energy_true
    exposure = make_map_exposure_true_energy(geom=geom_true,
                                             livetime=livetime,
                                             pointing=geom_true.center_skydir,
                                             aeff=aeff)

    geom = spectrum_dataset.counts.geom
    counts = RegionNDMap.from_geom(geom=geom)

    gti = GTI.create(start=0 * u.s, stop=livetime)
    spectrum_dataset1 = SpectrumDataset(
        counts=counts,
        exposure=exposure,
        edisp=edisp1,
        meta_table=Table({"OBS_ID": [0]}),
        gti=gti.copy(),
    )

    edisp2 = EDispKernelMap.from_gauss(
        energy_axis=energy,
        energy_axis_true=energy.copy(name="energy_true"),
        sigma=0.2,
        bias=0.0,
        geom=geom,
    )
    edisp2.exposure_map.data += 1

    gti2 = GTI.create(start=100 * u.s, stop=200 * u.s)

    spectrum_dataset2 = SpectrumDataset(
        counts=counts,
        exposure=exposure.copy(),
        edisp=edisp2,
        meta_table=Table({"OBS_ID": [1]}),
        gti=gti2,
    )
    spectrum_dataset1.stack(spectrum_dataset2)

    assert_allclose(spectrum_dataset1.meta_table["OBS_ID"][0], [0, 1])

    assert spectrum_dataset1.background_model is None
    assert_allclose(spectrum_dataset1.gti.time_sum.to_value("s"), 200)
    assert_allclose(spectrum_dataset1.exposure.quantity[2].to_value("m2 s"),
                    1573851.079861)
    kernel = edisp1.get_edisp_kernel()
    assert_allclose(kernel.get_bias(1 * u.TeV), 0.0, atol=1.2e-3)
    assert_allclose(kernel.get_resolution(1 * u.TeV), 0.1581, atol=1e-2)
예제 #20
0
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset):
    geom = spectrum_dataset.counts.geom

    energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30)
    energy_true = MapAxis.from_energy_bounds("0.1 TeV",
                                             "10 TeV",
                                             nbin=30,
                                             name="energy_true")

    aeff = EffectiveAreaTable2D.from_parametrization(
        energy_axis_true=energy_true, instrument="HESS")

    livetime = 100 * u.s
    gti = GTI.create(start=0 * u.s, stop=livetime)

    geom_true = geom.as_energy_true
    exposure = make_map_exposure_true_energy(geom=geom_true,
                                             livetime=livetime,
                                             pointing=geom_true.center_skydir,
                                             aeff=aeff)

    edisp = EDispKernelMap.from_diagonal_response(energy,
                                                  energy_true,
                                                  geom=geom.to_image())
    edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :]

    background = spectrum_dataset.background

    mask_safe = RegionNDMap.from_geom(geom=geom, dtype=bool)
    mask_safe.data += True

    spectrum_dataset1 = SpectrumDataset(
        name="ds1",
        counts=spectrum_dataset.counts.copy(),
        exposure=exposure.copy(),
        edisp=edisp.copy(),
        background=background.copy(),
        gti=gti.copy(),
        mask_safe=mask_safe,
    )

    livetime2 = 0.5 * livetime
    gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2)
    bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data)

    geom = spectrum_dataset.counts.geom
    data = np.ones(spectrum_dataset.data_shape, dtype="bool")
    data[0] = False
    safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data)
    exposure2 = exposure.copy()

    edisp = edisp.copy()
    edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :]
    spectrum_dataset2 = SpectrumDataset(
        name="ds2",
        counts=spectrum_dataset.counts.copy(),
        exposure=exposure2,
        edisp=edisp,
        background=bkg2,
        mask_safe=safe_mask2,
        gti=gti2,
    )

    spectrum_dataset1.stack(spectrum_dataset2)

    reference = spectrum_dataset.counts.data
    assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2)
    assert_allclose(spectrum_dataset1.counts.data[0], 141363)
    assert_allclose(spectrum_dataset1.exposure.quantity[0],
                    4.755644e09 * u.Unit("cm2 s"))
    assert_allclose(spectrum_dataset1.background.data[1:],
                    3 * background.data[1:])
    assert_allclose(spectrum_dataset1.background.data[0], background.data[0])

    kernel = edisp.get_edisp_kernel()
    kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel()

    assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:])
    assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
예제 #21
0
    def create_dataset_list(self):
        """
        Create the dataset list as a list of list to handle more than one
        irf per slice (GRB seen on both sites).

        The pointing, center of the field of view, is the same for all time
        slices as it is considered that the GRB is constantly followed within
        the pre-computed visibililty window. The pointing is displaced compared
        to the GRB position by a distance mc.offset, a value
        choosen to give enough space to have 1/mcf.alpha off regions of radius
        mcf.on_size, the detection area.
        The detection areas, on and off regions, have a size independent of
        the energy, but the effective area and background are mofified to take
        into account the PSF evolution with energy (see the irf module).

        The datasets are defined with the same true and reconstructed enegy
        axes so that counts can be added bin per bin. Differences in threshold
        due to different IRF are taken into account by masking some bins
        (the mask at a certain energy value cancel the whole bin it belongs to,
        see the irf module for more explanations)

        A word on masking
        =================
        Direct masking is not made available to users in Gammapy 0.17:
        It is possible to mask on effective area criteria (minimum value),
        or data counts (counts.geom.energy_mask)
        The direct masking on energy is made as shown below, as a result of
        discussions with Gammapy developpers on the Gammapy slack channel
        (Nov. 2nd, 2020).
        Note that the mask_safe is not considered in the printout or peek
        functions and has to be obtained by hand (Could be corrected in
        further versions): this is implemented in the check_dataset SoHAPPy
        function.

        Discussion on masking binning with A. Donath, dec. 3rd, 2020
        ------------------------------------------------------------
        Applying a mask (Emin, Emax) is simply cancelling the data of the
        energy axis bins outside the mask. Even partially covered bins are
        lost (e.g. bins (E1,E2) with E1<Emin<E2 ).
        There could have been a partial recovery of the data (i.e. keeping
        the counts or flux between Emin and E2, but this is not
        what has been chosen.
        The influence of this becomes small if Energy bin sizes are small
        enough. However small bins size can give low statititics
        A. Donath says it is not a problem as long as the statitical
        computation is done properly : this is in fact approaching the
        unbinned likelihood case. Only for flux points computation, e.g.
        to get a significant point at high energies, bin should have enough
        statistics. This can be done since v0.18.2 using
        SpectrumDatasetOnOff.resample_energy_axis() to re-group the data
        before model fitting

        A word on stacking
        ==================
        In order to be stacked, datasets should be based on the same
        energy axis, which is indeed implemented in SoHAPPy.
        Two observation have usually different IRfs, and in particular energy
        thresholds. The thresholds are take into account by masking, as
        mentionned aboce.
        Stacking in gammapy is intended to merge observations that were to
        large in data volue to be handled together, and or to sum-up
        consecutive observations of the same object. Stacking 2 observations
        results in a longer observation with the same model, in particular the
        same zspectral model, with an observation livetime being the sum of the
        two initial observation livetime. It is not intended to sum-ip two
        observations done at the same time, with the same livetime.
        As a consequence the gammapy stacking cannot be used to "merge"
        the simultaneous observations of two sites. This would result in a
        livetime larger than the simultaneous observations and would therefore
        shift the observation times in the consecutive time slices.
        There was a tentative to modify this in reducing the livetime
        ds_stacked.livetime /= 2, and multiplying the attached spectrum by 2
        to keep the predicted count number coherent. But this causes problems
        later in the analysis.
        As a consequence, it is chosen to keep track of each observation
        identity, having a dataset for each time slice on a given site, and
        two datasets for slice in common on two sites (or more).
        The aperture photometry nalysis then simply adds-up the cont numbers
        from the two sites, whereas the spectrum extraction handles the
        situation outside the simulation code, in a separate notebook.

        Saving datasets
        ===============
        The choice made here is to save the simulation to disk as a bin file
        which has the advantage to have all information saved.
        An alternative is to save the dataset list only.

        Discussion on the  slack channel with A. Donath (Nov. 27th, 2020)
        -----------------------------------------------------------------
        For writing datasets in the present use-case there is a bit
        more bookkeeping to do, so that you can read back the model and
        datasets correctly. Here is a minimal example:
        <code>
        from gammapy.datasets import SpectrumDatasetOnOff, Datasets
        from gammapy.modeling.models import PowerLawSpectralModel, SkyModel

        path = "$GAMMAPY_DATA/joint-crab/spectra/hess/"

        obs_1 = SpectrumDatasetOnOff.from_ogip_files(path + "pha_obs23523.fits")
        obs_2 = SpectrumDatasetOnOff.from_ogip_files(path + "pha_obs23592.fits")

        model_1 = SkyModel(PowerLawSpectralModel(), name="model-1", datasets_names=[obs_1.name])
        model_2 = SkyModel(PowerLawSpectralModel(), name="model-2", datasets_names=[obs_2.name])
        obs_1.models = [model_1]
        obs_2.models = [model_2]
        datasets = Datasets([obs_1, obs_2])

        datasets.write("test", prefix="test", overwrite=True)
        </code>

        This was something that we started to refactor in v0.17 and are
        still improving. So you have the possibility to “assign” models to
        datasets in a declarative way using the dataset_names keyword.
        The resulting YAML file looks like:
        components:

        <code>
        -   name: model-1
            type: SkyModel
            spectral:
                type: PowerLawSpectralModel
                parameters:
                - {name: index, value: 2.0, unit: '', min: .nan, max: .nan, frozen: false,
                    error: 0}
                - {name: amplitude, value: 1.0e-12, unit: cm-2 s-1 TeV-1, min: .nan, max: .nan,
                    frozen: false, error: 0}
                - {name: reference, value: 1.0, unit: TeV, min: .nan, max: .nan, frozen: true,
                    error: 0}
            datasets_names: ['23523']
        -   name: model-2
            type: SkyModel
            spectral:
                type: PowerLawSpectralModel
                parameters:
                - {name: index, value: 2.0, unit: '', min: .nan, max: .nan, frozen: false,
                    error: 0}
                - {name: amplitude, value: 1.0e-12, unit: cm-2 s-1 TeV-1, min: .nan, max: .nan,
                    frozen: false, error: 0}
                - {name: reference, value: 1.0, unit: TeV, min: .nan, max: .nan, frozen: true,
                    error: 0}
            datasets_names: ['23592']
        covariance: test/test_models_covariance.dat
        </code>

        So explicitly mentions for each model component the dataset it is
        assigned to. I think without defining dataset_names the information
        will not be present in the output YAML file and when reading back the
        data and model, the assignment is not correct anymore.
        We will add more documentation on this “global” model handling soon.

        Parameters
        ----------
        debug : Boolean, optional
            If True, verbose mode. The default is False.

        Returns
        -------
        dset_list : Numpy array of Dataset objects
            A list of datasets (not a collection like a Datasets.

        """

        dset_list = []
        
        for aslice in self.slot.slices:

            # Note: the spectrum is related to the slice, not the site
            spec  = self.slot.grb.spectra[aslice.fid()]
            name  = "Spec"+"-"+str(aslice.fid())
            model = SkyModel(spectral_model = spec, name = name)

            # The reference time and duration of the observation
            # Note that this is the start of the slice
            # Not necessarilty the point at which the flux and altitude
            # are evaluated
            tref = self.slot.grb.t_trig + aslice.ts1() # start of slice
            dt   = aslice.ts2() - aslice.ts1()

            # Each slice can have more than one IRF since it can be observed
            # from both sites in some cases.
            # Two independent datasets are created
            dset_site = []
            for ip, perf in enumerate(aslice.irf()):

                array = perf.subarray
                kzen = perf.kzen
                on_size = mcf.on_size[array]
                offset  = mcf.offset[array]
                # The on-region is on the GRB
                on_region = CircleSkyRegion(center = self.slot.grb.radec,
                                            radius = on_size)
                # Create the observation - The pointing is not on the GRB
                on_ptg = SkyCoord(self.slot.grb.radec.ra + offset,
                                  self.slot.grb.radec.dec, frame="icrs")

                with warnings.catch_warnings(): # because of t_trig
                    warnings.filterwarnings("ignore")
                    obs = Observation.create(obs_id   = aslice.idt(),
                                             pointing = on_ptg,
                                             livetime = dt,
                                             irfs     = perf.irf,
                                             deadtime_fraction = 0,
                                             reference_time = tref)

                # Create dataset - correct for containment - add model
                ds_name  = aslice.site()+"-"+str(aslice.idt())+"-"+str(ip)
                
                # Reconstructed energy axis
                # Stacking requires same original binning -> uses largest interval
                # Ensure that all possible edges are in, later apply masking
                # Use the Optimised binning
                # There is a bug in 0.17 (unit not taken into account
                # correctly)preventig from simply writing
                # erec_axis = MapAxis.from_edges(erec_edges,name="energy")
                e_reco = MapAxis.from_edges(mcf.erec_edges[array].to("TeV").value,
                                            unit="TeV",
                                            name="energy",
                                            interp="log")
                e_true = perf.etrue
                
                ds_empty = SpectrumDataset.create(e_reco = e_reco,
                                                  e_true = e_true,
                                                  region = on_region,
                                                  name   = ds_name)

                maker = SpectrumDatasetMaker(
                        selection=["exposure", "background","edisp"])  
                    
                ds = maker.run(ds_empty, obs)

                # Compute containmentfactor  
                # The PSF being given versus true energy, using the reconstructed energy
                # axis to compute the factor assumes that the reconstructed energy is
                # strictly equals to the true energy, which is certainly not the case at
                # the lowest energies.
                # Maybe this could desserve a specific study.                       
                radii = perf.irf['psf'].containment_radius(energy   = e_reco.center,
                                                           theta    = mcf.offset[array],
                                                           fraction = mcf.containment)[0]
                factor  = (1-np.cos(radii))/(1 - np.cos(mcf.on_size[array]))
                # If factor is too large above threshold, error
                idx = np.where((e_reco.center)[np.where(factor>1 )] >= mcf.erec_min[array][kzen])
                if (np.size(idx)):
                    # Get guilty energies

                    print(" E = ",e_reco.center[idx])
                    print(" R = ",radii[idx].value," max = ",
                                  mcf.on_size[array].value)
                    print(" F = ",factor[idx])
                    #sys.exit("IRF : Initial region too small")

                # In Gammapy 0.17, it is mandatory to change the effective
                # area before the mpdel is set, since once the model is set it
                # cannot be changed anymore.
                # This feature (bug) was discussed in Gammapy issue #3016 on
                # Sept 2020.

                # ds.exposure.data   *= mcf.containment
                ds.exposure   *= mcf.containment
                ds.background.data *= factor.value.reshape((-1, 1, 1))
                ds.models           = model
                mask = ds.mask_safe.geom.energy_mask(energy_min = mcf.erec_min[array][kzen],
                                                     energy_max = mcf.erec_max[kzen])
                    
                mask = mask & ds.mask_safe.data
                ds.mask_safe = RegionNDMap(ds.mask_safe.geom,data=mask)

                dset_site.append(ds)

            dset_list.append(dset_site)

        return np.asarray(dset_list)
예제 #22
0
def test_spectrum_dataset_stack_diagonal_safe_mask(spectrum_dataset):
    geom = spectrum_dataset.counts.geom

    energy = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=30)
    energy_true = MapAxis.from_energy_bounds("0.1 TeV",
                                             "10 TeV",
                                             nbin=30,
                                             name="energy_true")

    aeff = EffectiveAreaTable.from_parametrization(
        energy.edges, "HESS").to_region_map(geom.region)

    livetime = 100 * u.s
    gti = GTI.create(start=0 * u.s, stop=livetime)

    exposure = aeff * livetime

    edisp = EDispKernelMap.from_diagonal_response(energy,
                                                  energy_true,
                                                  geom=geom.to_image())
    edisp.exposure_map.data = exposure.data[:, :, np.newaxis, :]

    background = spectrum_dataset.background_model.map.copy()

    spectrum_dataset1 = SpectrumDataset(name="ds1",
                                        counts=spectrum_dataset.counts.copy(),
                                        exposure=exposure.copy(),
                                        edisp=edisp.copy(),
                                        models=BackgroundModel(
                                            background,
                                            name="ds1-bkg",
                                            datasets_names=["ds1"]),
                                        gti=gti.copy())

    livetime2 = 0.5 * livetime
    gti2 = GTI.create(start=200 * u.s, stop=200 * u.s + livetime2)
    aeff2 = aeff * 2
    bkg2 = RegionNDMap.from_geom(geom=geom, data=2 * background.data)

    geom = spectrum_dataset.counts.geom
    data = np.ones(spectrum_dataset.data_shape, dtype="bool")
    data[0] = False
    safe_mask2 = RegionNDMap.from_geom(geom=geom, data=data)
    exposure2 = aeff2 * livetime2

    edisp = edisp.copy()
    edisp.exposure_map.data = exposure2.data[:, :, np.newaxis, :]
    spectrum_dataset2 = SpectrumDataset(name="ds2",
                                        counts=spectrum_dataset.counts.copy(),
                                        exposure=exposure2,
                                        edisp=edisp,
                                        models=BackgroundModel(
                                            bkg2,
                                            name="ds2-bkg",
                                            datasets_names=["ds2"]),
                                        mask_safe=safe_mask2,
                                        gti=gti2)

    spectrum_dataset1.stack(spectrum_dataset2)

    reference = spectrum_dataset.counts.data
    assert_allclose(spectrum_dataset1.counts.data[1:], reference[1:] * 2)
    assert_allclose(spectrum_dataset1.counts.data[0], 141363)
    assert_allclose(spectrum_dataset1.exposure.data[0], 4.755644e+09)
    assert_allclose(spectrum_dataset1.background_model.map.data[1:],
                    3 * background.data[1:])
    assert_allclose(spectrum_dataset1.background_model.map.data[0],
                    background.data[0])

    assert_allclose(
        spectrum_dataset1.exposure.quantity.to_value("m2s"),
        2 * (aeff * livetime).quantity.to_value("m2s"),
    )
    kernel = edisp.get_edisp_kernel()
    kernel_stacked = spectrum_dataset1.edisp.get_edisp_kernel()

    assert_allclose(kernel_stacked.pdf_matrix[1:], kernel.pdf_matrix[1:])
    assert_allclose(kernel_stacked.pdf_matrix[0], 0.5 * kernel.pdf_matrix[0])
observations = data_store.get_observations(obs_ids)

crab_position = SkyCoord(83.63, 22.01, unit="deg", frame="icrs")

# The ON region center is defined in the icrs frame. The angle is defined w.r.t. to its axis.
rectangle = RectangleSkyRegion(center=crab_position,
                               width=0.5 * u.deg,
                               height=0.4 * u.deg,
                               angle=0 * u.deg)

bkg_maker = ReflectedRegionsBackgroundMaker(min_distance=0.1 * u.rad)
dataset_maker = SpectrumDatasetMaker(selection=["counts"])

energy_axis = MapAxis.from_energy_bounds(0.1, 100, 30, unit="TeV")
geom = RegionGeom.create(region=rectangle, axes=[energy_axis])
dataset_empty = SpectrumDataset.create(geom=geom)

datasets = []

for obs in observations:

    dataset = dataset_maker.run(dataset_empty.copy(name=f"obs-{obs.obs_id}"),
                                obs)
    dataset_on_off = bkg_maker.run(observation=obs, dataset=dataset)
    datasets.append(dataset_on_off)

m = Map.create(skydir=crab_position, width=(8, 8), proj="TAN")

ax = m.plot(vmin=-1, vmax=0)

rectangle.to_pixel(ax.wcs).plot(ax=ax, color="black")
예제 #24
0
obs_ids = data_store.obs_table["OBS_ID"][mask].data
observations = data_store.get_observations(obs_ids)

crab_position = SkyCoord(83.63, 22.01, unit="deg", frame="icrs")

# The ON region center is defined in the icrs frame. The angle is defined w.r.t. to its axis.
rectangle = RectangleSkyRegion(center=crab_position,
                               width=0.5 * u.deg,
                               height=0.4 * u.deg,
                               angle=0 * u.deg)

bkg_maker = ReflectedRegionsBackgroundMaker(min_distance=0.1 * u.rad)
dataset_maker = SpectrumDatasetMaker(selection=["counts"])

e_reco = MapAxis.from_energy_bounds(0.1, 100, 30, unit="TeV")
dataset_empty = SpectrumDataset.create(e_reco=e_reco, region=rectangle)

datasets = []

for obs in observations:

    dataset = dataset_maker.run(dataset_empty.copy(name=f"obs-{obs.obs_id}"),
                                obs)
    dataset_on_off = bkg_maker.run(observation=obs, dataset=dataset)
    datasets.append(dataset_on_off)

m = Map.create(skydir=crab_position, width=(8, 8), proj="TAN")

_, ax, _ = m.plot(vmin=-1, vmax=0)

rectangle.to_pixel(ax.wcs).plot(ax=ax, color="black")
예제 #25
0
def generate_dataset(Eflux,
                     flux,
                     Erange=None,
                     tstart=Time('2000-01-01 02:00:00', scale='utc'),
                     tobs=100 * u.s,
                     irf_file=None,
                     alpha=1 / 5,
                     name=None,
                     fake=True,
                     onoff=True,
                     seed='random-seed',
                     debug=False):
    """
    Generate a dataset from a list of energies and flux points either as
    a SpectrumDataset or a SpectrumDatasetOnOff

    Note :
    - in SpectrumDataset, the backgound counts are assumed precisely know and
    are not fluctuated.
    - in SpectrumDatasetOnOff, the background counts (off counts) are
    fluctuated from the IRF known values.

    Parameters
    ----------
    Eflux : Quantity
        Energies at which the flux is given.
    flux : Quantity
        Flux corresponding to the given energies.
    Erange : List, optional
        The energy boundaries within which the flux is defined, if not over all
        energies. The default is None.
    tstart : Time object, optional
        Start date of the dataset.
        The default is Time('2000-01-01 02:00:00',scale='utc').
    tobs : Quantity, optional
        Duration of the observation. The default is 100*u.s.
    irf_file : String, optional
        The IRf file name. The default is None.
    alpha : Float, optional
        The on over off surface ratio for the On-Off analysis.
        The default is 1/5.
    name : String, optional
        The dataset name, also used to name tthe spectrum. The default is None.
    fake : Boolean, optional
        If True, the dataset counts are fluctuated. The default is True.
    onoff : Boolean, optional
        If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet.
        The default is True.
    seed : String, optional
        The seed for the randome generator; If an integer will generate the
        same random series at each run. The default is 'random-seed'.
    debug: Boolean
        If True, let's talk a bit. The default is False.

    Returns
    -------
    ds : Dataset object
        The dataset.

    """
    random_state = get_random_state(seed)

    ### Define on region
    on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg,
                           frame="icrs")  # Observing region
    on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg)

    # Define energy axis (see spectrum analysis notebook)
    # edges for SpectrumDataset - all dataset should have the same axes
    # Note that linear spacing is clearly problematic for powerlaw fluxes
    # Axes can also be defined using MapAxis
    unit = u.GeV
    E1v = min(Eflux).to(unit).value
    E2v = max(Eflux).to(unit).value
    #     ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit
    #     ereco_axis = MapAxis.from_edges(ereco.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy",
    #                                    interp="log")

    ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit,
                                            0.9 * E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy")

    #     etrue = np.logspace(np.log10(    E1v), np.log10(    E2v), 50) * unit
    #     etrue_axis = MapAxis.from_edges(etrue.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy_true",
    #                                    interp="log")
    etrue_axis = MapAxis.from_energy_bounds(E1v * unit,
                                            E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy_true")
    if (debug):
        print("Dataset ", name)
        print("Etrue : ", etrue_axis.edges)
        print("Ereco : ", ereco_axis.edges)

    # Load IRF
    irf = load_cta_irfs(irf_file)

    spec = TemplateSpectralModel(energy=Eflux,
                                 values=flux,
                                 interp_kwargs={"values_scale": "log"})

    model = SkyModel(spectral_model=spec, name="Spec" + str(name))
    obs = Observation.create(obs_id=1,
                             pointing=on_pointing,
                             livetime=tobs,
                             irfs=irf,
                             deadtime_fraction=0,
                             reference_time=tstart)

    ds_empty = SpectrumDataset.create(
        e_reco=ereco_axis,  # Ereco.edges,
        e_true=etrue_axis,  #Etrue.edges,
        region=on_region,
        name=name)
    maker = SpectrumDatasetMaker(containment_correction=False,
                                 selection=["exposure", "background", "edisp"])
    ds = maker.run(ds_empty, obs)
    ds.models = model
    mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0],
                                         energy_max=Erange[1])

    mask = mask & ds.mask_safe.data
    ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask)

    ds.fake(random_state=random_state)  # Fake is mandatory ?

    # Transform SpectrumDataset into SpectrumDatasetOnOff if needed
    if (onoff):

        ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds,
                                                        acceptance=1,
                                                        acceptance_off=1 /
                                                        alpha)
        print("Transformed in ONOFF")

    if fake:
        print(" Fluctuations : seed = ", seed)
        if (onoff):
            ds.fake(npred_background=ds.npred_background())
        else:
            ds.fake(random_state=random_state)

    print("ds.energy_range = ", ds.energy_range)

    return ds
예제 #26
0
def test_incorrect_mask(spectrum_dataset):
    mask_fit = np.ones(30, dtype=np.dtype("float"))
    with pytest.raises(ValueError):
        SpectrumDataset(
            counts=spectrum_dataset.counts.copy(), mask_fit=mask_fit,
        )
예제 #27
0
    bias=0,
    sigma=0.2,
)

observation = Observation.create(
    obs_id=0,
    pointing=SkyCoord("0d", "0d", frame="icrs"),
    irfs={"aeff": aeff, "edisp": edisp},
    tstart=0 * u.h,
    tstop=0.5 * u.h,
    location=observatory_locations["hess"],
)

geom = RegionGeom.create("icrs;circle(0, 0, 0.1)", axes=[energy_reco])

stacked = SpectrumDataset.create(geom=geom, energy_axis_true=energy_true)

maker = SpectrumDatasetMaker(selection=["edisp", "exposure"])

dataset_1 = maker.run(stacked.copy(), observation=observation)
dataset_2 = maker.run(stacked.copy(), observation=observation)

pwl = PowerLawSpectralModel()
model = SkyModel(spectral_model=pwl, name="test-source")

dataset_1.mask_safe = geom.energy_mask(energy_min=2 * u.TeV)
dataset_2.mask_safe = geom.energy_mask(energy_min=0.6 * u.TeV)

dataset_1.models = model
dataset_2.models = model
dataset_1.counts = dataset_1.npred()
예제 #28
0
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        log.info("Reducing spectrum datasets.")
        datasets_settings = self.config.datasets
        on_lon = datasets_settings.on_region.lon
        on_lat = datasets_settings.on_region.lat
        on_center = SkyCoord(on_lon,
                             on_lat,
                             frame=datasets_settings.on_region.frame)
        on_region = CircleSkyRegion(on_center,
                                    datasets_settings.on_region.radius)

        maker_config = {}
        if datasets_settings.containment_correction:
            maker_config[
                "containment_correction"] = datasets_settings.containment_correction
        e_reco = self._make_energy_axis(datasets_settings.geom.axes.energy)

        maker_config["selection"] = ["counts", "exposure", "edisp"]
        dataset_maker = SpectrumDatasetMaker(**maker_config)

        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            exclusion_region = Map.read(datasets_settings.background.exclusion)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker_config.update(datasets_settings.background.parameters)
        bkg_method = datasets_settings.background.method
        if bkg_method == "reflected":
            bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)
            log.debug(
                f"Creating ReflectedRegionsBackgroundMaker with arguments {bkg_maker_config}"
            )
        else:
            bkg_maker = None
            log.warning(
                f"No background maker set for 1d analysis. Check configuration."
            )

        safe_mask_selection = datasets_settings.safe_mask.methods
        safe_mask_settings = datasets_settings.safe_mask.parameters
        safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)

        e_true = self._make_energy_axis(
            datasets_settings.geom.axes.energy_true, name="energy_true")

        geom = RegionGeom.create(region=on_region, axes=[e_reco])
        reference = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

        datasets = []
        for obs in self.observations:
            log.info(f"Processing observation {obs.obs_id}")
            dataset = dataset_maker.run(reference.copy(), obs)
            if bkg_maker is not None:
                dataset = bkg_maker.run(dataset, obs)
                if dataset.counts_off is None:
                    log.info(
                        f"No OFF region found for observation {obs.obs_id}. Discarding."
                    )
                    continue
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)

        self.datasets = Datasets(datasets)

        if datasets_settings.stack:
            stacked = self.datasets.stack_reduce(name="stacked")
            self.datasets = Datasets([stacked])
예제 #29
0
def spectrum_dataset_crab_fine():
    e_true = MapAxis.from_edges(np.logspace(-2, 2.5, 109) * u.TeV,
                                name="energy_true")
    e_reco = MapAxis.from_energy_edges(np.logspace(-2, 2, 73) * u.TeV)
    geom = RegionGeom.create("icrs;circle(83.63, 22.01, 0.11)", axes=[e_reco])
    return SpectrumDataset.create(geom=geom, energy_axis_true=e_true)
예제 #30
0
def spectrum_dataset_gc():
    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-1, 2, 13) * u.TeV,
                                name="energy_true")
    geom = RegionGeom.create("galactic;circle(0, 0, 0.11)", axes=[e_reco])
    return SpectrumDataset.create(geom=geom, energy_axis_true=e_true)