Esempio n. 1
0
def test_spectrum_dataset_maker_hess_cta(spectrum_dataset_gc,
                                         observations_cta_dc1):
    maker = SpectrumDatasetMaker()

    datasets = []

    for obs in observations_cta_dc1:
        dataset = maker.run(spectrum_dataset_gc, obs)
        datasets.append(dataset)

    assert_allclose(datasets[0].counts.data.sum(), 53)
    assert_allclose(datasets[1].counts.data.sum(), 47)

    assert_allclose(datasets[0].livetime.value, 1764.000034)
    assert_allclose(datasets[1].livetime.value, 1764.000034)

    assert_allclose(datasets[0].background.data.sum(), 2.238345, rtol=1e-5)
    assert_allclose(datasets[1].background.data.sum(), 2.164593, rtol=1e-5)
Esempio n. 2
0
def test_safe_mask_maker_dl3(spectrum_dataset_crab, observations_hess_dl3):

    safe_mask_maker = SafeMaskMaker()
    maker = SpectrumDatasetMaker()

    obs = observations_hess_dl3[0]
    dataset = maker.run(spectrum_dataset_crab, obs)
    dataset = safe_mask_maker.run(dataset, obs)
    assert_allclose(dataset.energy_range[0].value, 1)
    assert dataset.energy_range[0].unit == "TeV"

    mask_safe = safe_mask_maker.make_mask_energy_aeff_max(dataset)
    assert mask_safe.data.sum() == 4

    mask_safe = safe_mask_maker.make_mask_energy_edisp_bias(dataset)
    assert mask_safe.data.sum() == 3

    mask_safe = safe_mask_maker.make_mask_energy_bkg_peak(dataset)
    assert mask_safe.data.sum() == 3
Esempio n. 3
0
def test_dataset_maker_spectrum_global_rad_max():
    """test the energy-dependent spectrum extraction"""

    observation = Observation.read('$GAMMAPY_DATA/joint-crab/dl3/magic/run_05029748_DL3.fits')

    maker = SpectrumDatasetMaker(
        containment_correction=False, selection=["counts", "exposure", "edisp"]
    )
    dataset = maker.run(get_spectrumdataset_rad_max("spec"), observation)

    finder = WobbleRegionsFinder(n_off_regions=3)
    bkg_maker = ReflectedRegionsBackgroundMaker(region_finder=finder)
    dataset_on_off = bkg_maker.run(dataset, observation)

    counts = dataset_on_off.counts
    counts_off = dataset_on_off.counts_off
    assert counts.unit == ""
    assert counts_off.unit == ""
    assert_allclose(counts.data.sum(), 437, rtol=1e-5)
    assert_allclose(counts_off.data.sum(), 273, rtol=1e-5)
Esempio n. 4
0
    def test_extract(
        pars,
        results,
        observations_hess_dl3,
        spectrum_dataset_crab_fine,
        reflected_regions_bkg_maker,
    ):
        """Test quantitative output for various configs"""
        safe_mask_maker = SafeMaskMaker()
        maker = SpectrumDatasetMaker(
            containment_correction=pars["containment_correction"])

        obs = observations_hess_dl3[0]
        dataset = maker.run(spectrum_dataset_crab_fine, obs)
        dataset = reflected_regions_bkg_maker.run(dataset, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        exposure_actual = (dataset.exposure.interp_by_coord(
            {
                "energy_true": 5 * u.TeV,
                "skycoord": dataset.counts.geom.center_skydir,
            }) * dataset.exposure.unit)

        edisp_actual = dataset._edisp_kernel.data.evaluate(energy_true=5 *
                                                           u.TeV,
                                                           energy=5.2 * u.TeV)
        aeff_actual = exposure_actual / dataset.exposure.meta["livetime"]

        assert_quantity_allclose(aeff_actual, results["aeff"], rtol=1e-3)
        assert_quantity_allclose(edisp_actual, results["edisp"], rtol=1e-3)

        # TODO: Introduce assert_stats_allclose
        info = dataset.info_dict()

        assert info["n_on"] == results["n_on"]
        assert_allclose(info["significance"], results["sigma"], rtol=1e-2)

        gti_obs = obs.gti.table
        gti_dataset = dataset.gti.table
        assert_allclose(gti_dataset["START"], gti_obs["START"])
        assert_allclose(gti_dataset["STOP"], gti_obs["STOP"])
Esempio n. 5
0
def test_spectrum_dataset_maker_hess_dl3(spectrum_dataset_crab,
                                         observations_hess_dl3):
    datasets = []
    maker = SpectrumDatasetMaker()

    for obs in observations_hess_dl3:
        dataset = maker.run(spectrum_dataset_crab, obs)
        datasets.append(dataset)

    assert_allclose(datasets[0].counts.data.sum(), 100)
    assert_allclose(datasets[1].counts.data.sum(), 92)

    assert_allclose(datasets[0].exposure.meta["livetime"].value, 1581.736758)
    assert_allclose(datasets[1].exposure.meta["livetime"].value, 1572.686724)

    assert_allclose(datasets[0].npred_background().data.sum(),
                    7.74732,
                    rtol=1e-5)
    assert_allclose(datasets[1].npred_background().data.sum(),
                    6.118879,
                    rtol=1e-5)
Esempio n. 6
0
def test_region_center_spectrum_dataset_maker_hess_dl3(
    spectrum_dataset_crab, observations_hess_dl3
):
    datasets = []
    maker = SpectrumDatasetMaker(use_region_center=True)

    for obs in observations_hess_dl3:
        dataset = maker.run(spectrum_dataset_crab, obs)
        datasets.append(dataset)

    assert isinstance(datasets[0], SpectrumDataset)
    assert not datasets[0].exposure.meta["is_pointlike"]

    assert_allclose(datasets[0].counts.data.sum(), 100)
    assert_allclose(datasets[1].counts.data.sum(), 92)

    assert_allclose(datasets[0].exposure.meta["livetime"].value, 1581.736758)
    assert_allclose(datasets[1].exposure.meta["livetime"].value, 1572.686724)

    assert_allclose(datasets[0].npred_background().data.sum(), 7.747881, rtol=1e-5)
    assert_allclose(datasets[1].npred_background().data.sum(), 5.731624, rtol=1e-5)
Esempio n. 7
0
def test_dataset_maker_spectrum_rad_max_all_excluded(observations_magic_rad_max, caplog):
    """test the energy-dependent spectrum extraction"""

    observation = observations_magic_rad_max[0]

    maker = SpectrumDatasetMaker(
        containment_correction=False, selection=["counts", "exposure", "edisp"]
    )
    dataset = maker.run(get_spectrumdataset_rad_max("spec"), observation)

    # excludes all possible off regions
    exclusion_region = CircleSkyRegion(
        center=observation.pointing_radec,
        radius=1 * u.deg,
    )
    geom = WcsGeom.create(
        npix=(150, 150), binsz=0.05, skydir=observation.pointing_radec, proj="TAN", frame="icrs"
    )

    exclusion_mask = ~geom.region_mask([exclusion_region])

    finder = WobbleRegionsFinder(n_off_regions=1)
    bkg_maker = ReflectedRegionsBackgroundMaker(
        region_finder=finder,
        exclusion_mask=exclusion_mask,
    )

    with caplog.at_level(logging.WARNING):
        dataset_on_off = bkg_maker.run(dataset, observation)

    # overlapping off regions means not counts will be filled
    assert dataset_on_off.counts_off is None
    assert (dataset_on_off.acceptance_off.data == 0).all()

    assert len(caplog.record_tuples) == 2
    assert caplog.record_tuples[0] == (
        'gammapy.makers.utils',
        logging.WARNING,
        "RegionsFinder returned no regions"
    )
Esempio n. 8
0
def test_run(observations, phase_bkg_maker):

    maker = SpectrumDatasetMaker()

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true")

    pos = SkyCoord("08h35m20.65525s", "-45d10m35.1545s", frame="icrs")
    radius = Angle(0.2, "deg")
    region = SphericalCircleSkyRegion(pos, radius)

    dataset_empty = SpectrumDataset.create(e_reco, e_true, region=region)

    obs = observations["111630"]
    dataset = maker.run(dataset_empty, obs)
    dataset_on_off = phase_bkg_maker.run(dataset, obs)

    assert_allclose(dataset_on_off.acceptance, 0.1)
    assert_allclose(dataset_on_off.acceptance_off, 0.3)

    assert_allclose(dataset_on_off.counts.data.sum(), 28)
    assert_allclose(dataset_on_off.counts_off.data.sum(), 57)
Esempio n. 9
0
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations):
    pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs")
    radius = Angle(0.11, "deg")
    region = CircleSkyRegion(pos, radius)

    maker = SpectrumDatasetMaker(selection=["counts"])

    datasets = []

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV,
                                name="energy_true")
    geom = RegionGeom.create(region=region, axes=[e_reco])
    dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert datasets[0].counts_off is None
    assert_allclose(datasets[0].acceptance_off, 0)
Esempio n. 10
0
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations):
    pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs")
    radius = Angle(0.11, "deg")
    region = CircleSkyRegion(pos, radius)

    maker = SpectrumDatasetMaker(selection=["counts"])

    datasets = []

    e_reco = np.logspace(0, 2, 5) * u.TeV
    e_true = np.logspace(-0.5, 2, 11) * u.TeV
    dataset_empty = SpectrumDataset.create(e_reco=e_reco,
                                           e_true=e_true,
                                           region=region)

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert datasets[0].counts_off is None
    assert_allclose(datasets[0].acceptance_off, 0)
Esempio n. 11
0
def test_spectrum_dataset_maker_hess_cta(spectrum_dataset_gc,
                                         observations_cta_dc1):
    maker = SpectrumDatasetMaker(use_region_center=True)

    datasets = []

    for obs in observations_cta_dc1:
        dataset = maker.run(spectrum_dataset_gc, obs)
        datasets.append(dataset)

    assert_allclose(datasets[0].counts.data.sum(), 53)
    assert_allclose(datasets[1].counts.data.sum(), 47)

    assert_allclose(datasets[0].exposure.meta["livetime"].value, 1764.000034)
    assert_allclose(datasets[1].exposure.meta["livetime"].value, 1764.000034)

    assert_allclose(datasets[0].npred_background().data.sum(),
                    2.238805,
                    rtol=1e-5)
    assert_allclose(datasets[1].npred_background().data.sum(),
                    2.165188,
                    rtol=1e-5)
Esempio n. 12
0
def test_dataset_maker_spectrum_rad_max(observations_magic):
    """test the energy-dependent spectrum extraction"""

    observation = observations_magic[0]

    maker = SpectrumDatasetMaker(containment_correction=False,
                                 selection=["counts", "exposure", "edisp"])
    dataset = maker.run(get_spectrumdataset_rad_max("spec"), observation)

    bkg_maker = ReflectedRegionsBackgroundMaker()
    dataset_on_off = bkg_maker.run(dataset, observation)

    counts = dataset_on_off.counts
    counts_off = dataset_on_off.counts_off
    assert counts.unit == ""
    assert counts_off.unit == ""
    assert_allclose(counts.data.sum(), 1138, rtol=1e-5)
    assert_allclose(counts_off.data.sum(), 2128, rtol=1e-5)

    exposure = dataset_on_off.exposure
    assert exposure.unit == "m2 s"
    assert_allclose(exposure.data.mean(), 68714990.52908568, rtol=1e-5)
Esempio n. 13
0
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations, caplog):
    pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs")
    radius = Angle(0.11, "deg")
    region = CircleSkyRegion(pos, radius)

    maker = SpectrumDatasetMaker(selection=["counts", "exposure"])

    datasets = []

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV,
                                name="energy_true")
    geom = RegionGeom.create(region=region, axes=[e_reco])
    dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)
    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, obs)
        datasets.append(dataset_on_off)

    assert datasets[0].counts_off is None
    assert_allclose(datasets[0].acceptance_off, 0)
    assert_allclose(datasets[0].mask_safe.data, False)

    assert "WARNING" in [record.levelname for record in caplog.records]

    message1 = (
        f"ReflectedRegionsBackgroundMaker failed. "
        f"No OFF region found outside exclusion mask for dataset '{datasets[0].name}'."
    )
    message2 = (f"ReflectedRegionsBackgroundMaker failed. "
                f"Setting {datasets[0].name} mask to False.")

    assert message1 in [record.message for record in caplog.records]
    assert message2 in [record.message for record in caplog.records]
Esempio n. 14
0
def test_spectrum_dataset_maker_hess_dl3(spectrum_dataset_crab, observations_hess_dl3):
    datasets = []
    maker = SpectrumDatasetMaker(use_region_center=False)

    datasets = []
    for obs in observations_hess_dl3:
        dataset = maker.run(spectrum_dataset_crab, obs)
        datasets.append(dataset)

    # Exposure
    assert_allclose(datasets[0].exposure.data.sum(), 7374718644.757894)
    assert_allclose(datasets[1].exposure.data.sum(), 6691006466.659032)

    # Background
    assert_allclose(datasets[0].npred_background().data.sum(), 7.7429157, rtol=1e-5)
    assert_allclose(datasets[1].npred_background().data.sum(), 5.7314076, rtol=1e-5)

    # Compare background with using bigger region
    e_reco = datasets[0].background.geom.axes['energy']
    e_true = datasets[0].exposure.geom.axes['energy_true']
    geom_bigger = RegionGeom.create("icrs;circle(83.63, 22.01, 0.22)", axes=[e_reco])

    datasets_big_region = []
    bigger_region_dataset = SpectrumDataset.create(geom=geom_bigger, energy_axis_true=e_true)
    for obs in observations_hess_dl3:
        dataset = maker.run(bigger_region_dataset, obs)
        datasets_big_region.append(dataset)

    ratio_regions = datasets[0].counts.geom.solid_angle()/datasets_big_region[1].counts.geom.solid_angle()
    ratio_bg_1 = datasets[0].npred_background().data.sum()/ datasets_big_region[0].npred_background().data.sum()
    ratio_bg_2 = datasets[1].npred_background().data.sum()/ datasets_big_region[1].npred_background().data.sum()
    assert_allclose(ratio_bg_1, ratio_regions, rtol=1e-2)
    assert_allclose(ratio_bg_2, ratio_regions, rtol=1e-2)

    #Edisp -> it isn't exactly 8, is that right? it also isn't without averaging
    assert_allclose(datasets[0].edisp.edisp_map.data[:,:,0,0].sum(), e_reco.nbin*2, rtol=1e-1)
    assert_allclose(datasets[1].edisp.edisp_map.data[:,:,0,0].sum(), e_reco.nbin*2, rtol=1e-1)
Esempio n. 15
0
    def test_extract(
        pars,
        results,
        observations_hess_dl3,
        spectrum_dataset_crab_fine,
        reflected_regions_bkg_maker,
    ):
        """Test quantitative output for various configs"""
        safe_mask_maker = SafeMaskMaker()
        maker = SpectrumDatasetMaker(
            containment_correction=pars["containment_correction"]
        )

        obs = observations_hess_dl3[0]
        dataset = maker.run(spectrum_dataset_crab_fine, obs)
        dataset = reflected_regions_bkg_maker.run(dataset, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        aeff_actual = dataset.aeff.data.evaluate(energy_true=5 * u.TeV)
        edisp_actual = dataset.edisp.data.evaluate(
            energy_true=5 * u.TeV, energy=5.2 * u.TeV
        )

        assert_quantity_allclose(aeff_actual, results["aeff"], rtol=1e-3)
        assert_quantity_allclose(edisp_actual, results["edisp"], rtol=1e-3)

        # TODO: Introduce assert_stats_allclose
        info = dataset.info_dict()

        assert info["n_on"] == results["n_on"]
        assert_allclose(info["significance"], results["sigma"], atol=1e-2)

        gti_obs = obs.gti.table
        gti_dataset = dataset.gti.table
        assert_allclose(gti_dataset["START"], gti_obs["START"])
        assert_allclose(gti_dataset["STOP"], gti_obs["STOP"])
Esempio n. 16
0
def test_reflected_bkg_maker(on_region, reflected_bkg_maker, observations):
    datasets = []

    e_reco = np.logspace(0, 2, 5) * u.TeV
    e_true = np.logspace(-0.5, 2, 11) * u.TeV

    dataset_empty = SpectrumDataset.create(e_reco=e_reco,
                                           e_true=e_true,
                                           region=on_region)

    maker = SpectrumDatasetMaker(selection=["counts"])

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert_allclose(datasets[0].counts_off.data.sum(), 76)
    assert_allclose(datasets[1].counts_off.data.sum(), 60)

    regions_0 = compound_region_to_list(datasets[0].counts_off.geom.region)
    regions_1 = compound_region_to_list(datasets[1].counts_off.geom.region)
    assert_allclose(len(regions_0), 11)
    assert_allclose(len(regions_1), 11)
Esempio n. 17
0
def test_dataset_maker_spectrum_rad_max_overlapping(observations_magic_rad_max, caplog):
    """test the energy-dependent spectrum extraction"""

    observation = observations_magic_rad_max[0]

    maker = SpectrumDatasetMaker(
        containment_correction=False, selection=["counts", "exposure", "edisp"]
    )

    finder = WobbleRegionsFinder(n_off_regions=5)
    bkg_maker = ReflectedRegionsBackgroundMaker(region_finder=finder)

    with caplog.at_level(logging.WARNING):
        dataset = maker.run(get_spectrumdataset_rad_max("spec"), observation)
        dataset_on_off = bkg_maker.run(dataset, observation)

    assert len(caplog.record_tuples) == 2
    assert caplog.record_tuples[0] == (
        'gammapy.makers.utils',
        logging.WARNING,
        'Found overlapping on/off regions, choose less off regions'
    )

    # overlapping off regions means not counts will be filled
    assert dataset_on_off.counts_off is None
    assert (dataset_on_off.acceptance_off.data == 0).all()

    # test that it works if we only look at higher energies with lower
    # rad max, allowing more off regions
    caplog.clear()
    with caplog.at_level(logging.WARNING):
        dataset = maker.run(get_spectrumdataset_rad_max("spec", e_min=250 * u.GeV), observation)
        dataset_on_off = bkg_maker.run(dataset, observation)
        assert dataset_on_off.counts_off is not None

    assert len(caplog.records) == 0
Esempio n. 18
0
def test_reflected_bkg_maker(on_region, reflected_bkg_maker, observations):
    datasets = []

    e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy")
    e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV,
                                name="energy_true")

    geom = RegionGeom(region=on_region, axes=[e_reco])
    dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

    maker = SpectrumDatasetMaker(selection=["counts"])

    for obs in observations:
        dataset = maker.run(dataset_empty, obs)
        dataset_on_off = reflected_bkg_maker.run(dataset, obs)
        datasets.append(dataset_on_off)

    assert_allclose(datasets[0].counts_off.data.sum(), 76)
    assert_allclose(datasets[1].counts_off.data.sum(), 60)

    regions_0 = compound_region_to_list(datasets[0].counts_off.geom.region)
    regions_1 = compound_region_to_list(datasets[1].counts_off.geom.region)
    assert_allclose(len(regions_0), 11)
    assert_allclose(len(regions_1), 11)
Esempio n. 19
0
    def _create_dataset_maker(self):
        """Create the Dataset Maker."""
        log.debug("Creating the target Dataset Maker.")

        datasets_settings = self.config.datasets
        if datasets_settings.type == "3d":
            maker = MapDatasetMaker(selection=datasets_settings.map_selection)
        elif datasets_settings.type == "1d":
            maker_config = {}
            if datasets_settings.containment_correction:
                maker_config[
                    "containment_correction"] = datasets_settings.containment_correction

            maker_config["selection"] = ["counts", "exposure", "edisp"]

            maker = SpectrumDatasetMaker(**maker_config)

        return maker
Esempio n. 20
0
data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
mask = data_store.obs_table["TARGET_NAME"] == "Crab"
obs_ids = data_store.obs_table["OBS_ID"][mask].data
observations = data_store.get_observations(obs_ids)

crab_position = SkyCoord(83.63, 22.01, unit="deg", frame="icrs")

# The ON region center is defined in the icrs frame. The angle is defined w.r.t. to its axis.
rectangle = RectangleSkyRegion(center=crab_position,
                               width=0.5 * u.deg,
                               height=0.4 * u.deg,
                               angle=0 * u.deg)

bkg_maker = ReflectedRegionsBackgroundMaker(min_distance=0.1 * u.rad)
dataset_maker = SpectrumDatasetMaker(selection=["counts"])

e_reco = MapAxis.from_energy_bounds(0.1, 100, 30, unit="TeV")
dataset_empty = SpectrumDataset.create(e_reco=e_reco, region=rectangle)

datasets = []

for obs in observations:

    dataset = dataset_maker.run(dataset_empty.copy(name=f"obs-{obs.obs_id}"),
                                obs)
    dataset_on_off = bkg_maker.run(observation=obs, dataset=dataset)
    datasets.append(dataset_on_off)

m = Map.create(skydir=crab_position, width=(8, 8), proj="TAN")
Esempio n. 21
0
def generate_dataset(Eflux,
                     flux,
                     Erange=None,
                     tstart=Time('2000-01-01 02:00:00', scale='utc'),
                     tobs=100 * u.s,
                     irf_file=None,
                     alpha=1 / 5,
                     name=None,
                     fake=True,
                     onoff=True,
                     seed='random-seed',
                     debug=False):
    """
    Generate a dataset from a list of energies and flux points either as
    a SpectrumDataset or a SpectrumDatasetOnOff

    Note :
    - in SpectrumDataset, the backgound counts are assumed precisely know and
    are not fluctuated.
    - in SpectrumDatasetOnOff, the background counts (off counts) are
    fluctuated from the IRF known values.

    Parameters
    ----------
    Eflux : Quantity
        Energies at which the flux is given.
    flux : Quantity
        Flux corresponding to the given energies.
    Erange : List, optional
        The energy boundaries within which the flux is defined, if not over all
        energies. The default is None.
    tstart : Time object, optional
        Start date of the dataset.
        The default is Time('2000-01-01 02:00:00',scale='utc').
    tobs : Quantity, optional
        Duration of the observation. The default is 100*u.s.
    irf_file : String, optional
        The IRf file name. The default is None.
    alpha : Float, optional
        The on over off surface ratio for the On-Off analysis.
        The default is 1/5.
    name : String, optional
        The dataset name, also used to name tthe spectrum. The default is None.
    fake : Boolean, optional
        If True, the dataset counts are fluctuated. The default is True.
    onoff : Boolean, optional
        If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet.
        The default is True.
    seed : String, optional
        The seed for the randome generator; If an integer will generate the
        same random series at each run. The default is 'random-seed'.
    debug: Boolean
        If True, let's talk a bit. The default is False.

    Returns
    -------
    ds : Dataset object
        The dataset.

    """
    random_state = get_random_state(seed)

    ### Define on region
    on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg,
                           frame="icrs")  # Observing region
    on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg)

    # Define energy axis (see spectrum analysis notebook)
    # edges for SpectrumDataset - all dataset should have the same axes
    # Note that linear spacing is clearly problematic for powerlaw fluxes
    # Axes can also be defined using MapAxis
    unit = u.GeV
    E1v = min(Eflux).to(unit).value
    E2v = max(Eflux).to(unit).value
    #     ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit
    #     ereco_axis = MapAxis.from_edges(ereco.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy",
    #                                    interp="log")

    ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit,
                                            0.9 * E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy")

    #     etrue = np.logspace(np.log10(    E1v), np.log10(    E2v), 50) * unit
    #     etrue_axis = MapAxis.from_edges(etrue.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy_true",
    #                                    interp="log")
    etrue_axis = MapAxis.from_energy_bounds(E1v * unit,
                                            E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy_true")
    if (debug):
        print("Dataset ", name)
        print("Etrue : ", etrue_axis.edges)
        print("Ereco : ", ereco_axis.edges)

    # Load IRF
    irf = load_cta_irfs(irf_file)

    spec = TemplateSpectralModel(energy=Eflux,
                                 values=flux,
                                 interp_kwargs={"values_scale": "log"})

    model = SkyModel(spectral_model=spec, name="Spec" + str(name))
    obs = Observation.create(obs_id=1,
                             pointing=on_pointing,
                             livetime=tobs,
                             irfs=irf,
                             deadtime_fraction=0,
                             reference_time=tstart)

    ds_empty = SpectrumDataset.create(
        e_reco=ereco_axis,  # Ereco.edges,
        e_true=etrue_axis,  #Etrue.edges,
        region=on_region,
        name=name)
    maker = SpectrumDatasetMaker(containment_correction=False,
                                 selection=["exposure", "background", "edisp"])
    ds = maker.run(ds_empty, obs)
    ds.models = model
    mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0],
                                         energy_max=Erange[1])

    mask = mask & ds.mask_safe.data
    ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask)

    ds.fake(random_state=random_state)  # Fake is mandatory ?

    # Transform SpectrumDataset into SpectrumDatasetOnOff if needed
    if (onoff):

        ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds,
                                                        acceptance=1,
                                                        acceptance_off=1 /
                                                        alpha)
        print("Transformed in ONOFF")

    if fake:
        print(" Fluctuations : seed = ", seed)
        if (onoff):
            ds.fake(npred_background=ds.npred_background())
        else:
            ds.fake(random_state=random_state)

    print("ds.energy_range = ", ds.energy_range)

    return ds
Esempio n. 22
0
)

observation = Observation.create(
    obs_id=0,
    pointing=SkyCoord("0d", "0d", frame="icrs"),
    irfs={"aeff": aeff, "edisp": edisp},
    tstart=0 * u.h,
    tstop=0.5 * u.h,
    location=observatory_locations["hess"],
)

geom = RegionGeom.create("icrs;circle(0, 0, 0.1)", axes=[energy_reco])

stacked = SpectrumDataset.create(geom=geom, energy_axis_true=energy_true)

maker = SpectrumDatasetMaker(selection=["edisp", "exposure"])

dataset_1 = maker.run(stacked.copy(), observation=observation)
dataset_2 = maker.run(stacked.copy(), observation=observation)

pwl = PowerLawSpectralModel()
model = SkyModel(spectral_model=pwl, name="test-source")

dataset_1.mask_safe = geom.energy_mask(energy_min=2 * u.TeV)
dataset_2.mask_safe = geom.energy_mask(energy_min=0.6 * u.TeV)

dataset_1.models = model
dataset_2.models = model
dataset_1.counts = dataset_1.npred()
dataset_2.counts = dataset_2.npred()
Esempio n. 23
0
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        log.info("Reducing spectrum datasets.")
        datasets_settings = self.config.datasets
        on_lon = datasets_settings.on_region.lon
        on_lat = datasets_settings.on_region.lat
        on_center = SkyCoord(on_lon,
                             on_lat,
                             frame=datasets_settings.on_region.frame)
        on_region = CircleSkyRegion(on_center,
                                    datasets_settings.on_region.radius)

        maker_config = {}
        if datasets_settings.containment_correction:
            maker_config[
                "containment_correction"] = datasets_settings.containment_correction
        e_reco = self._make_energy_axis(datasets_settings.geom.axes.energy)

        maker_config["selection"] = ["counts", "exposure", "edisp"]
        dataset_maker = SpectrumDatasetMaker(**maker_config)

        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            exclusion_region = Map.read(datasets_settings.background.exclusion)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker_config.update(datasets_settings.background.parameters)
        bkg_method = datasets_settings.background.method
        if bkg_method == "reflected":
            bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)
            log.debug(
                f"Creating ReflectedRegionsBackgroundMaker with arguments {bkg_maker_config}"
            )
        else:
            bkg_maker = None
            log.warning(
                f"No background maker set for 1d analysis. Check configuration."
            )

        safe_mask_selection = datasets_settings.safe_mask.methods
        safe_mask_settings = datasets_settings.safe_mask.parameters
        safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)

        e_true = self._make_energy_axis(
            datasets_settings.geom.axes.energy_true, name="energy_true")

        geom = RegionGeom.create(region=on_region, axes=[e_reco])
        reference = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

        datasets = []
        for obs in self.observations:
            log.info(f"Processing observation {obs.obs_id}")
            dataset = dataset_maker.run(reference.copy(), obs)
            if bkg_maker is not None:
                dataset = bkg_maker.run(dataset, obs)
                if dataset.counts_off is None:
                    log.info(
                        f"No OFF region found for observation {obs.obs_id}. Discarding."
                    )
                    continue
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)

        self.datasets = Datasets(datasets)

        if datasets_settings.stack:
            stacked = self.datasets.stack_reduce(name="stacked")
            self.datasets = Datasets([stacked])
Esempio n. 24
0
def simulate():

    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )

    # Reconstructed and true energy axis
    center = SkyCoord(0.0, 0.0, unit="deg", frame="galactic")
    energy_axis = MapAxis.from_edges(
        np.logspace(-0.5, 1.0, 10),
        unit="TeV",
        name="energy",
        interp="log",
    )
    energy_axis_true = MapAxis.from_edges(
        np.logspace(-1.2, 2.0, 31),
        unit="TeV",
        name="energy_true",
        interp="log",
    )

    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=center, radius=on_region_radius)

    geom = RegionGeom(on_region, axes=[energy_axis])

    pointing = SkyCoord(0.5, 0.5, unit="deg", frame="galactic")

    spectral_model = PowerLawSpectralModel(index=3,
                                           amplitude="1e-11 cm-2 s-1 TeV-1",
                                           reference="1 TeV")
    temporal_model = ExpDecayTemporalModel(t0="6 h", t_ref=gti_t0.mjd * u.d)
    model_simu = SkyModel(
        spectral_model=spectral_model,
        temporal_model=temporal_model,
        name="model-simu",
    )

    lvtm = np.ones(N_OBS) * 1.0 * u.hr
    tstart = 1.0 * u.hr

    datasets = []
    for i in range(N_OBS):
        obs = Observation.create(
            pointing=pointing,
            livetime=lvtm[i],
            tstart=tstart,
            irfs=irfs,
            reference_time=gti_t0,
        )
        empty = SpectrumDataset.create(
            geom=geom,
            energy_axis_true=energy_axis_true,
            name=f"dataset_{i}",
        )
        maker = SpectrumDatasetMaker(
            selection=["exposure", "background", "edisp"])
        dataset = maker.run(empty, obs)
        dataset.models = [
            model_simu,
            FoVBackgroundModel(dataset_name=dataset.name)
        ]
        dataset.fake()
        datasets.append(dataset)
        tstart = tstart + 2.0 * u.hr

    return datasets
Esempio n. 25
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    exclusion_region = CircleSkyRegion(
        center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"),
        radius=0.5 * u.deg,
    )

    skydir = target_position.galactic
    exclusion_mask = Map.create(npix=(150, 150),
                                binsz=0.05,
                                skydir=skydir,
                                proj="TAN",
                                frame="galactic")

    mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False)
    exclusion_mask.data = mask

    e_reco = MapAxis.from_bounds(0.1,
                                 40,
                                 nbin=40,
                                 interp="log",
                                 unit="TeV",
                                 name="energy")
    e_true = MapAxis.from_bounds(0.05,
                                 100,
                                 nbin=200,
                                 interp="log",
                                 unit="TeV",
                                 name="energy_true")

    stacked = SpectrumDatasetOnOff.create(region=on_region,
                                          e_reco=e_reco,
                                          e_true=e_true,
                                          name="stacked")

    dataset_maker = SpectrumDatasetMaker(containment_correction=False,
                                         selection=["counts", "aeff", "edisp"])

    bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    spectral_model = PowerLawSpectralModel(index=2,
                                           amplitude=2e-11 *
                                           u.Unit("cm-2 s-1 TeV-1"),
                                           reference=1 * u.TeV)
    spatial_model = PointSpatialModel(lon_0=target_position.ra,
                                      lat_0=target_position.dec,
                                      frame="icrs")
    spatial_model.lon_0.frozen = True
    spatial_model.lat_0.frozen = True

    sky_model = SkyModel(spatial_model=spatial_model,
                         spectral_model=spectral_model,
                         name="")

    for observation in observations:
        dataset = stacked.copy(name=f"dataset-{observation.obs_id}")
        dataset = dataset_maker.run(dataset=dataset, observation=observation)
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        stacked.stack(dataset_on_off)

    stacked.models = sky_model
    return Datasets([stacked])
irfs = load_cta_irfs("$GAMMAPY_DATA/prod3b-v2/bcf/North_z20_50h/irf_file.fits")

# Create the observation
obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs)
print("Characteristics of the simulated observation")
print(obs)

# # Create the On/Off simulations

# Make the SpectrumDataset
# NOTE: Even we don't set different energy ranges for recovered and true energies, if edisp is not considered then the
# FluxPointEstimator breaks
dataset_empty = SpectrumDataset.create(e_reco=energy_axis,
                                       region=on_region,
                                       name="obs-0")
maker = SpectrumDatasetMaker(selection=["exposure", "edisp", "background"])

# Set the number of observations we want to create
n_obs = 50
print("Creating the", n_obs, "On/Off simulations")

datasets = Datasets()
for idx in range(n_obs):
    dataset = maker.run(dataset_empty, obs)
    # Set the model on the dataset, and fake the counts
    dataset.models = model
    dataset.fake(random_state=idx)
    # Set off regions
    dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
        dataset=dataset, acceptance=1, acceptance_off=3)
    dataset_on_off.fake(random_state=idx,
Esempio n. 27
0
    def create_dataset_list(self):
        """
        Create the dataset list as a list of list to handle more than one
        irf per slice (GRB seen on both sites).

        The pointing, center of the field of view, is the same for all time
        slices as it is considered that the GRB is constantly followed within
        the pre-computed visibililty window. The pointing is displaced compared
        to the GRB position by a distance mc.offset, a value
        choosen to give enough space to have 1/mcf.alpha off regions of radius
        mcf.on_size, the detection area.
        The detection areas, on and off regions, have a size independent of
        the energy, but the effective area and background are mofified to take
        into account the PSF evolution with energy (see the irf module).

        The datasets are defined with the same true and reconstructed enegy
        axes so that counts can be added bin per bin. Differences in threshold
        due to different IRF are taken into account by masking some bins
        (the mask at a certain energy value cancel the whole bin it belongs to,
        see the irf module for more explanations)

        A word on masking
        =================
        Direct masking is not made available to users in Gammapy 0.17:
        It is possible to mask on effective area criteria (minimum value),
        or data counts (counts.geom.energy_mask)
        The direct masking on energy is made as shown below, as a result of
        discussions with Gammapy developpers on the Gammapy slack channel
        (Nov. 2nd, 2020).
        Note that the mask_safe is not considered in the printout or peek
        functions and has to be obtained by hand (Could be corrected in
        further versions): this is implemented in the check_dataset SoHAPPy
        function.

        Discussion on masking binning with A. Donath, dec. 3rd, 2020
        ------------------------------------------------------------
        Applying a mask (Emin, Emax) is simply cancelling the data of the
        energy axis bins outside the mask. Even partially covered bins are
        lost (e.g. bins (E1,E2) with E1<Emin<E2 ).
        There could have been a partial recovery of the data (i.e. keeping
        the counts or flux between Emin and E2, but this is not
        what has been chosen.
        The influence of this becomes small if Energy bin sizes are small
        enough. However small bins size can give low statititics
        A. Donath says it is not a problem as long as the statitical
        computation is done properly : this is in fact approaching the
        unbinned likelihood case. Only for flux points computation, e.g.
        to get a significant point at high energies, bin should have enough
        statistics. This can be done since v0.18.2 using
        SpectrumDatasetOnOff.resample_energy_axis() to re-group the data
        before model fitting

        A word on stacking
        ==================
        In order to be stacked, datasets should be based on the same
        energy axis, which is indeed implemented in SoHAPPy.
        Two observation have usually different IRfs, and in particular energy
        thresholds. The thresholds are take into account by masking, as
        mentionned aboce.
        Stacking in gammapy is intended to merge observations that were to
        large in data volue to be handled together, and or to sum-up
        consecutive observations of the same object. Stacking 2 observations
        results in a longer observation with the same model, in particular the
        same zspectral model, with an observation livetime being the sum of the
        two initial observation livetime. It is not intended to sum-ip two
        observations done at the same time, with the same livetime.
        As a consequence the gammapy stacking cannot be used to "merge"
        the simultaneous observations of two sites. This would result in a
        livetime larger than the simultaneous observations and would therefore
        shift the observation times in the consecutive time slices.
        There was a tentative to modify this in reducing the livetime
        ds_stacked.livetime /= 2, and multiplying the attached spectrum by 2
        to keep the predicted count number coherent. But this causes problems
        later in the analysis.
        As a consequence, it is chosen to keep track of each observation
        identity, having a dataset for each time slice on a given site, and
        two datasets for slice in common on two sites (or more).
        The aperture photometry nalysis then simply adds-up the cont numbers
        from the two sites, whereas the spectrum extraction handles the
        situation outside the simulation code, in a separate notebook.

        Saving datasets
        ===============
        The choice made here is to save the simulation to disk as a bin file
        which has the advantage to have all information saved.
        An alternative is to save the dataset list only.

        Discussion on the  slack channel with A. Donath (Nov. 27th, 2020)
        -----------------------------------------------------------------
        For writing datasets in the present use-case there is a bit
        more bookkeeping to do, so that you can read back the model and
        datasets correctly. Here is a minimal example:
        <code>
        from gammapy.datasets import SpectrumDatasetOnOff, Datasets
        from gammapy.modeling.models import PowerLawSpectralModel, SkyModel

        path = "$GAMMAPY_DATA/joint-crab/spectra/hess/"

        obs_1 = SpectrumDatasetOnOff.from_ogip_files(path + "pha_obs23523.fits")
        obs_2 = SpectrumDatasetOnOff.from_ogip_files(path + "pha_obs23592.fits")

        model_1 = SkyModel(PowerLawSpectralModel(), name="model-1", datasets_names=[obs_1.name])
        model_2 = SkyModel(PowerLawSpectralModel(), name="model-2", datasets_names=[obs_2.name])
        obs_1.models = [model_1]
        obs_2.models = [model_2]
        datasets = Datasets([obs_1, obs_2])

        datasets.write("test", prefix="test", overwrite=True)
        </code>

        This was something that we started to refactor in v0.17 and are
        still improving. So you have the possibility to “assign” models to
        datasets in a declarative way using the dataset_names keyword.
        The resulting YAML file looks like:
        components:

        <code>
        -   name: model-1
            type: SkyModel
            spectral:
                type: PowerLawSpectralModel
                parameters:
                - {name: index, value: 2.0, unit: '', min: .nan, max: .nan, frozen: false,
                    error: 0}
                - {name: amplitude, value: 1.0e-12, unit: cm-2 s-1 TeV-1, min: .nan, max: .nan,
                    frozen: false, error: 0}
                - {name: reference, value: 1.0, unit: TeV, min: .nan, max: .nan, frozen: true,
                    error: 0}
            datasets_names: ['23523']
        -   name: model-2
            type: SkyModel
            spectral:
                type: PowerLawSpectralModel
                parameters:
                - {name: index, value: 2.0, unit: '', min: .nan, max: .nan, frozen: false,
                    error: 0}
                - {name: amplitude, value: 1.0e-12, unit: cm-2 s-1 TeV-1, min: .nan, max: .nan,
                    frozen: false, error: 0}
                - {name: reference, value: 1.0, unit: TeV, min: .nan, max: .nan, frozen: true,
                    error: 0}
            datasets_names: ['23592']
        covariance: test/test_models_covariance.dat
        </code>

        So explicitly mentions for each model component the dataset it is
        assigned to. I think without defining dataset_names the information
        will not be present in the output YAML file and when reading back the
        data and model, the assignment is not correct anymore.
        We will add more documentation on this “global” model handling soon.

        Parameters
        ----------
        debug : Boolean, optional
            If True, verbose mode. The default is False.

        Returns
        -------
        dset_list : Numpy array of Dataset objects
            A list of datasets (not a collection like a Datasets.

        """

        dset_list = []
        
        for aslice in self.slot.slices:

            # Note: the spectrum is related to the slice, not the site
            spec  = self.slot.grb.spectra[aslice.fid()]
            name  = "Spec"+"-"+str(aslice.fid())
            model = SkyModel(spectral_model = spec, name = name)

            # The reference time and duration of the observation
            # Note that this is the start of the slice
            # Not necessarilty the point at which the flux and altitude
            # are evaluated
            tref = self.slot.grb.t_trig + aslice.ts1() # start of slice
            dt   = aslice.ts2() - aslice.ts1()

            # Each slice can have more than one IRF since it can be observed
            # from both sites in some cases.
            # Two independent datasets are created
            dset_site = []
            for ip, perf in enumerate(aslice.irf()):

                array = perf.subarray
                kzen = perf.kzen
                on_size = mcf.on_size[array]
                offset  = mcf.offset[array]
                # The on-region is on the GRB
                on_region = CircleSkyRegion(center = self.slot.grb.radec,
                                            radius = on_size)
                # Create the observation - The pointing is not on the GRB
                on_ptg = SkyCoord(self.slot.grb.radec.ra + offset,
                                  self.slot.grb.radec.dec, frame="icrs")

                with warnings.catch_warnings(): # because of t_trig
                    warnings.filterwarnings("ignore")
                    obs = Observation.create(obs_id   = aslice.idt(),
                                             pointing = on_ptg,
                                             livetime = dt,
                                             irfs     = perf.irf,
                                             deadtime_fraction = 0,
                                             reference_time = tref)

                # Create dataset - correct for containment - add model
                ds_name  = aslice.site()+"-"+str(aslice.idt())+"-"+str(ip)
                
                # Reconstructed energy axis
                # Stacking requires same original binning -> uses largest interval
                # Ensure that all possible edges are in, later apply masking
                # Use the Optimised binning
                # There is a bug in 0.17 (unit not taken into account
                # correctly)preventig from simply writing
                # erec_axis = MapAxis.from_edges(erec_edges,name="energy")
                e_reco = MapAxis.from_edges(mcf.erec_edges[array].to("TeV").value,
                                            unit="TeV",
                                            name="energy",
                                            interp="log")
                e_true = perf.etrue
                
                ds_empty = SpectrumDataset.create(e_reco = e_reco,
                                                  e_true = e_true,
                                                  region = on_region,
                                                  name   = ds_name)

                maker = SpectrumDatasetMaker(
                        selection=["exposure", "background","edisp"])  
                    
                ds = maker.run(ds_empty, obs)

                # Compute containmentfactor  
                # The PSF being given versus true energy, using the reconstructed energy
                # axis to compute the factor assumes that the reconstructed energy is
                # strictly equals to the true energy, which is certainly not the case at
                # the lowest energies.
                # Maybe this could desserve a specific study.                       
                radii = perf.irf['psf'].containment_radius(energy   = e_reco.center,
                                                           theta    = mcf.offset[array],
                                                           fraction = mcf.containment)[0]
                factor  = (1-np.cos(radii))/(1 - np.cos(mcf.on_size[array]))
                # If factor is too large above threshold, error
                idx = np.where((e_reco.center)[np.where(factor>1 )] >= mcf.erec_min[array][kzen])
                if (np.size(idx)):
                    # Get guilty energies

                    print(" E = ",e_reco.center[idx])
                    print(" R = ",radii[idx].value," max = ",
                                  mcf.on_size[array].value)
                    print(" F = ",factor[idx])
                    #sys.exit("IRF : Initial region too small")

                # In Gammapy 0.17, it is mandatory to change the effective
                # area before the mpdel is set, since once the model is set it
                # cannot be changed anymore.
                # This feature (bug) was discussed in Gammapy issue #3016 on
                # Sept 2020.

                # ds.exposure.data   *= mcf.containment
                ds.exposure   *= mcf.containment
                ds.background.data *= factor.value.reshape((-1, 1, 1))
                ds.models           = model
                mask = ds.mask_safe.geom.energy_mask(energy_min = mcf.erec_min[array][kzen],
                                                     energy_max = mcf.erec_max[kzen])
                    
                mask = mask & ds.mask_safe.data
                ds.mask_safe = RegionNDMap(ds.mask_safe.geom,data=mask)

                dset_site.append(ds)

            dset_list.append(dset_site)

        return np.asarray(dset_list)