Beispiel #1
0
def test_datasetsmaker_map(pars, observations_cta, makers_map):
    makers = DatasetsMaker(
        makers_map,
        stack_datasets=pars["stack_datasets"],
        cutout_mode="partial",
        cutout_width=pars["cutout_width"],
        n_jobs=pars["n_jobs"],
    )

    datasets = makers.run(pars["dataset"], observations_cta)
    if len(datasets) == 1:
        counts = datasets[0].counts
        assert counts.unit == ""
        assert_allclose(counts.data.sum(), 46716, rtol=1e-5)

        exposure = datasets[0].exposure
        assert exposure.unit == "m2 s"
        assert_allclose(exposure.data.mean(), 1.350841e09, rtol=3e-3)
    else:
        assert len(datasets) == 3
        # get by name because of async
        counts = datasets[0].counts
        assert counts.unit == ""
        assert_allclose(counts.data.sum(), 26318, rtol=1e-5)

        exposure = datasets[0].exposure
        assert exposure.unit == "m2 s"
        assert_allclose(exposure.data.mean(), 2.436063e09, rtol=3e-3)
Beispiel #2
0
def test_datasetsmaker_spectrum(observations_hess, makers_spectrum):

    makers = DatasetsMaker(makers_spectrum, stack_datasets=False, n_jobs=2)
    datasets = makers.run(get_spectrumdataset(name="spec"), observations_hess)

    counts = datasets[0].counts
    assert counts.unit == ""
    assert_allclose(counts.data.sum(), 192, rtol=1e-5)
    assert_allclose(datasets[0].background.data.sum(), 18.66666664, rtol=1e-5)

    exposure = datasets[0].exposure
    assert exposure.unit == "m2 s"
    assert_allclose(exposure.data.mean(), 3.94257338e08, rtol=3e-3)
Beispiel #3
0
def test_datasetsmaker_map_cutout_width(observations_cta, makers_map, tmp_path):
    makers = DatasetsMaker(
        makers_map,
        stack_datasets=True,
        cutout_mode="partial",
        cutout_width="5 deg",
        n_jobs=1,
    )
    datasets = makers.run(get_mapdataset(name="linear_staking_1deg"), observations_cta)

    counts = datasets[0].counts

    assert counts.unit == ""
    assert_allclose(counts.data.sum(), 46716, rtol=1e-5)

    exposure = datasets[0].exposure
    assert exposure.unit == "m2 s"
    assert_allclose(exposure.data.mean(), 1.350841e09, rtol=3e-3)
Beispiel #4
0
def test_datasetsmaker_map_2steps(observations_cta, makers_map, tmp_path):

    makers = DatasetsMaker(
        [MapDatasetMaker()],
        stack_datasets=False,
        cutout_mode="partial",
        cutout_width="5 deg",
        n_jobs=None,
    )

    dataset = get_mapdataset(name="2steps")
    datasets = makers.run(dataset, observations_cta)

    makers_list = [
        SafeMaskMaker(methods=["offset-max"], offset_max="2 deg"),
        FoVBackgroundMaker(method="scale"),
    ]
    makers = DatasetsMaker(
        makers_list,
        stack_datasets=True,
        cutout_mode="partial",
        cutout_width="5 deg",
        n_jobs=None,
    )
    datasets = makers.run(dataset, observations_cta, datasets)

    counts = datasets[0].counts
    assert counts.unit == ""
    assert_allclose(counts.data.sum(), 46716, rtol=1e-5)

    exposure = datasets[0].exposure
    assert exposure.unit == "m2 s"
    assert_allclose(exposure.data.mean(), 1.350841e09, rtol=3e-3)
Beispiel #5
0
    def _map_making(self):
        """Make maps and datasets for 3d analysis"""
        datasets_settings = self.config.datasets
        offset_max = datasets_settings.geom.selection.offset_max

        log.info("Creating reference dataset and makers.")
        stacked = self._create_reference_dataset(name="stacked")

        maker = self._create_dataset_maker()
        maker_safe_mask = self._create_safe_mask_maker()
        bkg_maker = self._create_background_maker()

        makers = [maker, maker_safe_mask, bkg_maker]
        makers = [maker for maker in makers if maker is not None]

        log.info("Start the data reduction loop.")

        datasets_maker = DatasetsMaker(makers,
                                      stack_datasets=datasets_settings.stack,
                                      n_jobs=self.config.general.n_jobs,
                                      cutout_mode='trim',
                                      cutout_width=2 * offset_max)
        self.datasets = datasets_maker.run(stacked, self.observations)