コード例 #1
0
def test_map_maker_ring(observations):
    geomd = geom(ebounds=[0.1, 10])
    map_dataset_maker = MapDatasetMaker(offset_max="2 deg")
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg")

    stacked = MapDatasetOnOff.create(geomd)

    regions = CircleSkyRegion(SkyCoord(0, 0, unit="deg", frame="galactic"),
                              radius=0.5 * u.deg)
    exclusion = Map.from_geom(geomd)
    exclusion.data = exclusion.geom.region_mask([regions], inside=False)

    ring_bkg = RingBackgroundMaker(r_in="0.5 deg",
                                   width="0.4 deg",
                                   exclusion_mask=exclusion)

    for obs in observations:
        dataset = map_dataset_maker.run(stacked, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        dataset = dataset.to_image()

        dataset_on_off = ring_bkg.run(dataset)
        stacked.stack(dataset_on_off)

    assert_allclose(np.nansum(stacked.counts.data), 34366, rtol=1e-2)
    assert_allclose(np.nansum(stacked.acceptance_off.data), 434.36, rtol=1e-2)
コード例 #2
0
        def empty_dataset(source_pos_radec, map_geom, e_reco_binning, livetime,
                          irf_file, offset):

            source_pos_ra = source_pos_radec["ra"]
            source_pos_dec = source_pos_radec["dec"]

            source = SkyCoord(source_pos_ra,
                              source_pos_dec,
                              unit="deg",
                              frame="icrs")

            e_reco_min = u.Quantity(e_reco_binning["e_reco_min"]).to("TeV")
            e_reco_min = e_reco_min.value
            e_reco_max = u.Quantity(e_reco_binning["e_reco_max"]).to("TeV")
            e_reco_max = e_reco_max.value
            n_e_reco = e_reco_binning["n_e_reco"]

            energy_axis = MapAxis.from_edges(np.logspace(
                np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco),
                                             unit="TeV",
                                             name="energy",
                                             interp="log")

            geom = WcsGeom.create(
                skydir=source,
                binsz=u.Quantity(map_geom["binsize"]).to("deg").value,
                width=(u.Quantity(map_geom["width"]).to("deg").value,
                       u.Quantity(map_geom["width"]).to("deg").value),
                frame="icrs",
                axes=[energy_axis])

            energy_axis_true = MapAxis.from_edges(np.logspace(
                np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco),
                                                  unit="TeV",
                                                  name="energy",
                                                  interp="log")

            pointing = SkyCoord(u.Quantity(source_pos_ra).to("deg"),
                                u.Quantity(source_pos_dec).to("deg") + offset,
                                frame="icrs",
                                unit="deg")

            irfs = load_cta_irfs(irf_file)

            obs = Observation.create(pointing=pointing,
                                     livetime=livetime,
                                     irfs=irfs)

            empty = MapDataset.create(geom, energy_axis_true=energy_axis_true)
            maker = MapDatasetMaker(
                selection=["exposure", "background", "psf", "edisp"])
            maker_safe_mask = SafeMaskMaker(
                methods=["offset-max"],
                offset_max=u.quantity.Quantity(map_geom["width"]) +
                1.0 * u.deg)

            dataset = maker.run(empty, obs)
            dataset = maker_safe_mask.run(dataset, obs)

            return dataset
コード例 #3
0
ファイル: test_make.py プロジェクト: mstrzys/gammapy
    def test_extract(
        pars,
        results,
        observations_hess_dl3,
        spectrum_dataset_maker_crab_fine_bins,
        reflected_regions_bkg_maker,
    ):
        """Test quantitative output for various configs"""
        safe_mask_maker = SafeMaskMaker()

        obs = observations_hess_dl3[0]
        spectrum_dataset_maker_crab_fine_bins.containment_correction = pars[
            "containment_correction"]
        dataset = spectrum_dataset_maker_crab_fine_bins.run(
            obs, selection=["counts", "aeff", "edisp"])
        dataset = reflected_regions_bkg_maker.run(dataset, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        aeff_actual = dataset.aeff.data.evaluate(energy=5 * u.TeV)
        edisp_actual = dataset.edisp.data.evaluate(e_true=5 * u.TeV,
                                                   e_reco=5.2 * u.TeV)

        assert_quantity_allclose(aeff_actual, results["aeff"], rtol=1e-3)
        assert_quantity_allclose(edisp_actual, results["edisp"], rtol=1e-3)

        # TODO: Introduce assert_stats_allclose
        info = dataset.info_dict()

        assert info["n_on"] == results["n_on"]
        assert_allclose(info["significance"], results["sigma"], atol=1e-2)

        gti_obs = obs.gti.table
        gti_dataset = dataset.gti.table
        assert_allclose(gti_dataset["START"], gti_obs["START"])
        assert_allclose(gti_dataset["STOP"], gti_obs["STOP"])
コード例 #4
0
ファイル: test_make.py プロジェクト: mstrzys/gammapy
def test_safe_mask_maker_dc1(spectrum_dataset_maker_gc, observations_cta_dc1):
    safe_mask_maker = SafeMaskMaker(methods=["edisp-bias", "aeff-max"])

    obs = observations_cta_dc1[0]
    dataset = spectrum_dataset_maker_gc.run(obs)
    dataset = safe_mask_maker.run(dataset, obs)
    assert_allclose(dataset.energy_range[0].value, 3.162278, rtol=1e-3)
    assert dataset.energy_range[0].unit == "TeV"
コード例 #5
0
ファイル: core.py プロジェクト: gfiusa/gammapy
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        log.info("Reducing spectrum datasets.")
        datasets_settings = self.config.datasets
        on_lon = datasets_settings.on_region.lon
        on_lat = datasets_settings.on_region.lat
        on_center = SkyCoord(on_lon,
                             on_lat,
                             frame=datasets_settings.on_region.frame)
        on_region = CircleSkyRegion(on_center,
                                    datasets_settings.on_region.radius)

        maker_config = {}
        if datasets_settings.containment_correction:
            maker_config[
                "containment_correction"] = datasets_settings.containment_correction
        e_reco = self._make_energy_axis(
            datasets_settings.geom.axes.energy).edges

        maker_config["selection"] = ["counts", "aeff", "edisp"]
        dataset_maker = SpectrumDatasetMaker(**maker_config)
        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            exclusion_region = Map.read(datasets_settings.background.exclusion)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)

        safe_mask_selection = self.config.datasets.safe_mask.methods
        safe_mask_settings = self.config.datasets.safe_mask.settings
        safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)

        e_true = self._make_energy_axis(
            datasets_settings.geom.axes.energy_true).edges

        reference = SpectrumDataset.create(e_reco=e_reco,
                                           e_true=e_true,
                                           region=on_region)

        datasets = []
        for obs in self.observations:
            log.info(f"Processing observation {obs.obs_id}")
            dataset = dataset_maker.run(reference.copy(), obs)
            dataset = bkg_maker.run(dataset, obs)
            if dataset.counts_off is None:
                log.info(
                    f"No OFF region found for observation {obs.obs_id}. Discarding."
                )
                continue
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)

        self.datasets = Datasets(datasets)

        if self.config.datasets.stack:
            stacked = self.datasets.stack_reduce(name="stacked")
            self.datasets = Datasets([stacked])
コード例 #6
0
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        region = self.settings["datasets"]["geom"]["region"]
        log.info("Reducing spectrum datasets.")
        on_lon = Angle(region["center"][0])
        on_lat = Angle(region["center"][1])
        on_center = SkyCoord(on_lon, on_lat, frame=region["frame"])
        on_region = CircleSkyRegion(on_center, Angle(region["radius"]))

        maker_config = {}
        if "containment_correction" in self.settings["datasets"]:
            maker_config["containment_correction"] = self.settings["datasets"][
                "containment_correction"
            ]
        params = self.settings["datasets"]["geom"]["axes"][0]
        e_reco = MapAxis.from_bounds(**params).edges
        maker_config["e_reco"] = e_reco

        # TODO: remove hard-coded e_true and make it configurable
        maker_config["e_true"] = np.logspace(-2, 2.5, 109) * u.TeV
        maker_config["region"] = on_region

        dataset_maker = SpectrumDatasetMaker(**maker_config)
        bkg_maker_config = {}
        background = self.settings["datasets"]["background"]

        if "exclusion_mask" in background:
            map_hdu = {}
            filename = background["exclusion_mask"]["filename"]
            if "hdu" in background["exclusion_mask"]:
                map_hdu = {"hdu": background["exclusion_mask"]["hdu"]}
            exclusion_region = Map.read(filename, **map_hdu)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        if background["background_estimator"] == "reflected":
            reflected_bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)
        else:
            # TODO: raise error?
            log.info("Background estimation only for reflected regions method.")

        safe_mask_maker = SafeMaskMaker(methods=["aeff-default", "aeff-max"])

        datasets = []
        for obs in self.observations:
            log.info(f"Processing observation {obs.obs_id}")
            selection = ["counts", "aeff", "edisp"]
            dataset = dataset_maker.run(obs, selection=selection)
            dataset = reflected_bkg_maker.run(dataset, obs)
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)

        self.datasets = Datasets(datasets)

        if self.settings["datasets"]["stack-datasets"]:
            stacked = self.datasets.stack_reduce()
            stacked.name = "stacked"
            self.datasets = Datasets([stacked])
コード例 #7
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)
    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position, radius=on_region_radius)

    exclusion_region = CircleSkyRegion(
        center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"),
        radius=0.5 * u.deg,
    )

    skydir = target_position.galactic
    exclusion_mask = Map.create(
        npix=(150, 150), binsz=0.05, skydir=skydir, proj="TAN", coordsys="GAL"
    )

    mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False)
    exclusion_mask.data = mask

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log", unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log", unit="TeV").edges

    dataset_maker = SpectrumDatasetMaker(
        region=on_region, e_reco=e_reco, e_true=e_true, containment_correction=True
    )
    bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    spectral_model = PowerLawSpectralModel(
        index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=1 * u.TeV
    )
    spatial_model = PointSpatialModel(
        lon_0=target_position.ra, lat_0=target_position.dec, frame="icrs"
    )
    spatial_model.lon_0.frozen = True
    spatial_model.lat_0.frozen = True

    sky_model = SkyModel(
        spatial_model=spatial_model, spectral_model=spectral_model, name=""
    )

    # Data preparation
    datasets = []

    for ind, observation in enumerate(observations):
        dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"])
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        dataset_on_off.name = f"dataset{ind}"
        dataset_on_off.models = sky_model
        datasets.append(dataset_on_off)

    return Datasets(datasets)
コード例 #8
0
    def test_compute_energy_threshold(self, spectrum_dataset_crab_fine,
                                      observations_hess_dl3):

        maker = SpectrumDatasetMaker(containment_correction=True)
        safe_mask_maker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

        obs = observations_hess_dl3[0]
        dataset = maker.run(spectrum_dataset_crab_fine, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        actual = dataset.energy_range[0]
        assert_quantity_allclose(actual, 0.8799225 * u.TeV, rtol=1e-3)
コード例 #9
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg")

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log",
                                 unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log",
                                 unit="TeV").edges

    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    dataset_maker = SpectrumDatasetMaker(containment_correction=True,
                                         selection=["counts", "aeff", "edisp"])

    empty = SpectrumDatasetOnOff.create(region=on_region,
                                        e_reco=e_reco,
                                        e_true=e_true)

    bkg_maker = ReflectedRegionsBackgroundMaker()
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    spectral_model = PowerLawSpectralModel(index=2.6,
                                           amplitude=2.0e-11 *
                                           u.Unit("1 / (cm2 s TeV)"),
                                           reference=1 * u.TeV)
    spectral_model.index.frozen = False

    model = spectral_model.copy()
    model.name = "crab"

    datasets_1d = []

    for observation in observations:

        dataset = dataset_maker.run(dataset=empty.copy(),
                                    observation=observation)

        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        datasets_1d.append(dataset_on_off)

    for dataset in datasets_1d:
        model = spectral_model.copy()
        model.name = "crab"
        dataset.model = model

    return datasets_1d
コード例 #10
0
ファイル: test_make.py プロジェクト: mstrzys/gammapy
    def test_compute_energy_threshold(self,
                                      spectrum_dataset_maker_crab_fine_bins,
                                      observations_hess_dl3):
        safe_mask_maker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

        obs = observations_hess_dl3[0]
        spectrum_dataset_maker_crab_fine_bins.containment_correction = True
        dataset = spectrum_dataset_maker_crab_fine_bins.run(
            obs, selection=["counts", "aeff", "edisp"])
        dataset = safe_mask_maker.run(dataset, obs)

        actual = dataset.energy_range[0]
        assert_quantity_allclose(actual, 0.8799225 * u.TeV, rtol=1e-3)
コード例 #11
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")
    OBS_ID = 110380
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    energy_axis = MapAxis.from_bounds(0.1,
                                      10,
                                      nbin=10,
                                      unit="TeV",
                                      name="energy",
                                      interp="log")
    geom = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.02,
        width=(10, 8),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )

    offset_max = 4 * u.deg
    maker = MapDatasetMaker()
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"],
                                    offset_max=offset_max)
    stacked = MapDataset.create(geom=geom)

    spatial_model = PointSpatialModel(lon_0="-0.05 deg",
                                      lat_0="-0.05 deg",
                                      frame="galactic")
    spectral_model = ExpCutoffPowerLawSpectralModel(
        index=2,
        amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
        reference=1.0 * u.TeV,
        lambda_=0.1 / u.TeV,
    )
    model = SkyModel(spatial_model=spatial_model,
                     spectral_model=spectral_model,
                     name="gc-source")

    datasets = Datasets([])
    for idx, obs in enumerate(observations):
        cutout = stacked.cutout(obs.pointing_radec,
                                width=2 * offset_max,
                                name=f"dataset{idx}")
        dataset = maker.run(cutout, obs)
        dataset = safe_mask_maker.run(dataset, obs)
        dataset.models = model
        datasets.append(dataset)
    return datasets
コード例 #12
0
def test_safe_mask_maker(observations):
    obs = observations[0]

    axis = MapAxis.from_edges([0.1, 1, 10],
                              name="energy",
                              interp="log",
                              unit="TeV")
    geom = WcsGeom.create(npix=(11, 11),
                          axes=[axis],
                          skydir=obs.pointing_radec)

    empty_dataset = MapDataset.create(geom=geom)
    dataset_maker = MapDatasetMaker(offset_max="3 deg")
    safe_mask_maker = SafeMaskMaker(offset_max="3 deg")
    dataset = dataset_maker.run(empty_dataset, obs)

    mask_offset = safe_mask_maker.make_mask_offset_max(dataset=dataset,
                                                       observation=obs)
    assert_allclose(mask_offset.sum(), 109)

    mask_energy_aeff_default = safe_mask_maker.make_mask_energy_aeff_default(
        dataset=dataset, observation=obs)
    assert_allclose(mask_energy_aeff_default.sum(), 242)

    with pytest.raises(NotImplementedError) as excinfo:
        safe_mask_maker.make_mask_energy_edisp_bias(dataset)

    assert "only supported" in str(excinfo.value)

    with pytest.raises(NotImplementedError) as excinfo:
        safe_mask_maker.make_mask_energy_edisp_bias(dataset)

    assert "only supported" in str(excinfo.value)
コード例 #13
0
ファイル: test_make.py プロジェクト: mstrzys/gammapy
def test_safe_mask_maker_dl3(spectrum_dataset_maker_crab,
                             observations_hess_dl3):
    safe_mask_maker = SafeMaskMaker()

    obs = observations_hess_dl3[0]
    dataset = spectrum_dataset_maker_crab.run(obs)
    dataset = safe_mask_maker.run(dataset, obs)
    assert_allclose(dataset.energy_range[0].value, 1)
    assert dataset.energy_range[0].unit == "TeV"

    mask_safe = safe_mask_maker.make_mask_energy_aeff_max(dataset)
    assert mask_safe.sum() == 4

    mask_safe = safe_mask_maker.make_mask_energy_edisp_bias(dataset)
    assert mask_safe.sum() == 3
コード例 #14
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    exclusion_region = CircleSkyRegion(
        center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"),
        radius=0.5 * u.deg,
    )

    skydir = target_position.galactic
    exclusion_mask = Map.create(npix=(150, 150),
                                binsz=0.05,
                                skydir=skydir,
                                proj="TAN",
                                coordsys="GAL")

    mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False)
    exclusion_mask.data = mask

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log",
                                 unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log",
                                 unit="TeV").edges

    stacked = SpectrumDatasetOnOff.create(e_reco=e_reco, e_true=e_true)
    stacked.name = "stacked"

    dataset_maker = SpectrumDatasetMaker(region=on_region,
                                         e_reco=e_reco,
                                         e_true=e_true,
                                         containment_correction=False)
    bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    for observation in observations:
        dataset = dataset_maker.run(observation,
                                    selection=["counts", "aeff", "edisp"])
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        stacked.stack(dataset_on_off)
    return stacked
コード例 #15
0
ファイル: test_make.py プロジェクト: mstrzys/gammapy
def test_map_maker(pars, observations):
    stacked = MapDataset.create(
        geom=pars["geom"],
        energy_axis_true=pars["e_true"],
        binsz_irf=pars["binsz_irf"],
        margin_irf=pars["margin_irf"],
    )

    maker = MapDatasetMaker(
        geom=pars["geom"],
        energy_axis_true=pars["e_true"],
        offset_max="2 deg",
        background_oversampling=pars.get("background_oversampling"),
        binsz_irf=pars["binsz_irf"],
        margin_irf=pars["margin_irf"],
    )
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg")

    for obs in observations:
        dataset = maker.run(obs)
        dataset = safe_mask_maker.run(dataset, obs)
        stacked.stack(dataset)

    counts = stacked.counts
    assert counts.unit == ""
    assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5)

    exposure = stacked.exposure
    assert exposure.unit == "m2 s"
    assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3)

    background = stacked.background_model.map
    assert background.unit == ""
    assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)

    image_dataset = stacked.to_image()

    counts = image_dataset.counts
    assert counts.unit == ""
    assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-4)

    exposure = image_dataset.exposure
    assert exposure.unit == "m2 s"
    assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=1e-3)

    background = image_dataset.background_model.map
    assert background.unit == ""
    assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)
コード例 #16
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")
    OBS_ID = 110380
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    energy_axis = MapAxis.from_bounds(0.1,
                                      10,
                                      nbin=10,
                                      unit="TeV",
                                      name="energy",
                                      interp="log")

    geom = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.05,
        width=(10, 8),
        coordsys="GAL",
        proj="CAR",
        axes=[energy_axis],
    )

    stacked = MapDataset.create(geom)
    maker = MapDatasetMaker()
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg")
    for obs in observations:
        dataset = maker.run(stacked, obs)
        dataset = safe_mask_maker.run(dataset, obs)
        stacked.stack(dataset)

    spatial_model = PointSpatialModel(lon_0="0.01 deg",
                                      lat_0="0.01 deg",
                                      frame="galactic")
    spectral_model = ExpCutoffPowerLawSpectralModel(
        index=2,
        amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
        reference=1.0 * u.TeV,
        lambda_=0.1 / u.TeV,
    )
    model = SkyModel(spatial_model=spatial_model,
                     spectral_model=spectral_model,
                     name="gc-source")

    stacked.models = model
    stacked.name = "stacked_ds"

    return Datasets([stacked])
コード例 #17
0
    def _map_making(self):
        """Make maps and datasets for 3d analysis."""
        log.info("Creating geometry.")

        geom = self._create_geometry(self.settings["datasets"]["geom"])

        geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None)
        if "energy-axis-true" in self.settings["datasets"]:
            axis_params = self.settings["datasets"]["energy-axis-true"]
            geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params)
        geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None)
        geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None)

        offset_max = Angle(self.settings["datasets"]["offset-max"])
        log.info("Creating datasets.")

        maker = MapDatasetMaker(offset_max=offset_max)
        maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max)

        stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf)

        if self.settings["datasets"]["stack-datasets"]:
            for obs in self.observations:
                log.info(f"Processing observation {obs.obs_id}")
                dataset = maker.run(stacked, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                dataset.background_model.name =  f"bkg_{dataset.name}"
                # TODO remove this once dataset and model have unique identifiers
                log.debug(dataset)
                stacked.stack(dataset)
            self._extract_irf_kernels(stacked)
            datasets = [stacked]
        else:
            datasets = []
            for obs in self.observations:
                log.info(f"Processing observation {obs.obs_id}")
                dataset = maker.run(stacked, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                dataset.background_model.name = f"bkg_{dataset.name}"
                # TODO remove this once dataset and model have unique identifiers
                self._extract_irf_kernels(dataset)
                log.debug(dataset)
                datasets.append(dataset)

        self.datasets = Datasets(datasets)
コード例 #18
0
ファイル: core.py プロジェクト: gfiusa/gammapy
    def _map_making(self):
        """Make maps and datasets for 3d analysis."""
        log.info("Creating geometry.")

        geom = self._create_geometry()
        geom_settings = self.config.datasets.geom
        geom_irf = dict(energy_axis_true=None, binsz_irf=None)
        if geom_settings.axes.energy_true.min is not None:
            geom_irf["energy_axis_true"] = self._make_energy_axis(
                geom_settings.axes.energy_true)
        geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value
        offset_max = geom_settings.selection.offset_max
        log.info("Creating datasets.")

        maker = MapDatasetMaker(selection=self.config.datasets.map_selection)

        safe_mask_selection = self.config.datasets.safe_mask.methods
        safe_mask_settings = self.config.datasets.safe_mask.settings
        maker_safe_mask = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)
        stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf)

        if self.config.datasets.stack:
            for obs in self.observations:
                log.info(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec,
                                        width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                log.debug(dataset)
                stacked.stack(dataset)
            datasets = [stacked]
        else:
            datasets = []
            for obs in self.observations:
                log.info(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec,
                                        width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                log.debug(dataset)
                datasets.append(dataset)
        self.datasets = Datasets(datasets)
コード例 #19
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")
    OBS_ID = 110380
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    energy_axis = MapAxis.from_bounds(0.1,
                                      10,
                                      nbin=10,
                                      unit="TeV",
                                      name="energy",
                                      interp="log")
    geom = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.02,
        width=(10, 8),
        coordsys="GAL",
        proj="CAR",
        axes=[energy_axis],
    )

    src_pos = SkyCoord(0, 0, unit="deg", frame="galactic")
    offset_max = 4 * u.deg
    maker = MapDatasetMaker(offset_max=offset_max)
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg")
    stacked = MapDataset.create(geom=geom)

    datasets = []
    for obs in observations:
        dataset = maker.run(stacked, obs)
        dataset = safe_mask_maker.run(dataset, obs)
        dataset.edisp = dataset.edisp.get_energy_dispersion(
            position=src_pos, e_reco=energy_axis.edges)
        dataset.psf = dataset.psf.get_psf_kernel(position=src_pos,
                                                 geom=geom,
                                                 max_radius="0.3 deg")

        datasets.append(dataset)
    return datasets
コード例 #20
0
ファイル: core.py プロジェクト: mstrzys/gammapy
    def _map_making(self):
        """Make maps and datasets for 3d analysis."""
        log.info("Creating geometry.")

        geom = self._create_geometry(self.settings["datasets"]["geom"])

        geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None)
        if "energy-axis-true" in self.settings["datasets"]:
            axis_params = self.settings["datasets"]["energy-axis-true"]
            geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params)
        geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None)
        geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None)

        offset_max = Angle(self.settings["datasets"]["offset-max"])
        log.info("Creating datasets.")

        maker = MapDatasetMaker(geom=geom, offset_max=offset_max, **geom_irf)
        maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max)

        if self.settings["datasets"]["stack-datasets"]:
            stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf)
            for obs in self.observations:
                dataset = maker.run(obs)
                dataset = maker_safe_mask.run(dataset, obs)
                stacked.stack(dataset)
            self._extract_irf_kernels(stacked)
            datasets = [stacked]
        else:
            datasets = []
            for obs in self.observations:
                dataset = maker.run(obs)
                dataset = maker_safe_mask.run(dataset, obs)
                self._extract_irf_kernels(dataset)
                datasets.append(dataset)

        self.datasets = Datasets(datasets)
コード例 #21
0
ファイル: run.py プロジェクト: QRemy/gammapy-benchmarks
def run_analysis_1d(target_dict):
    """Run spectral analysis for the selected target"""
    tag = target_dict["tag"]
    name = target_dict["name"]

    log.info(f"running 1d analysis, {tag}")
    path_res = Path(tag + "/results/")

    ra = target_dict["ra"]
    dec = target_dict["dec"]
    on_size = target_dict["on_size"]
    e_decorr = target_dict["e_decorr"]

    target_pos = SkyCoord(ra, dec, unit="deg", frame="icrs")
    on_radius = Angle(on_size * u.deg)
    containment_corr = True

    # Observations selection
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    mask = data_store.obs_table["TARGET_NAME"] == name
    obs_table = data_store.obs_table[mask]
    observations = data_store.get_observations(obs_table["OBS_ID"])

    if DEBUG is True:
        observations = [observations[0]]

    # Reflected regions background estimation
    on_region = CircleSkyRegion(center=target_pos, radius=on_radius)
    dataset_maker = SpectrumDatasetMaker(
        region=on_region,
        e_reco=E_RECO,
        e_true=E_RECO,
        containment_correction=containment_corr,
    )
    bkg_maker = ReflectedRegionsBackgroundMaker()
    safe_mask_masker = SafeMaskMaker(methods=["edisp-bias"], bias_percent=10)

    datasets = []

    for observation in observations:
        dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"])
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        datasets.append(dataset_on_off)

    # Fit spectrum
    model = PowerLawSpectralModel(
        index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=e_decorr * u.TeV
    )

    for dataset in datasets:
        dataset.model = model

    fit_joint = Fit(datasets)
    result_joint = fit_joint.run()

    parameters = model.parameters
    parameters.covariance = result_joint.parameters.covariance
    write_fit_summary(parameters, str(path_res / "results-summary-fit-1d.yaml"))

    # Flux points
    fpe = FluxPointsEstimator(datasets=datasets, e_edges=FLUXP_EDGES)
    flux_points = fpe.run()
    flux_points.table["is_ul"] = flux_points.table["ts"] < 4
    keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul"]
    flux_points.table_formatted[keys].write(
        path_res / "flux-points-1d.ecsv", format="ascii.ecsv"
    )
コード例 #22
0
def run_analysis_3d(target_dict, fluxp_edges, debug):
    """Run stacked 3D analysis for the selected target.

    Notice that, for the sake of time saving, we run a stacked analysis, as opposed
     to the joint analysis that is performed in the reference paper.
    """
    tag = target_dict["tag"]
    log.info(f"running 3d analysis, {tag}")

    path_res = Path(tag + "/results/")

    txt = Path("config_template.yaml").read_text()
    txt = txt.format_map(target_dict)
    config = AnalysisConfig.from_yaml(txt)

    log.info(f"Running observations selection")
    analysis = Analysis(config)
    analysis.get_observations()

    log.info(f"Running data reduction")
    analysis.get_datasets()

    # TODO: Improve safe mask handling in Analysis. the mask should be applied run-by-run
    maker_safe_mask = SafeMaskMaker(methods=["edisp-bias", "bkg-peak"])
    stacked = maker_safe_mask.run(analysis.datasets[0])

    log.info(f"Running fit ...")
    ra = target_dict["ra"]
    dec = target_dict["dec"]
    e_decorr = target_dict["e_decorr"]
    spectral_model = Model.create("PowerLawSpectralModel", reference=e_decorr)
    spatial_model = Model.create(target_dict["spatial_model"],
                                 lon_0=ra,
                                 lat_0=dec)
    if target_dict["spatial_model"] == "DiskSpatialModel":
        spatial_model.e.frozen = False
    sky_model = SkyModel(spatial_model=spatial_model,
                         spectral_model=spectral_model,
                         name=tag)

    stacked.models = sky_model
    stacked.background_model.norm.frozen = False
    fit = Fit([stacked])
    result = fit.run()

    parameters = stacked.models.parameters
    model_npars = len(sky_model.parameters.names)
    parameters.covariance = result.parameters.covariance[0:model_npars,
                                                         0:model_npars]
    log.info(f"Writing {path_res}")
    write_fit_summary(parameters,
                      str(path_res / "results-summary-fit-3d.yaml"))

    log.info("Running flux points estimation")
    # TODO: This is a workaround to re-optimize the bkg. Remove it once it's added to the Analysis class
    for par in stacked.parameters:
        if par is not stacked.background_model.norm:
            par.frozen = True

    reoptimize = True if debug is False else False
    fpe = FluxPointsEstimator(datasets=[stacked],
                              e_edges=fluxp_edges,
                              source=tag,
                              reoptimize=reoptimize)

    flux_points = fpe.run()
    flux_points.table["is_ul"] = flux_points.table["ts"] < 4
    keys = [
        "e_ref",
        "e_min",
        "e_max",
        "dnde",
        "dnde_errp",
        "dnde_errn",
        "is_ul",
        "dnde_ul",
    ]
    log.info(f"Writing {path_res}")
    flux_points.table_formatted[keys].write(path_res / "flux-points-3d.ecsv",
                                            format="ascii.ecsv")
コード例 #23
0
    spectral_model=spectral_model,
    name="model_simu",
)
print(model_simu)

# Now, comes the main part of dataset simulation. We create an in-memory observation and an empty dataset. We then predict the number of counts for the given model, and Poission fluctuate it using `fake()` to make a simulated counts maps. Keep in mind that it is important to specify the `selection` of the maps that you want to produce

# In[ ]:

# Create an in-memory observation
obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs)
print(obs)
# Make the MapDataset
empty = MapDataset.create(geom)
maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"])
maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=4.0 * u.deg)
dataset = maker.run(empty, obs)
dataset = maker_safe_mask.run(dataset, obs)
print(dataset)

# In[ ]:

# Add the model on the dataset and Poission fluctuate
dataset.models = model_simu
dataset.fake()
# Do a print on the dataset - there is now a counts maps
print(dataset)

# Now use this dataset as you would in all standard analysis. You can plot the maps, or proceed with your custom analysis.
# In the next section, we show the standard 3D fitting as in [analysis_3d](analysis_3d.ipynb).
コード例 #24
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    time_intervals = [(obs.tstart, obs.tstop) for obs in observations]
    target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg")

    emin, emax = [0.7, 10] * u.TeV
    energy_axis = MapAxis.from_bounds(emin.value,
                                      emax.value,
                                      10,
                                      unit="TeV",
                                      name="energy",
                                      interp="log")
    geom = WcsGeom.create(
        skydir=target_position,
        binsz=0.02,
        width=(2, 2),
        coordsys="CEL",
        proj="CAR",
        axes=[energy_axis],
    )

    energy_axis_true = MapAxis.from_bounds(0.1,
                                           20,
                                           20,
                                           unit="TeV",
                                           name="energy",
                                           interp="log")
    offset_max = 2 * u.deg

    datasets = []

    maker = MapDatasetMaker(offset_max=offset_max)
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"],
                                    offset_max=offset_max)

    for time_interval in time_intervals:
        observations = observations.select_time(time_interval)

        # Proceed with further analysis only if there are observations
        # in the selected time window
        if len(observations) == 0:
            log.warning(f"No observations in time interval: {time_interval}")
            continue

        stacked = MapDataset.create(geom=geom,
                                    energy_axis_true=energy_axis_true)

        for obs in observations:
            dataset = maker.run(stacked, obs)
            dataset = safe_mask_maker.run(dataset, obs)
            stacked.stack(dataset)

        stacked.edisp = stacked.edisp.get_energy_dispersion(
            position=target_position, e_reco=energy_axis.edges)

        stacked.psf = stacked.psf.get_psf_kernel(position=target_position,
                                                 geom=stacked.exposure.geom,
                                                 max_radius="0.3 deg")

        datasets.append(stacked)

    spatial_model = PointSpatialModel(lon_0=target_position.ra,
                                      lat_0=target_position.dec,
                                      frame="icrs")
    spatial_model.lon_0.frozen = True
    spatial_model.lat_0.frozen = True

    spectral_model = PowerLawSpectralModel(index=2.6,
                                           amplitude=2.0e-11 *
                                           u.Unit("1 / (cm2 s TeV)"),
                                           reference=1 * u.TeV)
    spectral_model.index.frozen = False

    sky_model = SkyModel(spatial_model=spatial_model,
                         spectral_model=spectral_model,
                         name="")

    for dataset in datasets:
        model = sky_model.copy(name="crab")
        dataset.model = model

    return datasets
コード例 #25
0
# In[ ]:

e_reco = np.logspace(-1, np.log10(40), 40) * u.TeV
e_true = np.logspace(np.log10(0.05), 2, 200) * u.TeV

dataset_empty = SpectrumDataset.create(e_reco=e_reco,
                                       e_true=e_true,
                                       region=on_region)

# In[ ]:

dataset_maker = SpectrumDatasetMaker(containment_correction=False,
                                     selection=["counts", "aeff", "edisp"])
bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

# In[ ]:

get_ipython().run_cell_magic(
    'time', '',
    'datasets = []\n\nfor observation in observations:\n    dataset = dataset_maker.run(dataset_empty, observation)\n    dataset_on_off = bkg_maker.run(dataset, observation)\n    dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)\n    datasets.append(dataset_on_off)'
)

# In[ ]:

plt.figure(figsize=(8, 8))
_, ax, _ = images["counts"].smooth("0.03 deg").plot(vmax=8)

on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="white")
plot_spectrum_datasets_off_regions(datasets, ax=ax)
コード例 #26
0
# In[ ]:


e_true = np.logspace(-2.5, 1, 100) * u.TeV
e_reco = np.logspace(-2, 1, 30) * u.TeV

dataset_empty = SpectrumDataset.create(
    e_reco=e_reco, e_true=e_true, region=on_region
)
dataset_maker = SpectrumDatasetMaker()
phase_bkg_maker = PhaseBackgroundMaker(
    on_phase=on_phase_range, off_phase=off_phase_range
)
safe_mask_maker = SafeMaskMaker(
    methods=["aeff-default", "edisp-bias"], bias_percent=20
)

datasets = []

for obs in obs_list_vela:
    dataset = dataset_maker.run(dataset_empty, obs)
    dataset_on_off = phase_bkg_maker.run(dataset, obs)
    dataset_on_off = safe_mask_maker.run(dataset_on_off, obs)
    datasets.append(dataset_on_off)


# Now let's a look at the datasets we just created:

# In[ ]:
コード例 #27
0
stacked = MapDataset.create(geom=geom,
                            energy_axis_true=energy_axis_true,
                            name="crab-stacked")

# ## Data reduction
#
# ### Create the maker classes to be used
#
# The `~gammapy.cube.MapDatasetMaker` object is initialized as well as the `~gammapy.cube.SafeMaskMaker` that carries here a maximum offset selection.

# In[ ]:

offset_max = 2.5 * u.deg
maker = MapDatasetMaker()
maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max)

# ### Perform the data reduction loop

# In[ ]:

get_ipython().run_cell_magic(
    'time', '',
    '\nfor obs in observations:\n    # First a cutout of the target map is produced\n    cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max)\n    # A MapDataset is filled in this cutout geometry\n    dataset = maker.run(cutout, obs)\n    # The data quality cut is applied\n    dataset = maker_safe_mask.run(dataset, obs)\n    # The resulting dataset cutout is stacked onto the final one\n    stacked.stack(dataset)'
)

# ### Inspect the reduced dataset

# In[ ]:

stacked.counts.sum_over_axes().smooth(0.05 * u.deg).plot(stretch="sqrt",
コード例 #28
0
def run_analysis(method, target_dict, debug):
    """If the method is "1d", runs joint spectral analysis for the selected target. If
    instead it is "3d", runs stacked 3D analysis."""
    tag = target_dict["tag"]
    log.info(f"Running {method} analysis, {tag}")
    path_res = Path(tag + "/results/")

    log.info("Reading config")
    txt = Path(f"config_{method}.yaml").read_text()
    txt = txt.format_map(target_dict)
    config = AnalysisConfig.from_yaml(txt)
    if debug:
        config.observations.obs_ids = [target_dict["debug_run"]]
        config.flux_points.energy.nbins = 1
        if method == "3d":
            config.datasets.geom.axes.energy_true.nbins = 10
    analysis = Analysis(config)

    log.info("Running observations selection")
    analysis.get_observations()

    log.info(f"Running data reduction")
    analysis.get_datasets()

    # TODO: This is a workaround. We should somehow apply the safe mask (run by run) from the HLI
    from gammapy.cube import SafeMaskMaker
    datasets = []
    maker_safe_mask = SafeMaskMaker(methods=["edisp-bias", "bkg-peak"],
                                    bias_percent=10)
    for dataset in analysis.datasets:
        dataset = maker_safe_mask.run(dataset)
        datasets.append(dataset)
    analysis.datasets = datasets

    log.info(f"Setting the model")
    txt = Path("model_config.yaml").read_text()
    txt = txt.format_map(target_dict)
    log.info(txt)
    analysis.set_models(txt)
    if method == "3d" and target_dict["spatial_model"] == "DiskSpatialModel":
        analysis.models[0].spatial_model.e.frozen = False
        analysis.models[0].spatial_model.phi.frozen = False
        analysis.models[0].spatial_model.r_0.value = 0.3

    log.info(f"Running fit ...")
    analysis.run_fit()

    # TODO: This is a workaround. Set covariance automatically
    results = analysis.fit_result
    names = ["spectral_model", "spatial_model"]
    for name in names:
        if name == "spatial_model" and method == "1d":
            continue
        model = getattr(analysis.models[0], name)
        model.parameters.covariance = results.parameters.get_subcovariance(
            model.parameters.names)

    log.info(f"Writing {path_res}")
    write_fit_summary(analysis.models[0].parameters,
                      str(path_res / f"results-summary-fit-{method}.yaml"))

    log.info(f"Running flux points estimation")
    # TODO:  For the 3D analysis, re-optimize the background norm in each energy
    #  bin. For now, this is not possible from the HLI.
    analysis.get_flux_points(source=tag)
    flux_points = analysis.flux_points.data
    flux_points.table["is_ul"] = flux_points.table["ts"] < 4
    keys = [
        "e_ref",
        "e_min",
        "e_max",
        "dnde",
        "dnde_errp",
        "dnde_errn",
        "is_ul",
        "dnde_ul",
    ]
    log.info(f"Writing {path_res}")
    flux_points.table_formatted[keys].write(path_res /
                                            f"flux-points-{method}.ecsv",
                                            format="ascii.ecsv")
コード例 #29
0
e_true = MapAxis.from_energy_bounds(0.05, 100, 100, "TeV").edges

target_position = SkyCoord(83.63308 * u.deg, 22.01450 * u.deg, frame="icrs")
on_region_radius = Angle("0.11 deg")
on_region = CircleSkyRegion(center=target_position, radius=on_region_radius)

# ### Creation of the data reduction makers
#
# We now create the dataset and background makers for the selected geometry.

# In[ ]:

dataset_maker = SpectrumDatasetMaker(containment_correction=True,
                                     selection=["counts", "aeff", "edisp"])
bkg_maker = ReflectedRegionsBackgroundMaker()
safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

# ### Creation of the datasets
#
# Now we perform the actual data reduction in the time_intervals.

# In[ ]:

datasets = []

dataset_empty = SpectrumDataset.create(e_reco=e_reco,
                                       e_true=e_true,
                                       region=on_region)

for obs in observations:
    dataset = dataset_maker.run(dataset_empty, obs)