コード例 #1
0
    def test_extract(
        pars,
        results,
        observations_hess_dl3,
        spectrum_dataset_crab_fine,
        reflected_regions_bkg_maker,
    ):
        """Test quantitative output for various configs"""
        safe_mask_maker = SafeMaskMaker()
        maker = SpectrumDatasetMaker(
            containment_correction=pars["containment_correction"])

        obs = observations_hess_dl3[0]
        dataset = maker.run(spectrum_dataset_crab_fine, obs)
        dataset = reflected_regions_bkg_maker.run(dataset, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        aeff_actual = dataset.aeff.data.evaluate(energy=5 * u.TeV)
        edisp_actual = dataset.edisp.data.evaluate(e_true=5 * u.TeV,
                                                   e_reco=5.2 * u.TeV)

        assert_quantity_allclose(aeff_actual, results["aeff"], rtol=1e-3)
        assert_quantity_allclose(edisp_actual, results["edisp"], rtol=1e-3)

        # TODO: Introduce assert_stats_allclose
        info = dataset.info_dict()

        assert info["n_on"] == results["n_on"]
        assert_allclose(info["significance"], results["sigma"], atol=1e-2)

        gti_obs = obs.gti.table
        gti_dataset = dataset.gti.table
        assert_allclose(gti_dataset["START"], gti_obs["START"])
        assert_allclose(gti_dataset["STOP"], gti_obs["STOP"])
コード例 #2
0
ファイル: core.py プロジェクト: gfiusa/gammapy
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        log.info("Reducing spectrum datasets.")
        datasets_settings = self.config.datasets
        on_lon = datasets_settings.on_region.lon
        on_lat = datasets_settings.on_region.lat
        on_center = SkyCoord(on_lon,
                             on_lat,
                             frame=datasets_settings.on_region.frame)
        on_region = CircleSkyRegion(on_center,
                                    datasets_settings.on_region.radius)

        maker_config = {}
        if datasets_settings.containment_correction:
            maker_config[
                "containment_correction"] = datasets_settings.containment_correction
        e_reco = self._make_energy_axis(
            datasets_settings.geom.axes.energy).edges

        maker_config["selection"] = ["counts", "aeff", "edisp"]
        dataset_maker = SpectrumDatasetMaker(**maker_config)
        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            exclusion_region = Map.read(datasets_settings.background.exclusion)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)

        safe_mask_selection = self.config.datasets.safe_mask.methods
        safe_mask_settings = self.config.datasets.safe_mask.settings
        safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)

        e_true = self._make_energy_axis(
            datasets_settings.geom.axes.energy_true).edges

        reference = SpectrumDataset.create(e_reco=e_reco,
                                           e_true=e_true,
                                           region=on_region)

        datasets = []
        for obs in self.observations:
            log.info(f"Processing observation {obs.obs_id}")
            dataset = dataset_maker.run(reference.copy(), obs)
            dataset = bkg_maker.run(dataset, obs)
            if dataset.counts_off is None:
                log.info(
                    f"No OFF region found for observation {obs.obs_id}. Discarding."
                )
                continue
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)

        self.datasets = Datasets(datasets)

        if self.config.datasets.stack:
            stacked = self.datasets.stack_reduce(name="stacked")
            self.datasets = Datasets([stacked])
コード例 #3
0
def test_safe_mask_maker_dc1(spectrum_dataset_gc, observations_cta_dc1):
    safe_mask_maker = SafeMaskMaker(methods=["edisp-bias", "aeff-max"])

    obs = observations_cta_dc1[0]
    maker = SpectrumDatasetMaker()
    dataset = maker.run(spectrum_dataset_gc, obs)
    dataset = safe_mask_maker.run(dataset, obs)
    assert_allclose(dataset.energy_range[0].value, 3.162278, rtol=1e-3)
    assert dataset.energy_range[0].unit == "TeV"
コード例 #4
0
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        region = self.settings["datasets"]["geom"]["region"]
        log.info("Reducing spectrum datasets.")
        on_lon = Angle(region["center"][0])
        on_lat = Angle(region["center"][1])
        on_center = SkyCoord(on_lon, on_lat, frame=region["frame"])
        on_region = CircleSkyRegion(on_center, Angle(region["radius"]))

        maker_config = {}
        if "containment_correction" in self.settings["datasets"]:
            maker_config["containment_correction"] = self.settings["datasets"][
                "containment_correction"
            ]
        params = self.settings["datasets"]["geom"]["axes"][0]
        e_reco = MapAxis.from_bounds(**params).edges
        maker_config["e_reco"] = e_reco

        # TODO: remove hard-coded e_true and make it configurable
        maker_config["e_true"] = np.logspace(-2, 2.5, 109) * u.TeV
        maker_config["region"] = on_region

        dataset_maker = SpectrumDatasetMaker(**maker_config)
        bkg_maker_config = {}
        background = self.settings["datasets"]["background"]

        if "exclusion_mask" in background:
            map_hdu = {}
            filename = background["exclusion_mask"]["filename"]
            if "hdu" in background["exclusion_mask"]:
                map_hdu = {"hdu": background["exclusion_mask"]["hdu"]}
            exclusion_region = Map.read(filename, **map_hdu)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        if background["background_estimator"] == "reflected":
            reflected_bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)
        else:
            # TODO: raise error?
            log.info("Background estimation only for reflected regions method.")

        safe_mask_maker = SafeMaskMaker(methods=["aeff-default", "aeff-max"])

        datasets = []
        for obs in self.observations:
            log.info(f"Processing observation {obs.obs_id}")
            selection = ["counts", "aeff", "edisp"]
            dataset = dataset_maker.run(obs, selection=selection)
            dataset = reflected_bkg_maker.run(dataset, obs)
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)

        self.datasets = Datasets(datasets)

        if self.settings["datasets"]["stack-datasets"]:
            stacked = self.datasets.stack_reduce()
            stacked.name = "stacked"
            self.datasets = Datasets([stacked])
コード例 #5
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)
    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position, radius=on_region_radius)

    exclusion_region = CircleSkyRegion(
        center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"),
        radius=0.5 * u.deg,
    )

    skydir = target_position.galactic
    exclusion_mask = Map.create(
        npix=(150, 150), binsz=0.05, skydir=skydir, proj="TAN", coordsys="GAL"
    )

    mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False)
    exclusion_mask.data = mask

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log", unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log", unit="TeV").edges

    dataset_maker = SpectrumDatasetMaker(
        region=on_region, e_reco=e_reco, e_true=e_true, containment_correction=True
    )
    bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    spectral_model = PowerLawSpectralModel(
        index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=1 * u.TeV
    )
    spatial_model = PointSpatialModel(
        lon_0=target_position.ra, lat_0=target_position.dec, frame="icrs"
    )
    spatial_model.lon_0.frozen = True
    spatial_model.lat_0.frozen = True

    sky_model = SkyModel(
        spatial_model=spatial_model, spectral_model=spectral_model, name=""
    )

    # Data preparation
    datasets = []

    for ind, observation in enumerate(observations):
        dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"])
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        dataset_on_off.name = f"dataset{ind}"
        dataset_on_off.models = sky_model
        datasets.append(dataset_on_off)

    return Datasets(datasets)
コード例 #6
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg")

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log",
                                 unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log",
                                 unit="TeV").edges

    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    dataset_maker = SpectrumDatasetMaker(containment_correction=True,
                                         selection=["counts", "aeff", "edisp"])

    empty = SpectrumDatasetOnOff.create(region=on_region,
                                        e_reco=e_reco,
                                        e_true=e_true)

    bkg_maker = ReflectedRegionsBackgroundMaker()
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    spectral_model = PowerLawSpectralModel(index=2.6,
                                           amplitude=2.0e-11 *
                                           u.Unit("1 / (cm2 s TeV)"),
                                           reference=1 * u.TeV)
    spectral_model.index.frozen = False

    model = spectral_model.copy()
    model.name = "crab"

    datasets_1d = []

    for observation in observations:

        dataset = dataset_maker.run(dataset=empty.copy(),
                                    observation=observation)

        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        datasets_1d.append(dataset_on_off)

    for dataset in datasets_1d:
        model = spectral_model.copy()
        model.name = "crab"
        dataset.model = model

    return datasets_1d
コード例 #7
0
    def test_compute_energy_threshold(self, spectrum_dataset_crab_fine,
                                      observations_hess_dl3):

        maker = SpectrumDatasetMaker(containment_correction=True)
        safe_mask_maker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

        obs = observations_hess_dl3[0]
        dataset = maker.run(spectrum_dataset_crab_fine, obs)
        dataset = safe_mask_maker.run(dataset, obs)

        actual = dataset.energy_range[0]
        assert_quantity_allclose(actual, 0.8799225 * u.TeV, rtol=1e-3)
コード例 #8
0
    def data(self) -> SpectrumDataset:
        """Actual event data in form of a SpectrumDataset.
        """

        dataset_empty = SpectrumDataset.create(e_reco=self.energy_axis,
                                               e_true=self.energy_axis,
                                               region=self.on_region)
        maker = SpectrumDatasetMaker(containment_correction=False,
                                     selection=["background", "aeff", "edisp"])
        dataset = maker.run(dataset_empty, self.obs)
        dataset.models = self.true_model
        dataset.fake()
        return dataset
コード例 #9
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    exclusion_region = CircleSkyRegion(
        center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"),
        radius=0.5 * u.deg,
    )

    skydir = target_position.galactic
    exclusion_mask = Map.create(npix=(150, 150),
                                binsz=0.05,
                                skydir=skydir,
                                proj="TAN",
                                coordsys="GAL")

    mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False)
    exclusion_mask.data = mask

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log",
                                 unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log",
                                 unit="TeV").edges

    stacked = SpectrumDatasetOnOff.create(e_reco=e_reco, e_true=e_true)
    stacked.name = "stacked"

    dataset_maker = SpectrumDatasetMaker(region=on_region,
                                         e_reco=e_reco,
                                         e_true=e_true,
                                         containment_correction=False)
    bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask)
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    for observation in observations:
        dataset = dataset_maker.run(observation,
                                    selection=["counts", "aeff", "edisp"])
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        stacked.stack(dataset_on_off)
    return stacked
コード例 #10
0
def test_spectrum_dataset_maker_hess_dl3(spectrum_dataset_crab,
                                         observations_hess_dl3):
    datasets = []
    maker = SpectrumDatasetMaker()

    for obs in observations_hess_dl3:
        dataset = maker.run(spectrum_dataset_crab, obs)
        datasets.append(dataset)

    assert_allclose(datasets[0].counts.data.sum(), 100)
    assert_allclose(datasets[1].counts.data.sum(), 92)

    assert_allclose(datasets[0].livetime.value, 1581.736758)
    assert_allclose(datasets[1].livetime.value, 1572.686724)

    assert_allclose(datasets[0].background.data.sum(), 7.74732, rtol=1e-5)
    assert_allclose(datasets[1].background.data.sum(), 6.118879, rtol=1e-5)
コード例 #11
0
def test_spectrum_dataset_maker_hess_cta(spectrum_dataset_gc,
                                         observations_cta_dc1):
    maker = SpectrumDatasetMaker()

    datasets = []

    for obs in observations_cta_dc1:
        dataset = maker.run(spectrum_dataset_gc, obs)
        datasets.append(dataset)

    assert_allclose(datasets[0].counts.data.sum(), 53)
    assert_allclose(datasets[1].counts.data.sum(), 47)

    assert_allclose(datasets[0].livetime.value, 1764.000034)
    assert_allclose(datasets[1].livetime.value, 1764.000034)

    assert_allclose(datasets[0].background.data.sum(), 2.238345, rtol=1e-5)
    assert_allclose(datasets[1].background.data.sum(), 2.164593, rtol=1e-5)
コード例 #12
0
def test_safe_mask_maker_dl3(spectrum_dataset_crab, observations_hess_dl3):

    safe_mask_maker = SafeMaskMaker()
    maker = SpectrumDatasetMaker()

    obs = observations_hess_dl3[0]
    dataset = maker.run(spectrum_dataset_crab, obs)
    dataset = safe_mask_maker.run(dataset, obs)
    assert_allclose(dataset.energy_range[0].value, 1)
    assert dataset.energy_range[0].unit == "TeV"

    mask_safe = safe_mask_maker.make_mask_energy_aeff_max(dataset)
    assert mask_safe.sum() == 4

    mask_safe = safe_mask_maker.make_mask_energy_edisp_bias(dataset)
    assert mask_safe.sum() == 3

    mask_safe = safe_mask_maker.make_mask_energy_bkg_peak(dataset)
    assert mask_safe.sum() == 3
コード例 #13
0
ファイル: test_phase.py プロジェクト: gfiusa/gammapy
def test_run(observations, phase_bkg_maker):

    maker = SpectrumDatasetMaker()

    e_reco = np.logspace(0, 2, 5) * u.TeV
    e_true = np.logspace(-0.5, 2, 11) * u.TeV

    pos = SkyCoord("08h35m20.65525s", "-45d10m35.1545s", frame="icrs")
    radius = Angle(0.2, "deg")
    region = SphericalCircleSkyRegion(pos, radius)

    dataset_empty = SpectrumDataset.create(e_reco, e_true, region=region)

    obs = observations["111630"]
    dataset = maker.run(dataset_empty, obs)
    dataset_on_off = phase_bkg_maker.run(dataset, obs)

    assert_allclose(dataset_on_off.acceptance, 0.1)
    assert_allclose(dataset_on_off.acceptance_off, 0.3)

    assert_allclose(dataset_on_off.counts.data.sum(), 28)
    assert_allclose(dataset_on_off.counts_off.data.sum(), 57)
コード例 #14
0
ファイル: run.py プロジェクト: QRemy/gammapy-benchmarks
def run_analysis_1d(target_dict):
    """Run spectral analysis for the selected target"""
    tag = target_dict["tag"]
    name = target_dict["name"]

    log.info(f"running 1d analysis, {tag}")
    path_res = Path(tag + "/results/")

    ra = target_dict["ra"]
    dec = target_dict["dec"]
    on_size = target_dict["on_size"]
    e_decorr = target_dict["e_decorr"]

    target_pos = SkyCoord(ra, dec, unit="deg", frame="icrs")
    on_radius = Angle(on_size * u.deg)
    containment_corr = True

    # Observations selection
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    mask = data_store.obs_table["TARGET_NAME"] == name
    obs_table = data_store.obs_table[mask]
    observations = data_store.get_observations(obs_table["OBS_ID"])

    if DEBUG is True:
        observations = [observations[0]]

    # Reflected regions background estimation
    on_region = CircleSkyRegion(center=target_pos, radius=on_radius)
    dataset_maker = SpectrumDatasetMaker(
        region=on_region,
        e_reco=E_RECO,
        e_true=E_RECO,
        containment_correction=containment_corr,
    )
    bkg_maker = ReflectedRegionsBackgroundMaker()
    safe_mask_masker = SafeMaskMaker(methods=["edisp-bias"], bias_percent=10)

    datasets = []

    for observation in observations:
        dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"])
        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        datasets.append(dataset_on_off)

    # Fit spectrum
    model = PowerLawSpectralModel(
        index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=e_decorr * u.TeV
    )

    for dataset in datasets:
        dataset.model = model

    fit_joint = Fit(datasets)
    result_joint = fit_joint.run()

    parameters = model.parameters
    parameters.covariance = result_joint.parameters.covariance
    write_fit_summary(parameters, str(path_res / "results-summary-fit-1d.yaml"))

    # Flux points
    fpe = FluxPointsEstimator(datasets=datasets, e_edges=FLUXP_EDGES)
    flux_points = fpe.run()
    flux_points.table["is_ul"] = flux_points.table["ts"] < 4
    keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul"]
    flux_points.table_formatted[keys].write(
        path_res / "flux-points-1d.ecsv", format="ascii.ecsv"
    )
コード例 #15
0
dataset_empty = SpectrumDataset.create(
    e_reco=e_reco, e_true=e_true, region=on_region
)
dataset_maker = SpectrumDatasetMaker()
phase_bkg_maker = PhaseBackgroundMaker(
    on_phase=on_phase_range, off_phase=off_phase_range
)
safe_mask_maker = SafeMaskMaker(
    methods=["aeff-default", "edisp-bias"], bias_percent=20
)

datasets = []

for obs in obs_list_vela:
    dataset = dataset_maker.run(dataset_empty, obs)
    dataset_on_off = phase_bkg_maker.run(dataset, obs)
    dataset_on_off = safe_mask_maker.run(dataset_on_off, obs)
    datasets.append(dataset_on_off)


# Now let's a look at the datasets we just created:

# In[ ]:


datasets[0].peek()


# Now we'll fit a model to the spectrum with the `Fit` class. First we load a power law model with an initial value for the index and the amplitude and then wo do a likelihood fit. The fit results are printed below.
コード例 #16
0
crab_position = SkyCoord(83.63, 22.01, unit="deg", frame="icrs")

# The ON region center is defined in the icrs frame. The angle is defined w.r.t. to its axis.
rectangle = RectangleSkyRegion(
    center=crab_position, width=0.5 * u.deg, height=0.4 * u.deg, angle=0 * u.deg
)


bkg_maker = ReflectedRegionsBackgroundMaker(min_distance=0.1 * u.rad)

dataset_maker = SpectrumDatasetMaker(
    region=rectangle, e_reco=np.logspace(-1, 2, 30) * u.TeV
)

datasets = []

for obs in observations:
    dataset = dataset_maker.run(obs, selection=["counts"])
    dataset_on_off = bkg_maker.run(observation=obs, dataset=dataset)
    datasets.append(dataset_on_off)

m = Map.create(skydir=crab_position, width=(8, 8), proj="TAN")

_, ax, _ = m.plot(vmin=-1, vmax=0)

rectangle.to_pixel(ax.wcs).plot(ax=ax, color="black")

plot_spectrum_datasets_off_regions(datasets=datasets, ax=ax)
plt.show()