예제 #1
0
파일: core.py 프로젝트: cdeil/gammapy
    def to_wcs_nd_map(self, energy_axis_mode='center'):
        """Convert to a `gammapy.maps.WcsNDMap`.

        There is no copy of the ``data`` or ``wcs`` object, this conversion is cheap.

        This is meant to help migrate code using `SkyCube`
        over to the new maps classes.
        """
        from gammapy.maps import WcsNDMap, WcsGeom, MapAxis

        if energy_axis_mode == 'center':
            energy = self.energies(mode='center')
            energy_axis = MapAxis.from_nodes(energy.value, unit=energy.unit)
        elif energy_axis_mode == 'edges':
            energy = self.energies(mode='edges')
            energy_axis = MapAxis.from_edges(energy.value, unit=energy.unit)
        else:
            raise ValueError('Invalid energy_axis_mode: {}'.format(energy_axis_mode))

        # Axis order in SkyCube: energy, lat, lon
        npix = (self.data.shape[2], self.data.shape[1])

        geom = WcsGeom(wcs=self.wcs, npix=npix, axes=[energy_axis])

        # TODO: change maps and SkyCube to have a unit attribute
        # For now, SkyCube is a mix of numpy array and quantity in `data`
        # and we just strip the unit here
        data = np.asarray(self.data)
        # unit = getattr(self.data, 'unit', None)

        return WcsNDMap(geom=geom, data=data)
예제 #2
0
def test_map_axis_single_bin():
    with pytest.raises(ValueError):
        _ = MapAxis.from_nodes([1])
예제 #3
0
def test_wcsgeom_squash():
    axis = MapAxis.from_nodes([1, 2, 3], name="test-axis")
    geom = WcsGeom.create(npix=(3, 3), axes=[axis])
    geom_squashed = geom.squash(axis_name="test-axis")
    assert geom_squashed.data_shape == (1, 3, 3)
예제 #4
0
def make_all_models():
    """Make an instance of each model, for testing."""
    yield Model.create("ConstantSpatialModel", "spatial")
    map_constantmodel = Map.create(npix=(10, 20), unit="sr-1")
    yield Model.create("TemplateSpatialModel", "spatial", map=map_constantmodel)
    yield Model.create(
        "DiskSpatialModel", "spatial", lon_0="1 deg", lat_0="2 deg", r_0="3 deg"
    )
    yield Model.create("gauss", "spatial", lon_0="1 deg", lat_0="2 deg", sigma="3 deg")
    yield Model.create("PointSpatialModel", "spatial", lon_0="1 deg", lat_0="2 deg")
    yield Model.create(
        "ShellSpatialModel",
        "spatial",
        lon_0="1 deg",
        lat_0="2 deg",
        radius="3 deg",
        width="4 deg",
    )
    yield Model.create("ConstantSpectralModel", "spectral", const="99 cm-2 s-1 TeV-1")
    yield Model.create(
        "CompoundSpectralModel",
        "spectral",
        model1=Model.create("PowerLawSpectralModel", "spectral"),
        model2=Model.create("PowerLawSpectralModel", "spectral"),
        operator=np.add,
    )
    yield Model.create("PowerLawSpectralModel", "spectral")
    yield Model.create("PowerLawNormSpectralModel", "spectral")
    yield Model.create("PowerLaw2SpectralModel", "spectral")
    yield Model.create("ExpCutoffPowerLawSpectralModel", "spectral")
    yield Model.create("ExpCutoffPowerLawNormSpectralModel", "spectral")
    yield Model.create("ExpCutoffPowerLaw3FGLSpectralModel", "spectral")
    yield Model.create("SuperExpCutoffPowerLaw3FGLSpectralModel", "spectral")
    yield Model.create("SuperExpCutoffPowerLaw4FGLDR3SpectralModel", "spectral")
    yield Model.create("SuperExpCutoffPowerLaw4FGLSpectralModel", "spectral")
    yield Model.create("LogParabolaSpectralModel", "spectral")
    yield Model.create("LogParabolaNormSpectralModel", "spectral")
    yield Model.create(
        "TemplateSpectralModel", "spectral", energy=[1, 2] * u.cm, values=[3, 4] * u.cm
    )  # TODO: add unit validation?
    yield Model.create(
        "PiecewiseNormSpectralModel",
        "spectral",
        energy=[1, 2] * u.cm,
        norms=[3, 4] * u.cm,
    )
    yield Model.create("GaussianSpectralModel", "spectral")
    # TODO: yield Model.create("EBLAbsorptionNormSpectralModel")
    # TODO: yield Model.create("NaimaSpectralModel")
    # TODO: yield Model.create("ScaleSpectralModel")
    yield Model.create("ConstantTemporalModel", "temporal")
    yield Model.create("LinearTemporalModel", "temporal")
    yield Model.create("PowerLawTemporalModel", "temporal")
    yield Model.create("SineTemporalModel", "temporal")
    yield Model.create("LightCurveTemplateTemporalModel", "temporal", Table())
    yield Model.create(
        "SkyModel",
        spatial_model=Model.create("ConstantSpatialModel", "spatial"),
        spectral_model=Model.create("PowerLawSpectralModel", "spectral"),
    )
    m1 = Map.create(
        npix=(10, 20, 30), axes=[MapAxis.from_nodes([1, 2] * u.TeV, name="energy")]
    )
    yield Model.create("TemplateSpatialModel", "spatial", map=m1)
    m2 = Map.create(
        npix=(10, 20, 30), axes=[MapAxis.from_edges([1, 2] * u.TeV, name="energy")]
    )
    yield Model.create("TemplateNPredModel", map=m2)
예제 #5
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz"
        )
        exposure_hpx.unit = "cm2 s"

        # iem
        iem_fermi_extra = Map.read("data/gll_iem_v06_extrapolated.fits")
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyDiffuseCube(
            iem_fermi_extra, norm=1.1, tilt=0.03, name="iem_extrapolated"
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            coordsys="GAL",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord(
            {"skycoord": self.events.radec, "energy": self.events.energy}
        )

        axis = MapAxis.from_nodes(
            counts.geom.axes[0].center, name="energy", unit="GeV", interp="log"
        )
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = counts.geom.get_coord()

        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(
            self.psf, counts.geom, max_radius=self.psf_margin * u.deg
        )

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max() > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg)
        )
        mask_fermi = WcsNDMap(counts.geom, mask)

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(background_total)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = SkyModels(FHL3_roi)

        # Dataset
        dataset = MapDataset(
            models=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            background_model=background_model,
            mask_fit=mask_fermi,
            name="3FHL_ROI_num" + str(kr),
        )
        cat_stat = dataset.stat_sum()

        datasets = Datasets([dataset])
        fit = Fit(datasets)
        results = fit.run(optimize_opts=self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message == "Optimization failed.":
            pass
        else:
            datasets.write(path=Path(self.resdir), prefix=dataset.name, overwrite=True)
            np.save(
                self.resdir / f"3FHL_ROI_num{kr}_covariance.npy",
                results.parameters.covariance,
            )
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            for model in FHL3_roi:
                if (
                    self.FHL3[model.name].data["ROI_num"] == kr
                    and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut
                ):
                    flux_points = FluxPointsEstimator(
                        datasets=datasets,
                        e_edges=self.El_flux,
                        source=model.name,
                        sigma_ul=2.0,
                    ).run()
                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
예제 #6
0
def test_axis():
    return MapAxis.from_nodes([1, 2], unit="", name="test")
예제 #7
0
 {
     "energy": MapAxis.from_energy_bounds(1, 10, 100, "TeV"),
     "rad": None,
     "energy_shape": (100, ),
     "psf_energy": 1428.893959,
     "rad_shape": (144, ),
     "psf_rad": 0.0015362848,
     "psf_exposure": 4.723409e+12,
     "psf_value_shape": (100, 144),
     "psf_value": 3719.21488,
 },
 {
     "energy":
     None,
     "rad":
     MapAxis.from_nodes(
         np.arange(0, 2, 0.002), unit="deg", name="theta"),
     "energy_shape": (32, ),
     "psf_energy":
     865.9643,
     "rad_shape": (1000, ),
     "psf_rad":
     0.000524,
     "psf_exposure":
     3.14711e12,
     "psf_value_shape": (32, 1000),
     "psf_value":
     25888.5047,
 },
 {
     "energy":
     MapAxis.from_energy_bounds(1, 10, 100, "TeV"),
예제 #8
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        log.info(f"ROI {kr}: loading data")

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # psf
        psf_map = PSFMap.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz",
            format="gtpsf")
        # reduce size of the PSF
        axis = psf_map.psf_map.geom.axes["rad"].center.to_value(u.deg)
        indmax = np.argmin(np.abs(self.psf_margin - axis))
        psf_map = psf_map.slice_by_idx(slices={"rad": slice(0, indmax)})

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyModel(
            PowerLawNormSpectralModel(norm=1.1, tilt=0.03),
            TemplateSpatialModel(iem_fermi_extra, normalize=False),
            name="iem_extrapolated",
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy_true",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # Energy Dispersion
        edisp = EDispKernelMap.from_diagonal_response(
            energy_axis_true=axis, energy_axis=self.energy_axis)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)
        mask_safe_fermi = WcsNDMap(counts.geom, np.ones(mask.shape,
                                                        dtype=bool))

        log.info(f"ROI {kr}: pre-computing diffuse")

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem,
            exposure=exposure,
            psf=psf_map.get_psf_kernel(geom),
            edisp=edisp.get_edisp_kernel(),
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp.get_edisp_kernel())
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso

        # Dataset
        dataset = MapDataset(
            counts=counts,
            exposure=exposure,
            background=background_total,
            psf=psf_map,
            edisp=edisp,
            mask_fit=mask_fermi,
            mask_safe=mask_safe_fermi,
            name=dataset_name,
        )

        background_model = FoVBackgroundModel(dataset_name=dataset_name)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models(FHL3_roi + [background_model])
        dataset.models = model_total

        cat_stat = dataset.stat_sum()
        datasets = Datasets([dataset])

        log.info(f"ROI {kr}: running fit")
        fit = Fit(**self.fit_opts)
        results = fit.run(datasets=datasets)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            filedata = Path(self.resdir) / f"3FHL_ROI_num{kr}_datasets.yaml"
            filemodel = Path(self.resdir) / f"3FHL_ROI_num{kr}_models.yaml"
            datasets.write(filedata, filemodel, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            log.info(f"ROI {kr}: running flux points")
            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    print(model.name)
                    flux_points = FluxPointsEstimator(
                        energy_edges=self.El_flux,
                        source=model.name,
                        n_sigma_ul=2,
                        selection_optional=["ul"],
                    ).run(datasets=datasets)
                    flux_points.meta["sqrt_ts_threshold_ul"] = 1

                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
예제 #9
0
    "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
# Unit is not stored in the file, set it manually
exposure_hpx.unit = "cm2 s"
print(exposure_hpx.geom)
print(exposure_hpx.geom.axes[0])

# In[ ]:

exposure_hpx.plot()

# In[ ]:

# For exposure, we choose a geometry with node_type='center',
# whereas for counts it was node_type='edge'
axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                          name="energy",
                          unit="GeV",
                          interp="log")
geom = WcsGeom(wcs=counts.geom.wcs, npix=counts.geom.npix, axes=[axis])

coord = counts.geom.get_coord()
data = exposure_hpx.interp_by_coord(coord)

# In[ ]:

exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)
print(exposure.geom)
print(exposure.geom.axes[0])

# In[ ]:

# Exposure is almost constant accross the field of view
예제 #10
0
     "psf_value": 4369.96391 * u.Unit("sr-1"),
 },
 {
     "energy": MapAxis.from_energy_bounds(1, 10, 100, "TeV", name="energy_true"),
     "rad": None,
     "energy_shape": 100,
     "psf_energy": 1.428893959,
     "rad_shape": 144,
     "psf_rad": 0.0015362848,
     "psf_exposure": 4.723409e12 * u.Unit("cm2 s"),
     "psf_value_shape": (100, 144),
     "psf_value": 3714.303683 * u.Unit("sr-1"),
 },
 {
     "energy": None,
     "rad": MapAxis.from_nodes(np.arange(0, 2, 0.002), unit="deg", name="rad"),
     "energy_shape": 32,
     "psf_energy": 0.8659643,
     "rad_shape": 1000,
     "psf_rad": 0.000524,
     "psf_exposure": 3.14711e12 * u.Unit("cm2 s"),
     "psf_value_shape": (32, 1000),
     "psf_value": 7.902016 * u.Unit("deg-2"),
 },
 {
     "energy": MapAxis.from_energy_bounds(1, 10, 100, "TeV", name="energy_true"),
     "rad": MapAxis.from_nodes(np.arange(0, 2, 0.002), unit="deg", name="rad"),
     "energy_shape": 100,
     "psf_energy": 1.428893959,
     "rad_shape": 1000,
     "psf_rad": 0.000524,
예제 #11
0
def test_interpolate_map_dataset():
    energy = MapAxis.from_energy_bounds("1 TeV",
                                        "300 TeV",
                                        nbin=5,
                                        name="energy")
    energy_true = MapAxis.from_nodes(np.logspace(-1, 3, 20),
                                     name="energy_true",
                                     interp="log",
                                     unit="TeV")

    # make dummy map IRFs
    geom_allsky = WcsGeom.create(npix=(5, 3),
                                 proj="CAR",
                                 binsz=60,
                                 axes=[energy],
                                 skydir=(0, 0))
    geom_allsky_true = geom_allsky.drop("energy").to_cube([energy_true])

    # background
    geom_background = WcsGeom.create(skydir=(0, 0),
                                     width=(5, 5),
                                     binsz=0.2 * u.deg,
                                     axes=[energy])
    value = 30
    bkg_map = Map.from_geom(geom_background, unit="")
    bkg_map.data = value * np.ones(bkg_map.data.shape)

    # effective area - with a gradient that also depends on energy
    aeff_map = Map.from_geom(geom_allsky_true, unit="cm2 s")
    ra_arr = np.arange(aeff_map.data.shape[1])
    dec_arr = np.arange(aeff_map.data.shape[2])
    for i in np.arange(aeff_map.data.shape[0]):
        aeff_map.data[i, :, :] = (
            (i + 1) * 10 * np.meshgrid(dec_arr, ra_arr)[0] +
            10 * np.meshgrid(dec_arr, ra_arr)[1] + 10)
    aeff_map.meta["TELESCOP"] = "HAWC"

    # psf map
    width = 0.2 * u.deg
    rad_axis = MapAxis.from_nodes(np.linspace(0, 2, 50),
                                  name="rad",
                                  unit="deg")
    psfMap = PSFMap.from_gauss(energy_true, rad_axis, width)

    # edispmap
    edispmap = EDispKernelMap.from_gauss(energy,
                                         energy_true,
                                         sigma=0.1,
                                         bias=0.0,
                                         geom=geom_allsky)

    # events and gti
    nr_ev = 10
    ev_t = Table()
    gti_t = Table()

    ev_t["EVENT_ID"] = np.arange(nr_ev)
    ev_t["TIME"] = nr_ev * [
        Time("2011-01-01 00:00:00", scale="utc", format="iso")
    ]
    ev_t["RA"] = np.linspace(-1, 1, nr_ev) * u.deg
    ev_t["DEC"] = np.linspace(-1, 1, nr_ev) * u.deg
    ev_t["ENERGY"] = np.logspace(0, 2, nr_ev) * u.TeV

    gti_t["START"] = [Time("2010-12-31 00:00:00", scale="utc", format="iso")]
    gti_t["STOP"] = [Time("2011-01-02 00:00:00", scale="utc", format="iso")]

    events = EventList(ev_t)
    gti = GTI(gti_t)

    # define observation
    obs = Observation(
        obs_id=0,
        obs_info={
            "RA_PNT": 0.0,
            "DEC_PNT": 0.0
        },
        gti=gti,
        aeff=aeff_map,
        edisp=edispmap,
        psf=psfMap,
        bkg=bkg_map,
        events=events,
        obs_filter=None,
    )

    # define analysis geometry
    geom_target = WcsGeom.create(skydir=(0, 0),
                                 width=(5, 5),
                                 binsz=0.1 * u.deg,
                                 axes=[energy])

    maker = MapDatasetMaker(
        selection=["exposure", "counts", "background", "edisp", "psf"])
    dataset = MapDataset.create(geom=geom_target,
                                energy_axis_true=energy_true,
                                rad_axis=rad_axis,
                                name="test")
    dataset = maker.run(dataset, obs)

    # test counts
    assert dataset.counts.data.sum() == nr_ev

    # test background
    assert np.floor(np.sum(dataset.npred_background().data)) == np.sum(
        bkg_map.data)
    coords_bg = {
        "skycoord": SkyCoord("0 deg", "0 deg"),
        "energy": energy.center[0]
    }
    assert_allclose(dataset.npred_background().get_by_coord(coords_bg)[0],
                    7.5,
                    atol=1e-4)

    # test effective area
    coords_aeff = {
        "skycoord": SkyCoord("0 deg", "0 deg"),
        "energy_true": energy_true.center[0],
    }
    assert_allclose(
        aeff_map.get_by_coord(coords_aeff)[0],
        dataset.exposure.interp_by_coord(coords_aeff)[0],
        atol=1e-3,
    )

    # test edispmap
    pdfmatrix_preinterp = edispmap.get_edisp_kernel(SkyCoord(
        "0 deg", "0 deg")).pdf_matrix
    pdfmatrix_postinterp = dataset.edisp.get_edisp_kernel(
        SkyCoord("0 deg", "0 deg")).pdf_matrix
    assert_allclose(pdfmatrix_preinterp, pdfmatrix_postinterp, atol=1e-7)

    # test psfmap
    geom_psf = geom_target.drop("energy").to_cube([energy_true])
    psfkernel_preinterp = psfMap.get_psf_kernel(position=SkyCoord(
        "0 deg", "0 deg"),
                                                geom=geom_psf,
                                                max_radius=2 * u.deg).data
    psfkernel_postinterp = dataset.psf.get_psf_kernel(
        position=SkyCoord("0 deg", "0 deg"),
        geom=geom_psf,
        max_radius=2 * u.deg).data
    assert_allclose(psfkernel_preinterp, psfkernel_postinterp, atol=1e-4)
예제 #12
0
 def test_init_error(self):
     with pytest.raises(ValueError):
         NDDataArray(
             axes=[MapAxis.from_nodes([1, 3, 6], name="x")],
             data=np.arange(8).reshape(4, 2),
         )
예제 #13
0
def axis_offset():
    return MapAxis.from_nodes([0.2, 0.3, 0.4, 0.5] * u.deg, name="offset")
예제 #14
0
def axis_x():
    return MapAxis.from_nodes([1, 3, 6], name="x")