Esempio n. 1
0
def test_maps_from_geom():
    geom = WcsGeom.create(npix=5)
    names = ["map1", "map2", "map3"]
    kwargs_list = [
        {
            "unit": "cm2s",
            "dtype": "float64"
        },
        {
            "dtype": "bool"
        },
        {
            "data": np.arange(25).reshape(5, 5)
        },
    ]

    maps = Maps.from_geom(geom, names)
    maps_kwargs = Maps.from_geom(geom, names, kwargs_list=kwargs_list)

    assert len(maps) == 3
    assert maps["map1"].geom == geom
    assert maps["map2"].unit == ""
    assert maps["map3"].data.dtype == np.float32
    assert len(maps_kwargs) == 3
    assert maps_kwargs["map1"].unit == "cm2s"
    assert maps_kwargs["map1"].data.dtype == np.float64
    assert maps_kwargs["map2"].data.dtype == np.bool
    assert maps_kwargs["map3"].data[2, 2] == 12
Esempio n. 2
0
def test_maps_read_write(map_dictionary):
    maps = Maps(**map_dictionary)
    maps.write("test.fits", overwrite=True)
    new_maps = Maps.read("test.fits")

    assert new_maps.geom == maps.geom
    assert len(new_maps) == 2
    assert_allclose(new_maps["map1"].data, 1)
    assert_allclose(new_maps["map2"].data, 2)
Esempio n. 3
0
def test_maps_wrong_addition(map_dictionary):
    maps = Maps(**map_dictionary)

    # Test pop method
    some_map = maps.pop("map2")
    assert len(maps) == 1
    assert_allclose(some_map.data, 2)

    # Test incorrect map addition
    with pytest.raises(ValueError):
        maps["map3"] = maps["map1"].sum_over_axes()
Esempio n. 4
0
def test_maps(map_dictionary):
    maps = Maps(**map_dictionary)

    maps["map3"] = maps["map1"].copy()

    assert maps.geom.npix[0] == 10
    assert len(maps) == 3
    assert_allclose(maps["map1"].data, 1)
    assert_allclose(maps["map2"].data, 2)
    assert_allclose(maps["map3"].data, 1)
    assert "map3" in maps.__str__()
Esempio n. 5
0
    def lightcurve(self):
        """Lightcurve (`~gammapy.estimators.FluxPoints`)."""
        time_axis = self.data["time_axis"]
        tag = "Flux_History"

        energy_axis = MapAxis.from_energy_edges(self.energy_range)
        geom = RegionGeom.create(region=self.position, axes=[energy_axis, time_axis])

        names = ["flux", "flux_errp", "flux_errn", "flux_ul"]
        maps = Maps.from_geom(geom=geom, names=names)

        maps["flux"].quantity = self.data[tag]
        maps["flux_errp"].quantity = self.data[f"Unc_{tag}"][:, 1]
        maps["flux_errn"].quantity = -self.data[f"Unc_{tag}"][:, 0]
        maps["flux_ul"].quantity = compute_flux_points_ul(
            maps["flux"].quantity, maps["flux_errp"].quantity
        )
        is_ul = np.isnan(maps["flux_errn"])
        maps["flux_ul"].data[~is_ul] = np.nan

        return FluxPoints.from_maps(
            maps=maps,
            sed_type="flux",
            reference_model=self.sky_model(),
            meta=self.flux_points_meta.copy(),
        )
Esempio n. 6
0
def test_maps_region():
    axis = MapAxis.from_edges([1, 2, 3, 4], name="axis", unit="cm")
    map1 = RegionNDMap.create(region=None, axes=[axis])
    map1.data = 1
    map2 = RegionNDMap.create(region=None, axes=[axis])

    maps = Maps(map1=map1, map2=map2)

    assert len(maps) == 2
    assert_allclose(maps["map1"], 1)
Esempio n. 7
0
def region_map_flux_estimate():
    axis = MapAxis.from_energy_edges((0.1, 1.0, 10.0), unit="TeV")
    geom = RegionGeom.create("galactic;circle(0, 0, 0.1)", axes=[axis])

    maps = Maps.from_geom(
        geom=geom,
        names=["norm", "norm_err", "norm_errn", "norm_errp", "norm_ul"])

    maps["norm"].data = np.array([1.0, 1.0])
    maps["norm_err"].data = np.array([0.1, 0.1])
    maps["norm_errn"].data = np.array([0.2, 0.2])
    maps["norm_errp"].data = np.array([0.15, 0.15])
    maps["norm_ul"].data = np.array([2.0, 2.0])
    return maps
Esempio n. 8
0
    def lightcurve(self, interval="1-year"):
        """Lightcurve (`~gammapy.estimators.FluxPoints`).

        Parameters
        ----------
        interval : {'1-year', '2-month'}
            Time interval of the lightcurve. Default is '1-year'.
            Note that '2-month' is not available for all catalogue version.
        """

        if interval == "1-year":
            tag = "Flux_History"
            time_axis = self.data["time_axis"]
            tag_sqrt_ts = "Sqrt_TS_History"
        elif interval == "2-month":
            tag = "Flux2_History"
            if tag not in self.data:
                raise ValueError(
                    "Only '1-year' interval is available for this catalogue version"
                )

            time_axis = self.data["time_axis_2"]
            tag_sqrt_ts = "Sqrt_TS2_History"
        else:
            raise ValueError("Time intervals available are '1-year' or '2-month'")

        energy_axis = MapAxis.from_energy_edges([50, 300000] * u.MeV)
        geom = RegionGeom.create(region=self.position, axes=[energy_axis, time_axis])

        names = ["flux", "flux_errp", "flux_errn", "flux_ul", "ts"]
        maps = Maps.from_geom(geom=geom, names=names)

        maps["flux"].quantity = self.data[tag]
        maps["flux_errp"].quantity = self.data[f"Unc_{tag}"][:, 1]
        maps["flux_errn"].quantity = -self.data[f"Unc_{tag}"][:, 0]
        maps["flux_ul"].quantity = compute_flux_points_ul(
            maps["flux"].quantity, maps["flux_errp"].quantity
        )
        maps["ts"].quantity = self.data[tag_sqrt_ts] ** 2

        return FluxPoints.from_maps(
            maps=maps,
            sed_type="flux",
            reference_model=self.sky_model(),
            meta=self.flux_points.meta.copy(),
        )
Esempio n. 9
0
    def to_maps(self, sed_type="likelihood"):
        """Return maps in a given SED type.

        Parameters
        ----------
        sed_type : {"likelihood", "dnde", "e2dnde", "flux", "eflux"}
            sed type to convert to. Default is `Likelihood`

        Returns
        -------
        maps : `Maps`
            Maps object containing the requested maps.
        """
        maps = Maps()

        for quantity in self.all_quantities(sed_type=sed_type):
            m = getattr(self, quantity, None)
            if m is not None:
                maps[quantity] = m

        return maps
Esempio n. 10
0
    def run(self, dataset):
        """Run adaptive smoothing on input MapDataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset (with one bin in energy at most)

        Returns
        -------
        images : dict of `~gammapy.maps.WcsNDMap`
            Smoothed images; keys are:
                * 'counts'
                * 'background'
                * 'flux' (optional)
                * 'scales'
                * 'sqrt_ts'.
        """
        energy_axis = self._get_energy_axis(dataset)

        results = []

        for energy_min, energy_max in progress_bar(
            energy_axis.iter_by_edges, desc="Energy bins"
        ):
            dataset_sliced = dataset.slice_by_energy(
                energy_min=energy_min, energy_max=energy_max, name=dataset.name
            )
            dataset_sliced.models = dataset.models
            result = self.estimate_maps(dataset_sliced)
            results.append(result)

        maps = Maps()

        for name in results[0].keys():
            maps[name] = Map.from_stack(
                maps=[_[name] for _ in results], axis_name="energy"
            )

        return maps
Esempio n. 11
0
    def from_hdulist(cls, hdulist, hdu_bands=None, sed_type=None):
        """Create flux map dataset from list of HDUs.

        Parameters
        ----------
        hdulist : `~astropy.io.fits.HDUList`
            List of HDUs.
        hdu_bands : str
            Name of the HDU with the BANDS table. Default is 'BANDS'
            If set to None, each map should have its own hdu_band
        sed_type : {"dnde", "flux", "e2dnde", "eflux", "likelihood"}
            Sed type

        Returns
        -------
        flux_maps : `~gammapy.estimators.FluxMaps`
            Flux maps object.
        """
        maps = Maps.from_hdulist(hdulist=hdulist, hdu_bands=hdu_bands)

        if sed_type is None:
            sed_type = hdulist[0].header.get("SED_TYPE", None)

        filename = hdulist[0].header.get("MODEL", None)

        if filename:
            reference_model = Models.read(filename)[0]
        else:
            reference_model = None

        if "GTI" in hdulist:
            gti = GTI(Table.read(hdulist["GTI"]))
        else:
            gti = None

        return cls.from_maps(maps=maps,
                             sed_type=sed_type,
                             reference_model=reference_model,
                             gti=gti)
Esempio n. 12
0
    def from_table(cls,
                   table,
                   sed_type=None,
                   format="gadf-sed",
                   reference_model=None,
                   gti=None):
        """Create flux points from a table. The table column names must be consistent with the
        sed_type

        Parameters
        ----------
        table : `~astropy.table.Table`
            Table
        sed_type : {"dnde", "flux", "eflux", "e2dnde", "likelihood"}
            Sed type
        format : {"gadf-sed", "lightcurve", "profile"}
            Table format.
        reference_model : `SpectralModel`
            Reference spectral model
        gti : `GTI`
            Good time intervals
        meta : dict
            Meta data.

        Returns
        -------
        flux_points : `FluxPoints`
            Flux points
        """
        table = table_standardise_units_copy(table)

        if sed_type is None:
            sed_type = table.meta.get("SED_TYPE", None)

        if sed_type is None:
            sed_type = cls._guess_sed_type(table.colnames)

        if sed_type is None:
            raise ValueError("Specifying the sed type is required")

        if sed_type == "likelihood":
            table = cls._convert_loglike_columns(table)
            if reference_model is None:
                reference_model = TemplateSpectralModel(
                    energy=flat_if_equal(table["e_ref"].quantity),
                    values=flat_if_equal(table["ref_dnde"].quantity),
                )

        maps = Maps()
        table.meta.setdefault("SED_TYPE", sed_type)

        for name in cls.all_quantities(sed_type=sed_type):
            if name in table.colnames:
                maps[name] = RegionNDMap.from_table(table=table,
                                                    colname=name,
                                                    format=format)

        meta = cls._get_meta_gadf(table)
        return cls.from_maps(
            maps=maps,
            reference_model=reference_model,
            meta=meta,
            sed_type=sed_type,
            gti=gti,
        )
Esempio n. 13
0
    def run(self, dataset):
        """
        Run TS map estimation.

        Requires a MapDataset with counts, exposure and background_model
        properly set to run.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input MapDataset.

        Returns
        -------
        maps : dict
             Dictionary containing result maps. Keys are:

                * ts : delta TS map
                * sqrt_ts : sqrt(delta TS), or significance map
                * flux : flux map
                * flux_err : symmetric error map
                * flux_ul : upper limit map

        """
        dataset_models = dataset.models

        pad_width = self.estimate_pad_width(dataset=dataset)
        dataset = dataset.pad(pad_width, name=dataset.name)
        dataset = dataset.downsample(self.downsampling_factor,
                                     name=dataset.name)

        energy_axis = self._get_energy_axis(dataset=dataset)

        results = []

        for energy_min, energy_max in progress_bar(energy_axis.iter_by_edges,
                                                   desc="Energy bins"):
            sliced_dataset = dataset.slice_by_energy(energy_min=energy_min,
                                                     energy_max=energy_max,
                                                     name=dataset.name)

            if self.sum_over_energy_groups:
                sliced_dataset = sliced_dataset.to_image(name=dataset.name)

            sliced_dataset.models = dataset_models
            result = self.estimate_flux_map(sliced_dataset)
            results.append(result)

        maps = Maps()

        for name in self.selection_all:
            m = Map.from_images(images=[_[name] for _ in results])

            order = 0 if name == "niter" else 1
            m = m.upsample(factor=self.downsampling_factor,
                           preserve_counts=False,
                           order=order)

            maps[name] = m.crop(crop_width=pad_width)

        meta = {"n_sigma": self.n_sigma, "n_sigma_ul": self.n_sigma_ul}
        return FluxMaps(data=maps,
                        reference_model=self.model,
                        gti=dataset.gti,
                        meta=meta)