Пример #1
0
    def from_stack(cls, maps, axis, meta=None):
        """Create flux points by stacking list of flux points.

        The first `FluxPoints` object in the list is taken as a reference to infer
        column names and units for the stacked object.

        Parameters
        ----------
        maps : list of `FluxMaps`
            List of maps to stack.
        axis : `MapAxis`
            New axis to create

        Returns
        -------
        flux_maps : `FluxMaps`
            Stacked flux maps along axis.
        """
        reference = maps[0]

        data = {}
        for quantity in reference.available_quantities:
            data[quantity] = Map.from_stack([_[quantity] for _ in maps],
                                            axis=axis)

        if meta is None:
            meta = reference.meta.copy()

        gti = GTI.from_stack([_.gti for _ in maps])

        return cls(data=data,
                   reference_model=reference.reference_model,
                   meta=meta,
                   gti=gti)
Пример #2
0
    def run(self, dataset):
        """Run adaptive smoothing on input MapDataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset (with one bin in energy at most)

        Returns
        -------
        images : dict of `~gammapy.maps.WcsNDMap`
            Smoothed images; keys are:
                * 'counts'
                * 'background'
                * 'flux' (optional)
                * 'scales'
                * 'sqrt_ts'.
        """
        energy_axis = self._get_energy_axis(dataset)

        results = []

        for energy_min, energy_max in progress_bar(
            energy_axis.iter_by_edges, desc="Energy bins"
        ):
            dataset_sliced = dataset.slice_by_energy(
                energy_min=energy_min, energy_max=energy_max, name=dataset.name
            )
            dataset_sliced.models = dataset.models
            result = self.estimate_maps(dataset_sliced)
            results.append(result)

        maps = Maps()

        for name in results[0].keys():
            maps[name] = Map.from_stack(
                maps=[_[name] for _ in results], axis_name="energy"
            )

        return maps
Пример #3
0
    def run(self, dataset):
        """
        Run TS map estimation.

        Requires a MapDataset with counts, exposure and background_model
        properly set to run.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input MapDataset.

        Returns
        -------
        maps : dict
             Dictionary containing result maps. Keys are:

                * ts : delta TS map
                * sqrt_ts : sqrt(delta TS), or significance map
                * flux : flux map
                * flux_err : symmetric error map
                * flux_ul : upper limit map

        """
        dataset_models = dataset.models

        pad_width = self.estimate_pad_width(dataset=dataset)
        dataset = dataset.pad(pad_width, name=dataset.name)
        dataset = dataset.downsample(self.downsampling_factor, name=dataset.name)

        energy_axis = self._get_energy_axis(dataset=dataset)

        results = []

        for energy_min, energy_max in progress_bar(
                energy_axis.iter_by_edges, desc="Energy bins"
        ):
            sliced_dataset = dataset.slice_by_energy(
                energy_min=energy_min, energy_max=energy_max, name=dataset.name
            )

            if self.sum_over_energy_groups:
                sliced_dataset = sliced_dataset.to_image(name=dataset.name)

            sliced_dataset.models = dataset_models
            result = self.estimate_flux_map(sliced_dataset)
            results.append(result)

        maps = Maps()

        for name in self.selection_all:
            m = Map.from_stack(maps=[_[name] for _ in results], axis_name="energy")

            order = 0 if name == "niter" else 1
            m = m.upsample(
                factor=self.downsampling_factor,
                preserve_counts=False,
                order=order
            )

            maps[name] = m.crop(crop_width=pad_width)

        meta = {"n_sigma": self.n_sigma, "n_sigma_ul": self.n_sigma_ul}
        return FluxMaps(
            data=maps,
            reference_model=self.model,
            gti=dataset.gti,
            meta=meta
        )