예제 #1
0
파일: ring.py 프로젝트: renlliang3/gammapy
    def make_cubes(self, dataset):
        """Make acceptance, off acceptance, off counts cubes

        Parameters
        ----------
        dataset : `~gammapy.cube.MapDataset`
            Input map dataset.

        Returns
        -------
        cubes : dict of `~gammapy.maps.WcsNDMap`
            Dictionary containing ``counts_off``, ``acceptance`` and ``acceptance_off`` cubes.
        """
        counts = dataset.counts
        background = dataset.background_model.map
        kernels = self.kernels(counts)

        if self.exclusion_mask is not None:
            # reproject exclusion mask
            coords = counts.geom.get_coord()
            data = self.exclusion_mask.get_by_coord(coords)
            exclusion = Map.from_geom(geom=counts.geom, data=data)
        else:
            data = np.ones(counts.geom.data_shape, dtype=bool)
            exclusion = Map.from_geom(geom=counts.geom, data=data)

        cubes = {}
        cubes["counts_off"] = scale_cube(
            (counts.data * exclusion.data)[0, Ellipsis], kernels
        )
        cubes["acceptance_off"] = scale_cube(
            (background.data * exclusion.data)[0, Ellipsis], kernels
        )

        scale = background.geom.pixel_scales[0].to("deg")
        theta = self.theta * scale
        tophat = Tophat2DKernel(theta.value)
        tophat.normalize("peak")
        acceptance = background.convolve(tophat.array)
        acceptance_data = acceptance.data[0, Ellipsis]
        cubes["acceptance"] = np.repeat(
            acceptance_data[Ellipsis, np.newaxis], len(kernels), axis=2
        )

        return cubes
예제 #2
0
    def make_cubes(self, dataset):
        """Make acceptance, off acceptance, off counts cubes

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input map dataset.

        Returns
        -------
        cubes : dict of `~gammapy.maps.WcsNDMap`
            Dictionary containing ``counts_off``, ``acceptance`` and ``acceptance_off`` cubes.
        """
        counts = dataset.counts
        background = dataset.npred_background()
        kernels = self.kernels(counts)

        if self.exclusion_mask:
            exclusion = self.exclusion_mask.interp_to_geom(geom=counts.geom)
        else:
            exclusion = Map.from_geom(geom=counts.geom, data=True, dtype=bool)

        cubes = {}
        cubes["counts_off"] = scale_cube(
            (counts.data * exclusion.data)[0, Ellipsis], kernels
        )
        cubes["acceptance_off"] = scale_cube(
            (background.data * exclusion.data)[0, Ellipsis], kernels
        )

        scale = background.geom.pixel_scales[0].to("deg")
        theta = self.theta * scale
        tophat = Tophat2DKernel(theta.value)
        tophat.normalize("peak")
        acceptance = background.convolve(tophat.array)
        acceptance_data = acceptance.data[0, Ellipsis]
        cubes["acceptance"] = np.repeat(
            acceptance_data[Ellipsis, np.newaxis], len(kernels), axis=2
        )

        return cubes
예제 #3
0
    def estimate_maps(self, counts, background, exposure=None):
        """
        Run adaptive smoothing on input Maps.

        Parameters
        ----------
        counts : `~gammapy.maps.Map`
            counts map
        background : `~gammapy.maps.Map`
            estimated background counts map
        exposure : `~gammapy.maps.Map`
            exposure map. If set, it will produce a flux smoothed map.

        Returns
        -------
        images : dict of `~gammapy.maps.WcsNDMap`
            Smoothed images; keys are:
                * 'counts'
                * 'background'
                * 'flux' (optional)
                * 'scales'
                * 'significance'.
        """

        pixel_scale = counts.geom.pixel_scales.mean()
        kernels = self.kernels(pixel_scale)

        cubes = {}
        cubes["counts"] = scale_cube(counts.data, kernels)

        if background is not None:
            cubes["background"] = scale_cube(background.data, kernels)
        else:
            # TODO: Estimate background with asmooth method
            raise ValueError("Background estimation required.")

        if exposure is not None:
            flux = (counts - background) / exposure
            cubes["flux"] = scale_cube(flux.data, kernels)

        cubes["significance"] = self._significance_cube(cubes,
                                                        method=self.method)

        smoothed = self._reduce_cubes(cubes, kernels)

        result = {}

        for key in ["counts", "background", "scale", "significance"]:
            data = smoothed[key]

            # set remaining pixels with significance < threshold to mean value
            if key in ["counts", "background"]:
                mask = np.isnan(data)
                data[mask] = np.mean(locals()[key].data[mask])
                result[key] = WcsNDMap(counts.geom, data, unit=counts.unit)
            else:
                result[key] = WcsNDMap(counts.geom, data, unit="deg")

        if exposure is not None:
            data = smoothed["flux"]
            mask = np.isnan(data)
            data[mask] = np.mean(flux.data[mask])
            result["flux"] = WcsNDMap(counts.geom, data, unit=flux.unit)

        return result
예제 #4
0
    def estimate_maps(self, dataset):
        """Run adaptive smoothing on input Maps.

        Parameters
        ----------
        dataset : `MapDataset`
            Dataset

        Returns
        -------
        images : dict of `~gammapy.maps.WcsNDMap`
            Smoothed images; keys are:
                * 'counts'
                * 'background'
                * 'flux' (optional)
                * 'scales'
                * 'sqrt_ts'.
        """
        dataset_image = dataset.to_image(name=dataset.name)
        dataset_image.models = dataset.models

        # extract 2d arrays
        counts = dataset_image.counts.data[0].astype(float)
        background = dataset_image.npred_background().data[0]

        if isinstance(dataset_image, MapDatasetOnOff):
            background = dataset_image.background.data[0]

        if dataset_image.exposure is not None:
            exposure = estimate_exposure_reco_energy(dataset_image,
                                                     self.spectrum)
        else:
            exposure = None

        pixel_scale = dataset_image.counts.geom.pixel_scales.mean()
        kernels = self.get_kernels(pixel_scale)

        cubes = {}
        cubes["counts"] = scale_cube(counts, kernels)
        cubes["background"] = scale_cube(background, kernels)

        if exposure is not None:
            flux = (dataset_image.counts - background) / exposure
            cubes["flux"] = scale_cube(flux.data[0], kernels)

        cubes["sqrt_ts"] = self._sqrt_ts_cube(cubes, method=self.method)

        smoothed = self._reduce_cubes(cubes, kernels)

        result = {}

        geom = dataset_image.counts.geom

        for name, data in smoothed.items():
            # set remaining pixels with sqrt_ts < threshold to mean value
            if name in ["counts", "background"]:
                mask = np.isnan(data)
                data[mask] = np.mean(locals()[name][mask])
                result[name] = WcsNDMap(geom, data, unit="")
            else:
                unit = "deg" if name == "scale" else ""
                result[name] = WcsNDMap(geom, data, unit=unit)

        if exposure is not None:
            data = smoothed["flux"]
            mask = np.isnan(data)
            data[mask] = np.mean(flux.data[0][mask])
            result["flux"] = WcsNDMap(geom, data, unit=flux.unit)

        return result