Exemple #1
0
    def to_wcs_map_nd(self, energy_axis_mode='center'):
        """Convert to a `gammapy.maps.WcsMapND`.

        There is no copy of the ``data`` or ``wcs`` object, this conversion is cheap.

        This is meant to help migrate code using `SkyCube`
        over to the new maps classes.
        """
        from gammapy.maps import WcsMapND, WcsGeom, MapAxis

        if energy_axis_mode == 'center':
            energy = self.energies(mode='center')
            energy_axis = MapAxis.from_nodes(energy.value, unit=energy.unit)
        elif energy_axis_mode == 'edges':
            energy = self.energies(mode='edges')
            energy_axis = MapAxis.from_edges(energy.value, unit=energy.unit)
        else:
            raise ValueError(
                'Invalid energy_axis_mode: {}'.format(energy_axis_mode))

        # Axis order in SkyCube: energy, lat, lon
        npix = (self.data.shape[2], self.data.shape[1])

        geom = WcsGeom(wcs=self.wcs, npix=npix, axes=[energy_axis])

        # TODO: change maps and SkyCube to have a unit attribute
        # For now, SkyCube is a mix of numpy array and quantity in `data`
        # and we just strip the unit here
        data = np.asarray(self.data)
        # unit = getattr(self.data, 'unit', None)

        return WcsMapND(geom=geom, data=data)
Exemple #2
0
    def to_wcs_nd_map(self):
        """Convert to a `gammapy.maps.WcsNDMap`.

        There is no copy of the ``data`` or ``wcs`` object, this conversion is cheap.

        This is meant to help migrate code using `SkyImage`
        over to the new maps classes.
        """
        from gammapy.maps import WcsNDMap, WcsGeom

        # Axis order in SkyImage: lat, lon
        npix = (self.data.shape[1], self.data.shape[0])

        geom = WcsGeom(wcs=self.wcs, npix=npix)

        return WcsNDMap(geom=geom, data=self.data)
Exemple #3
0
def test_evaluate_on_fk5_map():
    # Check if spatial model can be evaluated on a map with FK5 frame
    # Regression test for GH-2402
    header = {}
    header["CDELT1"] = 1.0
    header["CDELT2"] = 1.0
    header["CTYPE1"] = "RA---TAN"
    header["CTYPE2"] = "DEC--TAN"
    header["RADESYS"] = "FK5"
    header["CRVAL1"] = 0
    header["CRVAL2"] = 0
    header["CRPIX1"] = 5
    header["CRPIX2"] = 5

    wcs = WCS(header)
    geom = WcsGeom(wcs, npix=(10, 10))
    model = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="1 deg")
    data = model.evaluate_geom(geom)
    assert data.sum() > 0
Exemple #4
0
    def __init__(self, counts, background, n_scale):
        self._counts = np.array(counts.data, dtype=float)
        self._background = np.array(background.data, dtype=float)
        self._geom2d = counts.geom.copy()
        scale_axis = MapAxis(np.arange(n_scale + 1))
        self._geom3d = WcsGeom(
            wcs=counts.geom.wcs, npix=counts.geom.npix, axes=[scale_axis]
        )

        shape_2d = self._counts.shape
        self._model = np.zeros(shape_2d)
        self._approx = np.zeros(shape_2d)
        self._approx_bkg = np.zeros(shape_2d)
        self._transform_2d = np.zeros(shape_2d)

        shape_3d = n_scale, shape_2d[0], shape_2d[1]
        self._transform_3d = np.zeros(shape_3d)
        self._error = np.zeros(shape_3d)
        self._support = np.zeros(shape_3d, dtype=bool)
Exemple #5
0
    def _make_tsmap_fast(self, prefix, **kwargs):
        """
        Make a TS map from a GTAnalysis instance.  This is a
        simplified implementation optimized for speed that only fits
        for the source normalization (all background components are
        kept fixed). The spectral/spatial characteristics of the test
        source can be defined with the src_dict argument.  By default
        this method will generate a TS map for a point source with an
        index=2.0 power-law spectrum.

        Parameters
        ----------
        model : dict or `~fermipy.roi_model.Source`
           Dictionary or Source object defining the properties of the
           test source that will be used in the scan.

        """
        loglevel = kwargs.get('loglevel', self.loglevel)

        src_dict = copy.deepcopy(kwargs.setdefault('model', {}))
        src_dict = {} if src_dict is None else src_dict

        multithread = kwargs.setdefault('multithread', False)
        threshold = kwargs.setdefault('threshold', 1E-2)
        max_kernel_radius = kwargs.get('max_kernel_radius')
        loge_bounds = kwargs.setdefault('loge_bounds', None)
        use_pylike = kwargs.setdefault('use_pylike', True)

        if loge_bounds:
            if len(loge_bounds) != 2:
                raise Exception('Wrong size of loge_bounds array.')
            loge_bounds[0] = (loge_bounds[0] if loge_bounds[0] is not None else
                              self.log_energies[0])
            loge_bounds[1] = (loge_bounds[1] if loge_bounds[1] is not None else
                              self.log_energies[-1])
        else:
            loge_bounds = [self.log_energies[0], self.log_energies[-1]]

        # Put the test source at the pixel closest to the ROI center
        xpix, ypix = (np.round(
            (self.npix - 1.0) / 2.), np.round((self.npix - 1.0) / 2.))
        cpix = np.array([xpix, ypix])

        map_geom = self._geom.to_image()
        frame = coordsys_to_frame(map_geom.coordsys)
        skydir = SkyCoord(*map_geom.pix_to_coord((cpix[0], cpix[1])),
                          frame=frame,
                          unit='deg')
        skydir = skydir.transform_to('icrs')

        src_dict['ra'] = skydir.ra.deg
        src_dict['dec'] = skydir.dec.deg
        src_dict.setdefault('SpatialModel', 'PointSource')
        src_dict.setdefault('SpatialWidth', 0.3)
        src_dict.setdefault('Index', 2.0)
        src_dict.setdefault('Prefactor', 1E-13)

        counts = []
        bkg = []
        model = []
        c0_map = []
        eslices = []
        enumbins = []
        model_npred = 0
        for c in self.components:

            imin = utils.val_to_edge(c.log_energies, loge_bounds[0])[0]
            imax = utils.val_to_edge(c.log_energies, loge_bounds[1])[0]

            eslice = slice(imin, imax)
            bm = c.model_counts_map(
                exclude=kwargs['exclude']).data.astype('float')[eslice, ...]
            cm = c.counts_map().data.astype('float')[eslice, ...]

            bkg += [bm]
            counts += [cm]
            c0_map += [cash(cm, bm)]
            eslices += [eslice]
            enumbins += [cm.shape[0]]

        self.add_source('tsmap_testsource',
                        src_dict,
                        free=True,
                        init_source=False,
                        use_single_psf=True,
                        use_pylike=use_pylike,
                        loglevel=logging.DEBUG)
        src = self.roi['tsmap_testsource']
        # self.logger.info(str(src_dict))
        modelname = utils.create_model_name(src)
        for c, eslice in zip(self.components, eslices):
            mm = c.model_counts_map('tsmap_testsource').data.astype('float')[
                eslice, ...]
            model_npred += np.sum(mm)
            model += [mm]

        self.delete_source('tsmap_testsource', loglevel=logging.DEBUG)

        for i, mm in enumerate(model):

            dpix = 3
            for j in range(mm.shape[0]):

                ix, iy = np.unravel_index(np.argmax(mm[j, ...]), mm[j,
                                                                    ...].shape)

                mx = mm[j, ix, :] > mm[j, ix, iy] * threshold
                my = mm[j, :, iy] > mm[j, ix, iy] * threshold
                dpix = max(dpix, np.round(np.sum(mx) / 2.))
                dpix = max(dpix, np.round(np.sum(my) / 2.))

            if max_kernel_radius is not None and \
                    dpix > int(max_kernel_radius / self.components[i].binsz):
                dpix = int(max_kernel_radius / self.components[i].binsz)

            xslice = slice(max(int(xpix - dpix), 0),
                           min(int(xpix + dpix + 1), self.npix))
            model[i] = model[i][:, xslice, xslice]

        ts_values = np.zeros((self.npix, self.npix))
        amp_values = np.zeros((self.npix, self.npix))

        wrap = functools.partial(_ts_value_newton,
                                 counts=counts,
                                 bkg=bkg,
                                 model=model,
                                 C_0_map=c0_map)

        if kwargs['map_skydir'] is not None:

            map_offset = wcs_utils.skydir_to_pix(kwargs['map_skydir'],
                                                 map_geom.wcs)

            map_delta = 0.5 * kwargs['map_size'] / self.components[0].binsz
            xmin = max(int(np.ceil(map_offset[1] - map_delta)), 0)
            xmax = min(int(np.floor(map_offset[1] + map_delta)) + 1, self.npix)
            ymin = max(int(np.ceil(map_offset[0] - map_delta)), 0)
            ymax = min(int(np.floor(map_offset[0] + map_delta)) + 1, self.npix)

            xslice = slice(xmin, xmax)
            yslice = slice(ymin, ymax)
            xyrange = [range(xmin, xmax), range(ymin, ymax)]

            wcs = map_geom.wcs.deepcopy()
            npix = (ymax - ymin, xmax - xmin)
            crpix = (map_geom._crpix[0] - ymin, map_geom._crpix[1] - xmin)
            wcs.wcs.crpix[0] -= ymin
            wcs.wcs.crpix[1] -= xmin

            # FIXME: We should implement this with a proper cutout method
            map_geom = WcsGeom(wcs, npix, crpix=crpix)
        else:
            xyrange = [range(self.npix), range(self.npix)]
            xslice = slice(0, self.npix)
            yslice = slice(0, self.npix)

        positions = []
        for i, j in itertools.product(xyrange[0], xyrange[1]):
            p = [[k // 2, i, j] for k in enumbins]
            positions += [p]

        self.logger.log(loglevel, 'Fitting test source.')
        if multithread:
            pool = Pool()
            results = pool.map(wrap, positions)
            pool.close()
            pool.join()
        else:
            results = map(wrap, positions)

        for i, r in enumerate(results):
            ix = positions[i][0][1]
            iy = positions[i][0][2]
            ts_values[ix, iy] = r[0]
            amp_values[ix, iy] = r[1]

        ts_values = ts_values[xslice, yslice]
        amp_values = amp_values[xslice, yslice]

        ts_map = WcsNDMap(map_geom, ts_values)
        sqrt_ts_map = WcsNDMap(map_geom, ts_values**0.5)
        npred_map = WcsNDMap(map_geom, amp_values * model_npred)
        amp_map = WcsNDMap(map_geom, amp_values * src.get_norm())

        o = {
            'name': utils.join_strings([prefix, modelname]),
            'src_dict': copy.deepcopy(src_dict),
            'file': None,
            'ts': ts_map,
            'sqrt_ts': sqrt_ts_map,
            'npred': npred_map,
            'amplitude': amp_map,
            'loglike': -self.like(),
            'config': kwargs
        }

        return o
Exemple #6
0
# In[ ]:


exposure_hpx.plot();


# In[ ]:


# For exposure, we choose a geometry with node_type='center',
# whereas for counts it was node_type='edge'
axis = MapAxis.from_nodes(
    counts.geom.axes[0].center, name="energy", unit="GeV", interp="log"
)
geom = WcsGeom(wcs=counts.geom.wcs, npix=counts.geom.npix, axes=[axis])

coord = counts.geom.get_coord()
data = exposure_hpx.interp_by_coord(coord)
exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit)
print(exposure.geom)
print(exposure.geom.axes[0])


# In[ ]:


# Exposure is almost constant accross the field of view
exposure.slice_by_idx({"energy": 0}).plot(add_cbar=True);

Exemple #7
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        # exposure
        exposure_hpx = Map.read(
            self.datadir + "/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # background iem
        infile = self.datadir + "/catalogs/fermi/gll_iem_v06.fits.gz"
        outfile = self.resdir + "/gll_iem_v06_extra.fits"
        model_iem = extrapolate_iem(infile, outfile, self.logEc_extra)

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            coordsys="GAL",
            binsz=self.dlb,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = counts.geom.get_coord()

        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(self.psf,
                                              counts.geom,
                                              max_radius=self.psf_margin *
                                              u.deg)

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EnergyDispersion.from_diagonal_response(e_true=e_true,
                                                        e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)

        # IEM
        eval_iem = MapEvaluator(model=model_iem,
                                exposure=exposure,
                                psf=psf_kernel,
                                edisp=edisp)
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(background_total)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = SkyModels(FHL3_roi)

        # Dataset
        dataset = MapDataset(
            model=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            background_model=background_model,
            mask_fit=mask_fermi,
            name="3FHL_ROI_num" + str(kr),
        )
        cat_stat = dataset.stat_sum()

        datasets = Datasets([dataset])
        fit = Fit(datasets)
        results = fit.run(optimize_opts=self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message == "Optimization failed.":
            pass
        else:
            datasets.to_yaml(path=Path(self.resdir),
                             prefix=dataset.name,
                             overwrite=True)
            np.save(
                self.resdir + "/3FHL_ROI_num" + str(kr) + "_covariance.npy",
                results.parameters.covariance,
            )
            np.savez(
                self.resdir + "/3FHL_ROI_num" + str(kr) + "_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    flux_points = FluxPointsEstimator(
                        datasets=datasets,
                        e_edges=self.El_flux,
                        source=model.name,
                        sigma_ul=2.0,
                    ).run()
                    filename = self.resdir + "/" + model.name + "_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
Exemple #8
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        log.info(f"ROI {kr}: loading data")

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyModel(
            PowerLawNormSpectralModel(norm=1.1, tilt=0.03),
            TemplateSpatialModel(iem_fermi_extra, normalize=False),
            name="iem_extrapolated",
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy_true",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(self.psf,
                                              geom,
                                              max_radius=self.psf_margin *
                                              u.deg)

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EDispKernel.from_diagonal_response(e_true=e_true,
                                                   e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)

        log.info(f"ROI {kr}: pre-computing diffuse")

        # IEM
        eval_iem = MapEvaluator(model=model_iem,
                                exposure=exposure,
                                psf=psf_kernel,
                                edisp=edisp)
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(background_total,
                                           name="bkg_iem+iso",
                                           datasets_names=[dataset_name])
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models([background_model] + FHL3_roi)

        # Dataset
        dataset = MapDataset(
            models=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            mask_fit=mask_fermi,
            name=dataset_name,
        )
        cat_stat = dataset.stat_sum()
        datasets = Datasets([dataset])

        log.info(f"ROI {kr}: running fit")
        fit = Fit(datasets)
        results = fit.run(**self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            datasets.write(path=Path(self.resdir),
                           prefix=dataset.name,
                           overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            log.info(f"ROI {kr}: running flux points")
            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    flux_points = FluxPointsEstimator(
                        e_edges=self.El_flux,
                        source=model.name,
                        n_sigma_ul=2,
                    ).run(datasets=datasets)
                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)