Пример #1
0
def get_npred_map():
    position = SkyCoord(0.0, 0.0, frame="galactic", unit="deg")
    energy_axis = MapAxis.from_bounds(1,
                                      100,
                                      nbin=30,
                                      unit="TeV",
                                      name="energy_true",
                                      interp="log")

    exposure = Map.create(
        binsz=0.02,
        map_type="wcs",
        skydir=position,
        width="2 deg",
        axes=[energy_axis],
        frame="galactic",
        unit="cm2 s",
    )

    spatial_model = GaussianSpatialModel(lon_0="0 deg",
                                         lat_0="0 deg",
                                         sigma="0.2 deg",
                                         frame="galactic")
    spectral_model = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1")
    skymodel = SkyModel(spatial_model=spatial_model,
                        spectral_model=spectral_model)

    exposure.data = 1e14 * np.ones(exposure.data.shape)
    evaluator = MapEvaluator(model=skymodel, exposure=exposure)

    npred = evaluator.compute_npred()
    return evaluator, npred
Пример #2
0
def estimate_exposure_reco_energy(dataset, spectral_model=None):
    """Estimate an exposure map in reconstructed energy.

    Parameters
    ----------
    dataset:`~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset
    spectral_model: `~gammapy.modeling.models.SpectralModel`
            assumed spectral shape. If none, a Power Law of index 2 is assumed

    Returns
    -------
    exposure : `Map`
        Exposure map in reconstructed energy
    """
    if spectral_model is None:
        spectral_model = PowerLawSpectralModel()

    model = SkyModel(spatial_model=ConstantFluxSpatialModel(),
                     spectral_model=spectral_model)

    energy_axis = dataset._geom.axes["energy"]

    edisp = None

    if dataset.edisp is not None:
        edisp = dataset.edisp.get_edisp_kernel(position=None,
                                               energy_axis=energy_axis)

    meval = MapEvaluator(model=model, exposure=dataset.exposure, edisp=edisp)
    npred = meval.compute_npred()
    ref_flux = spectral_model.integral(energy_axis.edges[:-1],
                                       energy_axis.edges[1:])
    reco_exposure = npred / ref_flux[:, np.newaxis, np.newaxis]
    return reco_exposure
Пример #3
0
def test_compute_flux_spatial():
    center = SkyCoord("0 deg", "0 deg", frame="galactic")
    region = CircleSkyRegion(center=center, radius=0.1 * u.deg)

    nbin = 2
    energy_axis_true = MapAxis.from_energy_bounds(
        ".1 TeV", "10 TeV", nbin=nbin, name="energy_true"
    )

    spectral_model = ConstantSpectralModel()
    spatial_model = PointSpatialModel(
        lon_0=0 * u.deg, lat_0=0 * u.deg, frame="galactic"
    )

    models = SkyModel(spectral_model=spectral_model, spatial_model=spatial_model)
    model = Models(models)

    exposure_region = RegionNDMap.create(region, axes=[energy_axis_true])
    exposure_region.data += 1.0
    exposure_region.unit = "m2 s"

    geom = RegionGeom(region, axes=[energy_axis_true])
    psf = PSFKernel.from_gauss(geom.to_wcs_geom(), sigma="0.1 deg")

    evaluator = MapEvaluator(model=model[0], exposure=exposure_region, psf=psf)
    flux = evaluator.compute_flux_spatial()

    assert_allclose(flux.value, [0.39677402, 0.39677402], atol=0.001)
Пример #4
0
def test_sky_point_source():
    # Test special case of point source. Regression test for GH 2367.

    energy_axis = MapAxis.from_edges(
        [1, 10], unit="TeV", name="energy_true", interp="log"
    )
    exposure = Map.create(
        skydir=(100, 70),
        npix=(4, 4),
        binsz=0.1,
        proj="AIT",
        unit="cm2 s",
        axes=[energy_axis],
    )
    exposure.data = np.ones_like(exposure.data)

    spatial_model = PointSpatialModel(
        lon_0=100.06 * u.deg, lat_0=70.03 * u.deg, frame="icrs"
    )
    # Create a spectral model with integral flux of 1 cm-2 s-1 in this energy band
    spectral_model = ConstantSpectralModel(const="1 cm-2 s-1 TeV-1")
    spectral_model.const.value /= spectral_model.integral(1 * u.TeV, 10 * u.TeV).value
    model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model)
    evaluator = MapEvaluator(model=model, exposure=exposure)
    flux = evaluator.compute_flux().quantity.to_value("cm-2 s-1")[0]

    expected = [
        [0, 0, 0, 0],
        [0, 0.140, 0.058, 0.0],
        [0, 0.564, 0.236, 0],
        [0, 0, 0, 0],
    ]
    assert_allclose(flux, expected, atol=0.01)

    assert_allclose(flux.sum(), 1)
Пример #5
0
    def estimate_kernel(self, dataset):
        """Get the convolution kernel for the input dataset.

        Convolves the model with the PSFKernel at the center of the dataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input dataset.

        Returns
        -------
        kernel : `Map`
            Kernel map

        """
        # TODO: further simplify the code below
        geom = dataset.counts.geom

        model = self.model.copy()
        model.spatial_model.position = geom.center_skydir

        binsz = np.mean(geom.pixel_scales)
        width_pix = self.kernel_width / binsz

        npix = round_up_to_odd(width_pix.to_value(""))

        axis = dataset.exposure.geom.axes["energy_true"]

        geom_kernel = WcsGeom.create(skydir=model.position,
                                     proj="TAN",
                                     npix=npix,
                                     axes=[axis],
                                     binsz=binsz)

        exposure = Map.from_geom(geom_kernel, unit="cm2 s1")
        exposure.data += 1.0

        # We use global evaluation mode to not modify the geometry
        evaluator = MapEvaluator(model, evaluation_mode="global")
        evaluator.update(exposure, dataset.psf, dataset.edisp,
                         dataset.counts.geom)

        kernel = evaluator.compute_npred()
        kernel.data /= kernel.data.sum()

        if (self.kernel_width + binsz >= geom.width).any():
            raise ValueError(
                "Kernel shape larger than map shape, please adjust"
                " size of the kernel")
        return kernel
Пример #6
0
    def estimate_kernel(self, dataset):
        """Get the convolution kernel for the input dataset.

        Convolves the model with the PSFKernel at the center of the dataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input dataset.

        Returns
        -------
        kernel : `Map`
            Kernel map

        """
        # TODO: further simplify the code below
        geom = dataset.exposure.geom

        model = self.model.copy()
        model.spatial_model.position = geom.center_skydir

        geom_kernel = geom.to_odd_npix(max_radius=self.kernel_width / 2)

        # Creating exposure map with exposure at map center
        exposure = Map.from_geom(geom_kernel, unit="cm2 s1")
        coord = MapCoord.create(
            dict(skycoord=geom.center_skydir,
                 energy_true=geom.axes["energy_true"].center))
        exposure.data[...] = dataset.exposure.get_by_coord(coord)[:,
                                                                  np.newaxis,
                                                                  np.newaxis]

        # We use global evaluation mode to not modify the geometry
        evaluator = MapEvaluator(model, evaluation_mode="global")
        evaluator.update(
            exposure,
            dataset.psf,
            dataset.edisp,
            dataset.counts.geom,
            dataset.mask_fit,
        )

        kernel = evaluator.compute_npred()
        kernel.data /= kernel.data.sum()

        if (self.kernel_width >= geom.width).any():
            raise ValueError(
                "Kernel shape larger than map shape, please adjust"
                " size of the kernel")
        return kernel
Пример #7
0
def estimate_exposure_reco_energy(dataset,
                                  spectral_model=None,
                                  normalize=True):
    """Estimate an exposure map in reconstructed energy.

    Parameters
    ----------
    dataset:`~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset
    spectral_model: `~gammapy.modeling.models.SpectralModel`
            assumed spectral shape. If none, a Power Law of index 2 is assumed
    normalize : bool
        Normalize the exposure to the total integrated flux of the spectral model.
        When not normalized it directly gives the predicted counts from the spectral
        model.

    Returns
    -------
    exposure : `Map`
        Exposure map in reconstructed energy
    """
    if spectral_model is None:
        spectral_model = PowerLawSpectralModel()

    model = SkyModel(spatial_model=ConstantFluxSpatialModel(),
                     spectral_model=spectral_model)

    energy_axis = dataset._geom.axes["energy"]

    if dataset.edisp is not None:
        edisp = dataset.edisp.get_edisp_kernel(position=None,
                                               energy_axis=energy_axis)
    else:
        edisp = None

    eval = MapEvaluator(model=model, exposure=dataset.exposure, edisp=edisp)
    reco_exposure = eval.compute_npred()

    if normalize:
        ref_flux = spectral_model.integral(energy_axis.edges[:-1],
                                           energy_axis.edges[1:])
        reco_exposure = reco_exposure / ref_flux[:, np.newaxis, np.newaxis]

    return reco_exposure
Пример #8
0
    def get_kernel(self, dataset):
        """Set the convolution kernel for the input dataset.

        Convolves the model with the PSFKernel at the center of the dataset.
        If no PSFMap or PSFKernel is found the dataset, the model is used without convolution.
        """
        # TODO: further simplify the code below
        geom = dataset.counts.geom

        if self.downsampling_factor:
            geom = geom.downsample(self.downsampling_factor)

        model = self.model.copy()
        model.spatial_model.position = geom.center_skydir

        binsz = np.mean(geom.pixel_scales)
        width_pix = self.kernel_width / binsz

        npix = round_up_to_odd(width_pix.to_value(""))

        axis = dataset.exposure.geom.get_axis_by_name("energy_true")

        geom = WcsGeom.create(skydir=model.position,
                              proj="TAN",
                              npix=npix,
                              axes=[axis],
                              binsz=binsz)

        exposure = Map.from_geom(geom, unit="cm2 s1")
        exposure.data += 1.0

        # We use global evaluation mode to not modify the geometry
        evaluator = MapEvaluator(model, evaluation_mode="global")
        evaluator.update(exposure, dataset.psf, dataset.edisp,
                         dataset.counts.geom)

        kernel = evaluator.compute_npred().sum_over_axes()
        kernel.data /= kernel.data.sum()

        if (self.kernel_width > geom.width).any():
            raise ValueError(
                "Kernel shape larger than map shape, please adjust"
                " size of the kernel")
        return kernel
Пример #9
0
    def estimate_kernel(self, dataset):
        """Get the convolution kernel for the input dataset.

        Convolves the model with the PSFKernel at the center of the dataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input dataset.

        Returns
        -------
        kernel : `Map`
            Kernel map

        """
        geom = dataset.exposure.geom

        if self.kernel_width is not None:
            geom = geom.to_odd_npix(max_radius=self.kernel_width / 2)

        model = self.model.copy()
        model.spatial_model.position = geom.center_skydir

        # Creating exposure map with exposure at map center
        exposure = Map.from_geom(geom, unit="cm2 s1")
        exposure_center = dataset.exposure.to_region_nd_map(geom.center_skydir)
        exposure.data[...] = exposure_center.data

        # We use global evaluation mode to not modify the geometry
        evaluator = MapEvaluator(model=model)

        evaluator.update(
            exposure=exposure,
            psf=dataset.psf,
            edisp=dataset.edisp,
            geom=dataset.counts.geom,
            mask=dataset.mask_image,
        )
        kernel = evaluator.compute_npred()
        kernel.data /= kernel.data.sum()
        return kernel
Пример #10
0
    def estimate_kernel(self, dataset):
        """Get the convolution kernel for the input dataset.

        Convolves the model with the PSFKernel at the center of the dataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input dataset.

        Returns
        -------
        kernel : `Map`
            Kernel map

        """
        # TODO: further simplify the code below
        geom = dataset.exposure.geom

        model = self.model.copy()
        model.spatial_model.position = geom.center_skydir

        geom_kernel = geom.to_odd_npix(max_radius=self.kernel_width / 2)

        exposure = Map.from_geom(geom_kernel, unit="cm2 s1")
        exposure.data += 1.0

        # We use global evaluation mode to not modify the geometry
        evaluator = MapEvaluator(model, evaluation_mode="global")
        evaluator.update(exposure, dataset.psf, dataset.edisp,
                         dataset.counts.geom)

        kernel = evaluator.compute_npred()
        kernel.data /= kernel.data.sum()

        if (self.kernel_width >= geom.width).any():
            raise ValueError(
                "Kernel shape larger than map shape, please adjust"
                " size of the kernel")
        return kernel
Пример #11
0
def diffuse_evaluator(diffuse_model, exposure, psf, edisp):
    return MapEvaluator(diffuse_model, exposure, psf=psf, edisp=edisp)
Пример #12
0
def evaluator(sky_model, exposure, psf, edisp, gti):
    return MapEvaluator(sky_model, exposure, psf=psf, edisp=edisp, gti=gti)
Пример #13
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz"
        )
        exposure_hpx.unit = "cm2 s"

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyDiffuseCube(
            iem_fermi_extra, norm=1.1, tilt=0.03, name="iem_extrapolated"
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord(
            {"skycoord": self.events.radec, "energy": self.events.energy}
        )

        axis = MapAxis.from_nodes(
            counts.geom.axes[0].center, name="energy_true", unit="GeV", interp="log"
        )
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(
            self.psf, geom, max_radius=self.psf_margin * u.deg
        )

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EDispKernel.from_diagonal_response(e_true=e_true, e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max() > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg)
        )
        mask_fermi = WcsNDMap(counts.geom, mask)

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(
            background_total, name="bkg_iem+iso", datasets_names=[dataset_name]
        )
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models([background_model] + FHL3_roi)

        # Dataset
        dataset = MapDataset(
            models=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            mask_fit=mask_fermi,
            name=dataset_name,
        )
        cat_stat = dataset.stat_sum()

        datasets = Datasets([dataset])
        fit = Fit(datasets)
        results = fit.run(**self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            datasets.write(path=Path(self.resdir), prefix=dataset.name, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            for model in FHL3_roi:
                if (
                    self.FHL3[model.name].data["ROI_num"] == kr
                    and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut
                ):
                    flux_points = FluxPointsEstimator(
                        e_edges=self.El_flux, source=model.name, n_sigma_ul=2,
                    ).run(datasets=datasets)
                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
Пример #14
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        log.info(f"ROI {kr}: loading data")

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # psf
        psf_map = PSFMap.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz",
            format="gtpsf")
        # reduce size of the PSF
        axis = psf_map.psf_map.geom.axes["rad"].center.to_value(u.deg)
        indmax = np.argmin(np.abs(self.psf_margin - axis))
        psf_map = psf_map.slice_by_idx(slices={"rad": slice(0, indmax)})

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyModel(
            PowerLawNormSpectralModel(norm=1.1, tilt=0.03),
            TemplateSpatialModel(iem_fermi_extra, normalize=False),
            name="iem_extrapolated",
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy_true",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # Energy Dispersion
        edisp = EDispKernelMap.from_diagonal_response(
            energy_axis_true=axis, energy_axis=self.energy_axis)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)
        mask_safe_fermi = WcsNDMap(counts.geom, np.ones(mask.shape,
                                                        dtype=bool))

        log.info(f"ROI {kr}: pre-computing diffuse")

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem,
            exposure=exposure,
            psf=psf_map.get_psf_kernel(geom),
            edisp=edisp.get_edisp_kernel(),
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp.get_edisp_kernel())
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso

        # Dataset
        dataset = MapDataset(
            counts=counts,
            exposure=exposure,
            background=background_total,
            psf=psf_map,
            edisp=edisp,
            mask_fit=mask_fermi,
            mask_safe=mask_safe_fermi,
            name=dataset_name,
        )

        background_model = FoVBackgroundModel(dataset_name=dataset_name)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models(FHL3_roi + [background_model])
        dataset.models = model_total

        cat_stat = dataset.stat_sum()
        datasets = Datasets([dataset])

        log.info(f"ROI {kr}: running fit")
        fit = Fit(**self.fit_opts)
        results = fit.run(datasets=datasets)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            filedata = Path(self.resdir) / f"3FHL_ROI_num{kr}_datasets.yaml"
            filemodel = Path(self.resdir) / f"3FHL_ROI_num{kr}_models.yaml"
            datasets.write(filedata, filemodel, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            log.info(f"ROI {kr}: running flux points")
            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    print(model.name)
                    flux_points = FluxPointsEstimator(
                        energy_edges=self.El_flux,
                        source=model.name,
                        n_sigma_ul=2,
                        selection_optional=["ul"],
                    ).run(datasets=datasets)
                    flux_points.meta["sqrt_ts_threshold_ul"] = 1

                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)