Exemplo n.º 1
0
 def __init__(self, name="test"):
     self.name = name
     self._models = Models([MyModel()])
     self.data_shape = (1,)
     self.meta_table = Table()
Exemplo n.º 2
0
 def __init__(self, name="test"):
     self.name = name
     self.models = Models([MyModel()])
     self.data_shape = (1,)
Exemplo n.º 3
0
    def create(
        cls,
        e_reco,
        e_true=None,
        region=None,
        reference_time="2000-01-01",
        name=None,
        meta_table=None,
    ):
        """Creates empty spectrum dataset.

        Empty containers are created with the correct geometry.
        counts, background and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~gammapy.maps.MapAxis`
            counts energy axis. Its name must be "energy".
        e_true : `~gammapy.maps.MapAxis`
            effective area table energy axis. Its name must be "energy-true".
            If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        meta_table : `~astropy.table.Table`
            Table listing informations on observations used to create the dataset.
            One line per observation for stacked datasets.
        """
        if e_true is None:
            e_true = e_reco.copy(name="energy_true")

        if region is None:
            region = "icrs;circle(0, 0, 1)"

        name = make_name(name)
        counts = RegionNDMap.create(region=region, axes=[e_reco])
        background = RegionNDMap.create(region=region, axes=[e_reco])
        models = Models([
            BackgroundModel(background,
                            name=name + "-bkg",
                            datasets_names=[name])
        ])
        exposure = RegionNDMap.create(region=region,
                                      axes=[e_true],
                                      unit="cm2 s")
        edisp = EDispKernelMap.from_diagonal_response(e_reco,
                                                      e_true,
                                                      geom=counts.geom)
        mask_safe = RegionNDMap.from_geom(counts.geom, dtype="bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)

        return SpectrumDataset(
            counts=counts,
            exposure=exposure,
            edisp=edisp,
            mask_safe=mask_safe,
            gti=gti,
            models=models,
            name=name,
        )
Exemplo n.º 4
0
def test_map_fit(sky_model, geom, geom_etrue):
    dataset_1 = get_map_dataset(geom, geom_etrue, name="test-1")
    dataset_2 = get_map_dataset(geom, geom_etrue, name="test-2")
    datasets = Datasets([dataset_1, dataset_2])

    models = Models(datasets.models)
    models.insert(0, sky_model)

    models["test-1-bkg"].spectral_model.norm.value = 0.5
    models["test-model"].spatial_model.sigma.frozen = True

    datasets.models = models
    dataset_2.counts = dataset_2.npred()
    dataset_1.counts = dataset_1.npred()

    models["test-1-bkg"].spectral_model.norm.value = 0.49
    models["test-2-bkg"].spectral_model.norm.value = 0.99

    fit = Fit(datasets)
    result = fit.run()

    assert result.success
    assert "minuit" in repr(result)

    npred = dataset_1.npred().data.sum()
    assert_allclose(npred, 7525.790688, rtol=1e-3)
    assert_allclose(result.total_stat, 21659.2139, rtol=1e-3)

    pars = result.parameters
    assert_allclose(pars["lon_0"].value, 0.2, rtol=1e-2)
    assert_allclose(pars["lon_0"].error, 0.002244, rtol=1e-2)

    assert_allclose(pars["index"].value, 3, rtol=1e-2)
    assert_allclose(pars["index"].error, 0.024277, rtol=1e-2)

    assert_allclose(pars["amplitude"].value, 1e-11, rtol=1e-2)
    assert_allclose(pars["amplitude"].error, 4.216154e-13, rtol=1e-2)

    # background norm 1
    assert_allclose(pars[8].value, 0.5, rtol=1e-2)
    assert_allclose(pars[8].error, 0.015811, rtol=1e-2)

    # background norm 2
    assert_allclose(pars[11].value, 1, rtol=1e-2)
    assert_allclose(pars[11].error, 0.02147, rtol=1e-2)

    # test mask_safe evaluation
    mask_safe = geom.energy_mask(energy_min=1 * u.TeV)
    dataset_1.mask_safe = Map.from_geom(geom, data=mask_safe)
    dataset_2.mask_safe = Map.from_geom(geom, data=mask_safe)

    stat = fit.datasets.stat_sum()
    assert_allclose(stat, 14823.579908, rtol=1e-5)

    region = sky_model.spatial_model.to_region()

    initial_counts = dataset_1.counts.copy()
    with mpl_plot_check():
        dataset_1.plot_residuals(kwargs_spectral=dict(region=region))

    # check dataset has not changed
    assert initial_counts == dataset_1.counts

    # test model evaluation outside image
    dataset_1.models[0].spatial_model.lon_0.value = 150
    dataset_1.npred()
    assert not dataset_1._evaluators["test-model"].contributes
Exemplo n.º 5
0
        lat_0=lat_0 * u.deg,
        eta=eta,
        r_0=r_0 * u.deg,
        e=e,
        phi=phi,
        frame="galactic",
    )
    meval = model.evaluate_geom(geom)
    Map.from_geom(geom=geom, data=meval.value, unit=meval.unit).plot(ax=ax)
    pixreg = model.to_region().to_pixel(geom.wcs)
    pixreg.plot(ax=ax, edgecolor="g", facecolor="none", lw=2)
    ax.set_title(tag)
    ax.set_xticks([])
    ax.set_yticks([])
plt.tight_layout()

# %%
# YAML representation
# -------------------
# Here is an example YAML file using the model:

pwl = PowerLawSpectralModel()
gengauss = GeneralizedGaussianSpatialModel()

model = SkyModel(spectral_model=pwl,
                 spatial_model=gengauss,
                 name="pwl-gengauss-model")
models = Models([model])

print(models.to_yaml())
# **Define 3D Sky Model**
DarkMatterAnnihilationSpectralModel.THERMAL_RELIC_CROSS_SECTION = xsection
flux_model = DarkMatterAnnihilationSpectralModel(mass=mDM,
                                                 channel=channel,
                                                 jfactor=JFAC)

spatial_model = TemplateSpatialModel.read(jfactor_filename)

sky_model = SkyModel(spatial_model=spatial_model,
                     spectral_model=flux_model,
                     name="model-simu")

bkg_model = FoVBackgroundModel(dataset_name="dataset-simu")

models = Models([sky_model, bkg_model])

# ## Declare observation values

pointing = src_pos
livetime = 100 * u.hour
offset = 2.0 * u.deg
#offset = 0.5 * u.deg

# Create an in-memory observation
obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs)

# ## Start the simulations and get the limits

#masses = [70, 200, 500, 800, 1000, 5000, 8000, 10000, 30000, 50000, 60000, 100000]*u.GeV
masses = [200, 1000, 50000] * u.GeV
Exemplo n.º 7
0
def make_datasets_example():
    # Define which data to use and print some information

    energy_axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4),
                                     unit="TeV",
                                     name="energy",
                                     interp="log")
    geom0 = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.1,
        width=(2, 2),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geom1 = WcsGeom.create(
        skydir=(1, 0),
        binsz=0.1,
        width=(2, 2),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geoms = [geom0, geom1]

    sources_coords = [(0, 0), (0.9, 0.1)]
    names = ["gc", "g09"]
    models = Models()

    for idx, (lon, lat) in enumerate(sources_coords):
        spatial_model = PointSpatialModel(lon_0=lon * u.deg,
                                          lat_0=lat * u.deg,
                                          frame="galactic")
        spectral_model = ExpCutoffPowerLawSpectralModel(
            index=2 * u.Unit(""),
            amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
            reference=1.0 * u.TeV,
            lambda_=0.1 / u.TeV,
        )
        model_ecpl = SkyModel(spatial_model=spatial_model,
                              spectral_model=spectral_model,
                              name=names[idx])
        models.append(model_ecpl)

    models["gc"].spectral_model.reference = models[
        "g09"].spectral_model.reference

    obs_ids = [110380, 111140, 111159]
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")

    diffuse_model = SkyDiffuseCube.read(
        "$GAMMAPY_DATA/fermi_3fhl/gll_iem_v06_cutout.fits")

    maker = MapDatasetMaker()
    datasets = Datasets()

    observations = data_store.get_observations(obs_ids)

    for idx, geom in enumerate(geoms):
        stacked = MapDataset.create(geom=geom, name=names[idx])

        for obs in observations:
            dataset = maker.run(stacked, obs)
            stacked.stack(dataset)

        bkg = stacked.models.pop(0)
        stacked.models = [models[idx], diffuse_model, bkg]
        datasets.append(stacked)

    datasets.write("$GAMMAPY_DATA/tests/models",
                   prefix="gc_example",
                   overwrite=True,
                   write_covariance=False)
Exemplo n.º 8
0
 def __init__(self, name="test"):
     self._name = name
     self._models = Models([MyModel(x=1.99, y=2.99e3, z=3.99e-2)])
     self.data_shape = (1, )
     self.meta_table = Table()
Exemplo n.º 9
0
    def run(self, datasets):
        """Estimate flux for a given energy range.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset`
            Spectrum datasets.

        Returns
        -------
        result : dict
            Dict with results for the flux point.
        """
        datasets = Datasets(datasets)

        datasets_sliced = datasets.slice_by_energy(energy_min=self.energy_min,
                                                   energy_max=self.energy_max)

        # TODO: simplify model book-keeping!!
        models = Models()

        for model in datasets.models:
            if "sky-model" in model.tag:
                models.append(model)
            elif "fov-bkg" in model.tag:
                bkg_model = model.copy(dataset_name=model.datasets_names[0] +
                                       "-sliced")
                bkg_model.reset_to_default()
                models.append(bkg_model)

        if len(datasets_sliced) > 0:
            # TODO: this relies on the energy binning of the first dataset
            energy_axis = datasets_sliced[0].counts.geom.axes["energy"]
            energy_min, energy_max = energy_axis.edges.min(
            ), energy_axis.edges.max()
        else:
            energy_min, energy_max = self.energy_min, self.energy_max

        contributions = []

        for dataset in datasets_sliced:
            if dataset.mask is not None:
                value = dataset.mask.data.any()
            else:
                value = True
            contributions.append(value)

        model = self.get_scale_model(models)

        with np.errstate(invalid="ignore", divide="ignore"):
            result = self.get_reference_flux_values(model.model, energy_min,
                                                    energy_max)

        if len(datasets) == 0 or not np.any(contributions):
            result.update(self.nan_result)
        else:
            models[self.source].spectral_model = model

            datasets_sliced.models = models
            result.update(
                self._parameter_estimator.run(datasets_sliced, model.norm))
            result["sqrt_ts"] = self.get_sqrt_ts(result["ts"], result["norm"])

        return result
Exemplo n.º 10
0
def run_analyses(targets):
    log.info("Run small source extension check.")

    info = {}

    targets = list(AVAILABLE_TARGETS) if targets == "all-targets" else [
        targets
    ]

    for target in targets:
        t = time.time()

        config = AnalysisConfig.read(f"configs/config_{target}.yaml")
        analysis = Analysis(config)
        analysis.get_observations()
        info["data_preparation"] = time.time() - t

        t = time.time()

        analysis.get_datasets()
        info["data_reduction"] = time.time() - t

        models = Models.read(f"models/model_{target}.yaml")

        point_models = Models(define_model_pointlike(models[0]))
        analysis.set_models(point_models)

        t = time.time()
        analysis.run_fit()

        info["point_model_fitting"] = time.time() - t
        log.info(f"\n{point_models.to_parameters_table()}")

        log.info("Fitting extended gaussian source.")

        analysis.datasets.models = []
        analysis.set_models(models)
        t = time.time()

        analysis.run_fit()

        info["gauss_model_fitting"] = time.time() - t

        log.info(analysis.fit_result)

        log.info(f"\n{models.to_parameters_table()}")

        log.info("Extract size error, UL and stat profile.")

        t = time.time()
        analysis.models[0].spatial_model.lon_0.frozen = True
        analysis.models[0].spatial_model.lat_0.frozen = True
        analysis.models[0].spectral_model.index.frozen = True

        size_est = ExtensionEstimator(
            source=models[0].name,
            energy_edges=[0.2, 10.0] * u.TeV,
            selection_optional=["errn-errp", "ul", "scan"],
            size_min="0.08 deg",
            size_max="0.12 deg",
            size_n_values=20,
            reoptimize=True)
        res = size_est.run(analysis.datasets)

        info["estimator"] = time.time() - t
        t = time.time()

        log.info(res)
        plot_profile(res[0], target)

        Path(f"bench_{target}.yaml").write_text(
            yaml.dump(info, sort_keys=False, indent=4))
        analysis.models.to_parameters_table().write(
            f"results/{target}_results.ecsv", overwrite=True)
Exemplo n.º 11
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz"
        )
        exposure_hpx.unit = "cm2 s"

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyDiffuseCube(
            iem_fermi_extra, norm=1.1, tilt=0.03, name="iem_extrapolated"
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord(
            {"skycoord": self.events.radec, "energy": self.events.energy}
        )

        axis = MapAxis.from_nodes(
            counts.geom.axes[0].center, name="energy_true", unit="GeV", interp="log"
        )
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(
            self.psf, geom, max_radius=self.psf_margin * u.deg
        )

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EDispKernel.from_diagonal_response(e_true=e_true, e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max() > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg)
        )
        mask_fermi = WcsNDMap(counts.geom, mask)

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(
            background_total, name="bkg_iem+iso", datasets_names=[dataset_name]
        )
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models([background_model] + FHL3_roi)

        # Dataset
        dataset = MapDataset(
            models=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            mask_fit=mask_fermi,
            name=dataset_name,
        )
        cat_stat = dataset.stat_sum()

        datasets = Datasets([dataset])
        fit = Fit(datasets)
        results = fit.run(**self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            datasets.write(path=Path(self.resdir), prefix=dataset.name, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            for model in FHL3_roi:
                if (
                    self.FHL3[model.name].data["ROI_num"] == kr
                    and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut
                ):
                    flux_points = FluxPointsEstimator(
                        e_edges=self.El_flux, source=model.name, n_sigma_ul=2,
                    ).run(datasets=datasets)
                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
Exemplo n.º 12
0
                                     sigma="0.2 deg",
                                     frame="galactic")

spectral_model = ExpCutoffPowerLawSpectralModel(
    index=2,
    amplitude="3e-12 cm-2 s-1 TeV-1",
    reference="1 TeV",
    lambda_="0.05 TeV-1",
)

sky_model_simu = SkyModel(spatial_model=spatial_model,
                          spectral_model=spectral_model,
                          name="source")

bkg_model = FoVBackgroundModel(dataset_name="dataset-mcmc")
models = Models([sky_model_simu, bkg_model])
print(models)

# In[7]:

dataset.models = models
dataset.fake()

# In[8]:

dataset.counts.sum_over_axes().plot(add_cbar=True)

# In[9]:

# If you want to fit the data for comparison with MCMC later
# fit = Fit(dataset)
Exemplo n.º 13
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        log.info(f"ROI {kr}: loading data")

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # psf
        psf_map = PSFMap.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz",
            format="gtpsf")
        # reduce size of the PSF
        axis = psf_map.psf_map.geom.axes["rad"].center.to_value(u.deg)
        indmax = np.argmin(np.abs(self.psf_margin - axis))
        psf_map = psf_map.slice_by_idx(slices={"rad": slice(0, indmax)})

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyModel(
            PowerLawNormSpectralModel(norm=1.1, tilt=0.03),
            TemplateSpatialModel(iem_fermi_extra, normalize=False),
            name="iem_extrapolated",
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy_true",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # Energy Dispersion
        edisp = EDispKernelMap.from_diagonal_response(
            energy_axis_true=axis, energy_axis=self.energy_axis)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)
        mask_safe_fermi = WcsNDMap(counts.geom, np.ones(mask.shape,
                                                        dtype=bool))

        log.info(f"ROI {kr}: pre-computing diffuse")

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem,
            exposure=exposure,
            psf=psf_map.get_psf_kernel(geom),
            edisp=edisp.get_edisp_kernel(),
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp.get_edisp_kernel())
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso

        # Dataset
        dataset = MapDataset(
            counts=counts,
            exposure=exposure,
            background=background_total,
            psf=psf_map,
            edisp=edisp,
            mask_fit=mask_fermi,
            mask_safe=mask_safe_fermi,
            name=dataset_name,
        )

        background_model = FoVBackgroundModel(dataset_name=dataset_name)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models(FHL3_roi + [background_model])
        dataset.models = model_total

        cat_stat = dataset.stat_sum()
        datasets = Datasets([dataset])

        log.info(f"ROI {kr}: running fit")
        fit = Fit(**self.fit_opts)
        results = fit.run(datasets=datasets)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            filedata = Path(self.resdir) / f"3FHL_ROI_num{kr}_datasets.yaml"
            filemodel = Path(self.resdir) / f"3FHL_ROI_num{kr}_models.yaml"
            datasets.write(filedata, filemodel, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            log.info(f"ROI {kr}: running flux points")
            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    print(model.name)
                    flux_points = FluxPointsEstimator(
                        energy_edges=self.El_flux,
                        source=model.name,
                        n_sigma_ul=2,
                        selection_optional=["ul"],
                    ).run(datasets=datasets)
                    flux_points.meta["sqrt_ts_threshold_ul"] = 1

                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
Exemplo n.º 14
0
    def _map_making(self):
        """Make maps and datasets for 3d analysis."""
        datasets_settings = self.config.datasets
        log.info("Creating geometry.")
        geom = self._create_geometry()
        geom_settings = datasets_settings.geom
        geom_irf = dict(energy_axis_true=None, binsz_irf=None)
        if geom_settings.axes.energy_true.min is not None:
            geom_irf["energy_axis_true"] = self._make_energy_axis(
                geom_settings.axes.energy_true, name="energy_true"
            )
        geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value
        offset_max = geom_settings.selection.offset_max
        log.info("Creating datasets.")

        maker = MapDatasetMaker(selection=datasets_settings.map_selection)

        safe_mask_selection = datasets_settings.safe_mask.methods
        safe_mask_settings = datasets_settings.safe_mask.parameters
        maker_safe_mask = SafeMaskMaker(
            methods=safe_mask_selection, **safe_mask_settings
        )

        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            exclusion_region = Map.read(datasets_settings.background.exclusion)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker_config.update(datasets_settings.background.parameters)

        bkg_method = datasets_settings.background.method
        if bkg_method == "fov_background":
            log.debug(f"Creating FoVBackgroundMaker with arguments {bkg_maker_config}")
            bkg_maker = FoVBackgroundMaker(**bkg_maker_config)
        elif bkg_method == "ring":
            bkg_maker = RingBackgroundMaker(**bkg_maker_config)
            log.debug(f"Creating RingBackgroundMaker with arguments {bkg_maker_config}")
            if datasets_settings.geom.axes.energy.nbins > 1:
                raise ValueError(
                    "You need to define a single-bin energy geometry for your dataset."
                )
        else:
            bkg_maker = None
            log.warning(
                f"No background maker set for 3d analysis. Check configuration."
            )

        stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf)

        if datasets_settings.stack:
            for obs in self.observations:
                log.info(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                if bkg_maker is not None:
                    dataset = bkg_maker.run(dataset)
                if bkg_method == "ring":
                    dataset.models = Models([BackgroundModel(dataset.background)])
                log.debug(dataset)
                stacked.stack(dataset)
            datasets = [stacked]
        else:
            datasets = []
            for obs in self.observations:
                log.info(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                if bkg_maker is not None:
                    dataset = bkg_maker.run(dataset)
                log.debug(dataset)
                datasets.append(dataset)
        self.datasets = Datasets(datasets)
Exemplo n.º 15
0
 def models(self, models):
     if models is None:
         self._models = None
     else:
         self._models = Models(models)
Exemplo n.º 16
0
def sky_models_2(sky_model):
    sky_model_4 = sky_model.copy(name="source-4")
    sky_model_5 = sky_model.copy(name="source-5")
    return Models([sky_model_4, sky_model_5])
Exemplo n.º 17
0
def make_example_2():
    spatial = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="1 deg")
    model = SkyModel(PowerLawSpectralModel(), spatial)
    models = Models([model])
    models.write(DATA_PATH / "example2.yaml")
Exemplo n.º 18
0
 def __init__(self, name="test"):
     self.name = name
     self.models = Models(MyModel())
Exemplo n.º 19
0
 def to_models(self, **kwargs):
     """Create Models object from catalogue"""
     return Models([_.sky_model(**kwargs) for _ in self])
Exemplo n.º 20
0
def sky_models(sky_model):
    sky_model_2 = sky_model.copy(name="source-2")
    sky_model_3 = sky_model.copy(name="source-3")
    return Models([sky_model_2, sky_model_3])
Exemplo n.º 21
0
def make_example_2():
    spatial = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="1 deg")
    model = SkyModel(PowerLawSpectralModel(), spatial, name="example_2")
    models = Models([model])
    models.write(DATA_PATH / "example2.yaml", overwrite=True, write_covariance=False)