示例#1
0
    def set_model(self, model=None, filename=""):
        """Read the model from dict or filename and attach it to datasets.

        Parameters
        ----------
        model: dict or string
            Dictionary or string in YAML format with the serialized model.
        filename : string
            Name of the model YAML file describing the model.
        """
        if not self._validate_set_model():
            return False
        log.info(f"Reading model.")
        if isinstance(model, str):
            model = yaml.safe_load(model)
        if model:
            self.model = SkyModels(dict_to_models(model))
        elif filename:
            filepath = make_path(filename)
            self.model = SkyModels.from_yaml(filepath)
        else:
            return False
        # TODO: Deal with multiple components
        for dataset in self.datasets.datasets:
            if isinstance(dataset, MapDataset):
                dataset.model = self.model
            else:
                if len(self.model.skymodels) > 1:
                    raise ValueError(
                        "Can only fit a single spectral model at one time."
                    )
                dataset.model = self.model.skymodels[0].spectral_model
        log.info(self.model)
示例#2
0
    def from_dict(cls, data, components, models):
        """Create flux point dataset from dict.

        Parameters
        ----------
        data : dict
            Dict containing data to create dataset from.
        components : list of dict
            Not used.
        models : list of `SkyModel`
            List of model components.

        Returns
        -------
        dataset : `SpectrumDatasetOnOff`
            Spectrum dataset on off.

        """
        model = SkyModels(
            [model for model in models if model.name in data["models"]])

        # TODO: assumes that the model is a skymodel
        # so this will work only when this change will be effective
        filename = data["filename"]

        dataset = cls.from_ogip_files(filename=filename)
        dataset.mask_fit = None
        dataset.model = model
        return dataset
示例#3
0
    def from_dict(cls, data, components, models):
        """Create flux point dataset from dict.

        Parameters
        ----------
        data : dict
            Dict containing data to create dataset from.
        components : list of dict
            Not used.
        models : list of `SkyModel`
            List of model components.

        Returns
        -------
        dataset : `FluxPointDataset`
            Flux point datasets.

        """
        models_list = [
            model for model in models if model.name in data["models"]
        ]
        # TODO: assumes that the model is a skymodel
        # so this will work only when this change will be effective
        table = Table.read(data["filename"])
        mask_fit = table["mask_fit"].data.astype("bool")
        mask_safe = table["mask_safe"].data.astype("bool")
        table.remove_columns(["mask_fit", "mask_safe"])
        return cls(
            model=SkyModels(models_list),
            name=data["name"],
            data=FluxPoints(table),
            mask_fit=mask_fit,
            mask_safe=mask_safe,
            likelihood=data["likelihood"],
        )
示例#4
0
    def sky_model(self, which="best"):
        """Source sky model.

        Parameters
        ----------
        which : {'best', 'pl', 'ecpl'}
            Which spectral model

        Returns
        -------
        sky_model : `~gammapy.modeling.models.SkyModel`
            Sky model of the catalog object.
        """
        if self.spatial_model_type in {"2-gaussian", "3-gaussian"}:
            models = []

            spectral_model = self.spectral_model(which=which)
            for component in self.components:
                weight = component.data["Flux_Map"] / self.data["Flux_Map"]
                spectral_model_comp = spectral_model.copy()
                # weight amplitude of the component
                spectral_model_comp.parameters["amplitude"].value *= weight
                models.append(
                    SkyModel(
                        component.spatial_model(),
                        spectral_model_comp,
                        name=component.name,
                    ))

            return SkyModels(models)
        else:
            return SkyModel(self.spatial_model(),
                            self.spectral_model(which=which),
                            name=self.name)
示例#5
0
    def __init__(
        self,
        model=None,
        counts=None,
        exposure=None,
        mask_fit=None,
        psf=None,
        edisp=None,
        background_model=None,
        name="",
        evaluation_mode="local",
        mask_safe=None,
        gti=None,
    ):
        if model is None:
            model = SkyModels([])
        if mask_fit is not None and mask_fit.data.dtype != np.dtype("bool"):
            raise ValueError("mask data must have dtype bool")
        if mask_safe is not None and mask_safe.data.dtype != np.dtype("bool"):
            raise ValueError("mask data must have dtype bool")

        self.evaluation_mode = evaluation_mode
        self.counts = counts
        self.exposure = exposure
        self.mask_fit = mask_fit
        self.psf = psf
        self.edisp = edisp
        self.background_model = background_model
        self.model = model
        self.name = name
        self.mask_safe = mask_safe
        self.gti = gti
示例#6
0
文件: fit.py 项目: mstrzys/gammapy
    def model(self, model):
        if isinstance(model, SkyModel):
            model = SkyModels([model])

        self._model = model

        if model is not None:
            evaluators = []

            for component in model:
                evaluator = MapEvaluator(component,
                                         evaluation_mode=self.evaluation_mode)
                evaluator.update(self.exposure, self.psf, self.edisp)
                evaluators.append(evaluator)

            self._evaluators = evaluators
示例#7
0
    def to_sky_models(self):
        """Convert to a `~gammapy.modeling.models.SkyModels`.

        TODO: add an option whether to skip or raise on missing models or data.
        """
        source_list = []

        for source_idx in range(len(self.table)):
            source = self[source_idx]
            try:
                source_list.append(source.sky_model)
            except NoDataAvailableError:
                log.warning(
                    f"Skipping source {source.name} (missing data in gamma-cat)"
                )
                continue

        return SkyModels(source_list)
示例#8
0
    def from_dict(cls, data, components, models):
        """Create from dicts and models list generated from YAML serialization."""
        dataset = cls.read(data["filename"], name=data["name"])
        bkg_name = data["background"]
        model_names = data["models"]
        for component in components["components"]:
            if component["type"] == "BackgroundModel":
                if component["name"] == bkg_name:
                    if "filename" not in component:
                        component["map"] = dataset.background_model.map
                    background_model = BackgroundModel.from_dict(component)
                    dataset.background_model = background_model

        models_list = [model for model in models if model.name in model_names]
        dataset.model = SkyModels(models_list)
        if"likelihood" in data:
            dataset.likelihood_type = data["likelihood"]
        return dataset
示例#9
0
def test_flux_point_dataset_serialization(tmp_path):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.e_ref.to("TeV")
    # TODO: remove duplicate definition this once model is redefine as skymodel
    spatial_model = ConstantSpatialModel()
    spectral_model = PowerLawSpectralModel(index=2.3,
                                           amplitude="2e-13 cm-2 s-1 TeV-1",
                                           reference="1 TeV")
    model = SkyModel(spatial_model, spectral_model, name="test_model")
    dataset = FluxPointsDataset(SkyModels([model]), data, name="test_dataset")

    Datasets([dataset]).to_yaml(tmp_path, prefix="tmp")
    datasets = Datasets.from_yaml(tmp_path / "tmp_datasets.yaml",
                                  tmp_path / "tmp_models.yaml")
    new_dataset = datasets[0]
    assert_allclose(new_dataset.data.table["dnde"], dataset.data.table["dnde"],
                    1e-4)
    if dataset.mask_fit is None:
        assert np.all(new_dataset.mask_fit == dataset.mask_safe)
    assert np.all(new_dataset.mask_safe == dataset.mask_safe)
    assert new_dataset.name == "test_dataset"
示例#10
0
def sky_models_2(sky_model):
    sky_model_4 = sky_model.copy(name="source-4")
    sky_model_5 = sky_model.copy(name="source-5")
    return SkyModels([sky_model_4, sky_model_5])
示例#11
0
def sky_models(sky_model):
    sky_model_2 = sky_model.copy(name="source-2")
    sky_model_3 = sky_model.copy(name="source-3")
    return SkyModels([sky_model_2, sky_model_3])
示例#12
0
文件: make.py 项目: fjhzwl/gammapy
def make_example_2():
    spatial = GaussianSpatialModel("0 deg", "0 deg", "1 deg")
    model = SkyModel(spatial, PowerLawSpectralModel())
    models = SkyModels([model])
    models.to_yaml(DATA_PATH / "example2.yaml")
示例#13
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        # exposure
        exposure_hpx = Map.read(
            self.datadir + "/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # background iem
        infile = self.datadir + "/catalogs/fermi/gll_iem_v06.fits.gz"
        outfile = self.resdir + "/gll_iem_v06_extra.fits"
        model_iem = extrapolate_iem(infile, outfile, self.logEc_extra)

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            coordsys="GAL",
            binsz=self.dlb,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = counts.geom.get_coord()

        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(self.psf,
                                              counts.geom,
                                              max_radius=self.psf_margin *
                                              u.deg)

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EnergyDispersion.from_diagonal_response(e_true=e_true,
                                                        e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)

        # IEM
        eval_iem = MapEvaluator(model=model_iem,
                                exposure=exposure,
                                psf=psf_kernel,
                                edisp=edisp)
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(background_total)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = SkyModels(FHL3_roi)

        # Dataset
        dataset = MapDataset(
            model=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            background_model=background_model,
            mask_fit=mask_fermi,
            name="3FHL_ROI_num" + str(kr),
        )
        cat_stat = dataset.stat_sum()

        datasets = Datasets([dataset])
        fit = Fit(datasets)
        results = fit.run(optimize_opts=self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message == "Optimization failed.":
            pass
        else:
            datasets.to_yaml(path=Path(self.resdir),
                             prefix=dataset.name,
                             overwrite=True)
            np.save(
                self.resdir + "/3FHL_ROI_num" + str(kr) + "_covariance.npy",
                results.parameters.covariance,
            )
            np.savez(
                self.resdir + "/3FHL_ROI_num" + str(kr) + "_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    flux_points = FluxPointsEstimator(
                        datasets=datasets,
                        e_edges=self.El_flux,
                        source=model.name,
                        sigma_ul=2.0,
                    ).run()
                    filename = self.resdir + "/" + model.name + "_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
示例#14
0
    reference=1 * u.TeV,
)

sky_model = SkyModel(spatial_model=spatial_model,
                     spectral_model=spectral_model,
                     name="crab")
# Now we freeze these parameters that we don't want the light curve estimator to change
sky_model.parameters["index"].frozen = True
sky_model.parameters["lon_0"].frozen = True
sky_model.parameters["lat_0"].frozen = True

# We assign them the model to be fitted to each dataset

# In[ ]:

models = SkyModels([sky_model])
analysis_3d.set_models(models)

# ## Light Curve estimation: by observation
#
# We can now create the light curve estimator.
#
# We pass it the list of datasets and the name of the model component for which we want to build the light curve.
# We can optionally ask for parameters reoptimization during fit, that is most of the time to fit background normalization in each time bin.
#
# If we don't set any time interval, the `~gammapy.time.LightCurveEstimator` is determines the flux of each dataset and places it at the corresponding time in the light curve.
# Here one dataset equals to one observing run.

# In[ ]:

lc_maker_3d = LightCurveEstimator(analysis_3d.datasets,
示例#15
0
# ## Fit
#
# Finally, the big finale: let's do a 3D map fit for the source at the Galactic center, to measure it's position and spectrum. We keep the background normalization free.

# In[ ]:

spatial_model = PointSpatialModel(lon_0="0 deg",
                                  lat_0="0 deg",
                                  frame="galactic")
spectral_model = PowerLawSpectralModel(index=2.5,
                                       amplitude="1e-11 cm-2 s-1 TeV-1",
                                       reference="100 GeV")

source = SkyModel(spectral_model=spectral_model, spatial_model=spatial_model)

models = SkyModels([source, diffuse_gal, diffuse_iso])

dataset = MapDataset(
    models=models,
    counts=counts,
    exposure=exposure,
    psf=psf_kernel,
    edisp=edisp,
)

# In[ ]:

get_ipython().run_cell_magic('time', '',
                             'fit = Fit([dataset])\nresult = fit.run()')

# In[ ]:
示例#16
0
flux.sum_over_axes().smooth("0.1 deg").plot(stretch="sqrt", add_cbar=True)

# ## Fit
#
# Finally, the big finale: let's do a 3D map fit for the source at the Galactic center, to measure it's position and spectrum. We keep the background normalization free.

# In[ ]:

model = SkyModel(
    PointSpatialModel("0 deg", "0 deg", frame="galactic"),
    PowerLawSpectralModel(index=2.5,
                          amplitude="1e-11 cm-2 s-1 TeV-1",
                          reference="100 GeV"),
)

model_total = SkyModels([model, model_diffuse, model_iso])

dataset = MapDataset(model=model_total,
                     counts=counts,
                     exposure=exposure,
                     psf=psf_kernel)
fit = Fit(dataset)
result = fit.run()

# In[ ]:

print(result)

# In[ ]:

dataset.parameters.to_table()