Example #1
0
    def to_spectrum_dataset(self, name=None):
        """ Convert a SpectrumDatasetOnOff to a SpectrumDataset
        The background model template is taken as alpha*counts_off

        Parameters:
        -----------
            name: str
                Name of the new dataset

        Returns:
        -------
            dataset: `SpectrumDataset`
                SpectrumDatset with cash statistics
        """

        name = make_name(name)

        background_model = BackgroundModel(self.counts_off * self.alpha)
        background_model.datasets_names = [name]
        return SpectrumDataset(
            counts=self.counts,
            exposure=self.exposure,
            edisp=self.edisp,
            name=name,
            gti=self.gti,
            mask_fit=self.mask_fit,
            mask_safe=self.mask_safe,
            models=background_model,
            meta_table=self.meta_table,
        )
Example #2
0
    def create(
        cls,
        e_reco,
        e_true=None,
        region=None,
        reference_time="2000-01-01",
        name=None,
        meta_table=None,
    ):
        """Creates empty spectrum dataset.

        Empty containers are created with the correct geometry.
        counts, background and aeff are zero and edisp is diagonal.

        The safe_mask is set to False in every bin.

        Parameters
        ----------
        e_reco : `~gammapy.maps.MapAxis`
            counts energy axis. Its name must be "energy".
        e_true : `~gammapy.maps.MapAxis`
            effective area table energy axis. Its name must be "energy-true".
            If not set use reco energy values. Default : None
        region : `~regions.SkyRegion`
            Region to define the dataset for.
        reference_time : `~astropy.time.Time`
            reference time of the dataset, Default is "2000-01-01"
        meta_table : `~astropy.table.Table`
            Table listing informations on observations used to create the dataset.
            One line per observation for stacked datasets.
        """
        if e_true is None:
            e_true = e_reco.copy(name="energy_true")

        if region is None:
            region = "icrs;circle(0, 0, 1)"

        name = make_name(name)
        counts = RegionNDMap.create(region=region, axes=[e_reco])
        background = RegionNDMap.create(region=region, axes=[e_reco])
        exposure = RegionNDMap.create(region=region,
                                      axes=[e_true],
                                      unit="cm2 s",
                                      meta={"livetime": 0 * u.s})
        edisp = EDispKernelMap.from_diagonal_response(e_reco,
                                                      e_true,
                                                      geom=counts.geom)
        mask_safe = RegionNDMap.from_geom(counts.geom, dtype="bool")
        gti = GTI.create(u.Quantity([], "s"), u.Quantity([], "s"),
                         reference_time)

        return SpectrumDataset(
            counts=counts,
            exposure=exposure,
            background=background,
            edisp=edisp,
            mask_safe=mask_safe,
            gti=gti,
            name=name,
        )
Example #3
0
    def __init__(
        self,
        models=None,
        counts=None,
        exposure=None,
        edisp=None,
        mask_safe=None,
        mask_fit=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        if mask_fit is not None and mask_fit.dtype != np.dtype("bool"):
            raise ValueError("mask data must have dtype bool")

        self.counts = counts

        self.mask_fit = mask_fit
        self.exposure = exposure
        self.edisp = edisp
        self._background_model = None
        self.mask_safe = mask_safe
        self.gti = gti
        self.meta_table = meta_table

        self._name = make_name(name)
        self.models = models
Example #4
0
    def __init__(
        self,
        models=None,
        counts=None,
        livetime=None,
        aeff=None,
        edisp=None,
        background=None,
        mask_safe=None,
        mask_fit=None,
        name=None,
        gti=None,
    ):

        if mask_fit is not None and mask_fit.dtype != np.dtype("bool"):
            raise ValueError("mask data must have dtype bool")

        self.counts = counts

        if livetime is not None:
            livetime = u.Quantity(livetime)

        self.livetime = livetime
        self.mask_fit = mask_fit
        self.aeff = aeff
        self.edisp = edisp
        self.background = background
        self.models = models
        self.mask_safe = mask_safe
        self.gti = gti

        self._name = make_name(name)
Example #5
0
    def fake(self, background_model, random_state="random-seed", name=None):
        """Simulate fake counts for the current model and reduced irfs.

        This method overwrites the counts and off counts defined on the dataset object.

        Parameters
        ----------
        background_model : `~gammapy.spectrum.CountsSpectrum`
            BackgroundModel. In the future will be part of the SpectrumDataset Class.
            For the moment, a CountSpectrum.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        self._name = make_name(name)
        random_state = get_random_state(random_state)

        npred_sig = self.npred_sig()
        npred_sig.data = random_state.poisson(npred_sig.data)

        npred_bkg = background_model.copy()
        npred_bkg.data = random_state.poisson(npred_bkg.data)

        self.counts = npred_sig + npred_bkg

        npred_off = background_model / self.alpha
        npred_off.data = random_state.poisson(npred_off.data)
        self.counts_off = npred_off
Example #6
0
    def read(cls, filename, name=None, format="gadf-sed"):
        """Read pre-computed flux points and create a dataset

        Parameters
        ----------
        filename : str
            Filename to read from.
        name : str
            Name of the new dataset.
        format : {"gadf-sed"}
            Format of the dataset file.

        Returns
        -------
        dataset : `FluxPointsDataset`
            FluxPointsDataset
        """
        from gammapy.estimators import FluxPoints

        filename = make_path(filename)
        table = Table.read(filename)
        mask_fit = None
        mask_safe = None
        if "mask_safe" in table.colnames:
            mask_safe = table["mask_safe"].data.astype("bool")
        if "mask_fit" in table.colnames:
            mask_fit = table["mask_fit"].data.astype("bool")
        return cls(
            name=make_name(name),
            data=FluxPoints.from_table(table, format=format),
            mask_fit=mask_fit,
            mask_safe=mask_safe
        )
Example #7
0
    def __init__(
        self,
        map,
        spectral_model=None,
        name=None,
        filename=None,
        datasets_names=None,
    ):
        if isinstance(map, Map):
            axis = map.geom.axes["energy"]
            if axis.node_type != "edges":
                raise ValueError(
                    'Need an integrated map, energy axis node_type="edges"')

        self.map = map
        self._name = make_name(name)
        self.filename = filename

        if spectral_model is None:
            spectral_model = PowerLawNormSpectralModel()
            spectral_model.tilt.frozen = True

        self.spectral_model = spectral_model

        if isinstance(datasets_names, str):
            datasets_names = [datasets_names]

        if isinstance(datasets_names, list):
            if len(datasets_names) != 1:
                raise ValueError(
                    "Currently background models can only be assigned to one dataset."
                )
        self.datasets_names = datasets_names
        super().__init__()
Example #8
0
    def to_dict(self, full_output=False):
        """Convert to dict."""
        # update linked parameters labels
        params_list = []
        params_shared = []
        for param in self.parameters:
            if param not in params_list:
                params_list.append(param)
                params_list.append(param)
            elif param not in params_shared:
                params_shared.append(param)
        for param in params_shared:
            param._link_label_io = param.name + "@" + make_name()

        models_data = []
        for model in self._models:
            model_data = model.to_dict(full_output)
            models_data.append(model_data)
        if self._covar_file is not None:
            return {
                "components": models_data,
                "covariance": str(self._covar_file),
            }
        else:
            return {"components": models_data}
Example #9
0
    def to_dict(self, full_output=False, overwrite_templates=False):
        """Convert to dict."""
        # update linked parameters labels
        params_list = []
        params_shared = []
        for param in self.parameters:
            if param not in params_list:
                params_list.append(param)
                params_list.append(param)
            elif param not in params_shared:
                params_shared.append(param)
        for param in params_shared:
            param._link_label_io = param.name + "@" + make_name()

        models_data = []
        for model in self._models:
            model_data = model.to_dict(full_output)
            models_data.append(model_data)
            if (hasattr(model, "spatial_model")
                    and model.spatial_model is not None
                    and "template" in model.spatial_model.tag):
                model.spatial_model.write(overwrite=overwrite_templates)

        if self._covar_file is not None:
            return {
                "components": models_data,
                "covariance": str(self._covar_file),
            }
        else:
            return {"components": models_data}
Example #10
0
    def __init__(
        self,
        map,
        norm=norm.quantity,
        tilt=tilt.quantity,
        reference=reference.quantity,
        meta=None,
        interp_kwargs=None,
        name=None,
        filename=None,
    ):

        self._name = make_name(name)

        axis = map.geom.get_axis_by_name("energy")

        if axis.node_type != "center":
            raise ValueError('Need a map with energy axis node_type="center"')

        self.map = map
        self.meta = {} if meta is None else meta
        self.filename = filename

        interp_kwargs = {} if interp_kwargs is None else interp_kwargs
        interp_kwargs.setdefault("interp", "linear")
        interp_kwargs.setdefault("fill_value", 0)
        self._interp_kwargs = interp_kwargs

        # TODO: onve we have implement a more general and better model caching
        #  remove this again
        self._cached_value = None
        self._cached_coordinates = (None, None, None)

        super().__init__(norm=norm, tilt=tilt, reference=reference)
Example #11
0
    def __init__(
        self,
        map,
        norm=norm.quantity,
        tilt=tilt.quantity,
        reference=reference.quantity,
        name=None,
        filename=None,
        datasets_names=None,
    ):
        axis = map.geom.get_axis_by_name("energy")
        if axis.node_type != "edges":
            raise ValueError(
                'Need an integrated map, energy axis node_type="edges"')

        self.map = map

        self._name = make_name(name)
        self.filename = filename

        if isinstance(datasets_names, list):
            if len(datasets_names) != 1:
                raise ValueError(
                    "Currently background models can only be assigned to one dataset."
                )

        self.datasets_names = datasets_names
        super().__init__(norm=norm, tilt=tilt, reference=reference)
Example #12
0
def _update_link_reference(models):
    params_list = []
    params_shared = []
    for model in models:
        for param in model.parameters:
            if param not in params_list:
                params_list.append(param)
            elif param not in params_shared:
                params_shared.append(param)
    for param in params_shared:
        param._link_label_io = param.name + "@" + make_name()
Example #13
0
 def update_link_label(self):
     """update linked parameters labels used for serialization and print"""
     params_list = []
     params_shared = []
     for param in self.parameters:
         if param not in params_list:
             params_list.append(param)
             params_list.append(param)
         elif param not in params_shared:
             params_shared.append(param)
     for param in params_shared:
         param._link_label_io = param.name + "@" + make_name()
Example #14
0
    def __init__(self, models, data, mask_fit=None, mask_safe=None, name=None):
        self.data = data
        self.mask_fit = mask_fit
        self._name = make_name(name)
        self.models = models

        if data.sed_type != "dnde":
            raise ValueError("Currently only flux points of type 'dnde' are supported.")

        if mask_safe is None:
            mask_safe = np.isfinite(data.table["dnde"])

        self.mask_safe = mask_safe
Example #15
0
    def __init__(self,
                 spectral_model,
                 spatial_model=None,
                 temporal_model=None,
                 name=None):
        self.spatial_model = spatial_model
        self.spectral_model = spectral_model
        self.temporal_model = temporal_model
        super().__init__()
        # TODO: this hack is needed for compound models to work
        self.__dict__.pop("_parameters")

        self._name = make_name(name)
Example #16
0
    def __init__(
        self,
        models=None,
        counts=None,
        counts_off=None,
        livetime=None,
        aeff=None,
        edisp=None,
        mask_safe=None,
        mask_fit=None,
        acceptance=None,
        acceptance_off=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        self.counts = counts
        self.counts_off = counts_off

        if livetime is not None:
            livetime = u.Quantity(livetime)

        self.livetime = livetime
        self.mask_fit = mask_fit
        self.aeff = aeff
        self.edisp = edisp
        self.mask_safe = mask_safe
        self.meta_table = meta_table

        if np.isscalar(acceptance):
            data = np.ones(self._geom.data_shape) * acceptance
            acceptance = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance = acceptance

        if np.isscalar(acceptance_off):
            data = np.ones(self._geom.data_shape) * acceptance_off
            acceptance_off = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance_off = acceptance_off

        self._evaluators = {}
        self._name = make_name(name)
        self.gti = gti
        self.models = models

        # TODO: this enforces the exposure on the edisp map, maybe better move
        #  to where the EDispKernelMap is created?
        if edisp is not None:
            self.edisp.exposure_map.data = self.exposure.data
Example #17
0
    def copy(self, name=None):
        """A deep copy."""
        new = copy.deepcopy(self)
        name = make_name(name)
        new._name = name

        # propagate new dataset name
        if new._models is not None:
            for m in new._models:
                if m.datasets_names is not None:
                    for k, d in enumerate(m.datasets_names):
                        if d == self.name:
                            m.datasets_names[k] = name
        return new
Example #18
0
 def copy(self, name=None):
     """A deep copy."""
     new = copy.deepcopy(self)
     name = make_name(name)
     new._name = name
     # propagate new dataset name
     if new._models is not None:
         for m in new._models:
             if m.datasets_names is not None:
                 for k, d in enumerate(m.datasets_names):
                     if d == self.name:
                         m.datasets_names[k] = name
                 if hasattr(new, "background_model") and m == new.background_model:
                     m._name = name + "-bkg"
     return new
Example #19
0
    def fake(self, random_state="random-seed", name=None):
        """Simulate fake counts for the current model and reduced irfs.

        This method overwrites the counts defined on the dataset object.

        Parameters
        ----------
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        self._name = make_name(name)
        random_state = get_random_state(random_state)
        npred = self.npred()
        npred.data = random_state.poisson(npred.data)
        self.counts = npred
Example #20
0
    def __init__(
        self,
        models=None,
        counts=None,
        counts_off=None,
        livetime=None,
        aeff=None,
        edisp=None,
        mask_safe=None,
        mask_fit=None,
        acceptance=None,
        acceptance_off=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        self.counts = counts
        self.counts_off = counts_off

        if livetime is not None:
            livetime = u.Quantity(livetime)

        self.livetime = livetime
        self.mask_fit = mask_fit
        self.aeff = aeff
        self.edisp = edisp
        self.mask_safe = mask_safe
        self.meta_table = meta_table

        if np.isscalar(acceptance):
            data = np.ones(self._geom.data_shape) * acceptance
            acceptance = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance = acceptance

        if np.isscalar(acceptance_off):
            data = np.ones(self._geom.data_shape) * acceptance_off
            acceptance_off = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance_off = acceptance_off

        self._evaluators = {}
        self._name = make_name(name)
        self.gti = gti
        self.models = models
Example #21
0
    def copy(self, name=None, **kwargs):
        """Copy SkyModel"""
        if self.spatial_model is not None:
            spatial_model = self.spatial_model.copy()
        else:
            spatial_model = None

        if self.temporal_model is not None:
            temporal_model = self.temporal_model.copy()
        else:
            temporal_model = None

        kwargs.setdefault("name", make_name(name))
        kwargs.setdefault("spectral_model", self.spectral_model.copy())
        kwargs.setdefault("spatial_model", spatial_model)
        kwargs.setdefault("temporal_model", temporal_model)

        return self.__class__(**kwargs)
Example #22
0
    def to_dict(self):
        """Convert to dict."""
        # update linked parameters labels
        params_list = []
        params_shared = []
        for param in self.parameters:
            if param not in params_list:
                params_list.append(param)
            elif param not in params_shared:
                params_shared.append(param)
        for param in params_shared:
            param._link_label_io = param.name + "@" + make_name()

        models_data = []
        for model in self._models:
            model_data = model.to_dict()
            models_data.append(model_data)
        return {"components": models_data}
Example #23
0
    def slice_by_idx(self, slices, name=None):
        """Slice sub dataset.

        The slicing only applies to the maps that define the corresponding axes.

        Parameters
        ----------
        slices : dict
            Dict of axes names and integers or `slice` object pairs. Contains one
            element for each non-spatial dimension. For integer indexing the
            corresponding axes is dropped from the map. Axes not specified in the
            dict are kept unchanged.
        name : str
            Name of the sliced dataset.

        Returns
        -------
        map_out : `Map`
            Sliced map object.
        """
        name = make_name(name)
        kwargs = {"gti": self.gti, "name": name}

        if self.counts is not None:
            kwargs["counts"] = self.counts.slice_by_idx(slices=slices)

        if self.exposure is not None:
            kwargs["aeff"] = self.aeff.slice_by_idx(slices=slices)

        if self.edisp is not None:
            kwargs["edisp"] = self.edisp.slice_by_idx(slices=slices)

        if self.mask_safe is not None:
            kwargs["mask_safe"] = self.mask_safe.slice_by_idx(slices=slices)

        if self.mask_fit is not None:
            kwargs["mask_fit"] = self.mask_fit.slice_by_idx(slices=slices)

        kwargs["acceptance"] = self.acceptance.slice_by_idx(slices=slices)
        kwargs["acceptance_off"] = self.acceptance_off.slice_by_idx(slices=slices)
        kwargs["counts_off"] = self.counts_off.slice_by_idx(slices=slices)
        kwargs["livetime"] = self.livetime

        return self.__class__(**kwargs)
Example #24
0
    def copy(self, name=None):
        """A deep copy.

        Parameters
        ----------
        name : str
            Name of the copied dataset

        Returns
        -------
        dataset : `Dataset`
            Copied datasets.
        """
        new = copy.deepcopy(self)
        name = make_name(name)
        new._name = name
        # TODO: check the model behaviour?
        new.models = None
        return new
Example #25
0
    def __init__(
        self,
        models=None,
        data=None,
        mask_fit=None,
        mask_safe=None,
        name=None,
        meta_table=None,
    ):
        self.data = data
        self.mask_fit = mask_fit
        self._name = make_name(name)
        self.models = models
        self.meta_table = meta_table

        if mask_safe is None:
            mask_safe = (~data.is_ul).data[:, 0, 0]

        self.mask_safe = mask_safe
Example #26
0
    def __init__(
        self,
        models=None,
        data=None,
        mask_fit=None,
        mask_safe=None,
        name=None,
        meta_table=None,
    ):
        self.data = data
        self.mask_fit = mask_fit
        self._name = make_name(name)
        self.models = models
        self.meta_table = meta_table

        if mask_safe is None:
            mask_safe = np.isfinite(data.dnde)

        self.mask_safe = mask_safe
Example #27
0
    def __init__(
        self,
        models=None,
        counts=None,
        counts_off=None,
        exposure=None,
        edisp=None,
        mask_safe=None,
        mask_fit=None,
        acceptance=None,
        acceptance_off=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        self.counts = counts
        self.counts_off = counts_off

        self.mask_fit = mask_fit
        self.exposure = exposure
        self.edisp = edisp
        self.mask_safe = mask_safe
        self.meta_table = meta_table

        if np.isscalar(acceptance):
            data = np.ones(self._geom.data_shape) * acceptance
            acceptance = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance = acceptance

        if np.isscalar(acceptance_off):
            data = np.ones(self._geom.data_shape) * acceptance_off
            acceptance_off = RegionNDMap.from_geom(self._geom, data=data)

        self.acceptance_off = acceptance_off

        self._evaluators = {}
        self._name = make_name(name)
        self.gti = gti
        self.models = models
        self._background_model = None
Example #28
0
    def __init__(
        self,
        map,
        norm=norm.quantity,
        tilt=tilt.quantity,
        reference=reference.quantity,
        name=None,
        filename=None,
    ):
        axis = map.geom.get_axis_by_name("energy")
        if axis.node_type != "edges":
            raise ValueError(
                'Need an integrated map, energy axis node_type="edges"')

        self.map = map

        self._name = make_name(name)
        self.filename = filename

        super().__init__(norm=norm, tilt=tilt, reference=reference)
Example #29
0
    def __init__(
        self,
        spectral_model,
        spatial_model=None,
        temporal_model=None,
        name=None,
        apply_irf=None,
        datasets_names=None,
    ):
        self.spatial_model = spatial_model
        self.spectral_model = spectral_model
        self.temporal_model = temporal_model
        self._name = make_name(name)

        if apply_irf is None:
            apply_irf = self._apply_irf_default.copy()

        self.apply_irf = apply_irf
        self.datasets_names = datasets_names
        super().__init__()
Example #30
0
    def __init__(
        self,
        models=None,
        counts=None,
        livetime=None,
        aeff=None,
        edisp=None,
        background=None,
        mask_safe=None,
        mask_fit=None,
        name=None,
        gti=None,
        meta_table=None,
    ):

        if mask_fit is not None and mask_fit.dtype != np.dtype("bool"):
            raise ValueError("mask data must have dtype bool")

        self.counts = counts

        if livetime is not None:
            livetime = u.Quantity(livetime)

        self.livetime = livetime
        self.mask_fit = mask_fit
        self.aeff = aeff
        self.edisp = edisp
        self.background = background
        self.mask_safe = mask_safe
        self.gti = gti
        self.meta_table = meta_table

        self._name = make_name(name)
        self.models = models

        # TODO: this enforces the exposure on the edisp map, maybe better move
        #  to where the EDispKernelMap is created?
        if edisp is not None:
            self.edisp.exposure_map.data = self.exposure.data