Ejemplo n.º 1
0
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        log.info("Reducing spectrum datasets.")
        datasets_settings = self.config.datasets
        dataset_maker = self._create_dataset_maker()
        safe_mask_maker = self._create_safe_mask_maker()
        bkg_maker = self._create_background_maker()

        reference = self._create_reference_dataset()

        datasets = []
        for obs in progress_bar(self.observations, desc="Observations"):
            log.debug(f"Processing observation {obs.obs_id}")
            dataset = dataset_maker.run(reference.copy(), obs)
            if bkg_maker is not None:
                dataset = bkg_maker.run(dataset, obs)
                if dataset.counts_off is None:
                    log.debug(
                        f"No OFF region found for observation {obs.obs_id}. Discarding."
                    )
                    continue
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)
        self.datasets = Datasets(datasets)

        if datasets_settings.stack:
            stacked = self.datasets.stack_reduce(name="stacked")
            self.datasets = Datasets([stacked])
Ejemplo n.º 2
0
    def stat_surface(self, datasets, x, y, reoptimize=False):
        """Compute fit statistic surface.

        The method used is to vary two parameters, keeping all others fixed.
        So this is taking a "slice" or "scan" of the fit statistic.

        Caveat: This method can be very computationally intensive and slow

        See also: `Fit.stat_contour`

        Parameters
        ----------
        datasets : `Datasets` or list of `Dataset`
            Datasets to optimize.
        x, y : `~gammapy.modeling.Parameter`
            Parameters of interest
        reoptimize : bool
            Re-optimize other parameters, when computing the confidence region.

        Returns
        -------
        results : dict
            Dictionary with keys "x_values", "y_values", "stat" and "fit_results". The latter contains an
            empty list, if `reoptimize` is set to False
        """
        datasets, parameters = self._parse_datasets(datasets=datasets)

        x, y = parameters[x], parameters[y]

        stats = []
        fit_results = []

        with parameters.restore_status():
            for x_value, y_value in progress_bar(itertools.product(
                    x.scan_values, y.scan_values),
                                                 desc="Trial values"):
                x.value, y.value = x_value, y_value

                if reoptimize:
                    x.frozen, y.frozen = True, True
                    result = self.optimize(datasets=datasets)
                    stat = result.total_stat
                    fit_results.append(result)
                else:
                    stat = datasets.stat_sum()

                stats.append(stat)

        shape = (len(x.scan_values), len(y.scan_values))
        stats = np.array(stats).reshape(shape)

        if reoptimize:
            fit_results = np.array(fit_results).reshape(shape)

        return {
            f"{x.name}_scan": x.scan_values,
            f"{y.name}_scan": y.scan_values,
            "stat_scan": stats,
            "fit_results": fit_results,
        }
Ejemplo n.º 3
0
    def run(self, datasets):
        """Run the flux point estimator for all energy groups.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.Dataset`
            Datasets
        Returns
        -------
        flux_points : `FluxPoints`
            Estimated flux points.
        """
        datasets = Datasets(datasets).copy()

        rows = []

        for energy_min, energy_max in progress_bar(zip(self.energy_edges[:-1],
                                                       self.energy_edges[1:]),
                                                   desc="Energy bins"):
            row = self.estimate_flux_point(
                datasets,
                energy_min=energy_min,
                energy_max=energy_max,
            )
            rows.append(row)

        table = table_from_row_data(rows=rows, meta={"SED_TYPE": "likelihood"})

        model = datasets.models[self.source]
        return FluxPoints(table,
                          reference_spectral_model=model.spectral_model.copy())
Ejemplo n.º 4
0
    def run(self, datasets):
        """Run."""
        datasets = Datasets(datasets)
        # find extension parameter
        # TODO: write something better
        model = datasets.models[self.source].spatial_model

        if hasattr(model, "sigma"):
            self.size_parameter = model.sigma
        elif hasattr(model, "r_0"):
            self.size_parameter = model.r_0
        elif hasattr(model, "radius"):
            self.size_parameter = model.radius
        else:
            raise ValueError(
                f"Cannot find size parameter on model {self.source}")

        rows = []

        for energy_min, energy_max in progress_bar(zip(self.energy_edges[:-1],
                                                       self.energy_edges[1:]),
                                                   desc="Energy bins"):
            datasets_sliced = datasets.slice_by_energy(energy_min=energy_min,
                                                       energy_max=energy_max)
            datasets_sliced = Datasets(
                [_.to_image(name=_.name) for _ in datasets_sliced])
            datasets_sliced.models = datasets.models  #.copy()
            row = self.estimate_size(datasets_sliced)
            rows.append(row)
        return rows
Ejemplo n.º 5
0
    def _map_making(self):
        """Make maps and datasets for 3d analysis"""
        datasets_settings = self.config.datasets
        offset_max = datasets_settings.geom.selection.offset_max

        log.info("Creating reference dataset and makers.")
        stacked = self._create_reference_dataset(name="stacked")

        maker = self._create_dataset_maker()
        maker_safe_mask = self._create_safe_mask_maker()
        bkg_maker = self._create_background_maker()

        log.info("Start the data reduction loop.")

        if datasets_settings.stack:
            for obs in progress_bar(self.observations, desc="Observations"):
                log.debug(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec,
                                        width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                if bkg_maker is not None:
                    dataset = bkg_maker.run(dataset)

                    if bkg_maker.tag == "RingBackgroundMaker":
                        dataset = dataset.to_map_dataset()

                log.debug(dataset)
                stacked.stack(dataset)
            datasets = [stacked]
        else:
            datasets = []

            for obs in progress_bar(self.observations, desc="Observations"):
                log.debug(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec,
                                        width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                if bkg_maker is not None:
                    dataset = bkg_maker.run(dataset)
                log.debug(dataset)
                datasets.append(dataset)
        self.datasets = Datasets(datasets)
Ejemplo n.º 6
0
    def run(self, datasets):
        """Run light curve extraction.

        Normalize integral and energy flux between emin and emax.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset` or `~gammapy.datasets.MapDataset`
            Spectrum or Map datasets.

        Returns
        -------
        lightcurve : `~gammapy.estimators.FluxPoints`
            Light curve flux points
        """
        datasets = Datasets(datasets)

        if self.time_intervals is None:
            gti = datasets.gti
        else:
            gti = GTI.from_time_intervals(self.time_intervals)

        gti = gti.union(overlap_ok=False, merge_equal=False)

        rows = []
        valid_intervals = []
        for t_min, t_max in progress_bar(gti.time_intervals,
                                         desc="Time intervals"):
            datasets_to_fit = datasets.select_time(time_min=t_min,
                                                   time_max=t_max,
                                                   atol=self.atol)

            if len(datasets_to_fit) == 0:
                log.info(
                    f"No Dataset for the time interval {t_min} to {t_max}. Skipping interval."
                )
                continue

            valid_intervals.append([t_min, t_max])
            fp = self.estimate_time_bin_flux(datasets=datasets_to_fit)

            for name in ["counts", "npred", "npred_null"]:
                fp._data[name] = self.expand_map(fp._data[name],
                                                 dataset_names=datasets.names)
            rows.append(fp)

        if len(rows) == 0:
            raise ValueError(
                "LightCurveEstimator: No datasets in time intervals")

        gti = GTI.from_time_intervals(valid_intervals)
        axis = TimeMapAxis.from_gti(gti=gti)
        return FluxPoints.from_stack(
            maps=rows,
            axis=axis,
        )
Ejemplo n.º 7
0
    def run(self, datasets):
        """Run light curve extraction.

        Normalize integral and energy flux between emin and emax.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset` or `~gammapy.datasets.MapDataset`
            Spectrum or Map datasets.
        Returns
        -------
        lightcurve : `~gammapy.estimators.LightCurve`
            the Light Curve object
        """
        datasets = Datasets(datasets)

        if self.time_intervals is None:
            gti = datasets.gti
        else:
            gti = GTI.from_time_intervals(self.time_intervals)

        gti = gti.union(overlap_ok=False, merge_equal=False)

        rows = []
        for t_min, t_max in progress_bar(gti.time_intervals,
                                         desc="Time intervals"):
            datasets_to_fit = datasets.select_time(t_min=t_min,
                                                   t_max=t_max,
                                                   atol=self.atol)

            if len(datasets_to_fit) == 0:
                log.debug(
                    f"No Dataset for the time interval {t_min} to {t_max}")
                continue

            row = {"time_min": t_min.mjd, "time_max": t_max.mjd}
            row.update(self.estimate_time_bin_flux(datasets_to_fit))
            rows.append(row)

        if len(rows) == 0:
            raise ValueError(
                "LightCurveEstimator: No datasets in time intervals")

        table = table_from_row_data(rows=rows, meta={"SED_TYPE": "likelihood"})
        model = datasets.models[self.source]

        # TODO: cleanup here...
        fp = FluxPoints(table,
                        reference_spectral_model=model.spectral_model.copy())
        table_flux = fp.to_table(sed_type="flux")
        table_flux.remove_columns(["stat", "ts", "sqrt_ts", "e_min", "e_max"])
        return LightCurve(hstack([table, table_flux]))
Ejemplo n.º 8
0
    def stat_profile(self, datasets, parameter, reoptimize=False):
        """Compute fit statistic profile.

        The method used is to vary one parameter, keeping all others fixed.
        So this is taking a "slice" or "scan" of the fit statistic.

        Parameters
        ----------
        datasets : `Datasets` or list of `Dataset`
            Datasets to optimize.
        parameter : `~gammapy.modeling.Parameter`
            Parameter of interest. The specification for the scan, such as bounds
            and number of values is taken from the parameter object.
        reoptimize : bool
            Re-optimize other parameters, when computing the confidence region.

        Returns
        -------
        results : dict
            Dictionary with keys "values", "stat" and "fit_results". The latter contains an
            empty list, if `reoptimize` is set to False
        """
        datasets, parameters = self._parse_datasets(datasets=datasets)
        parameter = parameters[parameter]
        values = parameter.scan_values

        stats = []
        fit_results = []
        with parameters.restore_status():
            for value in progress_bar(values, desc="Scan values"):
                parameter.value = value
                if reoptimize:
                    parameter.frozen = True
                    result = self.optimize(datasets=datasets)
                    stat = result.total_stat
                    fit_results.append(result)
                else:
                    stat = datasets.stat_sum()
                stats.append(stat)

        return {
            f"{parameter.name}_scan": values,
            "stat_scan": np.array(stats),
            "fit_results": fit_results,
        }
Ejemplo n.º 9
0
    def run(self, dataset):
        """Run adaptive smoothing on input MapDataset.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset (with one bin in energy at most)

        Returns
        -------
        images : dict of `~gammapy.maps.WcsNDMap`
            Smoothed images; keys are:
                * 'counts'
                * 'background'
                * 'flux' (optional)
                * 'scales'
                * 'sqrt_ts'.
        """
        energy_axis = self._get_energy_axis(dataset)

        results = []

        for energy_min, energy_max in progress_bar(
            energy_axis.iter_by_edges, desc="Energy bins"
        ):
            dataset_sliced = dataset.slice_by_energy(
                energy_min=energy_min, energy_max=energy_max, name=dataset.name
            )
            dataset_sliced.models = dataset.models
            result = self.estimate_maps(dataset_sliced)
            results.append(result)

        maps = Maps()

        for name in results[0].keys():
            maps[name] = Map.from_stack(
                maps=[_[name] for _ in results], axis_name="energy"
            )

        return maps
Ejemplo n.º 10
0
    def run(self, datasets):
        """Run the flux point estimator for all energy groups.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.Dataset`
            Datasets

        Returns
        -------
        flux_points : `FluxPoints`
            Estimated flux points.
        """
        # TODO: remove copy here...
        datasets = Datasets(datasets).copy()

        rows = []

        for energy_min, energy_max in progress_bar(zip(self.energy_edges[:-1],
                                                       self.energy_edges[1:]),
                                                   desc="Energy bins"):
            row = self.estimate_flux_point(
                datasets,
                energy_min=energy_min,
                energy_max=energy_max,
            )
            rows.append(row)

        meta = {
            "n_sigma": self.n_sigma,
            "n_sigma_ul": self.n_sigma_ul,
            "sed_type_init": "likelihood"
        }

        table = table_from_row_data(rows=rows, meta=meta)
        model = datasets.models[self.source]
        return FluxPoints.from_table(table=table,
                                     reference_model=model.copy(),
                                     gti=datasets.gti,
                                     format="gadf-sed")
Ejemplo n.º 11
0
    def run(self, dataset):
        """
        Run TS map estimation.

        Requires a MapDataset with counts, exposure and background_model
        properly set to run.

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset`
            Input MapDataset.

        Returns
        -------
        maps : dict
             Dictionary containing result maps. Keys are:

                * ts : delta TS map
                * sqrt_ts : sqrt(delta TS), or significance map
                * flux : flux map
                * flux_err : symmetric error map
                * flux_ul : upper limit map

        """
        dataset_models = dataset.models

        pad_width = self.estimate_pad_width(dataset=dataset)
        dataset = dataset.pad(pad_width, name=dataset.name)
        dataset = dataset.downsample(self.downsampling_factor,
                                     name=dataset.name)

        energy_axis = self._get_energy_axis(dataset=dataset)

        results = []

        for energy_min, energy_max in progress_bar(energy_axis.iter_by_edges,
                                                   desc="Energy bins"):
            sliced_dataset = dataset.slice_by_energy(energy_min=energy_min,
                                                     energy_max=energy_max,
                                                     name=dataset.name)

            if self.sum_over_energy_groups:
                sliced_dataset = sliced_dataset.to_image(name=dataset.name)

            sliced_dataset.models = dataset_models
            result = self.estimate_flux_map(sliced_dataset)
            results.append(result)

        maps = Maps()

        for name in self.selection_all:
            m = Map.from_images(images=[_[name] for _ in results])

            order = 0 if name == "niter" else 1
            m = m.upsample(factor=self.downsampling_factor,
                           preserve_counts=False,
                           order=order)

            maps[name] = m.crop(crop_width=pad_width)

        meta = {"n_sigma": self.n_sigma, "n_sigma_ul": self.n_sigma_ul}
        return FluxMaps(data=maps,
                        reference_model=self.model,
                        gti=dataset.gti,
                        meta=meta)
Ejemplo n.º 12
0
    def stat_surface(self,
                     datasets,
                     x,
                     y,
                     x_values,
                     y_values,
                     reoptimize=False):
        """Compute fit statistic surface.

        The method used is to vary two parameters, keeping all others fixed.
        So this is taking a "slice" or "scan" of the fit statistic.

        Caveat: This method can be very computationally intensive and slow

        See also: `Fit.minos_contour`

        Parameters
        ----------
        x, y : `~gammapy.modeling.Parameter`
            Parameters of interest
        x_values, y_values : list or `numpy.ndarray`
            Parameter values to evaluate the fit statistic for.
        reoptimize : bool
            Re-optimize other parameters, when computing the confidence region.


        Returns
        -------
        results : dict
            Dictionary with keys "x_values", "y_values", "stat" and "fit_results". The latter contains an
            empty list, if `reoptimize` is set to False
        """
        datasets, parameters = self._parse_datasets(datasets=datasets)

        x = parameters[x]
        y = parameters[y]

        stats = []
        fit_results = []
        with parameters.restore_status():
            for x_value, y_value in progress_bar(itertools.product(
                    x_values, y_values),
                                                 desc="Trial values"):
                # TODO: Remove log.info() and provide a nice progress bar
                log.info(f"Processing: x={x_value}, y={y_value}")
                x.value = x_value
                y.value = y_value
                if reoptimize:
                    x.frozen = True
                    y.frozen = True
                    result = self.optimize(datasets=datasets)
                    stat = result.total_stat
                    fit_results.append(result)
                else:
                    stat = datasets.stat_sum()

                stats.append(stat)

        shape = (np.asarray(x_values).shape[0], np.asarray(y_values).shape[0])
        stats = np.array(stats)
        stats = stats.reshape(shape)

        if reoptimize:
            fit_results = np.array(fit_results)
            fit_results = fit_results.reshape(shape)

        return {
            f"{x.name}_scan": x_values,
            f"{y.name}_scan": y_values,
            "stat_scan": stats,
            "fit_results": fit_results,
        }
Ejemplo n.º 13
0
    def stat_profile(self,
                     datasets,
                     parameter,
                     values=None,
                     bounds=2,
                     nvalues=11,
                     reoptimize=False):
        """Compute fit statistic profile.

        The method used is to vary one parameter, keeping all others fixed.
        So this is taking a "slice" or "scan" of the fit statistic.

        See also: `Fit.minos_profile`.

        Parameters
        ----------
        parameter : `~gammapy.modeling.Parameter`
            Parameter of interest
        values : `~astropy.units.Quantity` (optional)
            Parameter values to evaluate the fit statistic for.
        bounds : int or tuple of float
            When an `int` is passed the bounds are computed from `bounds * sigma`
            from the best fit value of the parameter, where `sigma` corresponds to
            the one sigma error on the parameter. If a tuple of floats is given
            those are taken as the min and max values and ``nvalues`` are linearly
            spaced between those.
        nvalues : int
            Number of parameter grid points to use.
        reoptimize : bool
            Re-optimize other parameters, when computing the confidence region.

        Returns
        -------
        results : dict
            Dictionary with keys "values", "stat" and "fit_results". The latter contains an
            empty list, if `reoptimize` is set to False
        """
        datasets, parameters = self._parse_datasets(datasets=datasets)
        parameter = parameters[parameter]

        if values is None:
            if isinstance(bounds, tuple):
                parmin, parmax = bounds
            else:
                if np.isnan(parameter.error):
                    raise ValueError("Parameter error is not properly set.")
                parerr = parameter.error
                parval = parameter.value
                parmin, parmax = parval - bounds * parerr, parval + bounds * parerr

            values = np.linspace(parmin, parmax, nvalues)

        stats = []
        fit_results = []
        with parameters.restore_status():
            for value in progress_bar(values, desc="Trial values"):
                parameter.value = value
                if reoptimize:
                    parameter.frozen = True
                    result = self.optimize(datasets=datasets)
                    stat = result.total_stat
                    fit_results.append(result)
                else:
                    stat = datasets.stat_sum()
                stats.append(stat)

        return {
            f"{parameter.name}_scan": values,
            "stat_scan": np.array(stats),
            "fit_results": fit_results,
        }
Ejemplo n.º 14
0
    def _spectrum_extraction(self):
        """Run all steps for the spectrum extraction."""
        log.info("Reducing spectrum datasets.")
        datasets_settings = self.config.datasets
        on_lon = datasets_settings.on_region.lon
        on_lat = datasets_settings.on_region.lat
        on_center = SkyCoord(on_lon,
                             on_lat,
                             frame=datasets_settings.on_region.frame)
        on_region = CircleSkyRegion(on_center,
                                    datasets_settings.on_region.radius)

        maker_config = {}
        if datasets_settings.containment_correction:
            maker_config[
                "containment_correction"] = datasets_settings.containment_correction
        e_reco = self._make_energy_axis(datasets_settings.geom.axes.energy)

        maker_config["selection"] = ["counts", "exposure", "edisp"]
        dataset_maker = SpectrumDatasetMaker(**maker_config)

        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            path = make_path(datasets_settings.background.exclusion)
            exclusion_region = Map.read(path)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker_config.update(datasets_settings.background.parameters)
        bkg_method = datasets_settings.background.method
        if bkg_method == "reflected":
            bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config)
            log.debug(
                f"Creating ReflectedRegionsBackgroundMaker with arguments {bkg_maker_config}"
            )
        else:
            bkg_maker = None
            log.warning(
                f"No background maker set for 1d analysis. Check configuration."
            )

        safe_mask_selection = datasets_settings.safe_mask.methods
        safe_mask_settings = datasets_settings.safe_mask.parameters
        safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)

        e_true = self._make_energy_axis(
            datasets_settings.geom.axes.energy_true, name="energy_true")

        geom = RegionGeom.create(region=on_region, axes=[e_reco])
        reference = SpectrumDataset.create(geom=geom, energy_axis_true=e_true)

        datasets = []
        for obs in progress_bar(self.observations, desc="Observations"):
            log.debug(f"Processing observation {obs.obs_id}")
            dataset = dataset_maker.run(reference.copy(), obs)
            if bkg_maker is not None:
                dataset = bkg_maker.run(dataset, obs)
                if dataset.counts_off is None:
                    log.debug(
                        f"No OFF region found for observation {obs.obs_id}. Discarding."
                    )
                    continue
            dataset = safe_mask_maker.run(dataset, obs)
            log.debug(dataset)
            datasets.append(dataset)
        self.datasets = Datasets(datasets)

        if datasets_settings.stack:
            stacked = self.datasets.stack_reduce(name="stacked")
            self.datasets = Datasets([stacked])
Ejemplo n.º 15
0
    def _map_making(self):
        """Make maps and datasets for 3d analysis"""
        datasets_settings = self.config.datasets
        log.info("Creating geometry.")
        geom = self._create_geometry()
        geom_settings = datasets_settings.geom
        geom_irf = dict(energy_axis_true=None, binsz_irf=None)
        if geom_settings.axes.energy_true.min is not None:
            geom_irf["energy_axis_true"] = self._make_energy_axis(
                geom_settings.axes.energy_true, name="energy_true")
        geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value
        offset_max = geom_settings.selection.offset_max
        log.info("Creating datasets.")

        maker = MapDatasetMaker(selection=datasets_settings.map_selection)

        safe_mask_selection = datasets_settings.safe_mask.methods
        safe_mask_settings = datasets_settings.safe_mask.parameters
        maker_safe_mask = SafeMaskMaker(methods=safe_mask_selection,
                                        **safe_mask_settings)

        bkg_maker_config = {}
        if datasets_settings.background.exclusion:
            path = make_path(datasets_settings.background.exclusion)
            exclusion_region = Map.read(path)
            bkg_maker_config["exclusion_mask"] = exclusion_region
        bkg_maker_config.update(datasets_settings.background.parameters)

        bkg_method = datasets_settings.background.method
        if bkg_method == "fov_background":
            log.debug(
                f"Creating FoVBackgroundMaker with arguments {bkg_maker_config}"
            )
            bkg_maker = FoVBackgroundMaker(**bkg_maker_config)
        elif bkg_method == "ring":
            bkg_maker = RingBackgroundMaker(**bkg_maker_config)
            log.debug(
                f"Creating RingBackgroundMaker with arguments {bkg_maker_config}"
            )
            if datasets_settings.geom.axes.energy.nbins > 1:
                raise ValueError(
                    "You need to define a single-bin energy geometry for your dataset."
                )
        else:
            bkg_maker = None
            log.warning(
                f"No background maker set for 3d analysis. Check configuration."
            )

        stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf)

        if datasets_settings.stack:
            for obs in progress_bar(self.observations, desc="Observations"):
                log.debug(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec,
                                        width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                if bkg_maker is not None:
                    dataset = bkg_maker.run(dataset)

                if bkg_method == "ring":
                    dataset = dataset.to_map_dataset()

                log.debug(dataset)
                stacked.stack(dataset)
            datasets = [stacked]
        else:
            datasets = []

            for obs in progress_bar(self.observations, desc="Observations"):
                log.debug(f"Processing observation {obs.obs_id}")
                cutout = stacked.cutout(obs.pointing_radec,
                                        width=2 * offset_max)
                dataset = maker.run(cutout, obs)
                dataset = maker_safe_mask.run(dataset, obs)
                if bkg_maker is not None:
                    dataset = bkg_maker.run(dataset)
                log.debug(dataset)
                datasets.append(dataset)
        self.datasets = Datasets(datasets)
Ejemplo n.º 16
0
    def run(self, datasets):
        """Run light curve extraction.

        Normalize integral and energy flux between emin and emax.

        Parameters
        ----------
        datasets : list of `~gammapy.datasets.SpectrumDataset` or `~gammapy.datasets.MapDataset`
            Spectrum or Map datasets.

        Returns
        -------
        lightcurve : `~gammapy.estimators.LightCurve`
            the Light Curve object
        """
        datasets = Datasets(datasets)

        if self.time_intervals is None:
            gti = datasets.gti
        else:
            gti = GTI.from_time_intervals(self.time_intervals)

        gti = gti.union(overlap_ok=False, merge_equal=False)

        rows = []
        for t_min, t_max in progress_bar(gti.time_intervals,
                                         desc="Time intervals"):
            datasets_to_fit = datasets.select_time(t_min=t_min,
                                                   t_max=t_max,
                                                   atol=self.atol)

            if len(datasets_to_fit) == 0:
                log.debug(
                    f"No Dataset for the time interval {t_min} to {t_max}")
                continue

            row = {"time_min": t_min.mjd, "time_max": t_max.mjd}
            fp = self.estimate_time_bin_flux(datasets_to_fit)
            fp_table = fp.to_table()

            for column in fp_table.colnames:
                if column == "counts":
                    data = fp_table[column].quantity.sum(axis=1)
                else:
                    data = fp_table[column].quantity
                row[column] = data

            fp_table_flux = fp.to_table(sed_type="flux")
            for column in fp_table_flux.colnames:
                if "flux" in column:
                    row[column] = fp_table_flux[column].quantity

            rows.append(row)

        if len(rows) == 0:
            raise ValueError(
                "LightCurveEstimator: No datasets in time intervals")

        table = table_from_row_data(rows=rows, meta={"SED_TYPE": "likelihood"})
        # TODO: use FluxPoints here
        return LightCurve(table=table)