def run(self, dataset): """Compute correlated excess, Li & Ma significance and flux maps Parameters ---------- dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff` input dataset Returns ------- images : dict Dictionary containing result correlated maps. Keys are: * counts : correlated counts map * background : correlated background map * excess : correlated excess map * ts : TS map * sqrt_ts : sqrt(delta TS), or Li-Ma significance map * err : symmetric error map (from covariance) * flux : flux map. An exposure map must be present in the dataset to compute flux map * errn : negative error map * errp : positive error map * ul : upper limit map """ if not isinstance(dataset, MapDataset): raise ValueError("Unsupported dataset type") # TODO: add support for joint excess estimate to ExcessMapEstimator? datasets = Datasets(dataset) if self.e_edges is None: energy_axis = dataset.counts.geom.axes["energy"] e_edges = u.Quantity([energy_axis.edges[0], energy_axis.edges[-1]]) else: e_edges = self.e_edges results = [] for e_min, e_max in zip(e_edges[:-1], e_edges[1:]): sliced_dataset = datasets.slice_energy(e_min, e_max)[0] result = self.estimate_excess_map(sliced_dataset) results.append(result) results_all = {} for name in results[0].keys(): map_all = Map.from_images(images=[_[name] for _ in results]) results_all[name] = map_all return results_all
def run(self, dataset): """Run adaptive smoothing on input MapDataset. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff` the input dataset (with one bin in energy at most) Returns ------- images : dict of `~gammapy.maps.WcsNDMap` Smoothed images; keys are: * 'counts' * 'background' * 'flux' (optional) * 'scales' * 'sqrt_ts'. """ if self.energy_edges is None: energy_axis = dataset.counts.geom.axes["energy"] energy_edges = u.Quantity( [energy_axis.edges[0], energy_axis.edges[-1]]) else: energy_edges = self.energy_edges results = [] for energy_min, energy_max in zip(energy_edges[:-1], energy_edges[1:]): dataset_sliced = dataset.slice_by_energy(energy_min, energy_max, name=dataset.name) dataset_sliced.models = dataset.models result = self.estimate_maps(dataset_sliced) results.append(result) result_all = {} for name in results[0].keys(): map_all = Map.from_images(images=[_[name] for _ in results]) result_all[name] = map_all return result_all
def run(self, dataset): """Run adaptive smoothing on input MapDataset. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff` the input dataset (with one bin in energy at most) Returns ------- images : dict of `~gammapy.maps.WcsNDMap` Smoothed images; keys are: * 'counts' * 'background' * 'flux' (optional) * 'scales' * 'sqrt_ts'. """ energy_axis = self._get_energy_axis(dataset) results = [] for energy_min, energy_max in progress_bar(energy_axis.iter_by_edges, desc="Energy bins"): dataset_sliced = dataset.slice_by_energy(energy_min=energy_min, energy_max=energy_max, name=dataset.name) dataset_sliced.models = dataset.models result = self.estimate_maps(dataset_sliced) results.append(result) result_all = {} for name in results[0].keys(): map_all = Map.from_images(images=[_[name] for _ in results]) result_all[name] = map_all return result_all
def run(self, dataset): """ Run TS map estimation. Requires a MapDataset with counts, exposure and background_model properly set to run. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Input MapDataset. Returns ------- maps : dict Dictionary containing result maps. Keys are: * ts : delta TS map * sqrt_ts : sqrt(delta TS), or significance map * flux : flux map * flux_err : symmetric error map * flux_ul : upper limit map """ dataset_models = dataset.models pad_width = self.estimate_pad_width(dataset=dataset) dataset = dataset.pad(pad_width, name=dataset.name) dataset = dataset.downsample(self.downsampling_factor, name=dataset.name) energy_axis = self._get_energy_axis(dataset=dataset) results = [] for energy_min, energy_max in progress_bar(energy_axis.iter_by_edges, desc="Energy bins"): sliced_dataset = dataset.slice_by_energy(energy_min=energy_min, energy_max=energy_max, name=dataset.name) if self.sum_over_energy_groups: sliced_dataset = sliced_dataset.to_image(name=dataset.name) sliced_dataset.models = dataset_models result = self.estimate_flux_map(sliced_dataset) results.append(result) maps = Maps() for name in self.selection_all: m = Map.from_images(images=[_[name] for _ in results]) order = 0 if name == "niter" else 1 m = m.upsample(factor=self.downsampling_factor, preserve_counts=False, order=order) maps[name] = m.crop(crop_width=pad_width) meta = {"n_sigma": self.n_sigma, "n_sigma_ul": self.n_sigma_ul} return FluxMaps(data=maps, reference_model=self.model, gti=dataset.gti, meta=meta)
def run(self, dataset): """ Run TS map estimation. Requires a MapDataset with counts, exposure and background_model properly set to run. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Input MapDataset. Returns ------- maps : dict Dictionary containing result maps. Keys are: * ts : delta TS map * sqrt_ts : sqrt(delta TS), or significance map * flux : flux map * flux_err : symmetric error map * flux_ul : upper limit map """ dataset_models = dataset.models if self.downsampling_factor: shape = dataset.counts.geom.to_image().data_shape pad_width = symmetric_crop_pad_width(shape, shape_2N(shape))[0] dataset = dataset.pad(pad_width).downsample( self.downsampling_factor) # TODO: add support for joint likelihood fitting to TSMapEstimator datasets = Datasets(dataset) if self.energy_edges is None: energy_axis = dataset.counts.geom.axes["energy"] energy_edges = u.Quantity( [energy_axis.edges[0], energy_axis.edges[-1]]) else: energy_edges = self.energy_edges results = [] for energy_min, energy_max in zip(energy_edges[:-1], energy_edges[1:]): sliced_dataset = datasets.slice_by_energy(energy_min, energy_max)[0] if self.sum_over_energy_groups: sliced_dataset = sliced_dataset.to_image() sliced_dataset.models = dataset_models result = self.estimate_flux_map(sliced_dataset) results.append(result) result_all = {} for name in self.selection_all: map_all = Map.from_images(images=[_[name] for _ in results]) if self.downsampling_factor: order = 0 if name == "niter" else 1 map_all = map_all.upsample(factor=self.downsampling_factor, preserve_counts=False, order=order) map_all = map_all.crop(crop_width=pad_width) result_all[name] = map_all result_all["sqrt_ts"] = self.estimate_sqrt_ts(result_all["ts"], result_all["norm"]) return FluxMaps(data=result_all, reference_model=self.model, gti=dataset.gti)