def run(self, dataset): """Compute correlated excess, Li & Ma significance and flux maps Parameters ---------- dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff` input dataset Returns ------- images : dict Dictionary containing result correlated maps. Keys are: * counts : correlated counts map * background : correlated background map * excess : correlated excess map * ts : TS map * sqrt_ts : sqrt(delta TS), or Li-Ma significance map * err : symmetric error map (from covariance) * flux : flux map. An exposure map must be present in the dataset to compute flux map * errn : negative error map * errp : positive error map * ul : upper limit map """ if not isinstance(dataset, MapDataset): raise ValueError("Unsupported dataset type") # TODO: add support for joint excess estimate to ExcessMapEstimator? datasets = Datasets(dataset) if self.e_edges is None: energy_axis = dataset.counts.geom.axes["energy"] e_edges = u.Quantity([energy_axis.edges[0], energy_axis.edges[-1]]) else: e_edges = self.e_edges results = [] for e_min, e_max in zip(e_edges[:-1], e_edges[1:]): sliced_dataset = datasets.slice_energy(e_min, e_max)[0] result = self.estimate_excess_map(sliced_dataset) results.append(result) results_all = {} for name in results[0].keys(): map_all = Map.from_images(images=[_[name] for _ in results]) results_all[name] = map_all return results_all
def run(self, dataset): """Run adaptive smoothing on input MapDataset. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff` the input dataset (with one bin in energy at most) Returns ------- images : dict of `~gammapy.maps.WcsNDMap` Smoothed images; keys are: * 'counts' * 'background' * 'flux' (optional) * 'scales' * 'sqrt_ts'. """ datasets = Datasets([dataset]) if self.e_edges is None: energy_axis = dataset.counts.geom.axes["energy"] e_edges = u.Quantity([energy_axis.edges[0], energy_axis.edges[-1]]) else: e_edges = self.e_edges results = [] for e_min, e_max in zip(e_edges[:-1], e_edges[1:]): dataset = datasets.slice_energy(e_min, e_max)[0] result = self.estimate_maps(dataset) results.append(result) result_all = {} for name in result.keys(): map_all = Map.from_images(images=[_[name] for _ in results]) result_all[name] = map_all return result_all
def run(self, dataset): """ Run TS map estimation. Requires a MapDataset with counts, exposure and background_model properly set to run. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Input MapDataset. Returns ------- maps : dict Dictionary containing result maps. Keys are: * ts : delta TS map * sqrt_ts : sqrt(delta TS), or significance map * flux : flux map * flux_err : symmetric error map * flux_ul : upper limit map """ if self.downsampling_factor: shape = dataset.counts.geom.to_image().data_shape pad_width = symmetric_crop_pad_width(shape, shape_2N(shape))[0] dataset = dataset.pad(pad_width).downsample(self.downsampling_factor) # TODO: add support for joint likelihood fitting to TSMapEstimator datasets = Datasets(dataset) if self.e_edges is None: energy_axis = dataset.counts.geom.axes["energy"] e_edges = u.Quantity([energy_axis.edges[0], energy_axis.edges[-1]]) else: e_edges = self.e_edges results = [] for e_min, e_max in zip(e_edges[:-1], e_edges[1:]): dataset = datasets.slice_energy(e_min, e_max)[0] if self.sum_over_energy_groups: dataset = dataset.to_image() result = self.estimate_flux_map(dataset) results.append(result) result_all = {} for name in self.selection_all: map_all = Map.from_images(images=[_[name] for _ in results]) if self.downsampling_factor: order = 0 if name == "niter" else 1 map_all = map_all.upsample( factor=self.downsampling_factor, preserve_counts=False, order=order ) map_all = map_all.crop(crop_width=pad_width) result_all[name] = map_all result_all["sqrt_ts"] = self.estimate_sqrt_ts(result_all["ts"]) return result_all