def run(self, datasets): """Estimate flux for a given energy range. Parameters ---------- datasets : list of `~gammapy.datasets.SpectrumDataset` Spectrum datasets. Returns ------- result : dict Dict with results for the flux point. """ datasets = Datasets(datasets) datasets_sliced = datasets.slice_by_energy(energy_min=self.energy_min, energy_max=self.energy_max) # TODO: simplify model book-keeping!! models = Models() for model in datasets.models: if "sky-model" in model.tag: models.append(model) elif "fov-bkg" in model.tag: bkg_model = model.copy(dataset_name=model.datasets_names[0] + "-sliced") bkg_model.reset_to_default() models.append(bkg_model) if len(datasets) > 0: # TODO: this relies on the energy binning of the first dataset energy_axis = datasets_sliced[0].counts.geom.axes["energy"] energy_min, energy_max = energy_axis.edges.min( ), energy_axis.edges.max() else: energy_min, energy_max = self.energy_min, self.energy_max any_contribution = np.any( [dataset.mask.data.any() for dataset in datasets_sliced]) model = self.get_scale_model(models) with np.errstate(invalid="ignore", divide="ignore"): result = self.get_reference_flux_values(model.model, energy_min, energy_max) if len(datasets) == 0 or not any_contribution: result.update(self.nan_result) else: models[self.source].spectral_model = model datasets_sliced.models = models result.update( self._parameter_estimator.run(datasets_sliced, model.norm)) result["sqrt_ts"] = self.get_sqrt_ts(result["ts"], result["norm"]) return result
def compute_npreds(datasets, n_iter, n_src): models = Models() positions = np.random.uniform(-4., 4., (n_src, 2)) for pos in positions: pos = u.Quantity(pos, "deg") model = SkyModel(spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel(lon_0=pos[0], lat_0=pos[1], sigma="0.5 deg")) models.append(model) for i in range(n_iter): datasets.models = models tmp = datasets[0].npred()
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log") geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = Models() for idx, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel(lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name=names[idx]) models.append(model_ecpl) models["gc"].spectral_model.reference = models[ "g09"].spectral_model.reference obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_spatial = TemplateSpatialModel.read( "$GAMMAPY_DATA/fermi-3fhl-gc/gll_iem_v06_gc.fits.gz") diffuse_model = SkyModel(PowerLawSpectralModel(), diffuse_spatial) maker = MapDatasetMaker() datasets = Datasets() observations = data_store.get_observations(obs_ids) for idx, geom in enumerate(geoms): stacked = MapDataset.create(geom=geom, name=names[idx]) for obs in observations: dataset = maker.run(stacked, obs) stacked.stack(dataset) bkg = stacked.models.pop(0) stacked.models = [models[idx], diffuse_model, bkg] datasets.append(stacked) datasets.write( "$GAMMAPY_DATA/tests/models", prefix="gc_example", overwrite=True, write_covariance=False, )