def run(self, datasets): """Estimate flux for a given energy range. Parameters ---------- datasets : list of `~gammapy.datasets.SpectrumDataset` Spectrum datasets. Returns ------- result : dict Dict with results for the flux point. """ datasets = Datasets(datasets) datasets_sliced = datasets.slice_by_energy(energy_min=self.energy_min, energy_max=self.energy_max) # TODO: simplify model book-keeping!! models = Models() for model in datasets.models: if "sky-model" in model.tag: models.append(model) elif "fov-bkg" in model.tag: bkg_model = model.copy(dataset_name=model.datasets_names[0] + "-sliced") bkg_model.reset_to_default() models.append(bkg_model) if len(datasets_sliced) > 0: # TODO: this relies on the energy binning of the first dataset energy_axis = datasets_sliced[0].counts.geom.axes["energy"] energy_min, energy_max = energy_axis.edges.min( ), energy_axis.edges.max() else: energy_min, energy_max = self.energy_min, self.energy_max contributions = [] for dataset in datasets_sliced: if dataset.mask is not None: value = dataset.mask.data.any() else: value = True contributions.append(value) model = self.get_scale_model(models) with np.errstate(invalid="ignore", divide="ignore"): result = self.get_reference_flux_values(model.model, energy_min, energy_max) if len(datasets) == 0 or not np.any(contributions): result.update(self.nan_result) else: models[self.source].spectral_model = model datasets_sliced.models = models result.update( self._parameter_estimator.run(datasets_sliced, model.norm)) result["sqrt_ts"] = self.get_sqrt_ts(result["ts"], result["norm"]) return result
def run_analyses(targets): log.info("Run small source extension check.") info = {} targets = list(AVAILABLE_TARGETS) if targets == "all-targets" else [ targets ] for target in targets: t = time.time() config = AnalysisConfig.read(f"configs/config_{target}.yaml") analysis = Analysis(config) analysis.get_observations() info["data_preparation"] = time.time() - t t = time.time() analysis.get_datasets() info["data_reduction"] = time.time() - t models = Models.read(f"models/model_{target}.yaml") point_models = Models(define_model_pointlike(models[0])) analysis.set_models(point_models) t = time.time() analysis.run_fit() info["point_model_fitting"] = time.time() - t log.info(f"\n{point_models.to_parameters_table()}") log.info("Fitting extended gaussian source.") analysis.datasets.models = [] analysis.set_models(models) t = time.time() analysis.run_fit() info["gauss_model_fitting"] = time.time() - t log.info(analysis.fit_result) log.info(f"\n{models.to_parameters_table()}") log.info("Extract size error, UL and stat profile.") t = time.time() analysis.models[0].spatial_model.lon_0.frozen = True analysis.models[0].spatial_model.lat_0.frozen = True analysis.models[0].spectral_model.index.frozen = True size_est = ExtensionEstimator( source=models[0].name, energy_edges=[0.2, 10.0] * u.TeV, selection_optional=["errn-errp", "ul", "scan"], size_min="0.08 deg", size_max="0.12 deg", size_n_values=20, reoptimize=True) res = size_est.run(analysis.datasets) info["estimator"] = time.time() - t t = time.time() log.info(res) plot_profile(res[0], target) Path(f"bench_{target}.yaml").write_text( yaml.dump(info, sort_keys=False, indent=4)) analysis.models.to_parameters_table().write( f"results/{target}_results.ecsv", overwrite=True)
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges( np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log" ) geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = Models() for idx, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel( lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic" ) spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name=names[idx] ) models.append(model_ecpl) models["gc"].spectral_model.reference = models["g09"].spectral_model.reference obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_spatial = TemplateSpatialModel.read( "$GAMMAPY_DATA/fermi-3fhl-gc/gll_iem_v06_gc.fits.gz" ) diffuse_model = SkyModel(PowerLawSpectralModel(), diffuse_spatial) maker = MapDatasetMaker() datasets = Datasets() observations = data_store.get_observations(obs_ids) for idx, geom in enumerate(geoms): stacked = MapDataset.create(geom=geom, name=names[idx]) for obs in observations: dataset = maker.run(stacked, obs) stacked.stack(dataset) bkg = stacked.models.pop(0) stacked.models = [models[idx], diffuse_model, bkg] datasets.append(stacked) datasets.write( "$GAMMAPY_DATA/tests/models", prefix="gc_example", overwrite=True, write_covariance=False, )
def run_region(self, kr, lon, lat, radius): # TODO: for now we have to read/create the allsky maps each in each job # because we can't pickle <functools._lru_cache_wrapper object # send this back to init when fixed # exposure exposure_hpx = Map.read( "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz" ) exposure_hpx.unit = "cm2 s" # iem iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits" iem_fermi_extra = Map.read(iem_filepath) # norm=1.1, tilt=0.03 see paper appendix A model_iem = SkyDiffuseCube( iem_fermi_extra, norm=1.1, tilt=0.03, name="iem_extrapolated" ) # ROI roi_time = time() ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg") width = 2 * (radius + self.psf_margin) # Counts counts = Map.create( skydir=ROI_pos, width=width, proj="CAR", frame="galactic", binsz=1 / 8.0, axes=[self.energy_axis], dtype=float, ) counts.fill_by_coord( {"skycoord": self.events.radec, "energy": self.events.energy} ) axis = MapAxis.from_nodes( counts.geom.axes[0].center, name="energy_true", unit="GeV", interp="log" ) wcs = counts.geom.wcs geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis]) coords = geom.get_coord() # expo data = exposure_hpx.interp_by_coord(coords) exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float) # read PSF psf_kernel = PSFKernel.from_table_psf( self.psf, geom, max_radius=self.psf_margin * u.deg ) # Energy Dispersion e_true = exposure.geom.axes[0].edges e_reco = counts.geom.axes[0].edges edisp = EDispKernel.from_diagonal_response(e_true=e_true, e_reco=e_reco) # fit mask if coords["lon"].min() < 90 * u.deg and coords["lon"].max() > 270 * u.deg: coords["lon"][coords["lon"].value > 180] -= 360 * u.deg mask = ( (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg) & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg) & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg) & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg) ) mask_fermi = WcsNDMap(counts.geom, mask) # IEM eval_iem = MapEvaluator( model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp ) bkg_iem = eval_iem.compute_npred() # ISO eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp) bkg_iso = eval_iso.compute_npred() # merge iem and iso, only one local normalization is fitted dataset_name = "3FHL_ROI_num" + str(kr) background_total = bkg_iem + bkg_iso background_model = BackgroundModel( background_total, name="bkg_iem+iso", datasets_names=[dataset_name] ) background_model.parameters["norm"].min = 0.0 # Sources model in_roi = self.FHL3.positions.galactic.contained_by(wcs) FHL3_roi = [] for ks in range(len(self.FHL3.table)): if in_roi[ks] == True: model = self.FHL3[ks].sky_model() model.spatial_model.parameters.freeze_all() # freeze spatial model.spectral_model.parameters["amplitude"].min = 0.0 if isinstance(model.spectral_model, PowerLawSpectralModel): model.spectral_model.parameters["index"].min = 0.1 model.spectral_model.parameters["index"].max = 10.0 else: model.spectral_model.parameters["alpha"].min = 0.1 model.spectral_model.parameters["alpha"].max = 10.0 FHL3_roi.append(model) model_total = Models([background_model] + FHL3_roi) # Dataset dataset = MapDataset( models=model_total, counts=counts, exposure=exposure, psf=psf_kernel, edisp=edisp, mask_fit=mask_fermi, name=dataset_name, ) cat_stat = dataset.stat_sum() datasets = Datasets([dataset]) fit = Fit(datasets) results = fit.run(**self.optimize_opts) print("ROI_num", str(kr), "\n", results) fit_stat = datasets.stat_sum() if results.message != "Optimization failed.": datasets.write(path=Path(self.resdir), prefix=dataset.name, overwrite=True) np.savez( self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz", message=results.message, stat=[cat_stat, fit_stat], ) exec_time = time() - roi_time print("ROI", kr, " time (s): ", exec_time) for model in FHL3_roi: if ( self.FHL3[model.name].data["ROI_num"] == kr and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut ): flux_points = FluxPointsEstimator( e_edges=self.El_flux, source=model.name, n_sigma_ul=2, ).run(datasets=datasets) filename = self.resdir / f"{model.name}_flux_points.fits" flux_points.write(filename, overwrite=True) exec_time = time() - roi_time - exec_time print("ROI", kr, " Flux points time (s): ", exec_time)
def make_example_2(): spatial = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="1 deg") model = SkyModel(PowerLawSpectralModel(), spatial) models = Models([model]) models.write(DATA_PATH / "example2.yaml")
def __init__(self, name="test"): self.name = name self._models = Models([MyModel(x=1.99, y=2.99e3, z=3.99e-2)]) self.data_shape = (1, ) self.meta_table = Table()
def models(): filename = get_pkg_data_filename("data/examples.yaml") models_data = read_yaml(filename) models = Models.from_dict(models_data) return models
def read_models(self, path): """Read models from YAML file.""" path = make_path(path) models = Models.read(path) self.set_models(models)
def __init__(self, name=""): self.name = name self.models = Models([MyModel()]) self.data_shape = (1,)
def __init__(self, name="test"): self.name = name self.models = Models(MyModel())
def run_region(self, kr, lon, lat, radius): # TODO: for now we have to read/create the allsky maps each in each job # because we can't pickle <functools._lru_cache_wrapper object # send this back to init when fixed log.info(f"ROI {kr}: loading data") # exposure exposure_hpx = Map.read( "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz") exposure_hpx.unit = "cm2 s" # psf psf_map = PSFMap.read( "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz", format="gtpsf") # reduce size of the PSF axis = psf_map.psf_map.geom.axes["rad"].center.to_value(u.deg) indmax = np.argmin(np.abs(self.psf_margin - axis)) psf_map = psf_map.slice_by_idx(slices={"rad": slice(0, indmax)}) # iem iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits" iem_fermi_extra = Map.read(iem_filepath) # norm=1.1, tilt=0.03 see paper appendix A model_iem = SkyModel( PowerLawNormSpectralModel(norm=1.1, tilt=0.03), TemplateSpatialModel(iem_fermi_extra, normalize=False), name="iem_extrapolated", ) # ROI roi_time = time() ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg") width = 2 * (radius + self.psf_margin) # Counts counts = Map.create( skydir=ROI_pos, width=width, proj="CAR", frame="galactic", binsz=1 / 8.0, axes=[self.energy_axis], dtype=float, ) counts.fill_by_coord({ "skycoord": self.events.radec, "energy": self.events.energy }) axis = MapAxis.from_nodes(counts.geom.axes[0].center, name="energy_true", unit="GeV", interp="log") wcs = counts.geom.wcs geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis]) coords = geom.get_coord() # expo data = exposure_hpx.interp_by_coord(coords) exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float) # Energy Dispersion edisp = EDispKernelMap.from_diagonal_response( energy_axis_true=axis, energy_axis=self.energy_axis) # fit mask if coords["lon"].min() < 90 * u.deg and coords["lon"].max( ) > 270 * u.deg: coords["lon"][coords["lon"].value > 180] -= 360 * u.deg mask = ( (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg) & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg) & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg) & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg)) mask_fermi = WcsNDMap(counts.geom, mask) mask_safe_fermi = WcsNDMap(counts.geom, np.ones(mask.shape, dtype=bool)) log.info(f"ROI {kr}: pre-computing diffuse") # IEM eval_iem = MapEvaluator( model=model_iem, exposure=exposure, psf=psf_map.get_psf_kernel(geom), edisp=edisp.get_edisp_kernel(), ) bkg_iem = eval_iem.compute_npred() # ISO eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp.get_edisp_kernel()) bkg_iso = eval_iso.compute_npred() # merge iem and iso, only one local normalization is fitted dataset_name = "3FHL_ROI_num" + str(kr) background_total = bkg_iem + bkg_iso # Dataset dataset = MapDataset( counts=counts, exposure=exposure, background=background_total, psf=psf_map, edisp=edisp, mask_fit=mask_fermi, mask_safe=mask_safe_fermi, name=dataset_name, ) background_model = FoVBackgroundModel(dataset_name=dataset_name) background_model.parameters["norm"].min = 0.0 # Sources model in_roi = self.FHL3.positions.galactic.contained_by(wcs) FHL3_roi = [] for ks in range(len(self.FHL3.table)): if in_roi[ks] == True: model = self.FHL3[ks].sky_model() model.spatial_model.parameters.freeze_all() # freeze spatial model.spectral_model.parameters["amplitude"].min = 0.0 if isinstance(model.spectral_model, PowerLawSpectralModel): model.spectral_model.parameters["index"].min = 0.1 model.spectral_model.parameters["index"].max = 10.0 else: model.spectral_model.parameters["alpha"].min = 0.1 model.spectral_model.parameters["alpha"].max = 10.0 FHL3_roi.append(model) model_total = Models(FHL3_roi + [background_model]) dataset.models = model_total cat_stat = dataset.stat_sum() datasets = Datasets([dataset]) log.info(f"ROI {kr}: running fit") fit = Fit(**self.fit_opts) results = fit.run(datasets=datasets) print("ROI_num", str(kr), "\n", results) fit_stat = datasets.stat_sum() if results.message != "Optimization failed.": filedata = Path(self.resdir) / f"3FHL_ROI_num{kr}_datasets.yaml" filemodel = Path(self.resdir) / f"3FHL_ROI_num{kr}_models.yaml" datasets.write(filedata, filemodel, overwrite=True) np.savez( self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz", message=results.message, stat=[cat_stat, fit_stat], ) exec_time = time() - roi_time print("ROI", kr, " time (s): ", exec_time) log.info(f"ROI {kr}: running flux points") for model in FHL3_roi: if (self.FHL3[model.name].data["ROI_num"] == kr and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut): print(model.name) flux_points = FluxPointsEstimator( energy_edges=self.El_flux, source=model.name, n_sigma_ul=2, selection_optional=["ul"], ).run(datasets=datasets) flux_points.meta["sqrt_ts_threshold_ul"] = 1 filename = self.resdir / f"{model.name}_flux_points.fits" flux_points.write(filename, overwrite=True) exec_time = time() - roi_time - exec_time print("ROI", kr, " Flux points time (s): ", exec_time)
# **Define 3D Sky Model** DarkMatterAnnihilationSpectralModel.THERMAL_RELIC_CROSS_SECTION = xsection flux_model = DarkMatterAnnihilationSpectralModel(mass=mDM, channel=channel, jfactor=JFAC) spatial_model = TemplateSpatialModel.read(jfactor_filename) sky_model = SkyModel(spatial_model=spatial_model, spectral_model=flux_model, name="model-simu") bkg_model = FoVBackgroundModel(dataset_name="dataset-simu") models = Models([sky_model, bkg_model]) # ## Declare observation values pointing = src_pos livetime = 100 * u.hour offset = 2.0 * u.deg #offset = 0.5 * u.deg # Create an in-memory observation obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) # ## Start the simulations and get the limits #masses = [70, 200, 500, 800, 1000, 5000, 8000, 10000, 30000, 50000, 60000, 100000]*u.GeV masses = [200, 1000, 50000] * u.GeV
def to_models(self, **kwargs): """ Create Models object from catalogue""" return Models([_.sky_model(**kwargs) for _ in self])
def test_map_fit(sky_model, geom, geom_etrue): dataset_1 = get_map_dataset(geom, geom_etrue, name="test-1") dataset_2 = get_map_dataset(geom, geom_etrue, name="test-2") datasets = Datasets([dataset_1, dataset_2]) models = Models(datasets.models) models.insert(0, sky_model) models["test-1-bkg"].spectral_model.norm.value = 0.5 models["test-model"].spatial_model.sigma.frozen = True datasets.models = models dataset_2.counts = dataset_2.npred() dataset_1.counts = dataset_1.npred() models["test-1-bkg"].spectral_model.norm.value = 0.49 models["test-2-bkg"].spectral_model.norm.value = 0.99 fit = Fit(datasets) result = fit.run() assert result.success assert "minuit" in repr(result) npred = dataset_1.npred().data.sum() assert_allclose(npred, 7525.790688, rtol=1e-3) assert_allclose(result.total_stat, 21659.2139, rtol=1e-3) pars = result.parameters assert_allclose(pars["lon_0"].value, 0.2, rtol=1e-2) assert_allclose(pars["lon_0"].error, 0.002244, rtol=1e-2) assert_allclose(pars["index"].value, 3, rtol=1e-2) assert_allclose(pars["index"].error, 0.024277, rtol=1e-2) assert_allclose(pars["amplitude"].value, 1e-11, rtol=1e-2) assert_allclose(pars["amplitude"].error, 4.216154e-13, rtol=1e-2) # background norm 1 assert_allclose(pars[8].value, 0.5, rtol=1e-2) assert_allclose(pars[8].error, 0.015811, rtol=1e-2) # background norm 2 assert_allclose(pars[11].value, 1, rtol=1e-2) assert_allclose(pars[11].error, 0.02147, rtol=1e-2) # test mask_safe evaluation mask_safe = geom.energy_mask(energy_min=1 * u.TeV) dataset_1.mask_safe = Map.from_geom(geom, data=mask_safe) dataset_2.mask_safe = Map.from_geom(geom, data=mask_safe) stat = fit.datasets.stat_sum() assert_allclose(stat, 14823.579908, rtol=1e-5) region = sky_model.spatial_model.to_region() with mpl_plot_check(): dataset_1.plot_residuals(kwargs_spectral=dict(region=region)) # test model evaluation outside image dataset_1.models[0].spatial_model.lon_0.value = 150 dataset_1.npred() assert not dataset_1._evaluators["test-model"].contributes
def sky_models(sky_model): sky_model_2 = sky_model.copy(name="source-2") sky_model_3 = sky_model.copy(name="source-3") return Models([sky_model_2, sky_model_3])
def models(self, models): if models is None: self._models = None else: self._models = Models(models)
def sky_models_2(sky_model): sky_model_4 = sky_model.copy(name="source-4") sky_model_5 = sky_model.copy(name="source-5") return Models([sky_model_4, sky_model_5])
def _map_making(self): """Make maps and datasets for 3d analysis.""" datasets_settings = self.config.datasets log.info("Creating geometry.") geom = self._create_geometry() geom_settings = datasets_settings.geom geom_irf = dict(energy_axis_true=None, binsz_irf=None) if geom_settings.axes.energy_true.min is not None: geom_irf["energy_axis_true"] = self._make_energy_axis( geom_settings.axes.energy_true, name="energy_true") geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value offset_max = geom_settings.selection.offset_max log.info("Creating datasets.") maker = MapDatasetMaker(selection=datasets_settings.map_selection) safe_mask_selection = datasets_settings.safe_mask.methods safe_mask_settings = datasets_settings.safe_mask.parameters maker_safe_mask = SafeMaskMaker(methods=safe_mask_selection, **safe_mask_settings) bkg_maker_config = {} if datasets_settings.background.exclusion: exclusion_region = Map.read(datasets_settings.background.exclusion) bkg_maker_config["exclusion_mask"] = exclusion_region bkg_maker_config.update(datasets_settings.background.parameters) bkg_method = datasets_settings.background.method if bkg_method == "fov_background": log.debug( f"Creating FoVBackgroundMaker with arguments {bkg_maker_config}" ) bkg_maker = FoVBackgroundMaker(**bkg_maker_config) elif bkg_method == "ring": bkg_maker = RingBackgroundMaker(**bkg_maker_config) log.debug( f"Creating RingBackgroundMaker with arguments {bkg_maker_config}" ) if datasets_settings.geom.axes.energy.nbins > 1: raise ValueError( "You need to define a single-bin energy geometry for your dataset." ) else: bkg_maker = None log.warning( f"No background maker set for 3d analysis. Check configuration." ) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if datasets_settings.stack: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) if bkg_maker is not None: dataset = bkg_maker.run(dataset) if bkg_method == "ring": dataset.models = Models( [BackgroundModel(dataset.background)]) log.debug(dataset) stacked.stack(dataset) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) if bkg_maker is not None: dataset = bkg_maker.run(dataset) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
sigma="0.2 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude="3e-12 cm-2 s-1 TeV-1", reference="1 TeV", lambda_="0.05 TeV-1", ) sky_model_simu = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="source") bkg_model = FoVBackgroundModel(dataset_name="dataset-mcmc") models = Models([sky_model_simu, bkg_model]) print(models) # In[7]: dataset.models = models dataset.fake() # In[8]: dataset.counts.sum_over_axes().plot(add_cbar=True) # In[9]: # If you want to fit the data for comparison with MCMC later # fit = Fit(dataset)
def test_missing_parameters(): filename = get_pkg_data_filename("data/examples.yaml") models = Models.read(filename) assert models["source1"].spatial_model.e in models.parameters assert len(models["source1"].spatial_model.parameters) == 6
def test_dict_to_skymodels(): filename = get_pkg_data_filename("data/examples.yaml") models_data = read_yaml(filename) models = Models.from_dict(models_data) assert len(models) == 5 model0 = models[0] assert isinstance(model0, BackgroundModel) assert model0.name == "background_irf" model0 = models[1] assert model0.spectral_model.tag == "ExpCutoffPowerLawSpectralModel" assert model0.spatial_model.tag == "PointSpatialModel" pars0 = model0.parameters assert pars0["index"].value == 2.1 assert pars0["index"].unit == "" assert np.isnan(pars0["index"].max) assert np.isnan(pars0["index"].min) assert not pars0["index"].frozen assert pars0["lon_0"].value == -0.5 assert pars0["lon_0"].unit == "deg" assert pars0["lon_0"].max == 180.0 assert pars0["lon_0"].min == -180.0 assert pars0["lon_0"].frozen assert pars0["lat_0"].value == -0.0005 assert pars0["lat_0"].unit == "deg" assert pars0["lat_0"].max == 90.0 assert pars0["lat_0"].min == -90.0 assert pars0["lat_0"].frozen assert pars0["lambda_"].value == 0.006 assert pars0["lambda_"].unit == "TeV-1" assert np.isnan(pars0["lambda_"].min) assert np.isnan(pars0["lambda_"].max) model1 = models[2] assert model1.spectral_model.tag == "PowerLawSpectralModel" assert model1.spatial_model.tag == "DiskSpatialModel" assert model1.temporal_model.tag == "LightCurveTemplateTemporalModel" pars1 = model1.parameters assert pars1["index"].value == 2.2 assert pars1["index"].unit == "" assert pars1["lat_0"].scale == 1.0 assert pars1["lat_0"].factor == pars1["lat_0"].value assert np.isnan(pars1["index"].max) assert np.isnan(pars1["index"].min) assert pars1["r_0"].unit == "deg" model2 = models[3] assert_allclose(model2.spectral_model.energy.data, [34.171, 44.333, 57.517]) assert model2.spectral_model.energy.unit == "MeV" assert_allclose(model2.spectral_model.values.data, [2.52894e-06, 1.2486e-06, 6.14648e-06]) assert model2.spectral_model.values.unit == "1 / (cm2 MeV s sr)" assert model2.spectral_model.tag == "TemplateSpectralModel" assert model2.spatial_model.tag == "TemplateSpatialModel" assert model2.spatial_model.parameters["norm"].value == 1.0 assert not model2.spatial_model.normalize assert model2.spectral_model.parameters["norm"].value == 2.1
`naima.models.TemplateSpectralModel` """ # %% # Example plot # ------------ # Here is an example plot of the model: import numpy as np from astropy import units as u import matplotlib.pyplot as plt from gammapy.modeling.models import Models, SkyModel, TemplateSpectralModel energy_range = [0.1, 1] * u.TeV energy = np.array([1e6, 3e6, 1e7, 3e7]) * u.MeV values = np.array([4.4e-38, 2.0e-38, 8.8e-39, 3.9e-39 ]) * u.Unit("MeV-1 s-1 cm-2") model = TemplateSpectralModel(energy=energy, values=values) model.plot(energy_range) plt.grid(which="both") # %% # YAML representation # ------------------- # Here is an example YAML file using the model: model = SkyModel(spectral_model=model, name="template-model") models = Models([model]) print(models.to_yaml())
def make_example_2(): spatial = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="1 deg") model = SkyModel(PowerLawSpectralModel(), spatial, name="example_2") models = Models([model]) models.write(DATA_PATH / "example2.yaml", overwrite=True, write_covariance=False)