def bkg_estimate(observations, on_region, exclusion_mask): """An example background estimate""" est = ReflectedRegionsBackgroundEstimator( observations=observations, on_region=on_region, exclusion_mask=exclusion_mask, min_distance_input="0.2 deg", ) est.run() return est.result
def bkg_estimator(observations, exclusion_mask, on_region): """Example background estimator for testing.""" maker = ReflectedRegionsBackgroundEstimator( observations=observations, on_region=on_region, exclusion_mask=exclusion_mask, min_distance_input="0.2 deg", ) maker.run() return maker
def _spectrum_extraction(self): """Run all steps for the spectrum extraction.""" region = self.settings["datasets"]["geom"]["region"] log.info("Reducing spectrum datasets.") on_lon = Angle(region["center"][0]) on_lat = Angle(region["center"][1]) on_center = SkyCoord(on_lon, on_lat, frame=region["frame"]) on_region = CircleSkyRegion(on_center, Angle(region["radius"])) background_params = {"on_region": on_region} background = self.settings["datasets"]["background"] if "exclusion_mask" in background: map_hdu = {} filename = background["exclusion_mask"]["filename"] if "hdu" in background["exclusion_mask"]: map_hdu = {"hdu": background["exclusion_mask"]["hdu"]} exclusion_region = Map.read(filename, **map_hdu) background_params["exclusion_mask"] = exclusion_region if background["background_estimator"] == "reflected": self.background_estimator = ReflectedRegionsBackgroundEstimator( observations=self.observations, **background_params ) self.background_estimator.run() else: # TODO: raise error? log.info("Background estimation only for reflected regions method.") extraction_params = {} if "containment_correction" in self.settings["datasets"]: extraction_params["containment_correction"] = self.settings["datasets"][ "containment_correction" ] params = self.settings["datasets"]["geom"]["axes"][0] e_reco = MapAxis.from_bounds(**params).edges extraction_params["e_reco"] = e_reco extraction_params["e_true"] = None self.extraction = SpectrumExtraction( observations=self.observations, bkg_estimate=self.background_estimator.result, **extraction_params, ) self.extraction.run() self.datasets = Datasets(self.extraction.spectrum_observations) if self.settings["datasets"]["stack-datasets"]: stacked = self.datasets.stack_reduce() stacked.name = "stacked" self.datasets = Datasets([stacked])
def setup_class(cls): datastore = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") obs_ids = [23523, 23526] on_region = CircleSkyRegion( SkyCoord(83.63 * u.deg, 22.01 * u.deg, frame="icrs"), 0.3 * u.deg) obs_stats_list = [] for obs_id in obs_ids: obs = datastore.obs(obs_id) bkg = ReflectedRegionsBackgroundEstimator(on_region=on_region, observations=[obs]) bkg.run() bg_estimate = bkg.result[0] obs_stats = ObservationStats.from_observation(obs, bg_estimate) obs_stats_list.append(obs_stats) cls.obs_summary = ObservationSummary(obs_stats_list)
def test_extract_cta_1dc_data(caplog): datastore = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") obs_ids = [110380, 111140] observations = datastore.get_observations(obs_ids) pos = SkyCoord(0.0, 0.0, unit="deg", frame="galactic") radius = Angle(0.11, "deg") on_region = CircleSkyRegion(pos, radius) est = ReflectedRegionsBackgroundEstimator( observations=observations, on_region=on_region, min_distance_input="0.2 deg" ) est.run() # This will test non PSF3D input as well as absence of default thresholds extract = SpectrumExtraction( bkg_estimate=est.result, observations=observations, containment_correction=True ) extract.run() extract.compute_energy_threshold(method_lo="area_max", area_percent_lo=10) actual = extract.spectrum_observations[0].energy_range[0] assert_quantity_allclose(actual, 0.774263 * u.TeV, rtol=1e-3)
class Analysis: """Config-driven high-level analysis interface. It is initialized by default with a set of configuration parameters and values declared in an internal configuration schema YAML file, though the user can also provide configuration parameters passed as a nested dictionary at the moment of instantiation. In that case these parameters will overwrite the default values of those present in the configuration file. For more info see :ref:`HLI`. Parameters ---------- config : dict or `AnalysisConfig` Configuration options following `AnalysisConfig` schema """ def __init__(self, config=None): if isinstance(config, dict): self._config = AnalysisConfig(config) elif isinstance(config, AnalysisConfig): self._config = config else: raise ValueError("Dict or `AnalysiConfig` object required.") self._set_logging() self.observations = None self.background_estimator = None self.datasets = None self.extraction = None self.model = None self.fit = None self.fit_result = None self.flux_points = None @property def config(self): """Analysis configuration (`AnalysisConfig`)""" return self._config @property def settings(self): """Configuration settings for the analysis session.""" return self.config.settings def get_observations(self): """Fetch observations from the data store according to criteria defined in the configuration.""" self.config.validate() log.info("Fetching observations.") datastore_path = make_path(self.settings["observations"]["datastore"]) if datastore_path.is_file(): datastore = DataStore().from_file(datastore_path) elif datastore_path.is_dir(): datastore = DataStore().from_dir(datastore_path) else: raise FileNotFoundError(f"Datastore {datastore_path} not found.") ids = set() selection = dict() for criteria in self.settings["observations"]["filters"]: selected_obs = ObservationTable() # TODO: Reduce significantly the code. # This block would be handled by datastore.obs_table.select_observations selection["type"] = criteria["filter_type"] for key, val in criteria.items(): if key in ["lon", "lat", "radius", "border"]: val = Angle(val) selection[key] = val if selection["type"] == "angle_box": selection["type"] = "par_box" selection["value_range"] = Angle(criteria["value_range"]) if selection["type"] == "sky_circle" or selection["type"].endswith("_box"): selected_obs = datastore.obs_table.select_observations(selection) if selection["type"] == "par_value": mask = ( datastore.obs_table[criteria["variable"]] == criteria["value_param"] ) selected_obs = datastore.obs_table[mask] if selection["type"] == "ids": obs_list = datastore.get_observations(criteria["obs_ids"]) selected_obs["OBS_ID"] = [obs.obs_id for obs in obs_list.list] if selection["type"] == "all": obs_list = datastore.get_observations() selected_obs["OBS_ID"] = [obs.obs_id for obs in obs_list.list] if len(selected_obs): if "exclude" in criteria and criteria["exclude"]: ids.difference_update(selected_obs["OBS_ID"].tolist()) else: ids.update(selected_obs["OBS_ID"].tolist()) self.observations = datastore.get_observations(ids, skip_missing=True) for obs in self.observations.list: log.info(obs) def get_datasets(self): """Produce reduced datasets.""" if not self._validate_reduction_settings(): return False if self.settings["datasets"]["dataset-type"] == "SpectrumDatasetOnOff": self._spectrum_extraction() elif self.settings["datasets"]["dataset-type"] == "MapDataset": self._map_making() else: # TODO raise error? log.info("Data reduction method not available.") return False def set_model(self, model=None, filename=""): """Read the model from dict or filename and attach it to datasets. Parameters ---------- model: dict or string Dictionary or string in YAML format with the serialized model. filename : string Name of the model YAML file describing the model. """ if not self._validate_set_model(): return False log.info(f"Reading model.") if isinstance(model, str): model = yaml.safe_load(model) if model: self.model = SkyModels(dict_to_models(model)) elif filename: filepath = make_path(filename) self.model = SkyModels.from_yaml(filepath) else: return False # TODO: Deal with multiple components for dataset in self.datasets.datasets: if isinstance(dataset, MapDataset): dataset.model = self.model else: if len(self.model.skymodels) > 1: raise ValueError( "Can only fit a single spectral model at one time." ) dataset.model = self.model.skymodels[0].spectral_model log.info(self.model) def run_fit(self, optimize_opts=None): """Fitting reduced datasets to model.""" if not self._validate_fitting_settings(): return False for ds in self.datasets.datasets: # TODO: fit_range handled in jsonschema validation class if "fit_range" in self.settings["fit"]: e_min = u.Quantity(self.settings["fit"]["fit_range"]["min"]) e_max = u.Quantity(self.settings["fit"]["fit_range"]["max"]) if isinstance(ds, MapDataset): ds.mask_fit = ds.counts.geom.energy_mask(e_min, e_max) else: ds.mask_fit = ds.counts.energy_mask(e_min, e_max) log.info("Fitting reduced datasets.") self.fit = Fit(self.datasets) self.fit_result = self.fit.run(optimize_opts=optimize_opts) log.info(self.fit_result) def get_flux_points(self, source="source"): """Calculate flux points for a specific model component. Parameters ---------- source : string Name of the model component where to calculate the flux points. """ if not self._validate_fp_settings(): return False # TODO: add "source" to config log.info("Calculating flux points.") axis_params = self.settings["flux-points"]["fp_binning"] e_edges = MapAxis.from_bounds(**axis_params).edges flux_point_estimator = FluxPointsEstimator( e_edges=e_edges, datasets=self.datasets, source=source ) fp = flux_point_estimator.run() fp.table["is_ul"] = fp.table["ts"] < 4 model = self.model[source].spectral_model.copy() self.flux_points = FluxPointsDataset(data=fp, model=model) cols = ["e_ref", "ref_flux", "dnde", "dnde_ul", "dnde_err", "is_ul"] log.info("\n{}".format(self.flux_points.data.table[cols])) @staticmethod def _create_geometry(params): """Create the geometry.""" # TODO: handled in jsonschema validation class geom_params = copy.deepcopy(params) axes = [] for axis_params in geom_params.get("axes", []): ax = MapAxis.from_bounds(**axis_params) axes.append(ax) geom_params["axes"] = axes geom_params["skydir"] = tuple(geom_params["skydir"]) return WcsGeom.create(**geom_params) def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) if "geom-irf" in self.settings["datasets"]: geom_irf = self._create_geometry(self.settings["datasets"]["geom-irf"]) else: geom_irf = geom.to_binsz(binsz=BINSZ_IRF) offset_max = Angle(self.settings["datasets"]["offset-max"]) stack_datasets = self.settings["datasets"]["stack-datasets"] log.info("Creating datasets.") maker = MapDatasetMaker( geom=geom, geom_true=geom_irf, offset_max=offset_max, ) if stack_datasets: stacked = MapDataset.create(geom=geom, geom_irf=geom_irf, name="stacked") for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets) def _extract_irf_kernels(self, dataset): # TODO: remove hard-coded default value max_radius = self.settings["datasets"].get("psf-kernel-radius", "0.6 deg") # TODO: handle IRF maps in fit geom = dataset.counts.geom geom_irf = dataset.exposure.geom position = geom.center_skydir geom_psf = geom.to_image().to_cube(geom_irf.axes) dataset.psf = dataset.psf.get_psf_kernel( position=position, geom=geom_psf, max_radius=max_radius ) e_reco = geom.get_axis_by_name("energy").edges dataset.edisp = dataset.edisp.get_energy_dispersion( position=position, e_reco=e_reco ) def _set_logging(self): """Set logging parameters for API.""" logging.basicConfig(**self.settings["general"]["logging"]) log.info( "Setting logging config: {!r}".format(self.settings["general"]["logging"]) ) def _spectrum_extraction(self): """Run all steps for the spectrum extraction.""" region = self.settings["datasets"]["geom"]["region"] log.info("Reducing spectrum datasets.") on_lon = Angle(region["center"][0]) on_lat = Angle(region["center"][1]) on_center = SkyCoord(on_lon, on_lat, frame=region["frame"]) on_region = CircleSkyRegion(on_center, Angle(region["radius"])) background_params = {"on_region": on_region} background = self.settings["datasets"]["background"] if "exclusion_mask" in background: map_hdu = {} filename = background["exclusion_mask"]["filename"] if "hdu" in background["exclusion_mask"]: map_hdu = {"hdu": background["exclusion_mask"]["hdu"]} exclusion_region = Map.read(filename, **map_hdu) background_params["exclusion_mask"] = exclusion_region if background["background_estimator"] == "reflected": self.background_estimator = ReflectedRegionsBackgroundEstimator( observations=self.observations, **background_params ) self.background_estimator.run() else: # TODO: raise error? log.info("Background estimation only for reflected regions method.") extraction_params = {} if "containment_correction" in self.settings["datasets"]: extraction_params["containment_correction"] = self.settings["datasets"][ "containment_correction" ] params = self.settings["datasets"]["geom"]["axes"][0] e_reco = MapAxis.from_bounds(**params).edges extraction_params["e_reco"] = e_reco extraction_params["e_true"] = None self.extraction = SpectrumExtraction( observations=self.observations, bkg_estimate=self.background_estimator.result, **extraction_params, ) self.extraction.run() self.datasets = Datasets(self.extraction.spectrum_observations) if self.settings["datasets"]["stack-datasets"]: stacked = self.datasets.stack_reduce() stacked.name = "stacked" self.datasets = Datasets([stacked]) def _validate_reduction_settings(self): """Validate settings before proceeding to data reduction.""" if self.observations and len(self.observations): self.config.validate() return True else: log.info("No observations selected.") log.info("Data reduction cannot be done.") return False def _validate_set_model(self): if self.datasets and self.datasets.datasets: self.config.validate() return True else: log.info("No datasets reduced.") return False def _validate_fitting_settings(self): """Validate settings before proceeding to fit 1D.""" if not self.model: log.info("No model fetched for datasets.") log.info("Fit cannot be done.") return False else: return True def _validate_fp_settings(self): """Validate settings before proceeding to flux points estimation.""" valid = True if self.fit: self.config.validate() else: log.info("No results available from fit.") valid = False if "flux-points" not in self.settings: log.info("No values declared for the energy bins.") valid = False elif "fp_binning" not in self.settings["flux-points"]: log.info("No values declared for the energy bins.") valid = False if not valid: log.info("Flux points calculation cannot be done.") return valid
data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") mask = data_store.obs_table["TARGET_NAME"] == "Crab" obs_ids = data_store.obs_table["OBS_ID"][mask].data observations = data_store.get_observations(obs_ids) crab_position = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") # The ON region center is defined in the icrs frame. The angle is defined w.r.t. to its axis. on_region = RectangleSkyRegion( center=crab_position, width=0.5 * u.deg, height=0.4 * u.deg, angle=0 * u.deg ) background_estimator = ReflectedRegionsBackgroundEstimator( observations=observations, on_region=on_region, min_distance=0.1 * u.rad ) background_estimator.run() # Let's inspect the data extracted in the first observation print(background_estimator.result[0]) background_estimator.plot() # Now we change the ON region, and use a center defined in the galactic frame on_region_galactic = RectangleSkyRegion( center=crab_position.galactic, width=0.5 * u.deg, height=0.4 * u.deg, angle=0 * u.deg,
proj="TAN", coordsys="GAL") mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False) exclusion_mask.data = mask exclusion_mask.plot() # ## Estimate background # # Next we will manually perform a background estimate by placing [reflected regions](https://docs.gammapy.org/dev/spectrum/reflected.html) around the pointing position and looking at the source statistics. This will result in a [gammapy.spectrum.BackgroundEstimate](https://docs.gammapy.org/dev/api/gammapy.spectrum.BackgroundEstimate.html) that serves as input for other classes in gammapy. # In[ ]: background_estimator = ReflectedRegionsBackgroundEstimator( observations=observations, on_region=on_region, exclusion_mask=exclusion_mask, ) background_estimator.run() # In[ ]: plt.figure(figsize=(8, 8)) background_estimator.plot(add_legend=True) # ## Source statistic # # Next we're going to look at the overall source statistics in our signal region. For more info about what debug plots you can create check out the [ObservationSummary](https://docs.gammapy.org/dev/api/gammapy.data.ObservationSummary.html) class. # In[ ]:
# # We need to define the ON extraction region. We will keep the same reco and true energy axes as in 3D. # In[ ]: # Target definition on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) # ### Extracting the background # # We perform here an ON - OFF measurement with reflected regions. We perform first the background extraction. # In[ ]: bkg_estimator = ReflectedRegionsBackgroundEstimator(on_region=on_region, observations=crab_obs) bkg_estimator.run() # ### Creation of the datasets # # We now apply spectral extraction to create the datasets. # # NB: we are using here time intervals defined by the observations start and stop times. The standard observation based spectral extraction is therefore defined in the right time bins. # # A proper time resolved spectral extraction will be included in a coming gammapy release. # In[ ]: # Note that we are not performing the extraction in time bins extraction = SpectrumExtraction( observations=crab_obs,