def run_analysis_3d(target_dict): """Run 3D analysis for the selected target""" tag = target_dict["tag"] name = target_dict["name"] log.info(f"running 3d analysis, {tag}") path_res = Path(tag + "/results/") ra = target_dict["ra"] dec = target_dict["dec"] e_decorr = target_dict["e_decorr"] config_str = f""" general: logging: level: INFO outdir: . observations: datastore: $GAMMAPY_DATA/hess-dl3-dr1/ filters: - filter_type: par_value value_param: {name} variable: TARGET_NAME datasets: dataset-type: MapDataset stack-datasets: true offset-max: 2.5 deg geom: skydir: [{ra}, {dec}] width: [5, 5] binsz: 0.02 coordsys: CEL proj: TAN axes: - name: energy hi_bnd: 100 lo_bnd: 0.1 nbin: 24 interp: log node_type: edges unit: TeV energy-axis-true: name: energy hi_bnd: 100 lo_bnd: 0.1 nbin: 72 interp: log node_type: edges unit: TeV """ print(config_str) config = AnalysisConfig(config_str) # Observation selection analysis = Analysis(config) analysis.get_observations() if DEBUG is True: analysis.observations.list = [analysis.observations.list[0]] # Data reduction analysis.get_datasets() # Set runwise energy threshold. See reference paper, section 5.1.1. for dataset in analysis.datasets: # energy threshold given by the 10% edisp criterium e_thr_bias = dataset.edisp.get_bias_energy(0.1) # energy at which the background peaks background_model = dataset.background_model bkg_spectrum = background_model.map.get_spectrum() peak = bkg_spectrum.data.max() idx = list(bkg_spectrum.data).index(peak) e_thr_bkg = bkg_spectrum.energy.center[idx] esafe = max(e_thr_bias, e_thr_bkg) dataset.mask_fit = dataset.counts.geom.energy_mask(emin=esafe) # Model fitting spatial_model = target_dict["spatial_model"] model_config = f""" components: - name: {tag} type: SkyModel spatial: type: {spatial_model} frame: icrs parameters: - name: lon_0 value: {ra} unit: deg - name: lat_0 value: {dec} unit: deg spectral: type: PowerLawSpectralModel parameters: - name: amplitude value: 1.0e-12 unit: cm-2 s-1 TeV-1 - name: index value: 2.0 unit: '' - name: reference value: {e_decorr} unit: TeV frozen: true """ model_npars = 5 if spatial_model == "DiskSpatialModel": model_config = yaml.load(model_config) parameters = model_config["components"][0]["spatial"]["parameters"] parameters.append( { "name": "r_0", "value": 0.2, "unit": "deg", "frozen": False } ) parameters.append( { "name": "e", "value": 0.8, "unit": "", "frozen": False } ) parameters.append( { "name": "phi", "value": 150, "unit": "deg", "frozen": False } ) parameters.append( { "name": "edge", "value": 0.01, "unit": "deg", "frozen": True } ) model_npars += 4 analysis.set_model(model=model_config) for dataset in analysis.datasets: dataset.background_model.norm.frozen = False analysis.run_fit() parameters = analysis.model.parameters parameters.covariance = analysis.fit_result.parameters.covariance[0:model_npars, 0:model_npars] write_fit_summary(parameters, str(path_res / "results-summary-fit-3d.yaml")) # Flux points # TODO: This is a workaround to re-optimize the bkg in each energy bin. Add has to be added to the Analysis class datasets = analysis.datasets.copy() for dataset in datasets: for par in dataset.parameters: if par is not dataset.background_model.norm: par.frozen = True reoptimize = True if DEBUG is False else False fpe = FluxPointsEstimator( datasets=datasets, e_edges=FLUXP_EDGES, source=tag, reoptimize=reoptimize ) flux_points = fpe.run() flux_points.table["is_ul"] = flux_points.table["ts"] < 4 keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn"] flux_points.table_formatted[keys].write( path_res / "flux-points-3d.ecsv", format="ascii.ecsv" )
def run_analysis_1d(target_dict): """Run spectral analysis for the selected target""" tag = target_dict["tag"] name = target_dict["name"] log.info(f"running 1d analysis, {tag}") path_res = Path(tag + "/results/") ra = target_dict["ra"] dec = target_dict["dec"] on_size = target_dict["on_size"] e_decorr = target_dict["e_decorr"] target_pos = SkyCoord(ra, dec, unit="deg", frame="icrs") on_radius = Angle(on_size * u.deg) containment_corr = True # Observations selection data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") mask = data_store.obs_table["TARGET_NAME"] == name obs_table = data_store.obs_table[mask] observations = data_store.get_observations(obs_table["OBS_ID"]) if DEBUG is True: observations = [observations[0]] # Reflected regions background estimation on_region = CircleSkyRegion(center=target_pos, radius=on_radius) dataset_maker = SpectrumDatasetMaker( region=on_region, e_reco=E_RECO, e_true=E_RECO, containment_correction=containment_corr, ) bkg_maker = ReflectedRegionsBackgroundMaker() safe_mask_masker = SafeMaskMaker(methods=["edisp-bias"], bias_percent=10) datasets = [] for observation in observations: dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"]) dataset_on_off = bkg_maker.run(dataset, observation) dataset_on_off = safe_mask_masker.run(dataset_on_off, observation) datasets.append(dataset_on_off) # Fit spectrum model = PowerLawSpectralModel( index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=e_decorr * u.TeV ) for dataset in datasets: dataset.model = model fit_joint = Fit(datasets) result_joint = fit_joint.run() parameters = model.parameters parameters.covariance = result_joint.parameters.covariance write_fit_summary(parameters, str(path_res / "results-summary-fit-1d.yaml")) # Flux points fpe = FluxPointsEstimator(datasets=datasets, e_edges=FLUXP_EDGES) flux_points = fpe.run() flux_points.table["is_ul"] = flux_points.table["ts"] < 4 keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul"] flux_points.table_formatted[keys].write( path_res / "flux-points-1d.ecsv", format="ascii.ecsv" )
# Flux points are computed on stacked observation stacked_obs = Datasets(extract.spectrum_observations).stack_reduce() print(stacked_obs) # In[ ]: e_edges = np.logspace(0, 1.5, 5) * u.TeV stacked_obs.model = model fpe = FluxPointsEstimator(datasets=[dataset], e_edges=e_edges) flux_points = fpe.run() flux_points.table_formatted # ### Plot # # Let's plot the spectral model and points. You could do it directly, but there is a helper class. # Note that a spectral uncertainty band, a "butterfly" is drawn, but it is very thin, i.e. barely visible. # In[ ]: model.parameters.covariance = result.parameters.covariance flux_points_dataset = FluxPointsDataset(data=flux_points, model=model)
def flux_point(stacked): e_edges = [0.3, 1, 3, 10] * u.TeV fpe = FluxPointsEstimator(datasets=[stacked], e_edges=e_edges, source="gc-source") fpe.run()
class SpectrumAnalysisIACT: """High-level analysis class to perform a full 1D IACT spectral analysis. Observation selection must have happened before. Config options: * outdir : `pathlib.Path`, str Output folder, None means no output * background : dict Forwarded to `~gammapy.background.ReflectedRegionsBackgroundEstimator` * extraction : dict Forwarded to `~gammapy.spectrum.SpectrumExtraction` * fp_binning : `~astropy.units.Quantity` Flux points binning Parameters ---------- observations : `~gammapy.data.Observations` Observations to analyse config : dict Config dict """ def __init__(self, observations, config): self.observations = observations self.config = config def __str__(self): ss = self.__class__.__name__ ss += "\n{}".format(self.observations) ss += "\n{}".format(self.config) return ss def run(self, optimize_opts=None): """Run all steps.""" log.info("Running {}".format(self.__class__.__name__)) self.run_extraction() self.run_fit(optimize_opts) def run_extraction(self): """Run all steps for the spectrum extraction.""" self.background_estimator = ReflectedRegionsBackgroundEstimator( observations=self.observations, **self.config["background"]) self.background_estimator.run() self.extraction = SpectrumExtraction( observations=self.observations, bkg_estimate=self.background_estimator.result, **self.config["extraction"]) self.extraction.run() @property def _result_dict(self): """Convert to dict.""" val = dict() model = self.config["fit"]["model"] val["model"] = model.to_dict() fit_range = self.config["fit"].get("fit_range") if fit_range is not None: val["fit_range"] = dict( min=fit_range[0].value, max=fit_range[1].value, unit=fit_range.unit.to_string("fits"), ) val["statval"] = float(self.fit_result.total_stat) val["statname"] = "wstat" return val def write(self, filename, mode="w"): """Write to YAML file. Parameters ---------- filename : str File to write mode : str Write mode """ d = self._result_dict val = yaml.safe_dump(d, default_flow_style=False) with open(str(filename), mode) as outfile: outfile.write(val) def run_fit(self, optimize_opts=None): """Run all step for the spectrum fit.""" fit_range = self.config["fit"].get("fit_range") model = self.config["fit"]["model"] for obs in self.extraction.spectrum_observations: if fit_range is not None: obs.mask_fit = obs.counts.energy_mask(fit_range[0], fit_range[1]) obs.model = model self.fit = Fit(self.extraction.spectrum_observations) self.fit_result = self.fit.run(optimize_opts=optimize_opts) model = self.config["fit"]["model"] modelname = model.__class__.__name__ model.parameters.covariance = self.fit_result.parameters.covariance filename = make_path( self.config["outdir"]) / "fit_result_{}.yaml".format(modelname) self.write(filename=filename) obs_stacker = SpectrumDatasetOnOffStacker( self.extraction.spectrum_observations) obs_stacker.run() datasets_fp = obs_stacker.stacked_obs datasets_fp.model = model self.flux_point_estimator = FluxPointsEstimator( e_edges=self.config["fp_binning"], datasets=datasets_fp) fp = self.flux_point_estimator.run() fp.table["is_ul"] = fp.table["ts"] < 4 self.flux_points = fp @property def spectrum_result(self): """`~gammapy.spectrum.FluxPointsDataset`""" return FluxPointsDataset(data=self.flux_points, model=self.fit.datasets.datasets[0].model)
def flux_point(stacked): e_edges = MapAxis.from_bounds(0.7, 30, nbin=11, interp="log", unit="TeV").edges fpe = FluxPointsEstimator(datasets=[stacked], e_edges=e_edges) fpe.run()
def run_analysis_3d(target_dict, fluxp_edges, debug): """Run stacked 3D analysis for the selected target. Notice that, for the sake of time saving, we run a stacked analysis, as opposed to the joint analysis that is performed in the reference paper. """ tag = target_dict["tag"] log.info(f"running 3d analysis, {tag}") path_res = Path(tag + "/results/") txt = Path("config_template.yaml").read_text() txt = txt.format_map(target_dict) config = AnalysisConfig.from_yaml(txt) log.info(f"Running observations selection") analysis = Analysis(config) analysis.get_observations() log.info(f"Running data reduction") analysis.get_datasets() # TODO: Improve safe mask handling in Analysis. the mask should be applied run-by-run maker_safe_mask = SafeMaskMaker(methods=["edisp-bias", "bkg-peak"]) stacked = maker_safe_mask.run(analysis.datasets[0]) log.info(f"Running fit ...") ra = target_dict["ra"] dec = target_dict["dec"] e_decorr = target_dict["e_decorr"] spectral_model = Model.create("PowerLawSpectralModel", reference=e_decorr) spatial_model = Model.create(target_dict["spatial_model"], lon_0=ra, lat_0=dec) if target_dict["spatial_model"] == "DiskSpatialModel": spatial_model.e.frozen = False sky_model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name=tag) stacked.models = sky_model stacked.background_model.norm.frozen = False fit = Fit([stacked]) result = fit.run() parameters = stacked.models.parameters model_npars = len(sky_model.parameters.names) parameters.covariance = result.parameters.covariance[0:model_npars, 0:model_npars] log.info(f"Writing {path_res}") write_fit_summary(parameters, str(path_res / "results-summary-fit-3d.yaml")) log.info("Running flux points estimation") # TODO: This is a workaround to re-optimize the bkg. Remove it once it's added to the Analysis class for par in stacked.parameters: if par is not stacked.background_model.norm: par.frozen = True reoptimize = True if debug is False else False fpe = FluxPointsEstimator(datasets=[stacked], e_edges=fluxp_edges, source=tag, reoptimize=reoptimize) flux_points = fpe.run() flux_points.table["is_ul"] = flux_points.table["ts"] < 4 keys = [ "e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul", "dnde_ul", ] log.info(f"Writing {path_res}") flux_points.table_formatted[keys].write(path_res / "flux-points-3d.ecsv", format="ascii.ecsv")