def test_compute_energy_threshold(self, spectrum_dataset_crab_fine, observations_hess_dl3): maker = SpectrumDatasetMaker(containment_correction=True) safe_mask_maker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) obs = observations_hess_dl3[0] dataset = maker.run(spectrum_dataset_crab_fine, obs) dataset = safe_mask_maker.run(dataset, obs) actual = dataset.energy_range[0] assert_quantity_allclose(actual, 0.8799225 * u.TeV, rtol=1e-3)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") OBS_ID = 23523 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg") e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log", unit="TeV").edges e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log", unit="TeV").edges on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) dataset_maker = SpectrumDatasetMaker(containment_correction=True, selection=["counts", "aeff", "edisp"]) empty = SpectrumDatasetOnOff.create(region=on_region, e_reco=e_reco, e_true=e_true) bkg_maker = ReflectedRegionsBackgroundMaker() safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) spectral_model = PowerLawSpectralModel(index=2.6, amplitude=2.0e-11 * u.Unit("1 / (cm2 s TeV)"), reference=1 * u.TeV) spectral_model.index.frozen = False model = spectral_model.copy() model.name = "crab" datasets_1d = [] for observation in observations: dataset = dataset_maker.run(dataset=empty.copy(), observation=observation) dataset_on_off = bkg_maker.run(dataset, observation) dataset_on_off = safe_mask_masker.run(dataset_on_off, observation) datasets_1d.append(dataset_on_off) for dataset in datasets_1d: model = spectral_model.copy() model.name = "crab" dataset.model = model return datasets_1d
def test_compute_energy_threshold(self, spectrum_dataset_maker_crab_fine_bins, observations_hess_dl3): safe_mask_maker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) obs = observations_hess_dl3[0] spectrum_dataset_maker_crab_fine_bins.containment_correction = True dataset = spectrum_dataset_maker_crab_fine_bins.run( obs, selection=["counts", "aeff", "edisp"]) dataset = safe_mask_maker.run(dataset, obs) actual = dataset.energy_range[0] assert_quantity_allclose(actual, 0.8799225 * u.TeV, rtol=1e-3)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), frame="galactic", proj="CAR", axes=[energy_axis], ) offset_max = 4 * u.deg maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom) spatial_model = PointSpatialModel(lon_0="-0.05 deg", lat_0="-0.05 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") datasets = Datasets([]) for idx, obs in enumerate(observations): cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max, name=f"dataset{idx}") dataset = maker.run(cutout, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.models = model datasets.append(dataset) return datasets
def test_safe_mask_maker_dl3(spectrum_dataset_maker_crab, observations_hess_dl3): safe_mask_maker = SafeMaskMaker() obs = observations_hess_dl3[0] dataset = spectrum_dataset_maker_crab.run(obs) dataset = safe_mask_maker.run(dataset, obs) assert_allclose(dataset.energy_range[0].value, 1) assert dataset.energy_range[0].unit == "TeV" mask_safe = safe_mask_maker.make_mask_energy_aeff_max(dataset) assert mask_safe.sum() == 4 mask_safe = safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert mask_safe.sum() == 3
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(geom=geom, offset_max=offset_max, **geom_irf) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) if self.settings["datasets"]["stack-datasets"]: stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) for obs in self.observations: dataset = maker.run(obs) dataset = maker_safe_mask.run(dataset, obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) dataset = maker_safe_mask.run(dataset, obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") OBS_ID = 23523 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs") on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) exclusion_region = CircleSkyRegion( center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"), radius=0.5 * u.deg, ) skydir = target_position.galactic exclusion_mask = Map.create(npix=(150, 150), binsz=0.05, skydir=skydir, proj="TAN", coordsys="GAL") mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False) exclusion_mask.data = mask e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log", unit="TeV").edges e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log", unit="TeV").edges dataset_maker = SpectrumDatasetMaker(region=on_region, e_reco=e_reco, e_true=e_true, containment_correction=True) bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask) safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) # Data preparation datasets = [] for ind, observation in enumerate(observations): dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"]) dataset_on_off = bkg_maker.run(dataset, observation) dataset_on_off = safe_mask_masker.run(dataset_on_off, observation) dataset_on_off.name = str(ind) datasets.append(dataset_on_off) return datasets
def test_map_maker(pars, observations): stacked = MapDataset.create( geom=pars["geom"], energy_axis_true=pars["e_true"], binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) maker = MapDatasetMaker( geom=pars["geom"], energy_axis_true=pars["e_true"], offset_max="2 deg", background_oversampling=pars.get("background_oversampling"), binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") for obs in observations: dataset = maker.run(obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-4) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=1e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) stacked = MapDataset.create(geom) maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) spatial_model = PointSpatialModel(lon_0="0.01 deg", lat_0="0.01 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") stacked.models = model stacked.name = "stacked_ds" return Datasets([stacked])
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) src_pos = SkyCoord(0, 0, unit="deg", frame="galactic") offset_max = 4 * u.deg maker = MapDatasetMaker(offset_max=offset_max) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") stacked = MapDataset.create(geom=geom) datasets = [] for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.edisp = dataset.edisp.get_energy_dispersion( position=src_pos, e_reco=energy_axis.edges) dataset.psf = dataset.psf.get_psf_kernel(position=src_pos, geom=geom, max_radius="0.3 deg") datasets.append(dataset) return datasets
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") OBS_ID = 23523 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) time_intervals = [(obs.tstart, obs.tstop) for obs in observations] target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg") emin, emax = [0.7, 10] * u.TeV energy_axis = MapAxis.from_bounds(emin.value, emax.value, 10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=target_position, binsz=0.02, width=(2, 2), coordsys="CEL", proj="CAR", axes=[energy_axis], ) energy_axis_true = MapAxis.from_bounds(0.1, 20, 20, unit="TeV", name="energy", interp="log") offset_max = 2 * u.deg datasets = [] maker = MapDatasetMaker(offset_max=offset_max) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) for time_interval in time_intervals: observations = observations.select_time(time_interval) # Proceed with further analysis only if there are observations # in the selected time window if len(observations) == 0: log.warning(f"No observations in time interval: {time_interval}") continue stacked = MapDataset.create(geom=geom, energy_axis_true=energy_axis_true) for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) stacked.edisp = stacked.edisp.get_energy_dispersion( position=target_position, e_reco=energy_axis.edges) stacked.psf = stacked.psf.get_psf_kernel(position=target_position, geom=stacked.exposure.geom, max_radius="0.3 deg") datasets.append(stacked) spatial_model = PointSpatialModel(lon_0=target_position.ra, lat_0=target_position.dec, frame="icrs") spatial_model.lon_0.frozen = True spatial_model.lat_0.frozen = True spectral_model = PowerLawSpectralModel(index=2.6, amplitude=2.0e-11 * u.Unit("1 / (cm2 s TeV)"), reference=1 * u.TeV) spectral_model.index.frozen = False sky_model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="") for dataset in datasets: model = sky_model.copy(name="crab") dataset.model = model return datasets
def run_analysis_1d(target_dict): """Run spectral analysis for the selected target""" tag = target_dict["tag"] name = target_dict["name"] log.info(f"running 1d analysis, {tag}") path_res = Path(tag + "/results/") ra = target_dict["ra"] dec = target_dict["dec"] on_size = target_dict["on_size"] e_decorr = target_dict["e_decorr"] target_pos = SkyCoord(ra, dec, unit="deg", frame="icrs") on_radius = Angle(on_size * u.deg) containment_corr = True # Observations selection data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") mask = data_store.obs_table["TARGET_NAME"] == name obs_table = data_store.obs_table[mask] observations = data_store.get_observations(obs_table["OBS_ID"]) if DEBUG is True: observations = [observations[0]] # Reflected regions background estimation on_region = CircleSkyRegion(center=target_pos, radius=on_radius) dataset_maker = SpectrumDatasetMaker( region=on_region, e_reco=E_RECO, e_true=E_RECO, containment_correction=containment_corr, ) bkg_maker = ReflectedRegionsBackgroundMaker() safe_mask_masker = SafeMaskMaker(methods=["edisp-bias"], bias_percent=10) datasets = [] for observation in observations: dataset = dataset_maker.run(observation, selection=["counts", "aeff", "edisp"]) dataset_on_off = bkg_maker.run(dataset, observation) dataset_on_off = safe_mask_masker.run(dataset_on_off, observation) datasets.append(dataset_on_off) # Fit spectrum model = PowerLawSpectralModel( index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=e_decorr * u.TeV ) for dataset in datasets: dataset.model = model fit_joint = Fit(datasets) result_joint = fit_joint.run() parameters = model.parameters parameters.covariance = result_joint.parameters.covariance write_fit_summary(parameters, str(path_res / "results-summary-fit-1d.yaml")) # Flux points fpe = FluxPointsEstimator(datasets=datasets, e_edges=FLUXP_EDGES) flux_points = fpe.run() flux_points.table["is_ul"] = flux_points.table["ts"] < 4 keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul"] flux_points.table_formatted[keys].write( path_res / "flux-points-1d.ecsv", format="ascii.ecsv" )
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") OBS_ID = 23523 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs") on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) exclusion_region = CircleSkyRegion( center=SkyCoord(183.604, -8.708, unit="deg", frame="galactic"), radius=0.5 * u.deg, ) skydir = target_position.galactic exclusion_mask = Map.create(npix=(150, 150), binsz=0.05, skydir=skydir, proj="TAN", frame="galactic") mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False) exclusion_mask.data = mask e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log", unit="TeV").edges e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log", unit="TeV").edges stacked = SpectrumDatasetOnOff.create(region=on_region, e_reco=e_reco, e_true=e_true, name="stacked") dataset_maker = SpectrumDatasetMaker(containment_correction=False, selection=["counts", "aeff", "edisp"]) bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask) safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) spectral_model = PowerLawSpectralModel(index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=1 * u.TeV) spatial_model = PointSpatialModel(lon_0=target_position.ra, lat_0=target_position.dec, frame="icrs") spatial_model.lon_0.frozen = True spatial_model.lat_0.frozen = True sky_model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="") for observation in observations: dataset = stacked.copy(name=f"dataset-{observation.obs_id}") dataset = dataset_maker.run(dataset=dataset, observation=observation) dataset_on_off = bkg_maker.run(dataset, observation) dataset_on_off = safe_mask_masker.run(dataset_on_off, observation) stacked.stack(dataset_on_off) stacked.models = sky_model return Datasets([stacked])
e_reco=e_reco, e_true=e_true, region=on_region ) dataset_maker = SpectrumDatasetMaker() phase_bkg_maker = PhaseBackgroundMaker( on_phase=on_phase_range, off_phase=off_phase_range ) safe_mask_maker = SafeMaskMaker( methods=["aeff-default", "edisp-bias"], bias_percent=20 ) datasets = [] for obs in obs_list_vela: dataset = dataset_maker.run(dataset_empty, obs) dataset_on_off = phase_bkg_maker.run(dataset, obs) dataset_on_off = safe_mask_maker.run(dataset_on_off, obs) datasets.append(dataset_on_off) # Now let's a look at the datasets we just created: # In[ ]: datasets[0].peek() # Now we'll fit a model to the spectrum with the `Fit` class. First we load a power law model with an initial value for the index and the amplitude and then wo do a likelihood fit. The fit results are printed below. # In[ ]:
def run_analysis(method, target_dict, debug): """If the method is "1d", runs joint spectral analysis for the selected target. If instead it is "3d", runs stacked 3D analysis.""" tag = target_dict["tag"] log.info(f"Running {method} analysis, {tag}") path_res = Path(tag + "/results/") log.info("Reading config") txt = Path(f"config_{method}.yaml").read_text() txt = txt.format_map(target_dict) config = AnalysisConfig.from_yaml(txt) if debug: config.observations.obs_ids = [target_dict["debug_run"]] config.flux_points.energy.nbins = 1 if method == "3d": config.datasets.geom.axes.energy_true.nbins = 10 analysis = Analysis(config) log.info("Running observations selection") analysis.get_observations() log.info(f"Running data reduction") analysis.get_datasets() # TODO: This is a workaround. We should somehow apply the safe mask (run by run) from the HLI from gammapy.cube import SafeMaskMaker datasets = [] maker_safe_mask = SafeMaskMaker(methods=["edisp-bias", "bkg-peak"], bias_percent=10) for dataset in analysis.datasets: dataset = maker_safe_mask.run(dataset) datasets.append(dataset) analysis.datasets = datasets log.info(f"Setting the model") txt = Path("model_config.yaml").read_text() txt = txt.format_map(target_dict) log.info(txt) analysis.set_models(txt) if method == "3d" and target_dict["spatial_model"] == "DiskSpatialModel": analysis.models[0].spatial_model.e.frozen = False analysis.models[0].spatial_model.phi.frozen = False analysis.models[0].spatial_model.r_0.value = 0.3 log.info(f"Running fit ...") analysis.run_fit() # TODO: This is a workaround. Set covariance automatically results = analysis.fit_result names = ["spectral_model", "spatial_model"] for name in names: if name == "spatial_model" and method == "1d": continue model = getattr(analysis.models[0], name) model.parameters.covariance = results.parameters.get_subcovariance( model.parameters.names) log.info(f"Writing {path_res}") write_fit_summary(analysis.models[0].parameters, str(path_res / f"results-summary-fit-{method}.yaml")) log.info(f"Running flux points estimation") # TODO: For the 3D analysis, re-optimize the background norm in each energy # bin. For now, this is not possible from the HLI. analysis.get_flux_points(source=tag) flux_points = analysis.flux_points.data flux_points.table["is_ul"] = flux_points.table["ts"] < 4 keys = [ "e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul", "dnde_ul", ] log.info(f"Writing {path_res}") flux_points.table_formatted[keys].write(path_res / f"flux-points-{method}.ecsv", format="ascii.ecsv")
) print(model_simu) # Now, comes the main part of dataset simulation. We create an in-memory observation and an empty dataset. We then predict the number of counts for the given model, and Poission fluctuate it using `fake()` to make a simulated counts maps. Keep in mind that it is important to specify the `selection` of the maps that you want to produce # In[ ]: # Create an in-memory observation obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) print(obs) # Make the MapDataset empty = MapDataset.create(geom) maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"]) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=4.0 * u.deg) dataset = maker.run(empty, obs) dataset = maker_safe_mask.run(dataset, obs) print(dataset) # In[ ]: # Add the model on the dataset and Poission fluctuate dataset.models = model_simu dataset.fake() # Do a print on the dataset - there is now a counts maps print(dataset) # Now use this dataset as you would in all standard analysis. You can plot the maps, or proceed with your custom analysis. # In the next section, we show the standard 3D fitting as in [analysis_3d](analysis_3d.ipynb). # In[ ]:
def run_analysis_3d(target_dict, fluxp_edges, debug): """Run stacked 3D analysis for the selected target. Notice that, for the sake of time saving, we run a stacked analysis, as opposed to the joint analysis that is performed in the reference paper. """ tag = target_dict["tag"] log.info(f"running 3d analysis, {tag}") path_res = Path(tag + "/results/") txt = Path("config_template.yaml").read_text() txt = txt.format_map(target_dict) config = AnalysisConfig.from_yaml(txt) log.info(f"Running observations selection") analysis = Analysis(config) analysis.get_observations() log.info(f"Running data reduction") analysis.get_datasets() # TODO: Improve safe mask handling in Analysis. the mask should be applied run-by-run maker_safe_mask = SafeMaskMaker(methods=["edisp-bias", "bkg-peak"]) stacked = maker_safe_mask.run(analysis.datasets[0]) log.info(f"Running fit ...") ra = target_dict["ra"] dec = target_dict["dec"] e_decorr = target_dict["e_decorr"] spectral_model = Model.create("PowerLawSpectralModel", reference=e_decorr) spatial_model = Model.create(target_dict["spatial_model"], lon_0=ra, lat_0=dec) if target_dict["spatial_model"] == "DiskSpatialModel": spatial_model.e.frozen = False sky_model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name=tag) stacked.models = sky_model stacked.background_model.norm.frozen = False fit = Fit([stacked]) result = fit.run() parameters = stacked.models.parameters model_npars = len(sky_model.parameters.names) parameters.covariance = result.parameters.covariance[0:model_npars, 0:model_npars] log.info(f"Writing {path_res}") write_fit_summary(parameters, str(path_res / "results-summary-fit-3d.yaml")) log.info("Running flux points estimation") # TODO: This is a workaround to re-optimize the bkg. Remove it once it's added to the Analysis class for par in stacked.parameters: if par is not stacked.background_model.norm: par.frozen = True reoptimize = True if debug is False else False fpe = FluxPointsEstimator(datasets=[stacked], e_edges=fluxp_edges, source=tag, reoptimize=reoptimize) flux_points = fpe.run() flux_points.table["is_ul"] = flux_points.table["ts"] < 4 keys = [ "e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul", "dnde_ul", ] log.info(f"Writing {path_res}") flux_points.table_formatted[keys].write(path_res / "flux-points-3d.ecsv", format="ascii.ecsv")