def data_reduction(instrument): log.info(f"data_reduction: {instrument}") config = AnalysisConfig.read(f"config.yaml") config.observations.datastore = f"$JOINT_CRAB/data/{instrument}" config.datasets.stack = instrument_opts[instrument]['stack'] config.datasets.containment_correction = instrument_opts[instrument][ 'containment'] config.datasets.on_region.radius = instrument_opts[instrument]['on_radius'] analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() # TODO remove when safe mask can be set on config if instrument is 'fact': from gammapy.datasets import SpectrumDatasetOnOff stacked = SpectrumDatasetOnOff.create( e_reco=analysis.datasets[0]._energy_axis.edges, e_true=analysis.datasets[0]._energy_axis.edges, region=None) for ds in analysis.datasets: ds.mask_safe[:] = True stacked.stack(ds) analysis.datasets = Datasets([stacked]) analysis.datasets.write(f"reduced_{instrument}", overwrite=True)
def test_geom_analysis_1d(): cfg = """ observations: datastore: $GAMMAPY_DATA/hess-dl3-dr1 obs_ids: [23523] datasets: type: 1d background: method: reflected on_region: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 0.11 deg} geom: axes: energy: {min: 0.1 TeV, max: 30 TeV, nbins: 20} energy_true: {min: 0.03 TeV, max: 100 TeV, nbins: 50} containment_correction: false flux_points: energy: {min: 1 TeV, max: 50 TeV, nbins: 4} """ config = get_example_config("1d") analysis = Analysis(config) analysis.update_config(cfg) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 1 axis = analysis.datasets[0].exposure.geom.axes["energy_true"] assert axis.nbin == 50 assert_allclose(axis.edges[0].to_value("TeV"), 0.03) assert_allclose(axis.edges[-1].to_value("TeV"), 100)
def test_exclusion_region(tmp_path): config = get_example_config("1d") analysis = Analysis(config) exclusion_region = CircleSkyRegion(center=SkyCoord("85d 23d"), radius=1 * u.deg) exclusion_mask = Map.create(npix=(150, 150), binsz=0.05, skydir=SkyCoord("83d 22d")) mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False) exclusion_mask.data = mask.astype(int) filename = tmp_path / "exclusion.fits" exclusion_mask.write(filename) config.datasets.background.method = "reflected" config.datasets.background.exclusion = filename analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 2 config = get_example_config("3d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() geom = analysis.datasets[0]._geom exclusion = WcsNDMap.from_geom(geom) exclusion.data = geom.region_mask([exclusion_region], inside=False).astype(int) filename = tmp_path / "exclusion3d.fits" exclusion.write(filename) config.datasets.background.exclusion = filename analysis.get_datasets() assert len(analysis.datasets) == 1
def test_analysis_1d_stacked(): cfg = """ datasets: geom: axes: energy_true: {min: 0.03 TeV, max: 100 TeV, nbins: 50} background: method: reflected """ config = get_example_config("1d") analysis = Analysis(config) analysis.update_config(cfg) analysis.config.datasets.stack = True analysis.get_observations() analysis.get_datasets() analysis.read_models(MODEL_FILE_1D) analysis.run_fit() assert len(analysis.datasets) == 1 assert_allclose(analysis.datasets["stacked"].counts.data.sum(), 184) pars = analysis.fit_result.parameters assert_allclose(pars["index"].value, 2.76913, rtol=1e-2) assert_allclose(pars["amplitude"].value, 5.496388e-11, rtol=1e-2)
def test_analysis_3d_joint_datasets(): config = get_example_config("3d") config.datasets.stack = False analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 2
def test_get_observations_obs_ids(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "$GAMMAPY_DATA/cta-1dc/index/gps/" analysis.config.observations.obs_ids = ["110380"] analysis.get_observations() assert len(analysis.observations) == 1
def test_analysis_3d_no_geom_irf(): config = AnalysisConfig.from_template("3d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 1
def test_analysis_3d_joint_datasets(): config = AnalysisConfig.from_template("3d") config.settings["datasets"]["stack-datasets"] = False analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 4
def cli_run_analysis(filename, out, overwrite): """Performs automated data reduction process.""" config = AnalysisConfig.read(filename) analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() analysis.datasets.write(out, overwrite=overwrite) log.info(f"Datasets stored in {out} folder.")
def test_analysis_ring_3d(): config = get_example_config("3d") config.datasets.background.method = "ring" config.datasets.background.parameters = {"r_in": "0.7 deg", "width": "0.7 deg"} analysis = Analysis(config) analysis.get_observations() with pytest.raises(ValueError): analysis.get_datasets()
def test_get_observations_missing_irf(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "$GAMMAPY_DATA/joint-crab/dl3/magic/" analysis.config.observations.obs_ids = ["05029748"] analysis.config.observations.required_irf = ["aeff", "edisp"] analysis.get_observations() assert len(analysis.observations) == 1
def main(config_path, models_path, output, reference): config = AnalysisConfig.read(config_path) analysis = Analysis(config) log.info(config) analysis.get_observations() log.info(analysis) log.info(dir(analysis)) log.info(analysis.datasets) log.info(analysis.datasets[0].counts) analysis.get_datasets() analysis.read_models(models_path) # stacked fit and flux estimation analysis.run_fit() analysis.get_flux_points() # Plot flux points ax_sed, ax_residuals = analysis.flux_points.plot_fit() if reference: plot_kwargs = { "energy_range": [ analysis.config.flux_points.energy.min, analysis.config.flux_points.energy.max, ], "energy_power": 2, "flux_unit": "erg-1 cm-2 s-1", } create_crab_spectral_model(reference).plot( **plot_kwargs, ax=ax_sed, label="Crab reference" ) ax_sed.legend() ax_sed.set_ylim(1e-12, 1e-9) base_out = Path(output) ax_sed.get_figure().savefig(base_out.with_suffix(".pdf").as_posix()) plt.clf() analysis.models.write(base_out.with_suffix(".yaml").as_posix(), overwrite=True) analysis.flux_points.write( base_out.with_suffix(".fits").as_posix(), overwrite=True ) ax_excess = analysis.datasets["stacked"].plot_excess() ax_excess.get_figure().savefig(base_out.with_suffix(".excess.pdf").as_posix()) plt.clf() config.datasets.stack = False analysis.get_observations() analysis.get_datasets() analysis.read_models(models_path) lc_maker_low = LightCurveEstimator( energy_edges=[.2, 5] * u.TeV, source=config.flux_points.source, reoptimize=False ) lc_low = lc_maker_low.run(analysis.datasets) ax_lc = lc_low.plot(marker="o", label="1D") ax_lc.get_figure().savefig(base_out.with_suffix(".lc.pdf").as_posix()) plt.clf()
def test_analysis_no_bkg_1d(caplog): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], SpectrumDatasetOnOff) is False assert caplog.records[-1].levelname == "WARNING" assert caplog.records[ -1].message == "No background maker set. Check configuration."
def test_get_observations_obs_file(tmp_path): config = AnalysisConfig() analysis = Analysis(config) analysis.get_observations() filename = tmp_path / "obs_ids.txt" filename.write_text("20136\n47829\n") analysis.config.observations.obs_file = filename analysis.get_observations() assert len(analysis.observations) == 2
def test_analysis_1d(): cfg = """ observations: datastore: $GAMMAPY_DATA/hess-dl3-dr1 obs_ids: [23523, 23526] obs_time: { start: [J2004.92654346, J2004.92658453, J2004.92663655], stop: [J2004.92658453, J2004.92663655, J2004.92670773] } datasets: type: 1d background: method: reflected geom: axes: energy_true: {min: 0.01 TeV, max: 300 TeV, nbins: 109} on_region: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 0.11 deg} safe_mask: methods: [aeff-default, edisp-bias] parameters: {bias_percent: 10.0} containment_correction: false flux_points: energy: {min: 1 TeV, max: 50 TeV, nbins: 4} light_curve: energy_edges: {min: 1 TeV, max: 50 TeV, nbins: 1} time_intervals: { start: [J2004.92654346, J2004.92658453, J2004.92663655], stop: [J2004.92658453, J2004.92663655, J2004.92670773] } """ config = get_example_config("1d") analysis = Analysis(config) analysis.update_config(cfg) analysis.get_observations() analysis.get_datasets() analysis.read_models(MODEL_FILE_1D) analysis.run_fit() analysis.get_flux_points() analysis.get_light_curve() assert len(analysis.datasets) == 3 table = analysis.flux_points.data.to_table(sed_type="dnde") assert len(table) == 4 dnde = table["dnde"].quantity assert dnde.unit == "cm-2 s-1 TeV-1" assert_allclose(dnde[0].value, 8.116854e-12, rtol=1e-2) assert_allclose(dnde[2].value, 3.444475e-14, rtol=1e-2) axis = analysis.light_curve.geom.axes["time"] assert axis.nbin == 3 assert_allclose(axis.time_min.mjd, [53343.92, 53343.935, 53343.954]) flux = analysis.light_curve.flux.data[:, :, 0, 0] assert_allclose(flux, [[1.688954e-11], [2.347870e-11], [1.604152e-11]], rtol=1e-4)
def test_analysis_ring_background(): config = get_example_config("3d") config.datasets.background.method = "ring" config.datasets.background.parameters = {"r_in": "0.7 deg", "width": "0.7 deg"} config.datasets.geom.axes.energy.nbins = 1 analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) assert_allclose(analysis.datasets[0].background_model.map.data[0, 10, 10], 0.091552, rtol=1e-5)
def test_set_models(): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() models_str = Path(MODEL_FILE).read_text() analysis.set_models(models=models_str) assert isinstance(analysis.models, Models) is True with pytest.raises(TypeError): analysis.set_models(0)
def test_analysis_no_bkg_1d(caplog): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert not isinstance(analysis.datasets[0], SpectrumDatasetOnOff) assert "WARNING" in [_.levelname for _ in caplog.records] assert "No background maker set. Check configuration." in [ _.message for _ in caplog.records ]
def test_analysis_no_bkg_3d(caplog): config = get_example_config("3d") config.datasets.background.method = None analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) is True assert caplog.records[-1].levelname == "WARNING" assert caplog.records[ -1].message == "No background maker set. Check configuration."
def test_analysis_no_bkg_3d(caplog): config = get_example_config("3d") config.datasets.background.method = None analysis = Analysis(config) with caplog.at_level(logging.WARNING): analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) assert "No background maker set. Check configuration." in [ _.message for _ in caplog.records ]
def run_analysis(estimate): """Run analysis from observation selection to model fitting.""" config = AnalysisConfig.read(f"{estimate}/config.yaml") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() models = Models.read(f"{estimate}/models.yaml") analysis.set_models(models) analysis.run_fit() return analysis
def test_get_observations_obs_cone(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1" analysis.config.observations.obs_cone = { "frame": "icrs", "lon": "83d", "lat": "22d", "radius": "5d", } analysis.get_observations() assert len(analysis.observations) == 4
def test_get_observations_obs_time(tmp_path): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.obs_time = { "start": "2004-03-26", "stop": "2004-05-26", } analysis.get_observations() assert len(analysis.observations) == 40 analysis.config.observations.obs_ids = [0] with pytest.raises(ValueError): analysis.get_observations()
def run_3d(name): """Run 3D analysis for one source.""" logging.info(f"run3d: {name}") mode = "3d" config_file = f"config{mode}.yaml" target_config_file = f"targets.yaml" model_file = f"model{mode}_{name}.yaml" outdir = f"results/{name}" config = target_config3d(config_file, target_config_file, name) analysis = Analysis(config) analysis.get_observations() conf = config.settings["observations"]["filters"][0] nb, lon, lat, rad = ( len(analysis.observations.ids), conf["lon"], conf["lat"], conf["radius"], ) logging.info(f"{nb} observations found in {rad} around {lon}, {lat} ") analysis.get_datasets() # test plt.figure(figsize=(5, 5)) analysis.datasets["stacked"].counts.sum_over_axes().plot(add_cbar=True) plt.savefig(f"{outdir}/{name}_{mode}_counts.png", bbox_inches="tight") analysis.set_model(filename=model_file) logging.info(analysis.model) analysis.run_fit() logging.info(analysis.fit_result.parameters.to_table()) analysis.fit_result.parameters.to_table().write( f"{outdir}/{name}_{mode}_bestfit.dat", format="ascii", overwrite=True) analysis.get_flux_points(source=f"{name}") analysis.flux_points.write(f"{outdir}/{name}_{mode}_fluxpoints.fits") plt.figure(figsize=(5, 5)) analysis.datasets["stacked"].counts.sum_over_axes().plot(add_cbar=True) plt.savefig(f"{outdir}/{name}_{mode}_counts.png", bbox_inches="tight") plt.figure(figsize=(5, 5)) analysis.datasets["stacked"].plot_residuals(method="diff/sqrt(model)", vmin=-0.5, vmax=0.5) plt.savefig(f"{outdir}/{name}_{mode}_residuals.png", bbox_inches="tight") plt.figure(figsize=(8, 5)) ax_sed, ax_residuals = analysis.flux_points.peek() plt.savefig(f"{outdir}/{name}_{mode}_fluxpoints.png", bbox_inches="tight")
def test_analysis_3d_joint_datasets(): config = get_example_config("3d") config.datasets.stack = False analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 2 assert_allclose(analysis.datasets[0].background_model.norm.value, 1.031743694988066) assert_allclose(analysis.datasets[0].background_model.tilt.value, 0.0) assert_allclose(analysis.datasets[1].background_model.norm.value, 0.9776349021876344)
def test_analysis_no_bkg(): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], SpectrumDatasetOnOff) is False config = get_example_config("3d") config.datasets.background.method = None analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) is True
def test_analysis_1d_stacked(): config = AnalysisConfig.from_template("1d") analysis = Analysis(config) analysis.settings["datasets"]["stack-datasets"] = True analysis.get_observations() analysis.get_datasets() analysis.set_model(filename=MODEL_FILE) analysis.run_fit() assert len(analysis.datasets) == 1 assert_allclose(analysis.datasets["stacked"].counts.data.sum(), 404) pars = analysis.fit_result.parameters assert_allclose(pars["index"].value, 2.689559, rtol=1e-3) assert_allclose(pars["amplitude"].value, 2.81629e-11, rtol=1e-3)
def data_prep(): # source_pos = SkyCoord.from_name("MSH 15-52") source_pos = SkyCoord(228.32, -59.08, unit="deg") config = AnalysisConfig() # Select observations - 2.5 degrees from the source position config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1/" config.observations.obs_cone = { "frame": "icrs", "lon": source_pos.ra, "lat": source_pos.dec, "radius": 2.5 * u.deg, } config.datasets.type = "3d" config.datasets.geom.wcs.skydir = { "lon": source_pos.ra, "lat": source_pos.dec, "frame": "icrs", } # The WCS geometry - centered on MSH 15-52 config.datasets.geom.wcs.width = {"width": "3 deg", "height": "3 deg"} # The FoV radius to use for cutouts config.datasets.geom.wcs.binsize = "0.02 deg" config.datasets.geom.selection.offset_max = 3.5 * u.deg # We now fix the energy axis for the counts map - (the reconstructed # energy binning) config.datasets.geom.axes.energy.min = "0.5 TeV" config.datasets.geom.axes.energy.max = "5 TeV" config.datasets.geom.axes.energy.nbins = 10 # We need to extract the ring for each observation separately, hence, no # stacking at this stage config.datasets.stack = False # create the config analysis = Analysis(config) # for this specific case,w e do not need fine bins in true energy analysis.config.datasets.geom.axes.energy_true = ( analysis.config.datasets.geom.axes.energy) # `First get the required observations analysis.get_observations() # Analysis extraction analysis.get_datasets() return analysis
def test_analysis_1d(config_analysis_data): config = AnalysisConfig.from_template("1d") analysis = Analysis(config) analysis.config.update_settings(config_analysis_data) analysis.get_observations() analysis.get_datasets() analysis.set_model(filename=MODEL_FILE) analysis.run_fit() analysis.get_flux_points() assert len(analysis.datasets) == 2 assert len(analysis.flux_points.data.table) == 4 dnde = analysis.flux_points.data.table["dnde"].quantity assert dnde.unit == "cm-2 s-1 TeV-1" assert_allclose(dnde[0].value, 8.03604e-12, rtol=1e-2) assert_allclose(dnde[-1].value, 4.780021e-21, rtol=1e-2)
def test_set_models(): config = get_example_config("3d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() models_str = Path(MODEL_FILE).read_text() analysis.set_models(models=models_str) assert isinstance(analysis.models, DatasetModels) assert len(analysis.models) == 2 assert analysis.models.names == ['source', 'stacked-bkg'] with pytest.raises(TypeError): analysis.set_models(0) new_source = analysis.models["source"].copy(name="source2") analysis.set_models(models=[new_source], extend=False) assert len(analysis.models) == 2 assert analysis.models.names == ['source2', 'stacked-bkg']