def test_get_observations_obs_ids(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "$GAMMAPY_DATA/cta-1dc/index/gps/" analysis.config.observations.obs_ids = ["110380"] analysis.get_observations() assert len(analysis.observations) == 1
def test_get_observations_missing_irf(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "$GAMMAPY_DATA/joint-crab/dl3/magic/" analysis.config.observations.obs_ids = ["05029748"] analysis.config.observations.required_irf = ["aeff", "edisp"] analysis.get_observations() assert len(analysis.observations) == 1
def test_set_models(): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() models_str = Path(MODEL_FILE).read_text() analysis.set_models(models=models_str) assert isinstance(analysis.models, Models) is True with pytest.raises(TypeError): analysis.set_models(0)
def test_usage_errors(): config = get_example_config("1d") analysis = Analysis(config) with pytest.raises(RuntimeError): analysis.get_datasets() with pytest.raises(RuntimeError): analysis.read_models(MODEL_FILE) with pytest.raises(RuntimeError): analysis.run_fit() with pytest.raises(RuntimeError): analysis.get_flux_points()
def run_analysis(estimate): """Run analysis from observation selection to model fitting.""" config = AnalysisConfig.read(f"{estimate}/config.yaml") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() models = Models.read(f"{estimate}/models.yaml") analysis.set_models(models) analysis.run_fit() return analysis
def test_get_observations_obs_cone(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1" analysis.config.observations.obs_cone = { "frame": "icrs", "lon": "83d", "lat": "22d", "radius": "5d", } analysis.get_observations() assert len(analysis.observations) == 4
def test_geom_analysis_1d(): cfg = """ observations: datastore: $GAMMAPY_DATA/hess-dl3-dr1 obs_ids: [23523] datasets: type: 1d background: method: reflected on_region: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 0.11 deg} geom: axes: energy: {min: 0.1 TeV, max: 30 TeV, nbins: 20} energy_true: {min: 0.03 TeV, max: 100 TeV, nbins: 50} containment_correction: false flux_points: energy: {min: 1 TeV, max: 50 TeV, nbins: 4} """ config = get_example_config("1d") analysis = Analysis(config) analysis.update_config(cfg) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 1 axis = analysis.datasets[0].exposure.geom.axes["energy_true"] assert axis.nbin == 50 assert_allclose(axis.edges[0].to_value("TeV"), 0.03) assert_allclose(axis.edges[-1].to_value("TeV"), 100)
def data_reduction(instrument): log.info(f"data_reduction: {instrument}") config = AnalysisConfig.read(f"config.yaml") config.observations.datastore = f"$JOINT_CRAB/data/{instrument}" config.datasets.stack = instrument_opts[instrument]['stack'] config.datasets.containment_correction = instrument_opts[instrument][ 'containment'] config.datasets.on_region.radius = instrument_opts[instrument]['on_radius'] analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() # TODO remove when safe mask can be set on config if instrument is 'fact': from gammapy.datasets import SpectrumDatasetOnOff stacked = SpectrumDatasetOnOff.create( e_reco=analysis.datasets[0]._energy_axis.edges, e_true=analysis.datasets[0]._energy_axis.edges, region=None) for ds in analysis.datasets: ds.mask_safe[:] = True stacked.stack(ds) analysis.datasets = Datasets([stacked]) analysis.datasets.write(f"reduced_{instrument}", overwrite=True)
def test_config(): config = AnalysisConfig() assert config.settings["general"]["logging"]["level"] == "INFO" cfg = {"general": {"outdir": "test"}} config.update_settings(cfg) assert config.settings["general"]["logging"]["level"] == "INFO" assert config.settings["general"]["outdir"] == "test" with pytest.raises(ValueError): Analysis() assert "AnalysisConfig" in str(config)
def test_analysis_3d_joint_datasets(): config = get_example_config("3d") config.datasets.stack = False analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 2
def test_analysis_3d_joint_datasets(): config = AnalysisConfig.from_template("3d") config.settings["datasets"]["stack-datasets"] = False analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 4
def test_analysis_3d_no_geom_irf(): config = AnalysisConfig.from_template("3d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 1
def setup_analysis(target): log.info(f"analysis_3d_data_reduction: {target}") opts = yaml.safe_load(open("targets.yaml"))[target] txt = Path("config_template.yaml").read_text() print(opts) txt = txt.format_map(opts) config = AnalysisConfig.from_yaml(txt) config.flux_points.source = target config.datasets.safe_mask.parameters = {"offset_max": 5 * u.deg} return Analysis(config)
def test_analysis_ring_3d(): config = get_example_config("3d") config.datasets.background.method = "ring" config.datasets.background.parameters = {"r_in": "0.7 deg", "width": "0.7 deg"} analysis = Analysis(config) analysis.get_observations() with pytest.raises(ValueError): analysis.get_datasets()
def cli_run_analysis(filename, out, overwrite): """Performs automated data reduction process.""" config = AnalysisConfig.read(filename) analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() analysis.datasets.write(out, overwrite=overwrite) log.info(f"Datasets stored in {out} folder.")
def test_get_observations_obs_file(tmp_path): config = AnalysisConfig() analysis = Analysis(config) analysis.get_observations() filename = tmp_path / "obs_ids.txt" filename.write_text("20136\n47829\n") analysis.config.observations.obs_file = filename analysis.get_observations() assert len(analysis.observations) == 2
def test_analysis_no_bkg_1d(caplog): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], SpectrumDatasetOnOff) is False assert caplog.records[-1].levelname == "WARNING" assert caplog.records[ -1].message == "No background maker set. Check configuration."
def test_analysis_ring_background(): config = get_example_config("3d") config.datasets.background.method = "ring" config.datasets.background.parameters = {"r_in": "0.7 deg", "width": "0.7 deg"} config.datasets.geom.axes.energy.nbins = 1 analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) assert_allclose(analysis.datasets[0].background_model.map.data[0, 10, 10], 0.091552, rtol=1e-5)
def test_analysis_no_bkg_1d(caplog): config = get_example_config("1d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert not isinstance(analysis.datasets[0], SpectrumDatasetOnOff) assert "WARNING" in [_.levelname for _ in caplog.records] assert "No background maker set. Check configuration." in [ _.message for _ in caplog.records ]
def test_analysis_no_bkg_3d(caplog): config = get_example_config("3d") config.datasets.background.method = None analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) is True assert caplog.records[-1].levelname == "WARNING" assert caplog.records[ -1].message == "No background maker set. Check configuration."
def test_analysis_ring_background(): config = get_example_config("3d") config.datasets.background.method = "ring" config.datasets.background.parameters = { "r_in": "0.7 deg", "width": "0.7 deg" } config.datasets.geom.axes.energy.nbins = 1 analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() analysis.get_excess_map() assert isinstance(analysis.datasets[0], MapDataset) assert_allclose(analysis.datasets[0].npred_background().data[0, 10, 10], 0.091799, rtol=1e-2) assert isinstance(analysis.excess_map["sqrt_ts"], WcsNDMap) assert_allclose(analysis.excess_map["excess"].data[0, 62, 62], 134.12389)
def test_analysis_no_bkg_3d(caplog): config = get_example_config("3d") config.datasets.background.method = None analysis = Analysis(config) with caplog.at_level(logging.WARNING): analysis.get_observations() analysis.get_datasets() assert isinstance(analysis.datasets[0], MapDataset) assert "No background maker set. Check configuration." in [ _.message for _ in caplog.records ]
def test_get_observations_obs_time(tmp_path): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.obs_time = { "start": "2004-03-26", "stop": "2004-05-26", } analysis.get_observations() assert len(analysis.observations) == 40 analysis.config.observations.obs_ids = [0] with pytest.raises(ValueError): analysis.get_observations()
def test_analysis_3d_joint_datasets(): config = get_example_config("3d") config.datasets.stack = False analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 2 assert_allclose(analysis.datasets[0].background_model.norm.value, 1.031743694988066) assert_allclose(analysis.datasets[0].background_model.tilt.value, 0.0) assert_allclose(analysis.datasets[1].background_model.norm.value, 0.9776349021876344)
def data_prep(): # source_pos = SkyCoord.from_name("MSH 15-52") source_pos = SkyCoord(228.32, -59.08, unit="deg") config = AnalysisConfig() # Select observations - 2.5 degrees from the source position config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1/" config.observations.obs_cone = { "frame": "icrs", "lon": source_pos.ra, "lat": source_pos.dec, "radius": 2.5 * u.deg, } config.datasets.type = "3d" config.datasets.geom.wcs.skydir = { "lon": source_pos.ra, "lat": source_pos.dec, "frame": "icrs", } # The WCS geometry - centered on MSH 15-52 config.datasets.geom.wcs.width = {"width": "3 deg", "height": "3 deg"} # The FoV radius to use for cutouts config.datasets.geom.wcs.binsize = "0.02 deg" config.datasets.geom.selection.offset_max = 3.5 * u.deg # We now fix the energy axis for the counts map - (the reconstructed # energy binning) config.datasets.geom.axes.energy.min = "0.5 TeV" config.datasets.geom.axes.energy.max = "5 TeV" config.datasets.geom.axes.energy.nbins = 10 # We need to extract the ring for each observation separately, hence, no # stacking at this stage config.datasets.stack = False # create the config analysis = Analysis(config) # for this specific case,w e do not need fine bins in true energy analysis.config.datasets.geom.axes.energy_true = ( analysis.config.datasets.geom.axes.energy) # `First get the required observations analysis.get_observations() # Analysis extraction analysis.get_datasets() return analysis
def test_exclusion_region(tmp_path): config = get_example_config("1d") analysis = Analysis(config) exclusion_region = CircleSkyRegion(center=SkyCoord("85d 23d"), radius=1 * u.deg) exclusion_mask = Map.create(npix=(150, 150), binsz=0.05, skydir=SkyCoord("83d 22d")) mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False) exclusion_mask.data = mask.astype(int) filename = tmp_path / "exclusion.fits" exclusion_mask.write(filename) config.datasets.background.exclusion = filename analysis.get_observations() analysis.get_datasets() assert len(analysis.datasets) == 2
def data_reduction(instrument): log.info(f"data_reduction: {instrument}") config = AnalysisConfig.read(f"config.yaml") config.observations.datastore = str(Path().resolve().parent / "data" / "joint-crab" / instrument) config.datasets.stack = instrument_opts[instrument]["stack"] config.datasets.containment_correction = instrument_opts[instrument][ "containment"] config.datasets.on_region.radius = instrument_opts[instrument]["on_radius"] if instrument == "fact": config.datasets.safe_mask.methods = ["aeff-default"] analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() if instrument == "fact": counts = analysis.datasets[0].counts data = counts.geom.energy_mask(emin=0.4 * u.TeV) analysis.datasets[0].mask_safe = counts.copy(data=data) analysis.datasets.write(f"reduced_{instrument}", overwrite=True)
# In[ ]: config.write("config.yaml", overwrite=True) # In[ ]: config = AnalysisConfig.read("config.yaml") print(config) # ## Running the analysis # # We first create an `~gammapy.analysis.Analysis` object from our configuration. # In[ ]: analysis = Analysis(config) # ### Observation selection # # We can directly select and load the observations from disk using `~gammapy.analysis.Analysis.get_observations()`: # In[ ]: analysis.get_observations() # The observations are now available on the `Analysis` object. The selection corresponds to the following ids: # In[ ]: analysis.observations.ids
def test_get_observations_no_datastore(): config = AnalysisConfig() analysis = Analysis(config) analysis.config.observations.datastore = "other" with pytest.raises(FileNotFoundError): analysis.get_observations()
def test_analysis_3d(): config = get_example_config("3d") analysis = Analysis(config) analysis.get_observations() analysis.get_datasets() analysis.read_models(MODEL_FILE) analysis.datasets[ "stacked"].background_model.spectral_model.tilt.frozen = False analysis.run_fit() analysis.get_flux_points() assert len(analysis.datasets) == 1 assert len(analysis.fit_result.parameters) == 8 res = analysis.fit_result.parameters assert res["amplitude"].unit == "cm-2 s-1 TeV-1" assert len(analysis.flux_points.data.table) == 2 dnde = analysis.flux_points.data.table["dnde"].quantity assert_allclose(dnde[0].value, 1.340073e-11, rtol=1e-2) assert_allclose(dnde[-1].value, 2.776611e-13, rtol=1e-2) assert_allclose(res["index"].value, 3.097613, rtol=1e-2) assert_allclose(res["tilt"].value, -0.207792, rtol=1e-2)