コード例 #1
0
ファイル: test_analysis.py プロジェクト: gfiusa/gammapy
def test_analysis_3d_joint_datasets():
    config = get_example_config("3d")
    config.datasets.stack = False
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert len(analysis.datasets) == 2
コード例 #2
0
def data_reduction(instrument):
    log.info(f"data_reduction: {instrument}")
    config = AnalysisConfig.read(f"config.yaml")
    config.observations.datastore = f"$JOINT_CRAB/data/{instrument}"
    config.datasets.stack = instrument_opts[instrument]['stack']
    config.datasets.containment_correction = instrument_opts[instrument][
        'containment']
    config.datasets.on_region.radius = instrument_opts[instrument]['on_radius']

    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()

    # TODO remove when safe mask can be set on config
    if instrument is 'fact':
        from gammapy.datasets import SpectrumDatasetOnOff
        stacked = SpectrumDatasetOnOff.create(
            e_reco=analysis.datasets[0]._energy_axis.edges,
            e_true=analysis.datasets[0]._energy_axis.edges,
            region=None)
        for ds in analysis.datasets:
            ds.mask_safe[:] = True
            stacked.stack(ds)
        analysis.datasets = Datasets([stacked])

    analysis.datasets.write(f"reduced_{instrument}", overwrite=True)
コード例 #3
0
def test_geom_analysis_1d():
    cfg = """
    observations:
        datastore: $GAMMAPY_DATA/hess-dl3-dr1
        obs_ids: [23523]
    datasets:
        type: 1d
        background:
            method: reflected
        on_region: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 0.11 deg}
        geom:
            axes:
                energy: {min: 0.1 TeV, max: 30 TeV, nbins: 20}
                energy_true: {min: 0.03 TeV, max: 100 TeV, nbins: 50}
        containment_correction: false
    flux_points:
        energy: {min: 1 TeV, max: 50 TeV, nbins: 4}
    """
    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.update_config(cfg)
    analysis.get_observations()
    analysis.get_datasets()

    assert len(analysis.datasets) == 1

    axis = analysis.datasets[0].exposure.geom.axes["energy_true"]
    assert axis.nbin == 50
    assert_allclose(axis.edges[0].to_value("TeV"), 0.03)
    assert_allclose(axis.edges[-1].to_value("TeV"), 100)
コード例 #4
0
def test_validation_checks():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.settings["observations"]["datastore"] = "other"
    with pytest.raises(FileNotFoundError):
        analysis.get_observations()

    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    assert analysis.get_flux_points() is False
    assert analysis.run_fit() is False
    assert analysis.set_model() is False
    assert analysis.get_datasets() is False

    analysis.get_observations()
    analysis.settings["datasets"]["dataset-type"] = "not assigned"
    assert analysis.get_datasets() is False

    analysis.settings["datasets"]["dataset-type"] = "SpectrumDatasetOnOff"
    analysis.get_observations()
    analysis.get_datasets()
    model_str = Path(MODEL_FILE).read_text()
    analysis.set_model(model=model_str)
    assert isinstance(analysis.model, SkyModels) is True
    assert analysis.set_model() is False

    analysis.run_fit()
    del analysis.settings["flux-points"]
    assert analysis.get_flux_points() is False
コード例 #5
0
def test_analysis_1d_stacked():
    cfg = """
    datasets:
        geom:
            axes:
                energy_true: {min: 0.03 TeV, max: 100 TeV, nbins: 50}
        background:
            method: reflected
    """

    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.update_config(cfg)
    analysis.config.datasets.stack = True
    analysis.get_observations()
    analysis.get_datasets()
    analysis.read_models(MODEL_FILE_1D)
    analysis.run_fit()

    assert len(analysis.datasets) == 1
    assert_allclose(analysis.datasets["stacked"].counts.data.sum(), 184)
    pars = analysis.fit_result.parameters

    assert_allclose(pars["index"].value, 2.76913, rtol=1e-2)
    assert_allclose(pars["amplitude"].value, 5.496388e-11, rtol=1e-2)
コード例 #6
0
def test_analysis_3d_no_geom_irf():
    config = AnalysisConfig.from_template("3d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()

    assert len(analysis.datasets) == 1
コード例 #7
0
def test_analysis_3d_joint_datasets():
    config = AnalysisConfig.from_template("3d")
    config.settings["datasets"]["stack-datasets"] = False
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert len(analysis.datasets) == 4
コード例 #8
0
def cli_run_analysis(filename, out, overwrite):
    """Performs automated data reduction process."""
    config = AnalysisConfig.read(filename)
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.datasets.write(out, overwrite=overwrite)
    log.info(f"Datasets stored in {out} folder.")
コード例 #9
0
def test_analysis_ring_3d():
    config = get_example_config("3d")
    config.datasets.background.method = "ring"
    config.datasets.background.parameters = {"r_in": "0.7 deg", "width": "0.7 deg"}
    analysis = Analysis(config)
    analysis.get_observations()
    with pytest.raises(ValueError):
        analysis.get_datasets()
コード例 #10
0
def test_analysis_1d():
    cfg = """
    observations:
        datastore: $GAMMAPY_DATA/hess-dl3-dr1
        obs_ids: [23523, 23526]
        obs_time: {
            start: [J2004.92654346, J2004.92658453, J2004.92663655],
            stop: [J2004.92658453, J2004.92663655, J2004.92670773]
        }
    datasets:
        type: 1d
        background:
            method: reflected
        geom:
            axes:
                energy_true: {min: 0.01 TeV, max: 300 TeV, nbins: 109}
        on_region: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 0.11 deg}
        safe_mask:
            methods: [aeff-default, edisp-bias]
            parameters: {bias_percent: 10.0}
        containment_correction: false
    flux_points:
        energy: {min: 1 TeV, max: 50 TeV, nbins: 4}
    light_curve:
        energy_edges: {min: 1 TeV, max: 50 TeV, nbins: 1}
        time_intervals: {
            start: [J2004.92654346, J2004.92658453, J2004.92663655],
            stop: [J2004.92658453, J2004.92663655, J2004.92670773]
        }
    """
    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.update_config(cfg)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.read_models(MODEL_FILE_1D)
    analysis.run_fit()
    analysis.get_flux_points()
    analysis.get_light_curve()

    assert len(analysis.datasets) == 3
    table = analysis.flux_points.data.to_table(sed_type="dnde")

    assert len(table) == 4
    dnde = table["dnde"].quantity
    assert dnde.unit == "cm-2 s-1 TeV-1"

    assert_allclose(dnde[0].value, 8.116854e-12, rtol=1e-2)
    assert_allclose(dnde[2].value, 3.444475e-14, rtol=1e-2)

    axis = analysis.light_curve.geom.axes["time"]
    assert axis.nbin == 3
    assert_allclose(axis.time_min.mjd, [53343.92, 53343.935, 53343.954])

    flux = analysis.light_curve.flux.data[:, :, 0, 0]
    assert_allclose(flux, [[1.688954e-11], [2.347870e-11], [1.604152e-11]],
                    rtol=1e-4)
コード例 #11
0
def test_analysis_no_bkg_1d(caplog):
    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert isinstance(analysis.datasets[0], SpectrumDatasetOnOff) is False
    assert caplog.records[-1].levelname == "WARNING"
    assert caplog.records[
        -1].message == "No background maker set. Check configuration."
コード例 #12
0
def main(config_path, models_path, output, reference):
    config = AnalysisConfig.read(config_path)
    analysis = Analysis(config)
    log.info(config)

    analysis.get_observations()
    log.info(analysis)
    log.info(dir(analysis))
    log.info(analysis.datasets)
    log.info(analysis.datasets[0].counts)
    analysis.get_datasets()
    analysis.read_models(models_path)

    # stacked fit and flux estimation
    analysis.run_fit()
    analysis.get_flux_points()

    # Plot flux points
    ax_sed, ax_residuals = analysis.flux_points.plot_fit()
    if reference:
        plot_kwargs = {
            "energy_range": [
                analysis.config.flux_points.energy.min,
                analysis.config.flux_points.energy.max,
            ],
            "energy_power": 2,
            "flux_unit": "erg-1 cm-2 s-1",
        }
        create_crab_spectral_model(reference).plot(
            **plot_kwargs, ax=ax_sed, label="Crab reference"
        )
        ax_sed.legend()
        ax_sed.set_ylim(1e-12, 1e-9)
    

    base_out = Path(output)
    ax_sed.get_figure().savefig(base_out.with_suffix(".pdf").as_posix())
    plt.clf()
    analysis.models.write(base_out.with_suffix(".yaml").as_posix(), overwrite=True)
    analysis.flux_points.write(
        base_out.with_suffix(".fits").as_posix(), overwrite=True
    )
    ax_excess = analysis.datasets["stacked"].plot_excess()
    ax_excess.get_figure().savefig(base_out.with_suffix(".excess.pdf").as_posix())
    plt.clf()
        
    config.datasets.stack = False
    analysis.get_observations()
    analysis.get_datasets()
    analysis.read_models(models_path)
    lc_maker_low = LightCurveEstimator(
        energy_edges=[.2, 5] * u.TeV, source=config.flux_points.source, reoptimize=False
    )
    lc_low = lc_maker_low.run(analysis.datasets)
    ax_lc = lc_low.plot(marker="o", label="1D")
    ax_lc.get_figure().savefig(base_out.with_suffix(".lc.pdf").as_posix())
    plt.clf()
コード例 #13
0
def test_set_models():
    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    models_str = Path(MODEL_FILE).read_text()
    analysis.set_models(models=models_str)
    assert isinstance(analysis.models, Models) is True
    with pytest.raises(TypeError):
        analysis.set_models(0)
コード例 #14
0
def test_analysis_no_bkg_3d(caplog):
    config = get_example_config("3d")
    config.datasets.background.method = None
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert isinstance(analysis.datasets[0], MapDataset) is True
    assert caplog.records[-1].levelname == "WARNING"
    assert caplog.records[
        -1].message == "No background maker set. Check configuration."
コード例 #15
0
ファイル: test_analysis.py プロジェクト: karangurtu/gammapy
def test_analysis_ring_background():
    config = get_example_config("3d")
    config.datasets.background.method = "ring"
    config.datasets.background.parameters = {"r_in": "0.7 deg", "width": "0.7 deg"}
    config.datasets.geom.axes.energy.nbins = 1
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert isinstance(analysis.datasets[0], MapDataset)
    assert_allclose(analysis.datasets[0].background_model.map.data[0, 10, 10], 0.091552, rtol=1e-5)
コード例 #16
0
def test_analysis_no_bkg_1d(caplog):
    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert not isinstance(analysis.datasets[0], SpectrumDatasetOnOff)
    assert "WARNING" in [_.levelname for _ in caplog.records]
    assert "No background maker set. Check configuration." in [
        _.message for _ in caplog.records
    ]
コード例 #17
0
ファイル: test_analysis.py プロジェクト: gfiusa/gammapy
def test_usage_errors():
    config = get_example_config("1d")
    analysis = Analysis(config)
    with pytest.raises(RuntimeError):
        analysis.get_datasets()
    with pytest.raises(RuntimeError):
        analysis.read_models(MODEL_FILE)
    with pytest.raises(RuntimeError):
        analysis.run_fit()
    with pytest.raises(RuntimeError):
        analysis.get_flux_points()
コード例 #18
0
ファイル: test_analysis.py プロジェクト: mireianievas/gammapy
def test_analysis_no_bkg_3d(caplog):
    config = get_example_config("3d")
    config.datasets.background.method = None
    analysis = Analysis(config)
    with caplog.at_level(logging.WARNING):
        analysis.get_observations()
        analysis.get_datasets()
        assert isinstance(analysis.datasets[0], MapDataset)
        assert "No background maker set. Check configuration." in [
            _.message for _ in caplog.records
        ]
コード例 #19
0
ファイル: make.py プロジェクト: gammapy/gammapy-data
def run_analysis(estimate):
    """Run analysis from observation selection to model fitting."""
    config = AnalysisConfig.read(f"{estimate}/config.yaml")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()

    models = Models.read(f"{estimate}/models.yaml")
    analysis.set_models(models)
    analysis.run_fit()
    return analysis
コード例 #20
0
def run_3d(name):
    """Run 3D analysis for one source."""
    logging.info(f"run3d: {name}")
    mode = "3d"
    config_file = f"config{mode}.yaml"
    target_config_file = f"targets.yaml"
    model_file = f"model{mode}_{name}.yaml"

    outdir = f"results/{name}"

    config = target_config3d(config_file, target_config_file, name)
    analysis = Analysis(config)
    analysis.get_observations()

    conf = config.settings["observations"]["filters"][0]
    nb, lon, lat, rad = (
        len(analysis.observations.ids),
        conf["lon"],
        conf["lat"],
        conf["radius"],
    )
    logging.info(f"{nb} observations found in {rad} around {lon}, {lat} ")

    analysis.get_datasets()

    # test
    plt.figure(figsize=(5, 5))
    analysis.datasets["stacked"].counts.sum_over_axes().plot(add_cbar=True)
    plt.savefig(f"{outdir}/{name}_{mode}_counts.png", bbox_inches="tight")

    analysis.set_model(filename=model_file)
    logging.info(analysis.model)
    analysis.run_fit()
    logging.info(analysis.fit_result.parameters.to_table())
    analysis.fit_result.parameters.to_table().write(
        f"{outdir}/{name}_{mode}_bestfit.dat", format="ascii", overwrite=True)

    analysis.get_flux_points(source=f"{name}")
    analysis.flux_points.write(f"{outdir}/{name}_{mode}_fluxpoints.fits")

    plt.figure(figsize=(5, 5))
    analysis.datasets["stacked"].counts.sum_over_axes().plot(add_cbar=True)
    plt.savefig(f"{outdir}/{name}_{mode}_counts.png", bbox_inches="tight")

    plt.figure(figsize=(5, 5))
    analysis.datasets["stacked"].plot_residuals(method="diff/sqrt(model)",
                                                vmin=-0.5,
                                                vmax=0.5)
    plt.savefig(f"{outdir}/{name}_{mode}_residuals.png", bbox_inches="tight")

    plt.figure(figsize=(8, 5))
    ax_sed, ax_residuals = analysis.flux_points.peek()
    plt.savefig(f"{outdir}/{name}_{mode}_fluxpoints.png", bbox_inches="tight")
コード例 #21
0
ファイル: test_analysis.py プロジェクト: vikasj78/gammapy
def test_analysis_3d_joint_datasets():
    config = get_example_config("3d")
    config.datasets.stack = False
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert len(analysis.datasets) == 2
    assert_allclose(analysis.datasets[0].background_model.norm.value,
                    1.031743694988066)
    assert_allclose(analysis.datasets[0].background_model.tilt.value, 0.0)
    assert_allclose(analysis.datasets[1].background_model.norm.value,
                    0.9776349021876344)
コード例 #22
0
def test_analysis_no_bkg():
    config = get_example_config("1d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert isinstance(analysis.datasets[0], SpectrumDatasetOnOff) is False

    config = get_example_config("3d")
    config.datasets.background.method = None
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert isinstance(analysis.datasets[0], MapDataset) is True
コード例 #23
0
def test_analysis_1d_stacked():
    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    analysis.settings["datasets"]["stack-datasets"] = True
    analysis.get_observations()
    analysis.get_datasets()
    analysis.set_model(filename=MODEL_FILE)
    analysis.run_fit()

    assert len(analysis.datasets) == 1
    assert_allclose(analysis.datasets["stacked"].counts.data.sum(), 404)
    pars = analysis.fit_result.parameters

    assert_allclose(pars["index"].value, 2.689559, rtol=1e-3)
    assert_allclose(pars["amplitude"].value, 2.81629e-11, rtol=1e-3)
コード例 #24
0
def data_prep():
    # source_pos = SkyCoord.from_name("MSH 15-52")
    source_pos = SkyCoord(228.32, -59.08, unit="deg")
    config = AnalysisConfig()
    # Select observations - 2.5 degrees from the source position
    config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1/"
    config.observations.obs_cone = {
        "frame": "icrs",
        "lon": source_pos.ra,
        "lat": source_pos.dec,
        "radius": 2.5 * u.deg,
    }
    config.datasets.type = "3d"
    config.datasets.geom.wcs.skydir = {
        "lon": source_pos.ra,
        "lat": source_pos.dec,
        "frame": "icrs",
    }
    # The WCS geometry - centered on MSH 15-52
    config.datasets.geom.wcs.width = {"width": "3 deg", "height": "3 deg"}

    # The FoV radius to use for cutouts
    config.datasets.geom.wcs.binsize = "0.02 deg"
    config.datasets.geom.selection.offset_max = 3.5 * u.deg

    # We now fix the energy axis for the counts map - (the reconstructed
    # energy binning)
    config.datasets.geom.axes.energy.min = "0.5 TeV"
    config.datasets.geom.axes.energy.max = "5 TeV"
    config.datasets.geom.axes.energy.nbins = 10

    # We need to extract the ring for each observation separately, hence, no
    # stacking at this stage
    config.datasets.stack = False

    # create the config
    analysis = Analysis(config)

    # for this specific case,w e do not need fine bins in true energy
    analysis.config.datasets.geom.axes.energy_true = (
        analysis.config.datasets.geom.axes.energy)

    # `First get the required observations
    analysis.get_observations()

    # Analysis extraction
    analysis.get_datasets()
    return analysis
コード例 #25
0
def test_analysis_1d(config_analysis_data):
    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    analysis.config.update_settings(config_analysis_data)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.set_model(filename=MODEL_FILE)
    analysis.run_fit()
    analysis.get_flux_points()

    assert len(analysis.datasets) == 2
    assert len(analysis.flux_points.data.table) == 4
    dnde = analysis.flux_points.data.table["dnde"].quantity
    assert dnde.unit == "cm-2 s-1 TeV-1"

    assert_allclose(dnde[0].value, 8.03604e-12, rtol=1e-2)
    assert_allclose(dnde[-1].value, 4.780021e-21, rtol=1e-2)
コード例 #26
0
def test_set_models():
    config = get_example_config("3d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    models_str = Path(MODEL_FILE).read_text()
    analysis.set_models(models=models_str)
    assert isinstance(analysis.models, DatasetModels)
    assert len(analysis.models) == 2
    assert analysis.models.names == ['source', 'stacked-bkg']
    with pytest.raises(TypeError):
        analysis.set_models(0)

    new_source = analysis.models["source"].copy(name="source2")
    analysis.set_models(models=[new_source], extend=False)
    assert len(analysis.models) == 2
    assert analysis.models.names == ['source2', 'stacked-bkg']
コード例 #27
0
ファイル: test_analysis.py プロジェクト: gfiusa/gammapy
def test_exclusion_region(tmp_path):
    config = get_example_config("1d")
    analysis = Analysis(config)

    exclusion_region = CircleSkyRegion(center=SkyCoord("85d 23d"),
                                       radius=1 * u.deg)
    exclusion_mask = Map.create(npix=(150, 150),
                                binsz=0.05,
                                skydir=SkyCoord("83d 22d"))
    mask = exclusion_mask.geom.region_mask([exclusion_region], inside=False)
    exclusion_mask.data = mask.astype(int)
    filename = tmp_path / "exclusion.fits"
    exclusion_mask.write(filename)
    config.datasets.background.exclusion = filename

    analysis.get_observations()
    analysis.get_datasets()
    assert len(analysis.datasets) == 2
コード例 #28
0
def test_analysis_ring_background():
    config = get_example_config("3d")
    config.datasets.background.method = "ring"
    config.datasets.background.parameters = {
        "r_in": "0.7 deg",
        "width": "0.7 deg"
    }
    config.datasets.geom.axes.energy.nbins = 1
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.get_excess_map()
    assert isinstance(analysis.datasets[0], MapDataset)
    assert_allclose(analysis.datasets[0].npred_background().data[0, 10, 10],
                    0.091799,
                    rtol=1e-2)
    assert isinstance(analysis.excess_map["sqrt_ts"], WcsNDMap)
    assert_allclose(analysis.excess_map["excess"].data[0, 62, 62], 134.12389)
コード例 #29
0
def test_usage_errors():
    config = get_example_config("1d")
    analysis = Analysis(config)
    with pytest.raises(RuntimeError):
        analysis.get_datasets()
    with pytest.raises(RuntimeError):
        analysis.read_datasets()
    with pytest.raises(RuntimeError):
        analysis.write_datasets()
    with pytest.raises(TypeError):
        analysis.read_models()
    with pytest.raises(RuntimeError):
        analysis.write_models()
    with pytest.raises(RuntimeError):
        analysis.run_fit()
    with pytest.raises(RuntimeError):
        analysis.get_flux_points()
    with pytest.raises(ValidationError):
        analysis.config.datasets.type = "None"
コード例 #30
0
def analysis_3d_data_reduction(target):
    log.info(f"analysis_3d_data_reduction: {target}")

    opts = yaml.safe_load(open("targets.yaml"))[target]

    txt = Path("config_template.yaml").read_text()
    txt = txt.format_map(opts)
    config = AnalysisConfig.from_yaml(txt)

    analysis = Analysis(config)
    analysis.get_observations()
    log.info("Running data reduction")
    analysis.get_datasets()

    # TODO: write datasets and separate fitting to next function
    # Not implemented in Gammapy yet, coming very soon.
    log.info("Running fit ...")
    analysis.read_models(f"{target}/model_3d.yaml")
    logging.info(analysis.models)
    analysis.run_fit()
    logging.info(analysis.fit_result.parameters.to_table())
    path = f"{target}/{target}_3d_bestfit.rst"
    log.info(f"Writing {path}")
    analysis.fit_result.parameters.to_table().write(path,
                                                    format="ascii.rst",
                                                    overwrite=True)

    #    analysis.get_flux_points(source=f"{target}")
    #    path = f"{target}/{target}_3d_fluxpoints.fits"
    #    log.info(f"Writing {path}")
    #    analysis.flux_points.write(path, overwrite=True)

    analysis.get_flux_points(source=f"{target}")
    path = f"{target}/{target}_3d_fluxpoints.ecsv"
    log.info(f"Writing {path}")
    keys = [
        "e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul"
    ]
    analysis.flux_points.data.table_formatted[keys].write(path,
                                                          format="ascii.ecsv",
                                                          overwrite=True)

    return analysis  # will write to disk when possible