示例#1
0
def test_validation_checks():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.settings["observations"]["datastore"] = "other"
    with pytest.raises(FileNotFoundError):
        analysis.get_observations()

    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    assert analysis.get_flux_points() is False
    assert analysis.run_fit() is False
    assert analysis.set_model() is False
    assert analysis.get_datasets() is False

    analysis.get_observations()
    analysis.settings["datasets"]["dataset-type"] = "not assigned"
    assert analysis.get_datasets() is False

    analysis.settings["datasets"]["dataset-type"] = "SpectrumDatasetOnOff"
    analysis.get_observations()
    analysis.get_datasets()
    model_str = Path(MODEL_FILE).read_text()
    analysis.set_model(model=model_str)
    assert isinstance(analysis.model, SkyModels) is True
    assert analysis.set_model() is False

    analysis.run_fit()
    del analysis.settings["flux-points"]
    assert analysis.get_flux_points() is False
示例#2
0
def test_get_observations_obs_ids():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "$GAMMAPY_DATA/cta-1dc/index/gps/"
    analysis.config.observations.obs_ids = ["110380"]
    analysis.get_observations()
    assert len(analysis.observations) == 1
示例#3
0
def test_get_observations_missing_irf():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "$GAMMAPY_DATA/joint-crab/dl3/magic/"
    analysis.config.observations.obs_ids = ["05029748"]
    analysis.config.observations.required_irf = ["aeff", "edisp"]
    analysis.get_observations()
    assert len(analysis.observations) == 1
示例#4
0
def test_update_config():
    analysis = Analysis(AnalysisConfig())
    data = {"general": {"outdir": "test"}}
    config = AnalysisConfig(**data)
    analysis.update_config(config)
    assert analysis.config.general.outdir == "test"

    analysis = Analysis(AnalysisConfig())
    data = """
    general:
        outdir: test
    """
    analysis.update_config(data)
    assert analysis.config.general.outdir == "test"

    analysis = Analysis(AnalysisConfig())
    with pytest.raises(TypeError):
        analysis.update_config(0)
示例#5
0
def test_get_observations_obs_file(tmp_path):
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.get_observations()
    filename = tmp_path / "obs_ids.txt"
    filename.write_text("20136\n47829\n")
    analysis.config.observations.obs_file = filename
    analysis.get_observations()
    assert len(analysis.observations) == 2
示例#6
0
def test_validate_astropy_quantities():
    config = AnalysisConfig()
    cfg = {
        "observations": {
            "filters": [{
                "filter_type": "all",
                "lon": "1 deg"
            }]
        }
    }
    config.update_settings(cfg)
    assert config.validate() is None
示例#7
0
def test_get_observations_obs_cone():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1"
    analysis.config.observations.obs_cone = {
        "frame": "icrs",
        "lon": "83d",
        "lat": "22d",
        "radius": "5d",
    }
    analysis.get_observations()
    assert len(analysis.observations) == 4
示例#8
0
def test_get_observations_obs_time(tmp_path):
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.obs_time = {
        "start": "2004-03-26",
        "stop": "2004-05-26",
    }
    analysis.get_observations()
    assert len(analysis.observations) == 40
    analysis.config.observations.obs_ids = [0]
    with pytest.raises(ValueError):
        analysis.get_observations()
示例#9
0
def test_config():
    config = AnalysisConfig()
    assert config.settings["general"]["logging"]["level"] == "INFO"
    cfg = {"general": {"outdir": "test"}}
    config.update_settings(cfg)
    assert config.settings["general"]["logging"]["level"] == "INFO"
    assert config.settings["general"]["outdir"] == "test"

    with pytest.raises(ValueError):
        Analysis()

    assert "AnalysisConfig" in str(config)
示例#10
0
def data_prep():
    # source_pos = SkyCoord.from_name("MSH 15-52")
    source_pos = SkyCoord(228.32, -59.08, unit="deg")
    config = AnalysisConfig()
    # Select observations - 2.5 degrees from the source position
    config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1/"
    config.observations.obs_cone = {
        "frame": "icrs",
        "lon": source_pos.ra,
        "lat": source_pos.dec,
        "radius": 2.5 * u.deg,
    }
    config.datasets.type = "3d"
    config.datasets.geom.wcs.skydir = {
        "lon": source_pos.ra,
        "lat": source_pos.dec,
        "frame": "icrs",
    }
    # The WCS geometry - centered on MSH 15-52
    config.datasets.geom.wcs.width = {"width": "3 deg", "height": "3 deg"}

    # The FoV radius to use for cutouts
    config.datasets.geom.wcs.binsize = "0.02 deg"
    config.datasets.geom.selection.offset_max = 3.5 * u.deg

    # We now fix the energy axis for the counts map - (the reconstructed
    # energy binning)
    config.datasets.geom.axes.energy.min = "0.5 TeV"
    config.datasets.geom.axes.energy.max = "5 TeV"
    config.datasets.geom.axes.energy.nbins = 10

    # We need to extract the ring for each observation separately, hence, no
    # stacking at this stage
    config.datasets.stack = False

    # create the config
    analysis = Analysis(config)

    # for this specific case,w e do not need fine bins in true energy
    analysis.config.datasets.geom.axes.energy_true = (
        analysis.config.datasets.geom.axes.energy)

    # `First get the required observations
    analysis.get_observations()

    # Analysis extraction
    analysis.get_datasets()
    return analysis
示例#11
0
from gammapy.modeling.models import PointSpatialModel
from gammapy.modeling.models import SkyModel, SkyModels
from gammapy.time import LightCurveEstimator
from gammapy.analysis import Analysis, AnalysisConfig

# ## Analysis configuration
# For the 1D and 3D extraction, we will use the same CrabNebula configuration than in the notebook analysis_1.ipynb using the high level interface of Gammapy.
#
# From the high level interface, the data reduction for those observations is performed as followed

# ### Building the 3D analysis configuration
#

# In[ ]:

conf_3d = AnalysisConfig()

# #### Definition of the data selection
#
# Here we use the Crab runs from the HESS DL3 data release 1

# In[ ]:

conf_3d.observations.obs_ids = [23523, 23526, 23559, 23592]

# #### Definition of the dataset geometry

# In[ ]:

# We want a 3D analysis
conf_3d.datasets.type = "3d"
示例#12
0
def test_validate_config():
    config = AnalysisConfig()
    assert config.validate() is None
示例#13
0
def test_get_observations_no_datastore():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "other"
    with pytest.raises(FileNotFoundError):
        analysis.get_observations()
示例#14
0
def test_help():
    config = AnalysisConfig()
    assert config.help() is None
示例#15
0
def run_analysis_3d(target_dict):
    """Run 3D analysis for the selected target"""
    tag = target_dict["tag"]
    name = target_dict["name"]
    log.info(f"running 3d analysis, {tag}")

    path_res = Path(tag + "/results/")

    ra = target_dict["ra"]
    dec = target_dict["dec"]
    e_decorr = target_dict["e_decorr"]

    config_str = f"""
    general:
        logging:
            level: INFO
        outdir: .

    observations:
        datastore: $GAMMAPY_DATA/hess-dl3-dr1/
        filters:
            - filter_type: par_value
              value_param: {name}
              variable: TARGET_NAME

    datasets:
        dataset-type: MapDataset
        stack-datasets: true
        offset-max: 2.5 deg
        geom:
            skydir: [{ra}, {dec}]
            width: [5, 5]
            binsz: 0.02
            coordsys: CEL
            proj: TAN
            axes:
              - name: energy
                hi_bnd: 100
                lo_bnd: 0.1
                nbin: 24
                interp: log
                node_type: edges
                unit: TeV
        energy-axis-true:
            name: energy
            hi_bnd: 100
            lo_bnd: 0.1
            nbin: 72
            interp: log
            node_type: edges
            unit: TeV
    """
    print(config_str)
    config = AnalysisConfig(config_str)

    #  Observation selection
    analysis = Analysis(config)
    analysis.get_observations()

    if DEBUG is True:
        analysis.observations.list = [analysis.observations.list[0]]

    # Data reduction
    analysis.get_datasets()

    # Set runwise energy threshold. See reference paper, section 5.1.1.
    for dataset in analysis.datasets:
        # energy threshold given by the 10% edisp criterium
        e_thr_bias = dataset.edisp.get_bias_energy(0.1)

        # energy at which the background peaks
        background_model = dataset.background_model
        bkg_spectrum = background_model.map.get_spectrum()
        peak = bkg_spectrum.data.max()
        idx = list(bkg_spectrum.data).index(peak)
        e_thr_bkg = bkg_spectrum.energy.center[idx]

        esafe = max(e_thr_bias, e_thr_bkg)
        dataset.mask_fit = dataset.counts.geom.energy_mask(emin=esafe)

    # Model fitting
    spatial_model = target_dict["spatial_model"]
    model_config = f"""
    components:
        - name: {tag}
          type: SkyModel
          spatial:
            type: {spatial_model}
            frame: icrs
            parameters:
            - name: lon_0
              value: {ra}
              unit: deg
            - name: lat_0 
              value: {dec}    
              unit: deg
          spectral:
            type: PowerLawSpectralModel
            parameters:
            - name: amplitude      
              value: 1.0e-12
              unit: cm-2 s-1 TeV-1
            - name: index
              value: 2.0
              unit: ''
            - name: reference
              value: {e_decorr}
              unit: TeV
              frozen: true
    """
    model_npars = 5
    if spatial_model == "DiskSpatialModel":
        model_config = yaml.load(model_config)
        parameters = model_config["components"][0]["spatial"]["parameters"]
        parameters.append(
            {
                "name": "r_0",
                "value": 0.2,
                "unit": "deg",
                "frozen": False
            }
        )
        parameters.append(
            {
                "name": "e",
                "value": 0.8,
                "unit": "",
                "frozen": False
            }
        )
        parameters.append(
            {
                "name": "phi",
                "value": 150,
                "unit": "deg",
                "frozen": False
            }
        )
        parameters.append(
            {
                "name": "edge",
                "value": 0.01,
                "unit": "deg",
                "frozen": True
            }
        )
        model_npars += 4
    analysis.set_model(model=model_config)

    for dataset in analysis.datasets:
        dataset.background_model.norm.frozen = False

    analysis.run_fit()

    parameters = analysis.model.parameters
    parameters.covariance = analysis.fit_result.parameters.covariance[0:model_npars, 0:model_npars]
    write_fit_summary(parameters, str(path_res / "results-summary-fit-3d.yaml"))

    # Flux points
    # TODO: This is a workaround to re-optimize the bkg in each energy bin. Add has to be added to the Analysis class
    datasets = analysis.datasets.copy()
    for dataset in datasets:
        for par in dataset.parameters:
            if par is not dataset.background_model.norm:
                par.frozen = True

    reoptimize = True if DEBUG is False else False
    fpe = FluxPointsEstimator(
        datasets=datasets, e_edges=FLUXP_EDGES, source=tag, reoptimize=reoptimize
    )

    flux_points = fpe.run()
    flux_points.table["is_ul"] = flux_points.table["ts"] < 4
    keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn"]
    flux_points.table_formatted[keys].write(
        path_res / "flux-points-3d.ecsv", format="ascii.ecsv"
    )
示例#16
0
def cli_make_config(filename, overwrite):
    """Writes default configuration file."""
    config = AnalysisConfig()
    config.write(filename, overwrite=overwrite)
    log.info(f"Configuration file produced: {filename}")
示例#17
0
def test_config_to_yaml(tmp_path):
    config = AnalysisConfig()
    config.settings["general"]["outdir"] = tmp_path
    config.to_yaml(overwrite=True)
    text = (tmp_path / config.filename).read_text()
    assert "stack-datasets" in text
示例#18
0
# In[ ]:

from pathlib import Path
from astropy import units as u
from gammapy.analysis import Analysis, AnalysisConfig
from gammapy.modeling.models import create_crab_spectral_model

# ## Analysis configuration
#
# For configuration of the analysis we use the [YAML](https://en.wikipedia.org/wiki/YAML) data format. YAML is a machine readable serialisation format, that is also friendly for humans to read. In this tutorial we will write the configuration file just using Python strings, but of course the file can be created and modified with any text editor of your choice.
#
# Here is what the configuration for our analysis looks like:

# In[ ]:

config = AnalysisConfig()
# the AnalysisConfig gives access to the various parameters used from logging to reduced dataset geometries
print(config)

# ### Setting the data to use

# We want to use Crab runs from the H.E.S.S. DL3-DR1. We define here the datastore and a cone search of observations pointing with 5 degrees of the Crab nebula.
# Parameters can be set directly or as a python dict.

# In[ ]:

# We define the datastore containing the data
config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1"

# We define the cone search parameters
config.observations.obs_cone.frame = "icrs"
示例#19
0
def test_get_observations(config_obs):
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.update_settings(config_obs)
    analysis.get_observations()
    assert len(analysis.observations) == config_obs["result"]