Exemplo n.º 1
0
def test_validation_checks():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.settings["observations"]["datastore"] = "other"
    with pytest.raises(FileNotFoundError):
        analysis.get_observations()

    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    assert analysis.get_flux_points() is False
    assert analysis.run_fit() is False
    assert analysis.set_model() is False
    assert analysis.get_datasets() is False

    analysis.get_observations()
    analysis.settings["datasets"]["dataset-type"] = "not assigned"
    assert analysis.get_datasets() is False

    analysis.settings["datasets"]["dataset-type"] = "SpectrumDatasetOnOff"
    analysis.get_observations()
    analysis.get_datasets()
    model_str = Path(MODEL_FILE).read_text()
    analysis.set_model(model=model_str)
    assert isinstance(analysis.model, SkyModels) is True
    assert analysis.set_model() is False

    analysis.run_fit()
    del analysis.settings["flux-points"]
    assert analysis.get_flux_points() is False
Exemplo n.º 2
0
def test_validate_astropy_quantities():
    config = AnalysisConfig()
    cfg = {
        "observations": {
            "filters": [{
                "filter_type": "all",
                "lon": "1 deg"
            }]
        }
    }
    config.update_settings(cfg)
    assert config.validate() is None
Exemplo n.º 3
0
def test_config():
    config = AnalysisConfig()
    assert config.settings["general"]["logging"]["level"] == "INFO"
    cfg = {"general": {"outdir": "test"}}
    config.update_settings(cfg)
    assert config.settings["general"]["logging"]["level"] == "INFO"
    assert config.settings["general"]["outdir"] == "test"

    with pytest.raises(ValueError):
        Analysis()

    assert "AnalysisConfig" in str(config)
Exemplo n.º 4
0
def data_reduction(instrument):
    log.info(f"data_reduction: {instrument}")
    config = AnalysisConfig.read(f"config.yaml")
    config.observations.datastore = f"$JOINT_CRAB/data/{instrument}"
    config.datasets.stack = instrument_opts[instrument]['stack']
    config.datasets.containment_correction = instrument_opts[instrument][
        'containment']
    config.datasets.on_region.radius = instrument_opts[instrument]['on_radius']

    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()

    # TODO remove when safe mask can be set on config
    if instrument is 'fact':
        from gammapy.datasets import SpectrumDatasetOnOff
        stacked = SpectrumDatasetOnOff.create(
            e_reco=analysis.datasets[0]._energy_axis.edges,
            e_true=analysis.datasets[0]._energy_axis.edges,
            region=None)
        for ds in analysis.datasets:
            ds.mask_safe[:] = True
            stacked.stack(ds)
        analysis.datasets = Datasets([stacked])

    analysis.datasets.write(f"reduced_{instrument}", overwrite=True)
Exemplo n.º 5
0
def test_get_observations_obs_ids():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "$GAMMAPY_DATA/cta-1dc/index/gps/"
    analysis.config.observations.obs_ids = ["110380"]
    analysis.get_observations()
    assert len(analysis.observations) == 1
Exemplo n.º 6
0
def test_analysis_3d_no_geom_irf():
    config = AnalysisConfig.from_template("3d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()

    assert len(analysis.datasets) == 1
Exemplo n.º 7
0
def test_analysis_3d_joint_datasets():
    config = AnalysisConfig.from_template("3d")
    config.settings["datasets"]["stack-datasets"] = False
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    assert len(analysis.datasets) == 4
Exemplo n.º 8
0
def cli_run_analysis(filename, out, overwrite):
    """Performs automated data reduction process."""
    config = AnalysisConfig.read(filename)
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.datasets.write(out, overwrite=overwrite)
    log.info(f"Datasets stored in {out} folder.")
Exemplo n.º 9
0
def test_get_observations_missing_irf():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "$GAMMAPY_DATA/joint-crab/dl3/magic/"
    analysis.config.observations.obs_ids = ["05029748"]
    analysis.config.observations.required_irf = ["aeff", "edisp"]
    analysis.get_observations()
    assert len(analysis.observations) == 1
Exemplo n.º 10
0
def main(config_path, models_path, output, reference):
    config = AnalysisConfig.read(config_path)
    analysis = Analysis(config)
    log.info(config)

    analysis.get_observations()
    log.info(analysis)
    log.info(dir(analysis))
    log.info(analysis.datasets)
    log.info(analysis.datasets[0].counts)
    analysis.get_datasets()
    analysis.read_models(models_path)

    # stacked fit and flux estimation
    analysis.run_fit()
    analysis.get_flux_points()

    # Plot flux points
    ax_sed, ax_residuals = analysis.flux_points.plot_fit()
    if reference:
        plot_kwargs = {
            "energy_range": [
                analysis.config.flux_points.energy.min,
                analysis.config.flux_points.energy.max,
            ],
            "energy_power": 2,
            "flux_unit": "erg-1 cm-2 s-1",
        }
        create_crab_spectral_model(reference).plot(
            **plot_kwargs, ax=ax_sed, label="Crab reference"
        )
        ax_sed.legend()
        ax_sed.set_ylim(1e-12, 1e-9)
    

    base_out = Path(output)
    ax_sed.get_figure().savefig(base_out.with_suffix(".pdf").as_posix())
    plt.clf()
    analysis.models.write(base_out.with_suffix(".yaml").as_posix(), overwrite=True)
    analysis.flux_points.write(
        base_out.with_suffix(".fits").as_posix(), overwrite=True
    )
    ax_excess = analysis.datasets["stacked"].plot_excess()
    ax_excess.get_figure().savefig(base_out.with_suffix(".excess.pdf").as_posix())
    plt.clf()
        
    config.datasets.stack = False
    analysis.get_observations()
    analysis.get_datasets()
    analysis.read_models(models_path)
    lc_maker_low = LightCurveEstimator(
        energy_edges=[.2, 5] * u.TeV, source=config.flux_points.source, reoptimize=False
    )
    lc_low = lc_maker_low.run(analysis.datasets)
    ax_lc = lc_low.plot(marker="o", label="1D")
    ax_lc.get_figure().savefig(base_out.with_suffix(".lc.pdf").as_posix())
    plt.clf()
Exemplo n.º 11
0
def test_update_config():
    analysis = Analysis(AnalysisConfig())
    data = {"general": {"outdir": "test"}}
    config = AnalysisConfig(**data)
    analysis.update_config(config)
    assert analysis.config.general.outdir == "test"

    analysis = Analysis(AnalysisConfig())
    data = """
    general:
        outdir: test
    """
    analysis.update_config(data)
    assert analysis.config.general.outdir == "test"

    analysis = Analysis(AnalysisConfig())
    with pytest.raises(TypeError):
        analysis.update_config(0)
Exemplo n.º 12
0
def test_get_observations_obs_file(tmp_path):
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.get_observations()
    filename = tmp_path / "obs_ids.txt"
    filename.write_text("20136\n47829\n")
    analysis.config.observations.obs_file = filename
    analysis.get_observations()
    assert len(analysis.observations) == 2
Exemplo n.º 13
0
def target_config3d(config_file, target_config_file, tag):
    """Create analyis configuration for out source."""
    targets_config_ = yaml.safe_load(open(target_config_file))
    targets_config = {}
    for conf in targets_config_:  # define tag as key
        targets_config[conf["tag"]] = conf

    config = AnalysisConfig.from_yaml(config_file)
    config_dict = config.settings

    config_dict["observations"]["filters"][0]["frame"] = targets_config[tag][
        "frame"]
    config_dict["observations"]["filters"][0]["lon"] = targets_config[tag][
        "lon"]
    config_dict["observations"]["filters"][0]["lat"] = targets_config[tag][
        "lat"]
    config_dict["observations"]["filters"][0]["radius"] = targets_config[tag][
        "radius"]
    config_dict["observations"]["filters"][0]["border"] = targets_config[tag][
        "radius"]

    config_dict["datasets"]["geom"]["skydir"] = [
        float(targets_config[tag]["lon"].strip(" deg")),
        float(targets_config[tag]["lat"].strip(" deg")),
    ]
    config_dict["datasets"]["geom"]["axes"][0]["lo_bnd"] = targets_config[tag][
        "emin"]
    config_dict["datasets"]["geom"]["axes"][0]["hi_bnd"] = targets_config[tag][
        "emax"]
    config_dict["datasets"]["geom"]["axes"][0]["nbin"] = targets_config[tag][
        "nbin"]
    config_dict["datasets"]["geom"]["axes"][0]["nbin"] = targets_config[tag][
        "nbin"]

    config_dict["flux-points"]["fp_binning"]["lo_bnd"] = targets_config[tag][
        "emin"]
    config_dict["flux-points"]["fp_binning"]["hi_bnd"] = targets_config[tag][
        "emax"]
    config_dict["flux-points"]["fp_binning"]["nbin"] = targets_config[tag][
        "nbin"]

    config_dict["flux-points"]["fp_binning"]["lo_bnd"] = targets_config[tag][
        "emin"]
    config_dict["flux-points"]["fp_binning"]["hi_bnd"] = targets_config[tag][
        "emax"]
    config_dict["flux-points"]["fp_binning"]["nbin"] = targets_config[tag][
        "nbin"]

    config_dict["fit"]["fit_range"]["min"] = str(
        targets_config[tag]["emin"]) + " TeV"
    config_dict["fit"]["fit_range"]["max"] = str(
        targets_config[tag]["emax"]) + " TeV"

    config.update_settings(config=config_dict)

    return config
Exemplo n.º 14
0
def run_analysis(estimate):
    """Run analysis from observation selection to model fitting."""
    config = AnalysisConfig.read(f"{estimate}/config.yaml")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()

    models = Models.read(f"{estimate}/models.yaml")
    analysis.set_models(models)
    analysis.run_fit()
    return analysis
Exemplo n.º 15
0
def test_get_observations_obs_time(tmp_path):
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.obs_time = {
        "start": "2004-03-26",
        "stop": "2004-05-26",
    }
    analysis.get_observations()
    assert len(analysis.observations) == 40
    analysis.config.observations.obs_ids = [0]
    with pytest.raises(ValueError):
        analysis.get_observations()
Exemplo n.º 16
0
def test_get_observations_obs_cone():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1"
    analysis.config.observations.obs_cone = {
        "frame": "icrs",
        "lon": "83d",
        "lat": "22d",
        "radius": "5d",
    }
    analysis.get_observations()
    assert len(analysis.observations) == 4
Exemplo n.º 17
0
def setup_analysis(target):
    log.info(f"analysis_3d_data_reduction: {target}")

    opts = yaml.safe_load(open("targets.yaml"))[target]
    txt = Path("config_template.yaml").read_text()
    print(opts)
    txt = txt.format_map(opts)

    config = AnalysisConfig.from_yaml(txt)
    config.flux_points.source = target
    config.datasets.safe_mask.parameters = {"offset_max": 5 * u.deg}

    return Analysis(config)
Exemplo n.º 18
0
def test_analysis_1d_stacked():
    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    analysis.settings["datasets"]["stack-datasets"] = True
    analysis.get_observations()
    analysis.get_datasets()
    analysis.set_model(filename=MODEL_FILE)
    analysis.run_fit()

    assert len(analysis.datasets) == 1
    assert_allclose(analysis.datasets["stacked"].counts.data.sum(), 404)
    pars = analysis.fit_result.parameters

    assert_allclose(pars["index"].value, 2.689559, rtol=1e-3)
    assert_allclose(pars["amplitude"].value, 2.81629e-11, rtol=1e-3)
Exemplo n.º 19
0
def data_prep():
    # source_pos = SkyCoord.from_name("MSH 15-52")
    source_pos = SkyCoord(228.32, -59.08, unit="deg")
    config = AnalysisConfig()
    # Select observations - 2.5 degrees from the source position
    config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1/"
    config.observations.obs_cone = {
        "frame": "icrs",
        "lon": source_pos.ra,
        "lat": source_pos.dec,
        "radius": 2.5 * u.deg,
    }
    config.datasets.type = "3d"
    config.datasets.geom.wcs.skydir = {
        "lon": source_pos.ra,
        "lat": source_pos.dec,
        "frame": "icrs",
    }
    # The WCS geometry - centered on MSH 15-52
    config.datasets.geom.wcs.width = {"width": "3 deg", "height": "3 deg"}

    # The FoV radius to use for cutouts
    config.datasets.geom.wcs.binsize = "0.02 deg"
    config.datasets.geom.selection.offset_max = 3.5 * u.deg

    # We now fix the energy axis for the counts map - (the reconstructed
    # energy binning)
    config.datasets.geom.axes.energy.min = "0.5 TeV"
    config.datasets.geom.axes.energy.max = "5 TeV"
    config.datasets.geom.axes.energy.nbins = 10

    # We need to extract the ring for each observation separately, hence, no
    # stacking at this stage
    config.datasets.stack = False

    # create the config
    analysis = Analysis(config)

    # for this specific case,w e do not need fine bins in true energy
    analysis.config.datasets.geom.axes.energy_true = (
        analysis.config.datasets.geom.axes.energy)

    # `First get the required observations
    analysis.get_observations()

    # Analysis extraction
    analysis.get_datasets()
    return analysis
Exemplo n.º 20
0
def test_analysis_1d(config_analysis_data):
    config = AnalysisConfig.from_template("1d")
    analysis = Analysis(config)
    analysis.config.update_settings(config_analysis_data)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.set_model(filename=MODEL_FILE)
    analysis.run_fit()
    analysis.get_flux_points()

    assert len(analysis.datasets) == 2
    assert len(analysis.flux_points.data.table) == 4
    dnde = analysis.flux_points.data.table["dnde"].quantity
    assert dnde.unit == "cm-2 s-1 TeV-1"

    assert_allclose(dnde[0].value, 8.03604e-12, rtol=1e-2)
    assert_allclose(dnde[-1].value, 4.780021e-21, rtol=1e-2)
Exemplo n.º 21
0
def analysis_3d_data_reduction(target):
    log.info(f"analysis_3d_data_reduction: {target}")

    opts = yaml.safe_load(open("targets.yaml"))[target]

    txt = Path("config_template.yaml").read_text()
    txt = txt.format_map(opts)
    config = AnalysisConfig.from_yaml(txt)

    analysis = Analysis(config)
    analysis.get_observations()
    log.info("Running data reduction")
    analysis.get_datasets()

    # TODO: write datasets and separate fitting to next function
    # Not implemented in Gammapy yet, coming very soon.
    log.info("Running fit ...")
    analysis.read_models(f"{target}/model_3d.yaml")
    logging.info(analysis.models)
    analysis.run_fit()
    logging.info(analysis.fit_result.parameters.to_table())
    path = f"{target}/{target}_3d_bestfit.rst"
    log.info(f"Writing {path}")
    analysis.fit_result.parameters.to_table().write(path,
                                                    format="ascii.rst",
                                                    overwrite=True)

    #    analysis.get_flux_points(source=f"{target}")
    #    path = f"{target}/{target}_3d_fluxpoints.fits"
    #    log.info(f"Writing {path}")
    #    analysis.flux_points.write(path, overwrite=True)

    analysis.get_flux_points(source=f"{target}")
    path = f"{target}/{target}_3d_fluxpoints.ecsv"
    log.info(f"Writing {path}")
    keys = [
        "e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn", "is_ul"
    ]
    analysis.flux_points.data.table_formatted[keys].write(path,
                                                          format="ascii.ecsv",
                                                          overwrite=True)

    return analysis  # will write to disk when possible
Exemplo n.º 22
0
 def run(self):
     raw = ""
     section = self.arguments[0]
     doc = AnalysisConfig._get_doc_sections()
     for keyword in doc.keys():
         if section == "" or section == keyword:
             raw += doc[keyword]
     include_lines = raw.splitlines()
     codeblock = CodeBlock(
         self.name,
         [],
         self.options,
         include_lines,  # content
         self.lineno,
         self.content_offset,
         self.block_text,
         self.state,
         self.state_machine,
     )
     return codeblock.run()
Exemplo n.º 23
0
def test_analysis_3d():
    config = AnalysisConfig.from_template("3d")
    analysis = Analysis(config)
    analysis.get_observations()
    analysis.get_datasets()
    analysis.set_model(filename=MODEL_FILE)
    analysis.datasets["stacked"].background_model.tilt.frozen = False
    analysis.run_fit()
    analysis.get_flux_points()

    assert len(analysis.datasets) == 1
    assert len(analysis.fit_result.parameters) == 8
    res = analysis.fit_result.parameters
    assert res[3].unit == "cm-2 s-1 TeV-1"
    assert len(analysis.flux_points.data.table) == 2
    dnde = analysis.flux_points.data.table["dnde"].quantity

    assert_allclose(dnde[0].value, 1.182768e-11, rtol=1e-1)
    assert_allclose(dnde[-1].value, 4.051367e-13, rtol=1e-1)
    assert_allclose(res["index"].value, 2.76607, rtol=1e-1)
    assert_allclose(res["tilt"].value, -0.143204, rtol=1e-1)
Exemplo n.º 24
0
def data_reduction(instrument):
    log.info(f"data_reduction: {instrument}")
    config = AnalysisConfig.read(f"config.yaml")
    config.observations.datastore = str(Path().resolve().parent / "data" /
                                        "joint-crab" / instrument)
    config.datasets.stack = instrument_opts[instrument]["stack"]
    config.datasets.containment_correction = instrument_opts[instrument][
        "containment"]
    config.datasets.on_region.radius = instrument_opts[instrument]["on_radius"]

    if instrument == "fact":
        config.datasets.safe_mask.methods = ["aeff-default"]

    analysis = Analysis(config)
    analysis.get_observations()

    analysis.get_datasets()
    if instrument == "fact":
        counts = analysis.datasets[0].counts
        data = counts.geom.energy_mask(emin=0.4 * u.TeV)
        analysis.datasets[0].mask_safe = counts.copy(data=data)

    analysis.datasets.write(f"reduced_{instrument}", overwrite=True)
Exemplo n.º 25
0
def test_analysis_1d_stacked_no_fit_range():
    cfg = """
    observations:
        datastore: $GAMMAPY_DATA/hess-dl3-dr1
        obs_cone: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 5 deg}
        obs_ids: [23592, 23559]

    datasets:
        type: 1d
        stack: false
        geom:
            axes:
                energy: {min: 0.01 TeV, max: 100 TeV, nbins: 73}
                energy_true: {min: 0.03 TeV, max: 100 TeV, nbins: 50}
        on_region: {frame: icrs, lon: 83.633 deg, lat: 22.014 deg, radius: 0.1 deg}
        containment_correction: true
        background:
            method: reflected
    """
    config = AnalysisConfig.from_yaml(cfg)
    analysis = Analysis(config)
    analysis.update_config(cfg)
    analysis.config.datasets.stack = True
    analysis.get_observations()
    analysis.get_datasets()
    analysis.read_models(MODEL_FILE_1D)
    analysis.run_fit()
    with pytest.raises(ValueError):
        analysis.get_excess_map()

    assert len(analysis.datasets) == 1
    assert_allclose(analysis.datasets["stacked"].counts.data.sum(), 184)
    pars = analysis.models.parameters
    assert_allclose(analysis.datasets[0].mask_fit.data, True)

    assert_allclose(pars["index"].value, 2.76913, rtol=1e-2)
    assert_allclose(pars["amplitude"].value, 5.479729e-11, rtol=1e-2)
Exemplo n.º 26
0
# In[ ]:

from pathlib import Path
from astropy import units as u
from gammapy.analysis import Analysis, AnalysisConfig
from gammapy.modeling.models import create_crab_spectral_model

# ## Analysis configuration
#
# For configuration of the analysis we use the [YAML](https://en.wikipedia.org/wiki/YAML) data format. YAML is a machine readable serialisation format, that is also friendly for humans to read. In this tutorial we will write the configuration file just using Python strings, but of course the file can be created and modified with any text editor of your choice.
#
# Here is what the configuration for our analysis looks like:

# In[ ]:

config = AnalysisConfig()
# the AnalysisConfig gives access to the various parameters used from logging to reduced dataset geometries
print(config)

# ### Setting the data to use

# We want to use Crab runs from the H.E.S.S. DL3-DR1. We define here the datastore and a cone search of observations pointing with 5 degrees of the Crab nebula.
# Parameters can be set directly or as a python dict.

# In[ ]:

# We define the datastore containing the data
config.observations.datastore = "$GAMMAPY_DATA/hess-dl3-dr1"

# We define the cone search parameters
config.observations.obs_cone.frame = "icrs"
Exemplo n.º 27
0
def test_get_observations_no_datastore():
    config = AnalysisConfig()
    analysis = Analysis(config)
    analysis.config.observations.datastore = "other"
    with pytest.raises(FileNotFoundError):
        analysis.get_observations()
Exemplo n.º 28
0
def get_example_config(which):
    """Example config: which can be 1d or 3d."""
    return AnalysisConfig.read(CONFIG_PATH / f"example-{which}.yaml")
Exemplo n.º 29
0
def run_analysis_3d(target_dict):
    """Run 3D analysis for the selected target"""
    tag = target_dict["tag"]
    name = target_dict["name"]
    log.info(f"running 3d analysis, {tag}")

    path_res = Path(tag + "/results/")

    ra = target_dict["ra"]
    dec = target_dict["dec"]
    e_decorr = target_dict["e_decorr"]

    config_str = f"""
    general:
        logging:
            level: INFO
        outdir: .

    observations:
        datastore: $GAMMAPY_DATA/hess-dl3-dr1/
        filters:
            - filter_type: par_value
              value_param: {name}
              variable: TARGET_NAME

    datasets:
        dataset-type: MapDataset
        stack-datasets: true
        offset-max: 2.5 deg
        geom:
            skydir: [{ra}, {dec}]
            width: [5, 5]
            binsz: 0.02
            coordsys: CEL
            proj: TAN
            axes:
              - name: energy
                hi_bnd: 100
                lo_bnd: 0.1
                nbin: 24
                interp: log
                node_type: edges
                unit: TeV
        energy-axis-true:
            name: energy
            hi_bnd: 100
            lo_bnd: 0.1
            nbin: 72
            interp: log
            node_type: edges
            unit: TeV
    """
    print(config_str)
    config = AnalysisConfig(config_str)

    #  Observation selection
    analysis = Analysis(config)
    analysis.get_observations()

    if DEBUG is True:
        analysis.observations.list = [analysis.observations.list[0]]

    # Data reduction
    analysis.get_datasets()

    # Set runwise energy threshold. See reference paper, section 5.1.1.
    for dataset in analysis.datasets:
        # energy threshold given by the 10% edisp criterium
        e_thr_bias = dataset.edisp.get_bias_energy(0.1)

        # energy at which the background peaks
        background_model = dataset.background_model
        bkg_spectrum = background_model.map.get_spectrum()
        peak = bkg_spectrum.data.max()
        idx = list(bkg_spectrum.data).index(peak)
        e_thr_bkg = bkg_spectrum.energy.center[idx]

        esafe = max(e_thr_bias, e_thr_bkg)
        dataset.mask_fit = dataset.counts.geom.energy_mask(emin=esafe)

    # Model fitting
    spatial_model = target_dict["spatial_model"]
    model_config = f"""
    components:
        - name: {tag}
          type: SkyModel
          spatial:
            type: {spatial_model}
            frame: icrs
            parameters:
            - name: lon_0
              value: {ra}
              unit: deg
            - name: lat_0 
              value: {dec}    
              unit: deg
          spectral:
            type: PowerLawSpectralModel
            parameters:
            - name: amplitude      
              value: 1.0e-12
              unit: cm-2 s-1 TeV-1
            - name: index
              value: 2.0
              unit: ''
            - name: reference
              value: {e_decorr}
              unit: TeV
              frozen: true
    """
    model_npars = 5
    if spatial_model == "DiskSpatialModel":
        model_config = yaml.load(model_config)
        parameters = model_config["components"][0]["spatial"]["parameters"]
        parameters.append(
            {
                "name": "r_0",
                "value": 0.2,
                "unit": "deg",
                "frozen": False
            }
        )
        parameters.append(
            {
                "name": "e",
                "value": 0.8,
                "unit": "",
                "frozen": False
            }
        )
        parameters.append(
            {
                "name": "phi",
                "value": 150,
                "unit": "deg",
                "frozen": False
            }
        )
        parameters.append(
            {
                "name": "edge",
                "value": 0.01,
                "unit": "deg",
                "frozen": True
            }
        )
        model_npars += 4
    analysis.set_model(model=model_config)

    for dataset in analysis.datasets:
        dataset.background_model.norm.frozen = False

    analysis.run_fit()

    parameters = analysis.model.parameters
    parameters.covariance = analysis.fit_result.parameters.covariance[0:model_npars, 0:model_npars]
    write_fit_summary(parameters, str(path_res / "results-summary-fit-3d.yaml"))

    # Flux points
    # TODO: This is a workaround to re-optimize the bkg in each energy bin. Add has to be added to the Analysis class
    datasets = analysis.datasets.copy()
    for dataset in datasets:
        for par in dataset.parameters:
            if par is not dataset.background_model.norm:
                par.frozen = True

    reoptimize = True if DEBUG is False else False
    fpe = FluxPointsEstimator(
        datasets=datasets, e_edges=FLUXP_EDGES, source=tag, reoptimize=reoptimize
    )

    flux_points = fpe.run()
    flux_points.table["is_ul"] = flux_points.table["ts"] < 4
    keys = ["e_ref", "e_min", "e_max", "dnde", "dnde_errp", "dnde_errn"]
    flux_points.table_formatted[keys].write(
        path_res / "flux-points-3d.ecsv", format="ascii.ecsv"
    )
Exemplo n.º 30
0
def cli_make_config(filename, overwrite):
    """Writes default configuration file."""
    config = AnalysisConfig()
    config.write(filename, overwrite=overwrite)
    log.info(f"Configuration file produced: {filename}")