Exemplo n.º 1
0
    def read(cls, filedata, filemodel):
        """De-serialize datasets from YAML and FITS files.

        Parameters
        ----------
        filedata : str
            filepath to yaml datasets file
        filemodel : str
            filepath to yaml models file

        Returns
        -------
        dataset : 'gammapy.modeling.Datasets'
            Datasets
        """
        from . import DATASETS

        models = Models.read(make_path(filemodel))
        data_list = read_yaml(make_path(filedata))

        datasets = []
        for data in data_list["datasets"]:
            dataset = DATASETS.get_cls(data["type"]).from_dict(data, models)
            datasets.append(dataset)
        return cls(datasets)
Exemplo n.º 2
0
    def from_yaml(cls, filename):
        """Write to YAML file."""
        from gammapy.modeling.serialize import dict_to_models

        data = read_yaml(filename)
        skymodels = dict_to_models(data)
        return cls(skymodels)
Exemplo n.º 3
0
def run_test_fitsexport(directory):
    """Run example analysis to test a fits data production

    hap-data-fits-export crab has to be run in order to produce the example data
    """
    log.info('Running test analysis of fits data')
    from gammapy.data import DataStore
    from gammapy.datasets import gammapy_extra
    from gammapy.utils.scripts import read_yaml
    from gammapy.spectrum.spectrum_pipe import run_spectrum_analysis_using_config
    from gammapy.spectrum.results import SpectrumResult

    s = DataStore.from_dir(directory)
    print(s.info())
    configfile = gammapy_extra.filename(
        'test_datasets/spectrum/spectrum_analysis_example.yaml')
    config = read_yaml(configfile)
    config['extraction']['data']['datastore'] = directory
    config['extraction']['data']['runlist'] = [23523, 23526, 23559, 23592]

    fit, analysis = run_spectrum_analysis_using_config(config)
    res = SpectrumResult(
        fit=fit.result,
        stats=analysis.observations.total_spectrum.spectrum_stats)
    print(res.to_table())
Exemplo n.º 4
0
 def validate(self):
     """Validate and/or fill initial config parameters against schema."""
     validator = _gp_units_validator
     try:
         jsonschema.validate(self.settings, read_yaml(SCHEMA_FILE), validator)
     except jsonschema.exceptions.ValidationError as ex:
         log.error("Error when validating configuration parameters against schema.")
         log.error(ex.message)
Exemplo n.º 5
0
def test_absorption_io(tmp_path):
    dominguez = Absorption.read_builtin("dominguez")
    model = AbsorbedSpectralModel(
        spectral_model=Model.create("PowerLawSpectralModel"),
        absorption=dominguez,
        parameter=0.5,
        parameter_name="redshift",
    )
    assert len(model.parameters) == 5

    model_dict = model.to_dict()
    parnames = [_["name"] for _ in model_dict["parameters"]]
    assert parnames == ["redshift", "alpha_norm"]

    new_model = AbsorbedSpectralModel.from_dict(model_dict)

    assert new_model.parameter == 0.5
    assert new_model.parameter_name == "redshift"
    assert new_model.alpha_norm.name == "alpha_norm"
    assert new_model.alpha_norm.value == 1
    assert new_model.spectral_model.tag == "PowerLawSpectralModel"
    assert_allclose(new_model.absorption.energy, dominguez.energy)
    assert_allclose(new_model.absorption.param, dominguez.param)
    assert len(new_model.parameters) == 5

    test_absorption = Absorption(
        u.Quantity(range(3), "keV"),
        u.Quantity(range(2), ""),
        u.Quantity(np.ones((2, 3)), ""),
    )
    model = AbsorbedSpectralModel(
        spectral_model=Model.create("PowerLawSpectralModel"),
        absorption=test_absorption,
        parameter=0.5,
        parameter_name="redshift",
    )
    model_dict = model.to_dict()
    new_model = AbsorbedSpectralModel.from_dict(model_dict)

    assert_allclose(new_model.absorption.energy, test_absorption.energy)
    assert_allclose(new_model.absorption.param, test_absorption.param)

    write_yaml(model_dict, tmp_path / "tmp.yaml")
    read_yaml(tmp_path / "tmp.yaml")
Exemplo n.º 6
0
    def read(cls,
             path,
             filedata="_datasets.yaml",
             filemodel="_models.yaml",
             lazy=True,
             cache=True):
        """De-serialize datasets from YAML and FITS files.

        Parameters
        ----------
        path : str, Path
            Base directory of the datasets files.
        filedata : str
            file path or name of yaml datasets file
        filemodel : str
            file path or name of yaml models file
        lazy : bool
            Whether to lazy load data into memory
        cache : bool
            Whether to cache the data after loading.


        Returns
        -------
        dataset : `gammapy.datasets.Datasets`
            Datasets
        """
        from . import DATASET_REGISTRY

        path = make_path(path)

        if (path / filedata).exists():
            filedata = path / filedata
        else:
            filedata = make_path(filedata)
        if (path / filemodel).exists():
            filemodel = path / filemodel
        else:
            filemodel = make_path(filemodel)

        models = Models.read(filemodel)
        data_list = read_yaml(filedata)

        datasets = []
        for data in data_list["datasets"]:
            if (path / data["filename"]).exists():
                data["filename"] = str(make_path(path / data["filename"]))

            dataset_cls = DATASET_REGISTRY.get_cls(data["type"])
            dataset = dataset_cls.from_dict(data,
                                            models,
                                            lazy=lazy,
                                            cache=cache)
            datasets.append(dataset)
        return cls(datasets)
Exemplo n.º 7
0
 def update_settings(self, config=None, filename=""):
     """Update settings with config dictionary or values in configfile"""
     if filename:
         filepath = make_path(filename)
         config = read_yaml(filepath)
     if config is None:
         config = {}
     if isinstance(config, str):
         config = yaml.safe_load(config)
     if len(config):
         self._update_settings(config, self.settings)
     self.validate()
Exemplo n.º 8
0
    def read(cls, filedata, filemodel):
        """De-serialize datasets from YAML and FITS files.

        Parameters
        ----------
        filedata : str
            filepath to yaml datasets file
        filemodel : str
            filepath to yaml models file

        Returns
        -------
        dataset : 'gammapy.modeling.Datasets'
            Datasets
        """
        from .io import dict_to_datasets

        components = read_yaml(make_path(filemodel))
        data_list = read_yaml(make_path(filedata))
        datasets = dict_to_datasets(data_list, components)
        return cls(datasets)
Exemplo n.º 9
0
def test_absorption_io(tmp_path):
    dominguez = EBLAbsorptionNormSpectralModel.read_builtin("dominguez",
                                                            redshift=0.5)
    assert len(dominguez.parameters) == 2

    model_dict = dominguez.to_dict()
    parnames = [_["name"] for _ in model_dict["parameters"]]
    assert parnames == [
        "alpha_norm",
        "redshift",
    ]

    new_model = EBLAbsorptionNormSpectralModel.from_dict(model_dict)

    assert new_model.redshift.value == 0.5
    assert new_model.alpha_norm.name == "alpha_norm"
    assert new_model.alpha_norm.value == 1
    assert_allclose(new_model.energy, dominguez.energy)
    assert_allclose(new_model.param, dominguez.param)
    assert len(new_model.parameters) == 2

    model = EBLAbsorptionNormSpectralModel(
        u.Quantity(range(3), "keV"),
        u.Quantity(range(2), ""),
        u.Quantity(np.ones((2, 3)), ""),
        redshift=0.5,
        alpha_norm=1,
    )
    model_dict = model.to_dict()
    new_model = EBLAbsorptionNormSpectralModel.from_dict(model_dict)

    assert_allclose(new_model.energy, model.energy)
    assert_allclose(new_model.param, model.param)
    assert_allclose(new_model.data, model.data)

    write_yaml(model_dict, tmp_path / "tmp.yaml")
    read_yaml(tmp_path / "tmp.yaml")
Exemplo n.º 10
0
def notebook_role(name, rawtext, notebook, lineno, inliner, options={}, content=[]):
    """Link to a notebook on gammapy-extra"""
    if HAS_GP_EXTRA:
        available_notebooks = read_yaml('$GAMMAPY_EXTRA/notebooks/notebooks.yaml')
        exists = notebook in [_['name'] for _ in available_notebooks]
    else:
        exists = True

    if not exists:
        msg = inliner.reporter.error('Unknown notebook {}'.format(notebook),
                                     line=lineno)
        prb = inliner.problematic(rawtext, rawtext, msg)
        return [prb], [msg]
    else:
        app = inliner.document.settings.env.app
        node = make_link_node(rawtext, app, notebook, options)
        return [node], []
Exemplo n.º 11
0
    def read(cls, filename, filename_models=None, lazy=True, cache=True):
        """De-serialize datasets from YAML and FITS files.

        Parameters
        ----------
        filename : str or `Path`
            File path or name of datasets yaml file
        filename_models : str or `Path`
            File path or name of models fyaml ile
        lazy : bool
            Whether to lazy load data into memory
        cache : bool
            Whether to cache the data after loading.

        Returns
        -------
        dataset : `gammapy.datasets.Datasets`
            Datasets
        """
        from . import DATASET_REGISTRY

        filename = make_path(filename)
        data_list = read_yaml(filename)

        datasets = []
        for data in data_list["datasets"]:
            path = filename.parent

            if (path / data["filename"]).exists():
                data["filename"] = str(make_path(path / data["filename"]))

            dataset_cls = DATASET_REGISTRY.get_cls(data["type"])
            dataset = dataset_cls.from_dict(data, lazy=lazy, cache=cache)
            datasets.append(dataset)

        datasets = cls(datasets)

        if filename_models:
            datasets.models = Models.read(filename_models)

        return datasets
Exemplo n.º 12
0
    def read(cls, path, filedata="_datasets.yaml", filemodel="_models.yaml"):
        """De-serialize datasets from YAML and FITS files.

        Parameters
        ----------
        path : str, Path
            Base directory of the datasets files.
        filedata : str
            file path or name of yaml datasets file
        filemodel : str
            file path or name of yaml models file

        Returns
        -------
        dataset : 'gammapy.modeling.Datasets'
            Datasets
        """
        from . import DATASET_REGISTRY

        path = make_path(path)

        if (path / filedata).exists():
            filedata = path / filedata
        else:
            filedata = make_path(filedata)
        if (path / filemodel).exists():
            filemodel = path / filemodel
        else:
            filemodel = make_path(filemodel)

        models = Models.read(filemodel)
        data_list = read_yaml(filedata)

        datasets = []
        for data in data_list["datasets"]:
            if (path / data["filename"]).exists():
                data["filename"] = str(make_path(path / data["filename"]))
            dataset = DATASET_REGISTRY.get_cls(data["type"]).from_dict(data, models)
            datasets.append(dataset)
        return cls(datasets)
Exemplo n.º 13
0
def run_test_fitsexport(directory):
    """Run example analysis to test a fits data production

    hap-data-fits-export crab has to be run in order to produce the example data
    """
    log.info('Running test analysis of fits data')
    from gammapy.data import DataStore
    from gammapy.datasets import gammapy_extra
    from gammapy.utils.scripts import read_yaml
    from gammapy.spectrum.spectrum_pipe import run_spectrum_analysis_using_config
    from gammapy.spectrum.results import SpectrumResult

    s = DataStore.from_dir(directory)
    print(s.info())
    configfile = gammapy_extra.filename(
        'test_datasets/spectrum/spectrum_analysis_example.yaml')
    config = read_yaml(configfile)
    config['extraction']['data']['datastore'] = directory
    config['extraction']['data']['runlist'] = [23523, 23526, 23559, 23592]

    fit, analysis = run_spectrum_analysis_using_config(config)
    res = SpectrumResult(fit=fit.result, stats=analysis.observations.total_spectrum.spectrum_stats)
    print(res.to_table())
Exemplo n.º 14
0
def models():
    filename = get_pkg_data_filename("data/examples.yaml")
    models_data = read_yaml(filename)
    models = Models.from_dict(models_data)
    return models
Exemplo n.º 15
0
 def from_yaml(cls, filename):
     """Read config from filename"""
     filename = make_path(filename)
     config = read_yaml(filename)
     return cls(config, filename=filename)
Exemplo n.º 16
0
def test_dict_to_skymodels():
    filename = get_pkg_data_filename("data/examples.yaml")
    models_data = read_yaml(filename)

    models = dict_to_models(models_data)

    assert len(models) == 3

    model0 = models[0]
    assert model0.spectral_model.tag == "ExpCutoffPowerLawSpectralModel"
    assert model0.spatial_model.tag == "PointSpatialModel"

    pars0 = model0.parameters
    assert pars0["index"].value == 2.1
    assert pars0["index"].unit == ""
    assert np.isnan(pars0["index"].max)
    assert np.isnan(pars0["index"].min)
    assert pars0["index"].frozen is False

    assert pars0["lon_0"].value == -50.0
    assert pars0["lon_0"].unit == "deg"
    assert pars0["lon_0"].max == 180.0
    assert pars0["lon_0"].min == -180.0
    assert pars0["lon_0"].frozen is True

    assert pars0["lat_0"].value == -0.05
    assert pars0["lat_0"].unit == "deg"
    assert pars0["lat_0"].max == 90.0
    assert pars0["lat_0"].min == -90.0
    assert pars0["lat_0"].frozen is True

    assert pars0["lambda_"].value == 0.06
    assert pars0["lambda_"].unit == "TeV-1"
    assert np.isnan(pars0["lambda_"].min)
    assert np.isnan(pars0["lambda_"].max)

    model1 = models[1]
    assert model1.spectral_model.tag == "PowerLawSpectralModel"
    assert model1.spatial_model.tag == "DiskSpatialModel"

    pars1 = model1.parameters
    assert pars1["index"].value == 2.2
    assert pars1["index"].unit == ""
    assert pars1["lat_0"].scale == 1.0
    assert pars1["lat_0"].factor == pars1["lat_0"].value

    assert np.isnan(pars1["index"].max)
    assert np.isnan(pars1["index"].min)

    assert pars1["r_0"].unit == "deg"

    model2 = models[2]
    assert_allclose(model2.spectral_model.energy.data,
                    [34.171, 44.333, 57.517])
    assert model2.spectral_model.energy.unit == "MeV"
    assert_allclose(model2.spectral_model.values.data,
                    [2.52894e-06, 1.2486e-06, 6.14648e-06])
    assert model2.spectral_model.values.unit == "1 / (cm2 MeV s sr)"

    assert model2.spectral_model.tag == "TemplateSpectralModel"
    assert model2.spatial_model.tag == "TemplateSpatialModel"

    assert model2.spatial_model.parameters["norm"].value == 1.0
    assert model2.spatial_model.normalize is False
    assert model2.spectral_model.parameters["norm"].value == 2.1
Exemplo n.º 17
0
def test_spectrum_pipe(tmpdir):
    configfile = gammapy_extra.filename('test_datasets/spectrum/spectrum_pipe_example.yaml')
    config = read_yaml(configfile)
    config['base_config']['general']['outdir'] = str(tmpdir)
    pipe = SpectrumPipe.from_config(config, auto_outdir=False)
    pipe.run()
Exemplo n.º 18
0
 def from_yaml(self):
     config = read_yaml(CONFIG_FILE)
     return config
Exemplo n.º 19
0
 def validate(self):
     """Validate config parameters against schema."""
     schema = read_yaml(SCHEMA_FILE)
     jsonschema.validate(self.settings, schema, _gp_validator)
Exemplo n.º 20
0
def test_dict_to_skymodels():
    filename = get_pkg_data_filename("data/examples.yaml")
    models_data = read_yaml(filename)
    models = Models.from_dict(models_data)

    assert len(models) == 5

    model0 = models[0]
    assert isinstance(model0, BackgroundModel)
    assert model0.name == "background_irf"

    model0 = models[1]
    assert "ExpCutoffPowerLawSpectralModel" in model0.spectral_model.tag
    assert "PointSpatialModel" in model0.spatial_model.tag

    pars0 = model0.parameters
    assert pars0["index"].value == 2.1
    assert pars0["index"].unit == ""
    assert np.isnan(pars0["index"].max)
    assert np.isnan(pars0["index"].min)
    assert not pars0["index"].frozen

    assert pars0["lon_0"].value == -0.5
    assert pars0["lon_0"].unit == "deg"
    assert pars0["lon_0"].max == 180.0
    assert pars0["lon_0"].min == -180.0
    assert pars0["lon_0"].frozen

    assert pars0["lat_0"].value == -0.0005
    assert pars0["lat_0"].unit == "deg"
    assert pars0["lat_0"].max == 90.0
    assert pars0["lat_0"].min == -90.0
    assert pars0["lat_0"].frozen

    assert pars0["lambda_"].value == 0.006
    assert pars0["lambda_"].unit == "TeV-1"
    assert np.isnan(pars0["lambda_"].min)
    assert np.isnan(pars0["lambda_"].max)

    model1 = models[2]
    assert "pl" in model1.spectral_model.tag
    assert "PowerLawSpectralModel" in model1.spectral_model.tag
    assert "DiskSpatialModel" in model1.spatial_model.tag
    assert "disk" in model1.spatial_model.tag
    assert "LightCurveTemplateTemporalModel" in model1.temporal_model.tag

    pars1 = model1.parameters
    assert pars1["index"].value == 2.2
    assert pars1["index"].unit == ""
    assert pars1["lat_0"].scale == 1.0
    assert pars1["lat_0"].factor == pars1["lat_0"].value

    assert np.isnan(pars1["index"].max)
    assert np.isnan(pars1["index"].min)

    assert pars1["r_0"].unit == "deg"

    model2 = models[3]
    assert_allclose(model2.spectral_model.energy.data,
                    [34.171, 44.333, 57.517])
    assert model2.spectral_model.energy.unit == "MeV"
    assert_allclose(model2.spectral_model.values.data,
                    [2.52894e-06, 1.2486e-06, 6.14648e-06])
    assert model2.spectral_model.values.unit == "1 / (cm2 MeV s)"

    assert "TemplateSpectralModel" in model2.spectral_model.tag
    assert "TemplateSpatialModel" in model2.spatial_model.tag

    assert not model2.spatial_model.normalize
Exemplo n.º 21
0
 def read(cls, path):
     """Reads from YAML file."""
     config = read_yaml(path)
     return AnalysisConfig(**config)