Example #1
0
def test_datasets_to_io(tmp_path):
    path = "$GAMMAPY_DATA/tests/models"
    filedata = "gc_example_datasets.yaml"
    filemodel = "gc_example_models.yaml"

    datasets = Datasets.read(path, filedata, filemodel)

    assert len(datasets) == 2
    print(list(datasets.models))
    assert len(datasets.models) == 5
    dataset0 = datasets[0]
    assert dataset0.name == "gc"
    assert dataset0.counts.data.sum() == 22258
    assert_allclose(dataset0.exposure.data.sum(), 8.057342e+12, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None

    assert_allclose(dataset0.background_model.evaluate().data.sum(),
                    15726.8,
                    atol=0.1)

    assert dataset0.background_model.name == "gc-bkg"

    dataset1 = datasets[1]
    assert dataset1.name == "g09"
    assert dataset1.background_model.name == "g09-bkg"

    assert (dataset0.models["gll_iem_v06_cutout"] ==
            dataset1.models["gll_iem_v06_cutout"])

    assert isinstance(dataset0.models, Models)
    assert len(dataset0.models) == 4
    assert dataset0.models[0].name == "gc"
    assert dataset0.models[1].name == "gll_iem_v06_cutout"
    assert dataset0.models[2].name == "gc-bkg"

    assert (dataset0.models["gc"].parameters["reference"] is
            dataset1.models["g09"].parameters["reference"])
    assert_allclose(dataset1.models["g09"].parameters["lon_0"].value,
                    0.9,
                    atol=0.1)

    datasets.write(tmp_path, prefix="written")
    datasets_read = Datasets.read(tmp_path, "written_datasets.yaml",
                                  "written_models.yaml")

    assert len(datasets.parameters) == 22

    assert len(datasets_read) == 2
    dataset0 = datasets_read[0]
    assert dataset0.counts.data.sum() == 22258
    assert_allclose(dataset0.exposure.data.sum(), 8.057342e+12, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None
    assert_allclose(dataset0.background_model.evaluate().data.sum(),
                    15726.8,
                    atol=0.1)

    dataset_copy = dataset0.copy(name="dataset0-copy")
    assert dataset_copy.background_model.datasets_names == ["dataset0-copy"]
Example #2
0
def test_datasets_to_io(tmp_path):
    filedata = "$GAMMAPY_DATA/tests/models/gc_example_datasets.yaml"
    filemodel = "$GAMMAPY_DATA/tests/models/gc_example_models.yaml"

    datasets = Datasets.read(filedata, filemodel)

    assert len(datasets) == 2

    dataset0 = datasets[0]
    assert dataset0.name == "gc"
    assert dataset0.counts.data.sum() == 6824
    assert_allclose(dataset0.exposure.data.sum(), 2072125400000.0, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None

    assert_allclose(dataset0.background_model.evaluate().data.sum(),
                    4094.2,
                    atol=0.1)

    assert dataset0.background_model.name == "background_irf_gc"

    dataset1 = datasets[1]
    assert dataset1.name == "g09"
    assert dataset1.background_model.name == "background_irf_g09"

    assert (dataset0.models["gll_iem_v06_cutout"] ==
            dataset1.models["gll_iem_v06_cutout"])

    assert isinstance(dataset0.models, Models)
    assert len(dataset0.models) == 5
    assert dataset0.models[0].name == "gc"
    assert dataset0.models[1].name == "gll_iem_v06_cutout"

    assert (dataset0.models["gc"].parameters["reference"] is
            dataset1.models["g09"].parameters["reference"])

    assert_allclose(dataset1.models["g09"].parameters["lon_0"].value,
                    0.9,
                    atol=0.1)

    datasets.write(tmp_path, prefix="written")
    datasets_read = Datasets.read(tmp_path / "written_datasets.yaml",
                                  tmp_path / "written_models.yaml")

    assert len(datasets.parameters) == 22

    assert len(datasets_read) == 2
    dataset0 = datasets_read[0]
    assert dataset0.counts.data.sum() == 6824
    assert_allclose(dataset0.exposure.data.sum(), 2072125400000.0, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None
    assert_allclose(dataset0.background_model.evaluate().data.sum(),
                    4094.2,
                    atol=0.1)
Example #3
0
def fermi_datasets():
    fermi_datasets = Datasets.read(
        "$GAMMAPY_DATA/fermi-3fhl-crab",
        "Fermi-LAT-3FHL_datasets.yaml",
        "Fermi-LAT-3FHL_models.yaml",
    )
    return fermi_datasets
Example #4
0
    def read_regions(self):
        for kr in self.ROIs_sel:
            filedata = self.resdir / f"3FHL_ROI_num{kr}_datasets.yaml"
            filemodel = self.resdir / f"3FHL_ROI_num{kr}_models.yaml"
            try:
                dataset = list(Datasets.read(filedata, filemodel,
                                             lazy=False))[0]
            except (FileNotFoundError, IOError):
                continue

            infos = np.load(self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz")
            self.diags["message"].append(infos["message"])
            self.diags["stat"].append(infos["stat"])

            if self.savefig:
                self.plot_maps(dataset)

            for model in dataset.models:
                if (isinstance(model, FoVBackgroundModel) is False
                        and self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    res_spec = model.spectral_model
                    cat_spec = self.FHL3[model.name].spectral_model()

                    res_fp = FluxPoints.read(self.resdir /
                                             f"{model.name}_flux_points.fits",
                                             reference_model=cat_spec)
                    cat_fp = self.FHL3[model.name].flux_points
                    self.update_spec_diags(dataset, model, cat_spec, res_spec,
                                           cat_fp, res_fp)
                    if self.savefig:
                        self.plot_spec(kr, model, cat_spec, res_spec, cat_fp,
                                       res_fp)
Example #5
0
def test_flux_point_dataset_serialization(tmp_path):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    data = FluxPoints.read(path)
    data.table["e_ref"] = data.energy_ref.to("TeV")
    spectral_model = PowerLawSpectralModel(
        index=2.3, amplitude="2e-13 cm-2 s-1 TeV-1", reference="1 TeV"
    )
    model = SkyModel(spectral_model=spectral_model, name="test_model")
    dataset = FluxPointsDataset(model, data, name="test_dataset")

    Datasets([dataset]).write(
        filename=tmp_path / "tmp_datasets.yaml",
        filename_models=tmp_path / "tmp_models.yaml",
    )

    datasets = Datasets.read(
        filename=tmp_path / "tmp_datasets.yaml",
        filename_models=tmp_path / "tmp_models.yaml",
    )

    new_dataset = datasets[0]
    assert_allclose(new_dataset.data.dnde, dataset.data.dnde, 1e-4)
    if dataset.mask_fit is None:
        assert np.all(new_dataset.mask_fit == dataset.mask_safe)
    assert np.all(new_dataset.mask_safe == dataset.mask_safe)
    assert new_dataset.name == "test_dataset"
Example #6
0
def test_flux_point_dataset_serialization(tmp_path):
    path = "$GAMMAPY_DATA/tests/spectrum/flux_points/diff_flux_points.fits"
    table = Table.read(make_path(path))
    table["e_ref"] = table["e_ref"].quantity.to("TeV")
    data = FluxPoints.from_table(table, format="gadf-sed")

    spectral_model = PowerLawSpectralModel(index=2.3,
                                           amplitude="2e-13 cm-2 s-1 TeV-1",
                                           reference="1 TeV")
    model = SkyModel(spectral_model=spectral_model, name="test_model")
    dataset = FluxPointsDataset(model, data, name="test_dataset")

    dataset2 = FluxPointsDataset.read(path, name="test_dataset2")
    assert_allclose(dataset.data.dnde.data, dataset2.data.dnde.data)
    assert dataset.mask_safe.data == dataset2.mask_safe.data
    assert dataset2.name == "test_dataset2"

    Datasets([dataset]).write(
        filename=tmp_path / "tmp_datasets.yaml",
        filename_models=tmp_path / "tmp_models.yaml",
    )

    datasets = Datasets.read(
        filename=tmp_path / "tmp_datasets.yaml",
        filename_models=tmp_path / "tmp_models.yaml",
    )

    new_dataset = datasets[0]
    assert_allclose(new_dataset.data.dnde, dataset.data.dnde, 1e-4)
    if dataset.mask_fit is None:
        assert np.all(new_dataset.mask_fit == dataset.mask_safe)
    assert np.all(new_dataset.mask_safe == dataset.mask_safe)
    assert new_dataset.name == "test_dataset"
Example #7
0
def input_dataset():
    datasets = Datasets.read(
        filedata="$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml",
        filemodel="$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml",
    )
    dataset = datasets[0]
    dataset.psf = None
    return dataset
Example #8
0
def test_spectrum_dataset_on_off_to_yaml(tmpdir):
    spectrum_datasets_on_off = make_observation_list()
    datasets = Datasets(spectrum_datasets_on_off)
    datasets.write(path=tmpdir)
    datasets_read = Datasets.read(tmpdir / "_datasets.yaml", tmpdir / "_models.yaml")
    assert len(datasets_read) == len(datasets)
    assert datasets_read[0].name == datasets[0].name
    assert datasets_read[1].name == datasets[1].name
    assert datasets_read[1].counts.data.sum() == datasets[1].counts.data.sum()
Example #9
0
def input_dataset():
    filename = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml"
    filename_models = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml"

    datasets = Datasets.read(filename=filename,
                             filename_models=filename_models)

    dataset = datasets[0]
    dataset.psf = None
    return dataset
Example #10
0
def read_datasets_and_set_model(instrument, model):
    # Read from disk
    datasets = Datasets.read(f"reduced_{instrument}", f"_datasets.yaml",
                             "_models.yaml")

    e_min = u.Quantity(instrument_opts[instrument]["emin"])
    e_max = u.Quantity(instrument_opts[instrument]["emax"])

    # Set model and fit range
    for ds in datasets:
        ds.models = model
        ds.mask_fit = ds.counts.geom.energy_mask(e_min, e_max)

    return datasets
Example #11
0
    def read_datasets(self):
        """Read datasets from YAML file.
        File names are taken from the configuration file.

        """

        filename = self.config.general.datasets_file
        filename_models = self.config.general.models_file
        if filename is not None:
            self.datasets = Datasets.read(filename)
            log.info(f"Datasets loaded from {filename}.")
            if filename_models is not  None:
                self.read_models(filename_models, extend=False)
        else:
            raise RuntimeError("Missing datasets_file in config.general")
Example #12
0
def dataset():
    dataset = Datasets.read(
        "$GAMMAPY_DATA/fermi-3fhl-crab/",
        "Fermi-LAT-3FHL_datasets.yaml",
        "Fermi-LAT-3FHL_models.yaml",
    )

    # Define the free parameters and min, max values
    parameters = dataset.models.parameters
    parameters["lon_0"].frozen = False
    parameters["lat_0"].frozen = False
    parameters["norm"].frozen = True
    parameters["alpha"].frozen = True
    parameters["beta"].frozen = True
    parameters["lat_0"].min = -90
    parameters["lat_0"].max = 90
    parameters["lon_0"].min = 0
    parameters["lon_0"].max = 360
    parameters["amplitude"].min = 0.01 * parameters["amplitude"].value
    parameters["amplitude"].max = 100 * parameters["amplitude"].value

    return dataset
Example #13
0
def dataset():
    filename_models = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml"
    models = Models.read(filename_models)

    # Define the free parameters and min, max values
    parameters = models.parameters
    parameters["lon_0"].frozen = False
    parameters["lat_0"].frozen = False
    parameters["norm"].frozen = True
    parameters["alpha"].frozen = True
    parameters["beta"].frozen = True
    parameters["lat_0"].min = -90
    parameters["lat_0"].max = 90
    parameters["lon_0"].min = 0
    parameters["lon_0"].max = 360
    parameters["amplitude"].min = 0.01 * parameters["amplitude"].value
    parameters["amplitude"].max = 100 * parameters["amplitude"].value

    filename = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml"
    datasets = Datasets.read(filename=filename)
    datasets.models = models
    return datasets
Example #14
0
def fermi_datasets():
    filename = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml"
    filename_models = "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml"

    return Datasets.read(filename=filename, filename_models=filename_models)
Example #15
0
def read(filename):
    return Datasets.read(os.getcwd(), f"{filename}_datasets.yaml",
                         f"{filename}_models.yaml")
Example #16
0
def test_models_management(tmp_path):
    path = "$GAMMAPY_DATA/tests/models"
    filedata = "gc_example_datasets.yaml"
    filemodel = "gc_example_models.yaml"

    datasets = Datasets.read(path, filedata, filemodel)

    model1 = datasets.models[0].copy(name="model1", datasets_names=None)
    model2 = datasets.models[0].copy(name="model2", datasets_names=[datasets[1].name])
    model3 = datasets.models[0].copy(name="model3", datasets_names=[datasets[0].name])

    model1b = datasets.models[0].copy(name="model1", datasets_names=None)
    model1b.spectral_model.amplitude.value *= 2

    names0 = datasets[0].models.names
    names1 = datasets[1].models.names

    datasets[0].models.append(model1)
    _ = datasets[0].models + model2
    assert datasets[0].models.names == names0 + ["model1", "model2"]
    assert datasets[0].models["model1"].datasets_names == None
    assert datasets[0].models["model2"].datasets_names == [
        datasets[1].name,
        datasets[0].name,
    ]
    assert datasets[1].models.names == names1 + ["model1", "model2"]

    # TODO consistency check at datasets level ?
    # or force same Models for each dataset._models on datasets init ?
    # here we have the right behavior: model1 and model2 are also added to dataset1
    # because serialization create a global model object shared by all datasets
    # if that was not the case we could have inconsistancies
    # such as model1.datasets_names == None added only to dataset1
    # user can still create such inconsistancies if they define datasets
    # with diferent Models objects for each dataset.

    del datasets[0].models["model1"]
    assert datasets[0].models.names == names0 + ["model2"]

    datasets[0].models.remove(model2)
    assert datasets[0].models.names == names0

    datasets.models.append(model2)
    assert model2 in datasets.models
    assert model2 in datasets[1].models
    assert datasets[0].models.names == names0 + ["model2"]

    datasets[0].models.extend([model1, model3])
    assert datasets[0].models.names == names0 + ["model2", "model1", "model3"]

    for m in [model1, model2, model3]:
        datasets.models.remove(m)
    assert datasets[0].models.names == names0
    assert datasets[1].models.names == names1
    datasets.models.extend([model1, model2, model3])
    assert datasets[0].models.names == names0 + ["model1", "model2", "model3"]
    assert datasets[1].models.names == names1 + ["model1", "model2"]

    for m in [model1, model2, model3]:
        datasets.models.remove(m)
    _ = datasets.models + [model1, model2]
    assert datasets[0].models.names == names0 + ["model1", "model2"]
    assert datasets[1].models.names == names1 + ["model1", "model2"]

    datasets[0].models["model2"] = model3
    assert datasets[0].models.names == names0 + ["model1", "model3"]
    assert datasets[1].models.names == names1 + ["model1"]

    datasets.models.remove(model1)
    datasets[0].models = model1
    _ = datasets.models  # auto-update models
    assert datasets[0].models.names == ["model1", "gll_iem_v06_cutout"]
    # the consistency check added diffuse model contained in the global model

    npred1 = datasets[0].npred().data.sum()
    datasets.models.remove(model1)
    npred0 = datasets[0].npred().data.sum()
    datasets.models.append(model1b)
    npred1b = datasets[0].npred().data.sum()
    assert npred1b != npred1
    assert npred1b != npred0
    assert_allclose(npred1b, 2147.407023024028)

    datasets.models.remove(model1b)
    _ = datasets.models  # auto-update models
    newmodels = [datasets.models[0].copy() for k in range(48)]
    datasets.models.extend(newmodels)

    datasets[0].use_cache = False
    nocache = datasets[0].npred().data.sum()
    datasets[0].use_cache = True
    assert_allclose(datasets[0].npred().data.sum(), nocache)
def read(filename):
    path = Path.cwd()
    return Datasets.read(path / f"{filename}_datasets.yaml",
                         filename_models=path / f"{filename}_models.yaml")
Example #18
0
        os.makedirs(outdir)

    # loop through the sources:
    for src in config.keys():
        if src == 'global':
            continue

        if args.select_source is not None:
            if not src == args.select_source:
                continue

        logging.info(f" ====== {src} ======= ")

        logging.info("Loading IACT datasets...")
        # read 3d data set
        dataset_3d = Datasets.read(config['global']['iact_dataset_3d'].replace(
            "*", src))
        geom = dataset_3d[0].geoms['geom']
        on_region = CircleSkyRegion(center=geom.center_skydir,
                                    radius=on_radius)

        # Load the 1D data set
        dataset_1d_file = config['global']['iact_dataset_1d'].replace("*", src)
        if os.path.exists(dataset_1d_file.replace(".yaml", "_stacked.yaml")):
            logging.info("Loading stacked dataset...")
            dataset_stack = Datasets.read(
                dataset_1d_file.replace(".yaml", "_stacked.yaml"))
        else:
            dataset_1d = Datasets.read(
                config['global']['iact_dataset_1d'].replace("*", src))
            # stack reduce the data set
            logging.info("stacking datasets...")
Example #19
0
def crab_datasets_fermi():
    return Datasets.read(
        "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml",
        "$GAMMAPY_DATA/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml")
Example #20
0
def test_datasets_to_io(tmp_path):
    path = "$GAMMAPY_DATA/tests/models"
    filedata = "gc_example_datasets.yaml"
    filemodel = "gc_example_models.yaml"

    datasets = Datasets.read(path, filedata, filemodel)

    assert len(datasets) == 2
    print(list(datasets.models))
    assert len(datasets.models) == 5
    dataset0 = datasets[0]
    assert dataset0.name == "gc"
    assert dataset0.counts.data.sum() == 6824
    assert_allclose(dataset0.exposure.data.sum(), 2072125400000.0, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None

    assert_allclose(dataset0.background_model.evaluate().data.sum(),
                    4094.2,
                    atol=0.1)

    assert dataset0.background_model.name == "background_irf_gc"

    dataset1 = datasets[1]
    assert dataset1.name == "g09"
    assert dataset1.background_model.name == "background_irf_g09"

    assert (dataset0.models["gll_iem_v06_cutout"] ==
            dataset1.models["gll_iem_v06_cutout"])

    assert isinstance(dataset0.models, Models)
    assert len(dataset0.models) == 3
    assert dataset0.models[0].name == "gc"
    assert dataset0.models[1].name == "gll_iem_v06_cutout"
    assert dataset0.models[2].name == "background_irf_gc"

    assert (dataset0.models["background_irf_gc"].parameters["norm"] is
            dataset1.models["background_irf_g09"].parameters["norm"])

    assert (dataset0.models["gc"].parameters["reference"] is
            dataset1.models["g09"].parameters["reference"])
    assert_allclose(dataset1.models["g09"].parameters["lon_0"].value,
                    0.9,
                    atol=0.1)

    datasets.write(tmp_path, prefix="written")
    datasets_read = Datasets.read(tmp_path, "written_datasets.yaml",
                                  "written_models.yaml")

    assert len(datasets.parameters) == 21

    assert len(datasets_read) == 2
    dataset0 = datasets_read[0]
    assert dataset0.counts.data.sum() == 6824
    assert_allclose(dataset0.exposure.data.sum(), 2072125400000.0, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None
    assert_allclose(dataset0.background_model.evaluate().data.sum(),
                    4094.2,
                    atol=0.1)

    Fit(datasets).run()
    assert_allclose(
        datasets.models["background_irf_g09"].covariance,
        datasets.models["background_irf_gc"].covariance,
    )

    dataset_copy = dataset0.copy(name="dataset0-copy")
    assert dataset_copy.background_model.datasets_names == ["dataset0-copy"]
Example #21
0
def test_datasets_to_io(tmp_path):
    filedata = "$GAMMAPY_DATA/tests/models/gc_example_datasets.yaml"
    filemodel = "$GAMMAPY_DATA/tests/models/gc_example_models.yaml"

    datasets = Datasets.read(
        filename=filedata,
        filename_models=filemodel,
    )

    assert len(datasets) == 2
    assert len(datasets.models) == 5
    dataset0 = datasets[0]
    assert dataset0.name == "gc"
    assert dataset0.counts.data.sum() == 22258
    assert_allclose(dataset0.exposure.data.sum(), 8.057342e12, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None

    assert_allclose(dataset0.npred_background().data.sum(), 15726.8, atol=0.1)

    assert dataset0.background_model.name == "gc-bkg"

    dataset1 = datasets[1]
    assert dataset1.name == "g09"
    assert dataset1.background_model.name == "g09-bkg"

    assert (dataset0.models["gll_iem_v06_cutout"] ==
            dataset1.models["gll_iem_v06_cutout"])

    assert isinstance(dataset0.models, DatasetModels)
    assert len(dataset0.models) == 4
    assert dataset0.models[0].name == "gc"
    assert dataset0.models[1].name == "gll_iem_v06_cutout"
    assert dataset0.models[2].name == "gc-bkg"

    assert (dataset0.models["gc"].parameters["reference"] is
            dataset1.models["g09"].parameters["reference"])
    assert_allclose(dataset1.models["g09"].parameters["lon_0"].value,
                    0.9,
                    atol=0.1)

    datasets.write(
        filename=tmp_path / "written_datasets.yaml",
        filename_models=tmp_path / "written_models.yaml",
    )

    datasets_read = Datasets.read(
        filename=tmp_path / "written_datasets.yaml",
        filename_models=tmp_path / "written_models.yaml",
    )

    assert len(datasets.parameters) == 22

    assert len(datasets_read) == 2
    dataset0 = datasets_read[0]
    assert dataset0.counts.data.sum() == 22258
    assert_allclose(dataset0.exposure.data.sum(), 8.057342e12, atol=0.1)
    assert dataset0.psf is not None
    assert dataset0.edisp is not None
    assert_allclose(dataset0.npred_background().data.sum(), 15726.8, atol=0.1)

    dataset_copy = dataset0.copy(name="dataset0-copy")
    assert dataset_copy.models is None