Exemple #1
0
def test_datasets_stack_reduce_no_off():
    datasets = Datasets()
    obs_ids = [23523, 23526, 23559, 23592]

    for obs_id in obs_ids:
        filename = f"$GAMMAPY_DATA/joint-crab/spectra/hess/pha_obs{obs_id}.fits"
        ds = SpectrumDatasetOnOff.read(filename)
        datasets.append(ds)

    datasets[-1].counts_off = None

    with pytest.raises(ValueError):
        stacked = datasets.stack_reduce(name="stacked")

    datasets[-1].mask_safe.data[...] = False
    stacked = datasets.stack_reduce(name="stacked")
    assert_allclose(stacked.exposure.meta["livetime"].to_value("s"),
                    4732.5469999)
    assert stacked.counts == 369

    datasets[0].mask_safe.data[...] = False

    stacked = datasets.stack_reduce(name="stacked")
    assert_allclose(stacked.exposure.meta["livetime"].to_value("s"),
                    3150.81024152)
    assert stacked.counts == 245
    def run(self, n_obs=10, sigma=0.3 * u.deg, A0=5e-13):
        pos1 = SkyCoord(self.skydir.galactic.l + self.wobble_offset,
                        self.skydir.galactic.b,
                        frame="galactic")
        pos2 = SkyCoord(self.skydir.galactic.l - self.wobble_offset,
                        self.skydir.galactic.b,
                        frame="galactic")
        pos3 = SkyCoord(self.skydir.galactic.l,
                        self.skydir.galactic.b + self.wobble_offset,
                        frame="galactic")
        pos4 = SkyCoord(self.skydir.galactic.l,
                        self.skydir.galactic.b - self.wobble_offset,
                        frame="galactic")

        datasets = Datasets()

        for j, apos in enumerate([pos1, pos2, pos3, pos4]):
            print("Pointing position: \n", apos)
            for i in range(n_obs):
                empty = self.create_empty(name=f"dataset-{j}-{i}")
                models = self.sky_model(sigma.value, A0)
                dataset = self.simulate_single(pointing=apos,
                                               models=models,
                                               empty=empty)
                datasets.append(dataset)

        return datasets
def test_Datasets_mutation():
    dat = MyDataset(name="test-1")
    dats = Datasets([MyDataset(name="test-2"), MyDataset(name="test-3")])
    dats2 = Datasets([MyDataset(name="test-4"), MyDataset(name="test-5")])

    dats.insert(0, dat)
    assert dats.names == ["test-1", "test-2", "test-3"]

    dats.extend(dats2)
    assert dats.names == ["test-1", "test-2", "test-3", "test-4", "test-5"]

    dat3 = dats[3]
    dats.remove(dats[3])
    assert dats.names == ["test-1", "test-2", "test-3", "test-5"]
    dats.append(dat3)
    assert dats.names == ["test-1", "test-2", "test-3", "test-5", "test-4"]
    dats.pop(3)
    assert dats.names == ["test-1", "test-2", "test-3", "test-4"]

    with pytest.raises(ValueError, match="Dataset names must be unique"):
        dats.append(dat)
    with pytest.raises(ValueError, match="Dataset names must be unique"):
        dats.insert(0, dat)
    with pytest.raises(ValueError, match="Dataset names must be unique"):
        dats.extend(dats2)
Exemple #4
0
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")
    OBS_ID = 110380
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    energy_axis = MapAxis.from_bounds(0.1,
                                      10,
                                      nbin=10,
                                      unit="TeV",
                                      name="energy",
                                      interp="log")
    geom = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.02,
        width=(10, 8),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )

    energy_axis_true = MapAxis.from_bounds(0.05,
                                           20,
                                           nbin=30,
                                           unit="TeV",
                                           name="energy_true",
                                           interp="log")

    offset_max = 4 * u.deg
    maker = MapDatasetMaker()
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"],
                                    offset_max=offset_max)
    stacked = MapDataset.create(geom=geom, energy_axis_true=energy_axis_true)

    spatial_model = PointSpatialModel(lon_0="-0.05 deg",
                                      lat_0="-0.05 deg",
                                      frame="galactic")
    spectral_model = ExpCutoffPowerLawSpectralModel(
        index=2,
        amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
        reference=1.0 * u.TeV,
        lambda_=0.1 / u.TeV,
    )
    model = SkyModel(spatial_model=spatial_model,
                     spectral_model=spectral_model,
                     name="gc-source")

    datasets = Datasets([])
    for idx, obs in enumerate(observations):
        cutout = stacked.cutout(obs.pointing_radec,
                                width=2 * offset_max,
                                name=f"dataset{idx}")
        dataset = maker.run(cutout, obs)
        dataset = safe_mask_maker.run(dataset, obs)
        dataset.models = [model, FoVBackgroundModel(dataset_name=dataset.name)]
        datasets.append(dataset)
    return datasets
Exemple #5
0
    def slice_datasets(datasets, e_min, e_max):
        """Select and slice datasets in energy range

        Parameters
        ----------
        datasets : Datasets
            Datasets
        e_min, e_max : `~astropy.units.Quantity`
            Energy bounds to compute the flux point for.

        Returns
        -------
        datasets : Datasets
            Datasets

        """
        datasets_to_fit = Datasets()

        for dataset in datasets:
            # TODO: implement slice_by_coord() and simplify?
            energy_axis = dataset.counts.geom.get_axis_by_name("energy")
            try:
                group = energy_axis.group_table(edges=[e_min, e_max])
            except ValueError:
                log.info(
                    f"Dataset {dataset.name} does not contribute in the energy range"
                )
                continue

            is_normal = group["bin_type"] == "normal   "
            group = group[is_normal]

            slices = {
                "energy":
                slice(int(group["idx_min"][0]),
                      int(group["idx_max"][0]) + 1)
            }

            name = f"{dataset.name}-{e_min:.3f}-{e_max:.3f}"
            dataset_sliced = dataset.slice_by_idx(slices, name=name)

            # TODO: Simplify model handling!!!!
            models = []

            for model in dataset.models:
                if isinstance(model, BackgroundModel):
                    models.append(dataset_sliced.background_model)
                else:
                    models.append(model)

            dataset_sliced.models = models
            datasets_to_fit.append(dataset_sliced)

        return datasets_to_fit
Exemple #6
0
def hess_datasets():
    datasets = Datasets([])
    for obsid in [23523, 23526]:
        datasets.append(
            SpectrumDatasetOnOff.from_ogip_files(
                f"$GAMMAPY_DATA/joint-crab/spectra/hess/pha_obs{obsid}.fits"))
    PLmodel = PowerLawSpectralModel(amplitude="3.5e-11 cm-2s-1TeV-1",
                                    index=2.7)
    for dataset in datasets:
        dataset.models = SkyModel(spectral_model=PLmodel, name="Crab")
    return datasets
Exemple #7
0
def hess_datasets():
    datasets = Datasets([])
    pwl = PowerLawSpectralModel(amplitude="3.5e-11 cm-2s-1TeV-1", index=2.7)
    model = SkyModel(spectral_model=pwl, name="Crab")

    for obsid in [23523, 23526]:
        dataset = SpectrumDatasetOnOff.read(
            f"$GAMMAPY_DATA/joint-crab/spectra/hess/pha_obs{obsid}.fits")
        dataset.models = model
        datasets.append(dataset)

    return datasets
Exemple #8
0
def compactify(dsets, dtmin=1 * u.h, debug=False):
    """
    Returns a list of stacked Dataset having a minimal total duration from an
    original unstacked Dataset list.
    Note that the model stacking is not applied.

    Parameters
    ----------
    dsets : Dataset List
        The initial list of Datasets.
    dtmin : astropy.time, optional
        The stacked Dataset minimal duration. The default is 1*u.h.
    debug : Boolean, optional
        If True, let's talk a bit. The default is False.

    Returns
    -------
    ds_compacted : Dataset List
        The compacted Dataset list.

    """

    duration = 0 * u.s
    tmp_stack = Datasets()
    ds_compacted = Datasets()
    iset = 0

    for ds in dsets:
        tmp_stack.append(ds)
        duration += ds.gti.time_delta[0]

        if debug: print("  ", ds.name, " : ", ds.livetime, " appended")

        " If max duration reached, stack"
        if (duration >= dtmin):
            dset_stacked = stacking(tmp_stack, tryflux=False, debug=False)
            name = "Compacted-" + str(iset)
            ds_compacted.append(dset_stacked[-1].copy(name=name))
            if debug:
                print("  Dt exceeded - stack", len(tmp_stack), " datasets")
                print(tmp_stack)
                print("   Duration and stack reset")
                print(dset_stacked)
                print(dset_stacked[-1].name, " should be kept as ", name)

            # Reset stack and duration
            duration = 0 * u.s
            tmp_stack = Datasets()
            iset += 1

    return ds_compacted
Exemple #9
0
def test_datasets_stack_reduce():
    datasets = Datasets()
    obs_ids = [23523, 23526, 23559, 23592]

    for obs_id in obs_ids:
        filename = f"$GAMMAPY_DATA/joint-crab/spectra/hess/pha_obs{obs_id}.fits"
        ds = SpectrumDatasetOnOff.from_ogip_files(filename)
        datasets.append(ds)

    stacked = datasets.stack_reduce(name="stacked")

    assert_allclose(stacked.exposure.meta["livetime"].to_value("s"), 6313.8116406202325)

    info_table = datasets.info_table()
    assert_allclose(info_table["n_on"], [124, 126, 119, 90])

    info_table_cum = datasets.info_table(cumulative=True)
    assert_allclose(info_table_cum["n_on"], [124, 250, 369, 459])
    assert stacked.name == "stacked"
    def get_spectrum_datasets(self, dataset):
        """ Utility to make the final `~gammapy.datasts.Datasets`

        Parameters
        ----------
        dataset : `~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the dataset to use for profile extraction
        Returns
        --------
        sp_datasets : array of `~gammapy.datasets.SpectrumDataset`
            the list of `~gammapy.datasets.SpectrumDataset` computed in each box
        """
        datasets = Datasets()

        for reg in self.regions:
            spectrum_dataset = dataset.to_spectrum_dataset(reg)
            datasets.append(spectrum_dataset)

        return datasets
Exemple #11
0
def createonoff_from_simulation(mc, random_state='rendom-seed', debug=False):
    """
    Transform the stored SpectrumDataset list in the MonteCarlo class of 
    SoHAPPy into a list of Datasets of type SpectrumDatasetOnOff.

    Parameters
    ----------
    mc : MonteCarlo instantiation
        A SoHAPPy MonteCarlo class instantiation
    random_state : randome seed
        The default is 'randome-seed'
    debug : boolean, optional
        If True, verbosy. The default is False.

    Returns
    -------
    dlist_onoff : Datasets
        A list of on-off datasets from the originals.

    """

    import sys
    sys.path.append("../SoHAPPy")
    import mcsim_res as mcres
    import mcsim_config as mcf
    from utilities import Log

    if (mc.slot == None):
        print(" Simulation was not runable - no dataset available")
        return None

    if debug:
        # story(mc, ref="VIS",saveplots="False",outfile="")
        # story(mc, ref="notVIS",saveplots="False",outfile="")
        # stat(mc,saveplots="False",outfile="")
        log = Log(name="out.log", talk=True)
        mcres.result(mc, mc.slot.grb, log=log)
        print(mc.slot)

    dset_list = mc.dset_list

    # Create on-off datasets fom the original list
    # which is a list of list to account for multiple sites
    dlist_onoff = Datasets()

    for ds_site in dset_list:

        for ds in ds_site:
            # Remove NAN background (corrected in 0.19)
            ds.background.data = np.nan_to_num(ds.background.data)

            # It seems that above 0.17, dataset need to be simulated in order
            # to go to on-off
            ds.fake(random_state=random_state)

            ds_onoff = SpectrumDatasetOnOff.from_spectrum_dataset(
                dataset=ds, acceptance=1, acceptance_off=1 / mcf.alpha)

            ds_onoff.fake(npred_background=ds_onoff.npred_background())

            if (debug): print(ds_onoff)

            dlist_onoff.append(ds_onoff)

    return dlist_onoff
Exemple #12
0
def stacking(dsets, tryflux=False, debug=False):
    """
    Create a new dataset collection (Datasets) with (consecutively) stacked 
    datasets.

    Note that the first dataset has to be masked explicitely as the stacked
    ones are. Note that by default the model carried by the stacked dataset
    in a stacked list is the model of the first dataset.
    An option exists in gammapy to supersede the models in each consecutively 
    stacked dataset but it essentially work only for dataset with the same 
    mask_safe (See documentation for more explanations)

    Parameters
    ----------
    dsets : List of Dataset object
        A collection of original datasets.
    tryflux : boolean
        If true a mean spectrum is recomputed an assigned to the stacked models. 
        See documentation for caveat.
    debug : boolean
        If True, let's talk a bit. Default is False.

    Returns
    -------
    dsets_stacked : Datasets object
        A collection of stacked datasets.

    """

    #------------------
    def info(ds0, dss0):
        print(" * Current dataset : {:20} dt= {:10} - Model : {}".format(
            ds0.name, t_fmt(ds0.gti.time_sum), ds0.models[0].name))
        print(" *         Stacked : {:20} dt= {:10} - Model : {}".format(
            dss0.name, t_fmt(dss0.gti.time_sum), dss0.models[0].name))
        print("                  -> t1={} t2={} dt={} ".format(
            dss0.gti.time_start, dss0.gti.time_stop, dss0.gti.time_delta))
#         print("        stacked    ",dplt.t_fmt(dss0.gti.time_sum)," - ",dss0.name,
#               " Model",dss0.models[0].name)
#------------------

# Put first MASKED dataset on stack

    ds = dsets[0]
    stacked = get_masked_dataset(ds.copy(name="1st unmasked"))
    if tryflux:  # Change model
        stacked.models = stacked_model(ds, stacked, first=True, debug=debug)
#     dsets_stacked = Datasets(stacked.copy(name="1st unmasked"))
    dsets_stacked = Datasets(ds.copy(name="1st unmasked"))

    if (debug): info(ds, ds)

    # Stack following ones
    for ds in dsets[1:]:

        stacked.stack(ds)  # Stack applies the mask !
        dss = stacked.copy(name="Stacked_" + ds.name)
        if tryflux:
            dss.models = stacked_model(ds, dsets_stacked[-1], debug=debug)

        # Add the dataset to the stack
        dsets_stacked.append(dss)
        if debug: info(ds, dss)

#     print(" Initial dataset collection stacked")

    return dsets_stacked
Exemple #13
0
def make_datasets_example():
    # Define which data to use and print some information

    energy_axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4),
                                     unit="TeV",
                                     name="energy",
                                     interp="log")
    geom0 = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.1,
        width=(2, 2),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geom1 = WcsGeom.create(
        skydir=(1, 0),
        binsz=0.1,
        width=(2, 2),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geoms = [geom0, geom1]

    sources_coords = [(0, 0), (0.9, 0.1)]
    names = ["gc", "g09"]
    models = Models()

    for idx, (lon, lat) in enumerate(sources_coords):
        spatial_model = PointSpatialModel(lon_0=lon * u.deg,
                                          lat_0=lat * u.deg,
                                          frame="galactic")
        spectral_model = ExpCutoffPowerLawSpectralModel(
            index=2 * u.Unit(""),
            amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
            reference=1.0 * u.TeV,
            lambda_=0.1 / u.TeV,
        )
        model_ecpl = SkyModel(spatial_model=spatial_model,
                              spectral_model=spectral_model,
                              name=names[idx])
        models.append(model_ecpl)

    models["gc"].spectral_model.reference = models[
        "g09"].spectral_model.reference

    obs_ids = [110380, 111140, 111159]
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")

    diffuse_spatial = TemplateSpatialModel.read(
        "$GAMMAPY_DATA/fermi-3fhl-gc/gll_iem_v06_gc.fits.gz")
    diffuse_model = SkyModel(PowerLawSpectralModel(), diffuse_spatial)

    maker = MapDatasetMaker()
    datasets = Datasets()

    observations = data_store.get_observations(obs_ids)

    for idx, geom in enumerate(geoms):
        stacked = MapDataset.create(geom=geom, name=names[idx])

        for obs in observations:
            dataset = maker.run(stacked, obs)
            stacked.stack(dataset)

        bkg = stacked.models.pop(0)
        stacked.models = [models[idx], diffuse_model, bkg]
        datasets.append(stacked)

    datasets.write(
        "$GAMMAPY_DATA/tests/models",
        prefix="gc_example",
        overwrite=True,
        write_covariance=False,
    )
print("Creating the", n_obs, "On/Off simulations")

datasets = Datasets()
for idx in range(n_obs):
    dataset = maker.run(dataset_empty, obs)
    # Set the model on the dataset, and fake the counts
    dataset.models = model
    dataset.fake(random_state=idx)
    # Set off regions
    dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
        dataset=dataset, acceptance=1, acceptance_off=3)
    dataset_on_off.fake(random_state=idx,
                        npred_background=dataset.npred_background())
    dataset_fake = dataset_on_off.copy(name=f"obs-{idx}")
    dataset_fake.meta_table["OBS_ID"] = [idx]
    datasets.append(dataset_fake)

# Save the data from the simulations
table = datasets.info_table()
table.write('observations.txt', format='ascii')

# Check counts in one selected realization
fig_2 = plt.figure(1)
datasets[0].npred().plot_hist(label='Predicted S+B')
datasets[0].npred_signal().plot_hist(label='Predicted S')
datasets[0].npred_background().plot_hist(label='Predicted B')
plt.legend()
form = plt.FormatStrFormatter('$%g$')
gca().xaxis.set_major_formatter(form)
plt.close(fig_2)
fig_2.savefig('obs_counts_one_simu.png', quality=95, dpi=1000)