Exemple #1
0
    def run(self, dataset, observation):
        """Run all steps.

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Input dataset.
        observation : `Observation`
            Data store observation.

        Returns
        -------
        dataset_on_off : `SpectrumDatasetOnOff`
            On off dataset.
        """
        counts_off = self.make_counts_off(dataset, observation)
        counts = self.make_counts(dataset, observation)
        acceptance = np.sum([_[1] - _[0] for _ in self.on_phase])
        acceptance_off = np.sum([_[1] - _[0] for _ in self.off_phase])

        dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
            dataset=dataset,
            counts_off=counts_off,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
        )
        dataset_on_off.counts = counts
        return dataset_on_off
Exemple #2
0
def test_cta_sensitivity_estimator(spectrum_dataset):
    dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
        dataset=spectrum_dataset, acceptance=1, acceptance_off=5
    )
    sens = SensitivityEstimator(gamma_min=20)
    table = sens.run(dataset_on_off)

    assert len(table) == 4
    assert table.colnames == ["energy", "e2dnde", "excess", "background", "criterion"]
    assert table["energy"].unit == "TeV"
    assert table["e2dnde"].unit == "erg / (cm2 s)"

    row = table[0]
    assert_allclose(row["energy"], 1.33352, rtol=1e-3)
    assert_allclose(row["e2dnde"], 3.40101e-11, rtol=1e-3)
    assert_allclose(row["excess"], 334.454, rtol=1e-3)
    assert_allclose(row["background"], 3600, rtol=1e-3)
    assert row["criterion"] == "significance"

    row = table[3]
    assert_allclose(row["energy"], 7.49894, rtol=1e-3)
    assert_allclose(row["e2dnde"], 1.14367e-11, rtol=1e-3)
    assert_allclose(row["excess"], 20, rtol=1e-3)
    assert_allclose(row["background"], 3.6, rtol=1e-3)
    assert row["criterion"] == "gamma"
Exemple #3
0
    def run(self, dataset, observation):
        """Run reflected regions background maker

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Spectrum dataset.
        observation : `DatastoreObservation`
            Data store observation.

        Returns
        -------
        dataset_on_off : `SpectrumDatasetOnOff`
            On off dataset.
        """
        counts_off, acceptance_off = self.make_counts_off(dataset, observation)
        acceptance = RegionNDMap.from_geom(geom=dataset.counts.geom, data=1)

        dataset_onoff = SpectrumDatasetOnOff.from_spectrum_dataset(
            dataset=dataset,
            acceptance=acceptance,
            acceptance_off=acceptance_off,
            counts_off=counts_off,
            name=dataset.name,
        )

        if dataset_onoff.counts_off is None:
            dataset_onoff.mask_safe.data[...] = False
            log.warning(
                f"ReflectedRegionsBackgroundMaker failed. Setting {dataset_onoff.name} mask to False."
            )
        return dataset_onoff
def test_cta_sensitivity_estimator(spectrum_dataset):
    dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
        dataset=spectrum_dataset, acceptance=1, acceptance_off=5)
    sens = SensitivityEstimator(gamma_min=25, bkg_syst_fraction=0.075)
    table = sens.run(dataset_on_off)

    assert len(table) == 4
    assert table.colnames == [
        "energy", "e2dnde", "excess", "background", "criterion"
    ]
    assert table["energy"].unit == "TeV"
    assert table["e2dnde"].unit == "erg / (cm2 s)"

    row = table[0]
    assert_allclose(row["energy"], 1.33352, rtol=1e-3)
    assert_allclose(row["e2dnde"], 2.74559e-08, rtol=1e-3)
    assert_allclose(row["excess"], 270000, rtol=1e-3)
    assert_allclose(row["background"], 3.6e+06, rtol=1e-3)
    assert row["criterion"] == "bkg"

    row = table[1]
    assert_allclose(row["energy"], 2.37137, rtol=1e-3)
    assert_allclose(row["e2dnde"], 6.04795e-11, rtol=1e-3)
    assert_allclose(row["excess"], 334.454, rtol=1e-3)
    assert_allclose(row["background"], 3600, rtol=1e-3)
    assert row["criterion"] == "significance"

    row = table[3]
    assert_allclose(row["energy"], 7.49894, rtol=1e-3)
    assert_allclose(row["e2dnde"], 1.42959e-11, rtol=1e-3)
    assert_allclose(row["excess"], 25, rtol=1e-3)
    assert_allclose(row["background"], 3.6, rtol=1e-3)
    assert row["criterion"] == "gamma"
Exemple #5
0
    def run(self, dataset, observation):
        """Run reflected regions background maker

        Parameters
        ----------
        dataset : `SpectrumDataset`
            Spectrum dataset.
        observation : `DatastoreObservation`
            Data store observation.

        Returns
        -------
        dataset_on_off : `SpectrumDatasetOnOff`
            On off dataset.
        """
        counts_off, acceptance_off = self.make_counts_off(dataset, observation)

        return SpectrumDatasetOnOff.from_spectrum_dataset(
            dataset=dataset,
            acceptance=1,
            acceptance_off=acceptance_off,
            counts_off=counts_off,
        )
Exemple #6
0
def createonoff_from_simulation(mc, random_state='rendom-seed', debug=False):
    """
    Transform the stored SpectrumDataset list in the MonteCarlo class of 
    SoHAPPy into a list of Datasets of type SpectrumDatasetOnOff.

    Parameters
    ----------
    mc : MonteCarlo instantiation
        A SoHAPPy MonteCarlo class instantiation
    random_state : randome seed
        The default is 'randome-seed'
    debug : boolean, optional
        If True, verbosy. The default is False.

    Returns
    -------
    dlist_onoff : Datasets
        A list of on-off datasets from the originals.

    """

    import sys
    sys.path.append("../SoHAPPy")
    import mcsim_res as mcres
    import mcsim_config as mcf
    from utilities import Log

    if (mc.slot == None):
        print(" Simulation was not runable - no dataset available")
        return None

    if debug:
        # story(mc, ref="VIS",saveplots="False",outfile="")
        # story(mc, ref="notVIS",saveplots="False",outfile="")
        # stat(mc,saveplots="False",outfile="")
        log = Log(name="out.log", talk=True)
        mcres.result(mc, mc.slot.grb, log=log)
        print(mc.slot)

    dset_list = mc.dset_list

    # Create on-off datasets fom the original list
    # which is a list of list to account for multiple sites
    dlist_onoff = Datasets()

    for ds_site in dset_list:

        for ds in ds_site:
            # Remove NAN background (corrected in 0.19)
            ds.background.data = np.nan_to_num(ds.background.data)

            # It seems that above 0.17, dataset need to be simulated in order
            # to go to on-off
            ds.fake(random_state=random_state)

            ds_onoff = SpectrumDatasetOnOff.from_spectrum_dataset(
                dataset=ds, acceptance=1, acceptance_off=1 / mcf.alpha)

            ds_onoff.fake(npred_background=ds_onoff.npred_background())

            if (debug): print(ds_onoff)

            dlist_onoff.append(ds_onoff)

    return dlist_onoff
Exemple #7
0
def generate_dataset(Eflux,
                     flux,
                     Erange=None,
                     tstart=Time('2000-01-01 02:00:00', scale='utc'),
                     tobs=100 * u.s,
                     irf_file=None,
                     alpha=1 / 5,
                     name=None,
                     fake=True,
                     onoff=True,
                     seed='random-seed',
                     debug=False):
    """
    Generate a dataset from a list of energies and flux points either as
    a SpectrumDataset or a SpectrumDatasetOnOff

    Note :
    - in SpectrumDataset, the backgound counts are assumed precisely know and
    are not fluctuated.
    - in SpectrumDatasetOnOff, the background counts (off counts) are
    fluctuated from the IRF known values.

    Parameters
    ----------
    Eflux : Quantity
        Energies at which the flux is given.
    flux : Quantity
        Flux corresponding to the given energies.
    Erange : List, optional
        The energy boundaries within which the flux is defined, if not over all
        energies. The default is None.
    tstart : Time object, optional
        Start date of the dataset.
        The default is Time('2000-01-01 02:00:00',scale='utc').
    tobs : Quantity, optional
        Duration of the observation. The default is 100*u.s.
    irf_file : String, optional
        The IRf file name. The default is None.
    alpha : Float, optional
        The on over off surface ratio for the On-Off analysis.
        The default is 1/5.
    name : String, optional
        The dataset name, also used to name tthe spectrum. The default is None.
    fake : Boolean, optional
        If True, the dataset counts are fluctuated. The default is True.
    onoff : Boolean, optional
        If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet.
        The default is True.
    seed : String, optional
        The seed for the randome generator; If an integer will generate the
        same random series at each run. The default is 'random-seed'.
    debug: Boolean
        If True, let's talk a bit. The default is False.

    Returns
    -------
    ds : Dataset object
        The dataset.

    """
    random_state = get_random_state(seed)

    ### Define on region
    on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg,
                           frame="icrs")  # Observing region
    on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg)

    # Define energy axis (see spectrum analysis notebook)
    # edges for SpectrumDataset - all dataset should have the same axes
    # Note that linear spacing is clearly problematic for powerlaw fluxes
    # Axes can also be defined using MapAxis
    unit = u.GeV
    E1v = min(Eflux).to(unit).value
    E2v = max(Eflux).to(unit).value
    #     ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit
    #     ereco_axis = MapAxis.from_edges(ereco.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy",
    #                                    interp="log")

    ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit,
                                            0.9 * E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy")

    #     etrue = np.logspace(np.log10(    E1v), np.log10(    E2v), 50) * unit
    #     etrue_axis = MapAxis.from_edges(etrue.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy_true",
    #                                    interp="log")
    etrue_axis = MapAxis.from_energy_bounds(E1v * unit,
                                            E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy_true")
    if (debug):
        print("Dataset ", name)
        print("Etrue : ", etrue_axis.edges)
        print("Ereco : ", ereco_axis.edges)

    # Load IRF
    irf = load_cta_irfs(irf_file)

    spec = TemplateSpectralModel(energy=Eflux,
                                 values=flux,
                                 interp_kwargs={"values_scale": "log"})

    model = SkyModel(spectral_model=spec, name="Spec" + str(name))
    obs = Observation.create(obs_id=1,
                             pointing=on_pointing,
                             livetime=tobs,
                             irfs=irf,
                             deadtime_fraction=0,
                             reference_time=tstart)

    ds_empty = SpectrumDataset.create(
        e_reco=ereco_axis,  # Ereco.edges,
        e_true=etrue_axis,  #Etrue.edges,
        region=on_region,
        name=name)
    maker = SpectrumDatasetMaker(containment_correction=False,
                                 selection=["exposure", "background", "edisp"])
    ds = maker.run(ds_empty, obs)
    ds.models = model
    mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0],
                                         energy_max=Erange[1])

    mask = mask & ds.mask_safe.data
    ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask)

    ds.fake(random_state=random_state)  # Fake is mandatory ?

    # Transform SpectrumDataset into SpectrumDatasetOnOff if needed
    if (onoff):

        ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds,
                                                        acceptance=1,
                                                        acceptance_off=1 /
                                                        alpha)
        print("Transformed in ONOFF")

    if fake:
        print(" Fluctuations : seed = ", seed)
        if (onoff):
            ds.fake(npred_background=ds.npred_background())
        else:
            ds.fake(random_state=random_state)

    print("ds.energy_range = ", ds.energy_range)

    return ds
                                       region=on_region,
                                       name="obs-0")
maker = SpectrumDatasetMaker(selection=["exposure", "edisp", "background"])

# Set the number of observations we want to create
n_obs = 50
print("Creating the", n_obs, "On/Off simulations")

datasets = Datasets()
for idx in range(n_obs):
    dataset = maker.run(dataset_empty, obs)
    # Set the model on the dataset, and fake the counts
    dataset.models = model
    dataset.fake(random_state=idx)
    # Set off regions
    dataset_on_off = SpectrumDatasetOnOff.from_spectrum_dataset(
        dataset=dataset, acceptance=1, acceptance_off=3)
    dataset_on_off.fake(random_state=idx,
                        npred_background=dataset.npred_background())
    dataset_fake = dataset_on_off.copy(name=f"obs-{idx}")
    dataset_fake.meta_table["OBS_ID"] = [idx]
    datasets.append(dataset_fake)

# Save the data from the simulations
table = datasets.info_table()
table.write('observations.txt', format='ascii')

# Check counts in one selected realization
fig_2 = plt.figure(1)
datasets[0].npred().plot_hist(label='Predicted S+B')
datasets[0].npred_signal().plot_hist(label='Predicted S')
datasets[0].npred_background().plot_hist(label='Predicted B')