def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg")

    e_reco = MapAxis.from_bounds(0.1, 40, nbin=40, interp="log",
                                 unit="TeV").edges
    e_true = MapAxis.from_bounds(0.05, 100, nbin=200, interp="log",
                                 unit="TeV").edges

    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=target_position,
                                radius=on_region_radius)

    dataset_maker = SpectrumDatasetMaker(containment_correction=True,
                                         selection=["counts", "aeff", "edisp"])

    empty = SpectrumDatasetOnOff.create(region=on_region,
                                        e_reco=e_reco,
                                        e_true=e_true)

    bkg_maker = ReflectedRegionsBackgroundMaker()
    safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)

    spectral_model = PowerLawSpectralModel(index=2.6,
                                           amplitude=2.0e-11 *
                                           u.Unit("1 / (cm2 s TeV)"),
                                           reference=1 * u.TeV)
    spectral_model.index.frozen = False

    model = spectral_model.copy()
    model.name = "crab"

    datasets_1d = []

    for observation in observations:

        dataset = dataset_maker.run(dataset=empty.copy(),
                                    observation=observation)

        dataset_on_off = bkg_maker.run(dataset, observation)
        dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
        datasets_1d.append(dataset_on_off)

    for dataset in datasets_1d:
        model = spectral_model.copy()
        model.name = "crab"
        dataset.model = model

    return datasets_1d
Exemplo n.º 2
0
# In[ ]:

model = PowerLawSpectralModel(index=2,
                              amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"),
                              reference=1 * u.TeV)

datasets_joint = extraction.spectrum_observations

for dataset in datasets_joint:
    dataset.model = model

fit_joint = Fit(datasets_joint)
result_joint = fit_joint.run()

# we make a copy here to compare it later
model_best_joint = model.copy()
model_best_joint.parameters.covariance = result_joint.parameters.covariance

# In[ ]:

print(result_joint)

# In[ ]:

plt.figure(figsize=(8, 6))
ax_spectrum, ax_residual = datasets_joint[0].plot_fit()
ax_spectrum.set_ylim(0, 25)

# ## Compute Flux Points
#
# To round up our analysis we can compute flux points by fitting the norm of the global model in energy bands. We'll use a fixed energy binning for now:
Exemplo n.º 3
0
# we need to set the times manually for now
for dataset, time_interval in zip(datasets_1d, time_intervals):
    dataset.counts.meta = dict()
    dataset.counts.meta["t_start"] = time_interval[0]
    dataset.counts.meta["t_stop"] = time_interval[1]

# ## Light Curve estimation for 1D spectra
#
# Now that we've reduced the 1D data we assign again the model to the datasets

# In[ ]:

for dataset in datasets_1d:
    # Copy the source model
    model = spectral_model.copy()
    model.name = "crab"
    dataset.model = model

# We can now call the LightCurveEstimator in a perfectly identical manner.

# In[ ]:

lc_maker_1d = LightCurveEstimator(datasets_1d, source="crab", reoptimize=False)

# In[ ]:

get_ipython().run_cell_magic(
    'time', '',
    'lc_1d = lc_maker_1d.run(e_ref=1 * u.TeV, e_min=1.0 * u.TeV, e_max=10.0 * u.TeV)'
)