예제 #1
0
def models(backgrounds):
    spatial_model = GaussianSpatialModel(
        lon_0="3 deg", lat_0="4 deg", sigma="3 deg", frame="galactic"
    )
    spectral_model = PowerLawSpectralModel(
        index=2, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV"
    )
    model1 = SkyModel(
        spatial_model=spatial_model, spectral_model=spectral_model, name="source-1",
    )

    model2 = model1.copy(name="source-2")
    model2.datasets_names = ["dataset-1"]
    model3 = model1.copy(name="source-3")
    model3.datasets_names = "dataset-2"
    model3.spatial_model = PointSpatialModel(frame="galactic")
    model3.parameters.freeze_all()
    models = Models([model1, model2, model3] + backgrounds)
    return models
예제 #2
0
def test_with_skymodel(phase_curve, light_curve):

    sky_model = SkyModel(spectral_model=PowerLawSpectralModel())
    out = sky_model.to_dict()
    assert "temporal" not in out

    sky_model = SkyModel(spectral_model=PowerLawSpectralModel(),
                         temporal_model=phase_curve)
    sky_model2 = sky_model.copy()
    assert sky_model2.temporal_model.tag == "PhaseCurveTemplateTemporalModel"
    assert sky_model2.temporal_model.parameters.names == [
        "time_0",
        "phase_0",
        "f0",
        "f1",
        "f2",
    ]

    sky_model = SkyModel(spectral_model=PowerLawSpectralModel(),
                         temporal_model=light_curve)
    assert sky_model.temporal_model.tag == "LightCurveTemplateTemporalModel"

    out = sky_model.to_dict()
    assert "temporal" in out
예제 #3
0
)

# In[ ]:

# You can see there are 3 datasets now
print(analysis_joint.datasets)

# In[ ]:

# You can access each one by its name, eg:
print(analysis_joint.datasets["obs_110380"])

# In[ ]:

# Add the model on each of the datasets
model_joint = model.copy()
for dataset in analysis_joint.datasets:
    dataset.models = model_joint
    dataset.background_model.norm.value = 1.1

# In[ ]:

get_ipython().run_cell_magic(
    'time', '',
    'fit_joint = Fit(analysis_joint.datasets)\nresult_joint = fit_joint.run()')

# In[ ]:

print(result)

# In[ ]:
def data_prep():
    data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/")
    OBS_ID = 23523
    obs_ids = OBS_ID * np.ones(N_OBS)
    observations = data_store.get_observations(obs_ids)

    time_intervals = [(obs.tstart, obs.tstop) for obs in observations]
    target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg")

    emin, emax = [0.7, 10] * u.TeV
    energy_axis = MapAxis.from_bounds(emin.value,
                                      emax.value,
                                      10,
                                      unit="TeV",
                                      name="energy",
                                      interp="log")
    geom = WcsGeom.create(
        skydir=target_position,
        binsz=0.02,
        width=(2, 2),
        coordsys="CEL",
        proj="CAR",
        axes=[energy_axis],
    )

    energy_axis_true = MapAxis.from_bounds(0.1,
                                           20,
                                           20,
                                           unit="TeV",
                                           name="energy",
                                           interp="log")
    offset_max = 2 * u.deg

    datasets = []

    maker = MapDatasetMaker(offset_max=offset_max)
    safe_mask_maker = SafeMaskMaker(methods=["offset-max"],
                                    offset_max=offset_max)

    for time_interval in time_intervals:
        observations = observations.select_time(time_interval)

        # Proceed with further analysis only if there are observations
        # in the selected time window
        if len(observations) == 0:
            log.warning(f"No observations in time interval: {time_interval}")
            continue

        stacked = MapDataset.create(geom=geom,
                                    energy_axis_true=energy_axis_true)

        for obs in observations:
            dataset = maker.run(stacked, obs)
            dataset = safe_mask_maker.run(dataset, obs)
            stacked.stack(dataset)

        stacked.edisp = stacked.edisp.get_energy_dispersion(
            position=target_position, e_reco=energy_axis.edges)

        stacked.psf = stacked.psf.get_psf_kernel(position=target_position,
                                                 geom=stacked.exposure.geom,
                                                 max_radius="0.3 deg")

        datasets.append(stacked)

    spatial_model = PointSpatialModel(lon_0=target_position.ra,
                                      lat_0=target_position.dec,
                                      frame="icrs")
    spatial_model.lon_0.frozen = True
    spatial_model.lat_0.frozen = True

    spectral_model = PowerLawSpectralModel(index=2.6,
                                           amplitude=2.0e-11 *
                                           u.Unit("1 / (cm2 s TeV)"),
                                           reference=1 * u.TeV)
    spectral_model.index.frozen = False

    sky_model = SkyModel(spatial_model=spatial_model,
                         spectral_model=spectral_model,
                         name="")

    for dataset in datasets:
        model = sky_model.copy(name="crab")
        dataset.model = model

    return datasets
예제 #5
0
# In[ ]:

get_ipython().run_cell_magic(
    'time', '',
    '\ndatasets = []\n\nfor time_interval in time_intervals:\n    # get filtered observation lists in time interval\n    obs = crab_obs.select_time(time_interval)\n    # Proceed with further analysis only if there are observations\n    # in the selected time window\n    if len(obs) == 0:\n        log.warning(\n            "No observations found in time interval:"\n            "{t_min} - {t_max}".format(\n                t_min=time_interval[0], t_max=time_interval[1]\n            )\n        )\n        continue\n    dataset = make_map_dataset(obs, target_position, geom, geom_true)\n    dataset.counts.meta["t_start"] = time_interval[0]\n    dataset.counts.meta["t_stop"] = time_interval[1]\n    datasets.append(dataset)'
)

# ## Light Curve estimation: the 3D case
#
# Now that we have created the datasets we assign them the model to be fitted:

# In[ ]:

for dataset in datasets:
    # Copy the source model
    model = sky_model.copy(name="crab")
    dataset.model = model

# We can now create the light curve estimator by passing it the list of datasets.
# We can optionally ask for parameters reoptimization during fit, e.g. to fit background normalization in each time bin.

# In[ ]:

lc_maker = LightCurveEstimator(datasets, source="crab", reoptimize=True)

# We now run the estimator once we pass it the energy interval on which to compute the integral flux of the source.

# In[ ]:

get_ipython().run_cell_magic(
    'time', '',
# In[ ]:


spectral_model = PowerLawSpectralModel(
    index=2, amplitude=2e-11 * u.Unit("cm-2 s-1 TeV-1"), reference=1 * u.TeV
)
model = SkyModel(spectral_model=spectral_model)

for dataset in datasets:
    dataset.models = model

fit_joint = Fit(datasets)
result_joint = fit_joint.run()

# we make a copy here to compare it later
model_best_joint = model.copy()
model_best_joint.spectral_model.parameters.covariance = (
    result_joint.parameters.covariance
)


# In[ ]:


print(result_joint)


# In[ ]:


plt.figure(figsize=(8, 6))