Esempio n. 1
0
 def test_io(self, tmp_path):
     indices = np.array([[1, 3, 6], [3, 3, 2]])
     desired = self.edisp.pdf_matrix[indices]
     self.edisp.write(tmp_path / "tmp.fits")
     edisp2 = EnergyDispersion.read(tmp_path / "tmp.fits")
     actual = edisp2.pdf_matrix[indices]
     assert_allclose(actual, desired)
Esempio n. 2
0
 def test_io(self, tmpdir):
     indices = np.array([[1, 3, 6], [3, 3, 2]])
     desired = self.edisp.pdf_matrix[indices]
     writename = str(tmpdir / "rmf_test.fits")
     self.edisp.write(writename)
     edisp2 = EnergyDispersion.read(writename)
     actual = edisp2.pdf_matrix[indices]
     assert_allclose(actual, desired)
Esempio n. 3
0
def plot_rmf():
    import matplotlib.pyplot as plt
    from gammapy.irf import EnergyDispersion

    #filename = 'xspec_test_rmf.fits'
    filename = '/Users/deil/code/gammalib/inst/cta/test/caldb/dc1/rmf.fits'
    #filename = '/Users/deil/work/host/howto/ctools_crab/cta-1dc/data/hess/dummy_s0.1.rmf.fits'
    filename = '/Users/deil/work/host/howto/xspec/Crab/run_rmf61261.fits'
    logging.info('Reading {0}'.format(filename))
    edisp = EnergyDispersion.read(filename)

    print(edisp)
    plt.figure(figsize=(5, 5))
    edisp.plot()

    filename = 'xspec_test_rmf.png'
    logging.info('Writing {0}'.format(filename))
    plt.savefig(filename, dpi=200)
Esempio n. 4
0
def plot_rmf():
    import matplotlib.pyplot as plt
    from gammapy.irf import EnergyDispersion

    #filename = 'xspec_test_rmf.fits'
    filename = '/Users/deil/code/gammalib/inst/cta/test/caldb/dc1/rmf.fits'
    #filename = '/Users/deil/work/host/howto/ctools_crab/cta-1dc/data/hess/dummy_s0.1.rmf.fits'
    filename = '/Users/deil/work/host/howto/xspec/Crab/run_rmf61261.fits'
    logging.info('Reading {0}'.format(filename))
    edisp = EnergyDispersion.read(filename)

    print(edisp)
    plt.figure(figsize=(5, 5))
    edisp.plot()

    filename = 'xspec_test_rmf.png'
    logging.info('Writing {0}'.format(filename))
    plt.savefig(filename, dpi=200)
Esempio n. 5
0
def test_compute_thresholds_from_crab_data():
    """Obs read from file"""
    arffile = "$GAMMAPY_DATA/joint-crab/spectra/hess/arf_obs23523.fits"
    rmffile = "$GAMMAPY_DATA/joint-crab/spectra/hess/rmf_obs23523.fits"

    aeff = EffectiveAreaTable.read(arffile)
    edisp = EnergyDispersion.read(rmffile)

    thresh_lo, thresh_hi = compute_energy_thresholds(
        aeff=aeff,
        edisp=edisp,
        method_lo="energy_bias",
        method_hi="none",
        bias_percent_lo=10,
        bias_percent_hi=10,
    )

    assert_allclose(thresh_lo.to("TeV").value, 0.9174, rtol=1e-4)
    assert_allclose(thresh_hi.to("TeV").value, 100.0, rtol=1e-4)
Esempio n. 6
0
bkg = TableModel('bkg')
bkg.load(None, bkg_3d.data.value.ravel())
bkg.ampl = 1
bkg.ampl.freeze()

# Set the exposure
exposure_3d = SkyCube.read(cube_dir / 'exposure_cube_etrue.fits')
i_nan = np.where(np.isnan(exposure_3d.data))
exposure_3d.data[i_nan] = 0
exposure_3d.data = exposure_3d.data * 1e4

# Set the mean psf model
psf_3d = SkyCube.read(cube_dir / 'psf_cube_etrue.fits')

# Load the mean rmf calculated for the 4 Crab runs
rmf = EnergyDispersion.read(cube_dir / 'rmf.fits')
# Setup combined spatial and spectral model
spatial_model = NormGauss2DInt('spatial-model')
spectral_model = PowLaw1D('spectral-model')
coord = counts_3d.sky_image_ref.coordinates(mode="edges")
energies = counts_3d.energies(mode='edges').to("TeV")
source_model = CombinedModel3DIntConvolveEdisp(
    coord=coord,
    energies=energies,
    use_psf=True,
    exposure=exposure_3d,
    psf=psf_3d,
    spatial_model=spatial_model,
    spectral_model=spectral_model,
    edisp=rmf.data.data,
)
Esempio n. 7
0
    def from_ogip_files(cls, filename):
        """Read `~gammapy.spectrum.SpectrumDatasetOnOff` from OGIP files.

        BKG file, ARF, and RMF must be set in the PHA header and be present in
        the same folder.

        The naming scheme is fixed to the following scheme

        * PHA file is named pha_obs{name}.fits
        * BKG file is named bkg_obs{name}.fits
        * ARF file is named arf_obs{name}.fits
        * RMF file is named rmf_obs{name}.fits
         with {name} the dataset name.

        Parameters
        ----------
        filename : str
            OGIP PHA file to read
        """
        filename = make_path(filename)
        dirname = filename.parent

        with fits.open(filename, memmap=False) as hdulist:
            data = _read_ogip_hdulist(hdulist)

        counts = CountsSpectrum(energy_hi=data["energy_hi"],
                                energy_lo=data["energy_lo"],
                                data=data["data"])

        phafile = filename.name

        try:
            rmffile = phafile.replace("pha", "rmf")
            energy_dispersion = EnergyDispersion.read(dirname / rmffile)
        except OSError:
            # TODO : Add logger and echo warning
            energy_dispersion = None

        try:
            bkgfile = phafile.replace("pha", "bkg")
            with fits.open(dirname / bkgfile, memmap=False) as hdulist:
                data_bkg = _read_ogip_hdulist(hdulist)
                counts_off = CountsSpectrum(
                    energy_hi=data_bkg["energy_hi"],
                    energy_lo=data_bkg["energy_lo"],
                    data=data_bkg["data"],
                )

                acceptance_off = data_bkg["backscal"]
        except OSError:
            # TODO : Add logger and echo warning
            counts_off, acceptance_off = None, None

        arffile = phafile.replace("pha", "arf")
        aeff = EffectiveAreaTable.read(dirname / arffile)

        mask_safe = np.logical_not(data["quality"])

        return cls(
            counts=counts,
            aeff=aeff,
            counts_off=counts_off,
            edisp=energy_dispersion,
            livetime=data["livetime"],
            mask_safe=mask_safe,
            acceptance=data["backscal"],
            acceptance_off=acceptance_off,
            name=str(data["obs_id"]),
            gti=data["gti"],
        )
# Now we read the maps and IRFs and create the dataset for each observation:

# In[ ]:

datasets = []

for obs_id in obs_ids:
    path = Path("analysis_3d_joint") / "obs_{}".format(obs_id)

    # read counts map and IRFs
    counts = Map.read(path / "counts.fits.gz")
    exposure = Map.read(path / "exposure.fits.gz")

    psf = PSFKernel.read(path / "psf.fits.gz")
    edisp = EnergyDispersion.read(path / "edisp.fits.gz")

    # create background model per observation / dataset
    background = Map.read(path / "background.fits.gz")
    background_model = BackgroundModel(background)
    background_model.tilt.frozen = False
    background_model.norm.value = 1.3

    # optionally define a safe energy threshold
    emin = None
    mask_data = counts.geom.energy_mask(emin=emin)
    mask = Map.from_geom(geom=counts.geom, data=mask_data)

    dataset = MapDataset(
        model=model,
        counts=counts,
Esempio n. 9
0
#
# ### Reading maps and IRFs
# As first step we read in the maps and IRFs that we have saved to disk again:

# In[ ]:

# read maps
maps = {
    "counts": Map.read(str(path / "counts.fits")),
    "background": Map.read(str(path / "background.fits")),
    "exposure": Map.read(str(path / "exposure.fits")),
}

# read IRFs
psf_kernel = PSFKernel.read(str(path / "psf.fits"))
edisp = EnergyDispersion.read(str(path / "edisp.fits"))

# ### Fit mask
#
# To select a certain energy range for the fit we can create a fit mask:

# In[ ]:

mask = Map.from_geom(maps["counts"].geom)

coords = mask.geom.get_coord()
mask.data = coords["energy"] > 0.3

# ### Model fit
#
# No we are ready for the actual likelihood fit. We first define the model as a combination of a point source with a powerlaw:
Esempio n. 10
0
cube = counts_3D.to_sherpa_data3d(dstype='Data3DInt')
#apply the cube_mask
cube.mask = cube_mask.data.value.ravel()

bkg_3D = SkyCube.read(outdir_data + "/bkg_cube.fits").cutout(
    center, extraction_size)
exposure_3D = SkyCube.read(outdir_data + "/exposure_cube.fits").cutout(
    center, extraction_size)
i_nan = np.where(np.isnan(exposure_3D.data))
exposure_3D.data[i_nan] = 0
exposure_3D.data = exposure_3D.data * 1e4
psf_SgrA = SkyCube.read(outdir_data + "/mean_psf_cube_GC.fits",
                        format="fermi-counts").cutout(center, extraction_size)
psf_G0p9 = SkyCube.read(outdir_data + "/mean_psf_cube_G0.9.fits",
                        format="fermi-counts").cutout(center, extraction_size)
rmf = EnergyDispersion.read(outdir_data + "/mean_rmf.fits")

# Setup combined spatial and spectral model
spatial_model_SgrA = NormGauss2DInt('spatial-model_SgrA')
spectral_model_SgrA = PowLaw1D('spectral-model_SgrA')
#spectral_model_SgrA = MyPLExpCutoff('spectral-model_SgrA')
source_model_SgrA = CombinedModel3DIntConvolveEdisp(
    coord=coord,
    energies=energies,
    use_psf=True,
    exposure=exposure_3D,
    psf=psf_SgrA,
    spatial_model=spatial_model_SgrA,
    spectral_model=spectral_model_SgrA,
    edisp=rmf.data.data,
    select_region=True,
Esempio n. 11
0
def test_get_bias_energy():
    """Obs read from file"""
    rmffile = "$GAMMAPY_DATA/joint-crab/spectra/hess/rmf_obs23523.fits"
    edisp = EnergyDispersion.read(rmffile)
    thresh_lo = edisp.get_bias_energy(0.1)
    assert_allclose(thresh_lo.to("TeV").value, 0.9174, rtol=1e-4)