def test_region_nd_io_gadf_no_region(tmpdir): energy_axis = MapAxis.from_edges([1, 3, 10] * u.TeV, name="energy") m = RegionNDMap.create(region=None, axes=[energy_axis]) m.write(tmpdir / "test.fits", format="gadf", hdu="TEST") m_new = RegionNDMap.read(tmpdir / "test.fits", format="gadf", hdu="TEST") assert m_new.geom.region is None assert m_new.geom.axes[0].name == "energy" assert m_new.data.shape == (2, 1, 1) assert_allclose(m_new.geom.axes["energy"].edges, [1, 3, 10] * u.TeV)
def test_region_nd_io_ogip_arf(tmpdir): energy_axis = MapAxis.from_energy_bounds(0.1, 10, 12, unit="TeV", name="energy_true") m = RegionNDMap.create("icrs;circle(83.63, 22.01, 0.5)", axes=[energy_axis]) m.write(tmpdir / "test.fits", format="ogip-arf") m_new = RegionNDMap.read(tmpdir / "test.fits", format="ogip-arf") assert m_new.geom.region is None with pytest.raises(ValueError): m.write(tmpdir / "test.fits", format="ogip")
def test_region_nd_io_gadf(tmpdir): energy_axis = MapAxis.from_edges([1, 3, 10] * u.TeV, name="energy") m = RegionNDMap.create("icrs;circle(83.63, 22.01, 0.5)", axes=[energy_axis]) m.write(tmpdir / "test.fits", format="gadf") m_new = RegionNDMap.read(tmpdir / "test.fits", format="gadf") assert isinstance(m_new.geom.region, CircleSkyRegion) assert m_new.geom.axes[0].name == "energy" assert m_new.data.shape == (2, 1, 1) assert_allclose(m_new.geom.axes["energy"].edges, [1, 3, 10] * u.TeV)
def test_region_nd_io_ogip(tmpdir): energy_axis = MapAxis.from_energy_bounds(0.1, 10, 12, unit="TeV") m = RegionNDMap.create("icrs;circle(83.63, 22.01, 0.5)", axes=[energy_axis], binsz_wcs="0.01deg") m.write(tmpdir / "test.fits", format="ogip") m_new = RegionNDMap.read(tmpdir / "test.fits", format="ogip") assert isinstance(m_new.geom.region, CircleSkyRegion) geom = m_new.geom.to_wcs_geom() assert geom.data_shape == (12, 102, 102) with pytest.raises(ValueError): m.write(tmpdir / "test.fits", format="ogip-arf")
def test_label_axis_io(tmpdir): energy_axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=5) label_axis = LabelMapAxis(labels=["dataset-1", "dataset-2"], name="dataset") m = RegionNDMap.create(region=None, axes=[energy_axis, label_axis]) m.data = np.arange(m.data.size) filename = tmpdir / "test.fits" m.write(filename, format="gadf") m_new = RegionNDMap.read(filename, format="gadf") assert m.geom.axes["dataset"] == m_new.geom.axes["dataset"] assert m.geom.axes["energy"] == m_new.geom.axes["energy"]
def test_region_nd_io_gadf_rad_axis(tmpdir): energy_axis = MapAxis.from_edges([1, 3, 10] * u.TeV, name="energy") rad_axis = MapAxis.from_nodes([0, 0.1, 0.2] * u.deg, name="rad") m = RegionNDMap.create("icrs;circle(83.63, 22.01, 0.5)", axes=[energy_axis, rad_axis], unit="sr-1") m.data = np.arange(np.prod(m.data.shape)).reshape(m.data.shape) m.write(tmpdir / "test.fits", format="gadf") m_new = RegionNDMap.read(tmpdir / "test.fits", format="gadf") assert isinstance(m_new.geom.region, CircleSkyRegion) assert m_new.geom.axes.names == ["energy", "rad"] assert m_new.unit == "sr-1" # check that the data is not re-shuffled assert_allclose(m_new.data, m.data) assert m_new.data.shape == (3, 2, 1, 1)
def read_arf(filename, livetime): """Read ARF file Parameters ---------- filename : str or `Path` PHA file name livetime : `Quantity` Livetime Returns ------- data : `RegionNDMap` Exposure map """ aeff = RegionNDMap.read(filename, format="ogip-arf") exposure = aeff * livetime exposure.meta["livetime"] = livetime return exposure
def from_ogip_files(cls, filename): """Read `~gammapy.spectrum.SpectrumDatasetOnOff` from OGIP files. BKG file, ARF, and RMF must be set in the PHA header and be present in the same folder. The naming scheme is fixed to the following scheme: * PHA file is named ``pha_obs{name}.fits`` * BKG file is named ``bkg_obs{name}.fits`` * ARF file is named ``arf_obs{name}.fits`` * RMF file is named ``rmf_obs{name}.fits`` with ``{name}`` the dataset name. Parameters ---------- filename : str OGIP PHA file to read """ filename = make_path(filename) dirname = filename.parent with fits.open(str(filename), memmap=False) as hdulist: counts = RegionNDMap.from_hdulist(hdulist, format="ogip") acceptance = RegionNDMap.from_hdulist( hdulist, format="ogip", ogip_column="BACKSCAL" ) if "GTI" in hdulist: gti = GTI(Table.read(hdulist["GTI"])) else: gti = None mask_safe = RegionNDMap.from_hdulist( hdulist, format="ogip", ogip_column="QUALITY" ) mask_safe.data = np.logical_not(mask_safe.data) phafile = filename.name try: rmffile = phafile.replace("pha", "rmf") kernel = EDispKernel.read(dirname / rmffile) edisp = EDispKernelMap.from_edisp_kernel(kernel, geom=counts.geom) except OSError: # TODO : Add logger and echo warning edisp = None try: bkgfile = phafile.replace("pha", "bkg") with fits.open(str(dirname / bkgfile), memmap=False) as hdulist: counts_off = RegionNDMap.from_hdulist(hdulist, format="ogip") acceptance_off = RegionNDMap.from_hdulist( hdulist, ogip_column="BACKSCAL" ) except OSError: # TODO : Add logger and echo warning counts_off, acceptance_off = None, None arffile = phafile.replace("pha", "arf") aeff = RegionNDMap.read(dirname / arffile, format="ogip-arf") return cls( counts=counts, aeff=aeff, counts_off=counts_off, edisp=edisp, livetime=counts.meta["EXPOSURE"] * u.s, mask_safe=mask_safe, acceptance=acceptance, acceptance_off=acceptance_off, name=str(counts.meta["OBS_ID"]), gti=gti, )