def test_hpxmap_read_write(tmp_path, nside, nested, frame, region, axes): path = tmp_path / "tmp.fits" m = create_map(nside, nested, frame, region, axes) m.write(path, sparse=True, overwrite=True) m2 = HpxNDMap.read(path) m4 = Map.read(path, map_type="hpx") msk = np.ones_like(m2.data[...], dtype=bool) assert_allclose(m.data[...][msk], m2.data[...][msk]) assert_allclose(m.data[...][msk], m4.data[...][msk]) m.write(path, overwrite=True) m2 = HpxNDMap.read(path) m3 = HpxMap.read(path, map_type="hpx") m4 = Map.read(path, map_type="hpx") assert_allclose(m.data[...][msk], m2.data[...][msk]) assert_allclose(m.data[...][msk], m3.data[...][msk]) assert_allclose(m.data[...][msk], m4.data[...][msk]) # Specify alternate HDU name for IMAGE and BANDS table m.write(path, sparse=True, hdu="IMAGE", hdu_bands="TEST", overwrite=True) m2 = HpxNDMap.read(path) m3 = Map.read(path) m4 = Map.read(path, map_type="hpx")
def test_hpxmap_read_write(tmp_path, nside, nested, coordsys, region, axes, sparse): path = tmp_path / "tmp.fits" m = create_map(nside, nested, coordsys, region, axes, sparse) fill_poisson(m, mu=0.5, random_state=0) m.write(path, sparse=sparse, overwrite=True) m2 = HpxNDMap.read(path) m3 = HpxSparseMap.read(path) m4 = Map.read(path, map_type="hpx") if sparse: msk = np.isfinite(m2.data[...]) else: msk = np.ones_like(m2.data[...], dtype=bool) assert_allclose(m.data[...][msk], m2.data[...][msk]) assert_allclose(m.data[...][msk], m3.data[...][msk]) assert_allclose(m.data[...][msk], m4.data[...][msk]) m.write(path, sparse=True, overwrite=True) m2 = HpxNDMap.read(path) m3 = HpxMap.read(path, map_type="hpx") m4 = Map.read(path, map_type="hpx") assert_allclose(m.data[...][msk], m2.data[...][msk]) assert_allclose(m.data[...][msk], m3.data[...][msk]) assert_allclose(m.data[...][msk], m4.data[...][msk]) # Specify alternate HDU name for IMAGE and BANDS table m.write(path, hdu="IMAGE", hdu_bands="TEST", overwrite=True) m2 = HpxNDMap.read(path) m3 = Map.read(path) m4 = Map.read(path, map_type="hpx")
def test_hpxmap_ud_grade(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.to_ud_graded(4)
def test_hpxmap_crop(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes)) m.crop(1)
def test_hpxmap_crop(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.crop(1)
def test_hpxmap_ud_grade(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes)) m.to_ud_graded(4)
def test_hpxmap_swap_scheme(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) fill_poisson(m, mu=1.0, random_state=0) m2 = m.to_swapped() coords = m.geom.get_coord(flat=True) assert_allclose(m.get_by_coord(coords), m2.get_by_coord(coords))
def test_hpxmap_swap_scheme(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes)) m.data = np.arange(m.data.size).reshape(m.geom.data_shape) m2 = m.to_swapped() coords = m.geom.get_coord(flat=True) assert_allclose(m.get_by_coord(coords), m2.get_by_coord(coords))
def test_hpx_nd_map_to_nside(): axis = MapAxis.from_edges([1, 2, 3], name="test-1") geom = HpxGeom.create(nside=64, axes=[axis]) m = HpxNDMap(geom, unit="m2") m.data += 1 m2 = m.to_nside(nside=32) assert_allclose(m2.data, 4) m3 = m.to_nside(nside=128) assert_allclose(m3.data, 0.25)
def test_hpxmap_downsample(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes), unit="m2", ) m.set_by_pix(m.geom.get_idx(flat=True), 1.0) m_down = m.downsample(2, preserve_counts=True) assert_allclose(np.nansum(m.data), np.nansum(m_down.data)) assert m.unit == m_down.unit
def test_hpxmap_sum_over_axes(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) coords = m.geom.get_coord(flat=True) m.fill_by_coord(coords, coords[0]) msum = m.sum_over_axes() if m.geom.is_regular: assert_allclose(np.nansum(m.data), np.nansum(msum.data))
def test_hpx_nd_map_to_wcs_tiles(): m = HpxNDMap.create(nside=8, frame="galactic") m.data += 1 tiles = m.to_wcs_tiles(nside_tiles=4) assert_allclose(tiles[0].data, 1) assert_allclose(tiles[32].data, 1) axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) m = HpxNDMap.create(nside=8, frame="galactic", axes=[axis]) m.data += 1 tiles = m.to_wcs_tiles(nside_tiles=4) assert_allclose(tiles[0].data, 1) assert_allclose(tiles[32].data, 1)
def test_hpx_map_weights_stack(): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) m = HpxNDMap.create(nside=32, frame="galactic", axes=[axis], region="DISK(110.,75.,10.)") m.data += np.arange(90) + 1 weights = m.copy() weights.data = 1 / (np.arange(90) + 1) m_allsky = HpxNDMap.create(nside=32, frame="galactic", axes=[axis]) m_allsky.stack(m, weights=weights) assert_allclose(m_allsky.data.sum(), 90)
def test_hpxmap_init(nside, nested, coordsys, region, axes): geom = HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes) shape = [int(np.max(geom.npix))] if axes: shape += [ax.nbin for ax in axes] shape = shape[::-1] data = np.random.uniform(0, 1, shape) m = HpxNDMap(geom) assert m.data.shape == data.shape m = HpxNDMap(geom, data) assert_allclose(m.data, data)
def test_hpxmap_interp_by_coord_quantities(): ax = MapAxis(np.logspace(0.0, 3.0, 3), interp="log", name="energy", unit="TeV") geom = HpxGeom(nside=1, axes=[ax]) m = HpxNDMap(geom=geom) coords_dict = {"lon": 99, "lat": 42, "energy": 1000 * u.GeV} coords = m.geom.get_coord(flat=True) m.set_by_coord(coords, coords["lat"]) coords_dict["energy"] = 1 * u.TeV val = m.interp_by_coord(coords_dict) assert_allclose(val, 42, rtol=1e-2)
def extract_spectra_fermi(target_position, on_radius): """Extract 1d spectra for Fermi-LAT""" log.info("Extracting 1d spectra for Fermi-LAT") events = EventList.read("data/fermi/events.fits.gz") exposure = HpxNDMap.read("data/fermi/exposure_cube.fits.gz") psf = EnergyDependentTablePSF.read("data/fermi/psf.fits.gz") emin, emax, dex = 0.03, 2, 0.1 num = int(np.log10(emax / emin) / dex) energy = np.logspace(start=np.log10(emin), stop=np.log10(emax), num=num) * u.TeV bkg_estimate = fermi_ring_background_extract(events, target_position, on_radius) extract = SpectrumExtractionFermi1D( events=events, exposure=exposure, psf=psf, bkg_estimate=bkg_estimate, target_position=target_position, on_radius=on_radius, energy=energy, containment_correction=True, ) obs = extract.run() path = f"{config.repo_path}/results/spectra/fermi" log.info(f"Writing to {path}") obs.write(path, use_sherpa=True, overwrite=True)
def create_map(nside, nested, coordsys, region, axes): return HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes))
def create_map(nside, nested, frame, region, axes): return HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes))
def test_partial_hpx_map_stack(): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) m_1 = HpxNDMap.create(nside=128, frame="galactic", axes=[axis], region="DISK(110.,75.,20.)") m_1.data += 1 m_2 = HpxNDMap.create(nside=128, frame="galactic", axes=[axis], region="DISK(130.,75.,20.)") m_2.stack(m_1) assert_allclose(m_1.data.sum(), 5933) assert_allclose(m_2.data.sum(), 4968)
def extract_spectra_fermi(target_position, on_radius): """Extract 1d spectra for Fermi-LAT""" log.info("Extracting 1d spectra for Fermi-LAT") events = EventList.read("data/fermi/events.fits.gz") exposure = HpxNDMap.read("data/fermi/exposure_cube.fits.gz") psf = EnergyDependentTablePSF.read("data/fermi/psf.fits.gz") valid_range = (config.energy_bins >= 30 * u.GeV) * (config.energy_bins <= 2 * u.TeV) energy = config.energy_bins[valid_range] bkg_estimate = ring_background_estimate( pos=target_position, on_radius=on_radius, inner_radius=1 * u.deg, outer_radius=2 * u.deg, events=events, ) extract = SpectrumExtractionFermi1D( events=events, exposure=exposure, psf=psf, bkg_estimate=bkg_estimate, target_position=target_position, on_radius=on_radius, energy=energy, ) obs = extract.run() path = "results/spectra/fermi" log.info(f"Writing to {path}") obs.write(path, use_sherpa=True, overwrite=True)
def test_hpxmap_pad(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.set_by_pix(m.geom.get_idx(flat=True), 1.0) cval = 2.2 m_pad = m.pad(1, mode="constant", cval=cval) coords_pad = m_pad.geom.get_coord(flat=True) msk = m.geom.contains(coords_pad) coords_out = tuple([c[~msk] for c in coords_pad]) assert_allclose(m_pad.get_by_coord(coords_out), cval * np.ones_like(coords_out[0])) coords_in = tuple([c[msk] for c in coords_pad]) assert_allclose(m_pad.get_by_coord(coords_in), np.ones_like(coords_in[0]))
def test_hpxndmap_resample_axis(): axis_1 = MapAxis.from_edges([1, 2, 3, 4, 5], name="test-1") axis_2 = MapAxis.from_edges([1, 2, 3, 4], name="test-2") geom = HpxGeom.create(nside=16, axes=[axis_1, axis_2]) m = HpxNDMap(geom, unit="m2") m.data += 1 new_axis = MapAxis.from_edges([2, 3, 5], name="test-1") m2 = m.resample_axis(axis=new_axis) assert m2.data.shape == (3, 2, 3072) assert_allclose(m2.data[0, :, 0], [1, 2]) # Test without all interval covered new_axis = MapAxis.from_edges([1.7, 4], name="test-1") m3 = m.resample_axis(axis=new_axis) assert m3.data.shape == (3, 1, 3072) assert_allclose(m3.data, 2)
def test_from_wcs_tiles(): geom = HpxGeom.create(nside=8) wcs_geoms = geom.to_wcs_tiles(nside_tiles=4) wcs_tiles = [Map.from_geom(geom, data=1) for geom in wcs_geoms] m = HpxNDMap.from_wcs_tiles(wcs_tiles=wcs_tiles) assert_allclose(m.data, 1)
def __init__(self, evt_file="$JOINT_CRAB/data/fermi/events.fits.gz", exp_file="$JOINT_CRAB/data/fermi/exposure_cube.fits.gz", psf_file="$JOINT_CRAB/data/fermi/psf.fits.gz", max_psf_radius='0.5 deg'): # Read data self.events = EventList.read(evt_file) self.exposure = HpxNDMap.read(exp_file) self.exposure.unit = u.Unit('cm2s') # no unit stored on map... self.psf = EnergyDependentTablePSF.read(psf_file)
def test_coadd_unit(): geom = HpxGeom.create(nside=128) m1 = HpxNDMap(geom, unit="m2") m2 = HpxNDMap(geom, unit="cm2") idx = geom.get_idx() weights = u.Quantity(np.ones_like(idx[0]), unit="cm2") m1.fill_by_idx(idx, weights=weights) assert_allclose(m1.data, 0.0001) weights = u.Quantity(np.ones_like(idx[0]), unit="m2") m1.fill_by_idx(idx, weights=weights) m1.coadd(m2) assert_allclose(m1.data, 1.0001)
def test_hpx_map_cutout(): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) m = HpxNDMap.create(nside=32, frame="galactic", axes=[axis]) m.data += np.arange(12288) cutout = m.cutout(SkyCoord("0d", "0d"), width=10 * u.deg) assert cutout.data.shape == (1, 25) assert_allclose(cutout.data.sum(), 239021) assert_allclose(cutout.data[0, 0], 8452) assert_allclose(cutout.data[0, -1], 9768)
def __init__( self, evt_file="../data/joint-crab/fermi/events.fits.gz", exp_file="../data/joint-crab/fermi/exposure_cube.fits.gz", psf_file="../data/joint-crab/fermi/psf.fits.gz", ): # Read data self.events = EventList.read(evt_file) self.exposure = HpxNDMap.read(exp_file) self.exposure.unit = u.Unit("cm2s") # no unit stored on map... self.psf = PSFMap.read(psf_file, format="gtpsf")
def test_hpx_nd_map_pad_axis(): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=2) m = HpxNDMap.create(nside=2, frame="galactic", axes=[axis]) m.data += [[1], [2]] m_pad = m.pad(axis_name="energy", pad_width=(1, 1), mode="constant", cval=3) assert_allclose(m_pad.data[:, 0], [3, 1, 2, 3])
def __init__( self, evt_file="../data/joint-crab/fermi/events.fits.gz", exp_file="../data/joint-crab/fermi/exposure_cube.fits.gz", psf_file="../data/joint-crab/fermi/psf.fits.gz", max_psf_radius="0.5 deg", ): # Read data self.events = EventList.read(evt_file) self.exposure = HpxNDMap.read(exp_file) self.exposure.unit = u.Unit("cm2s") # no unit stored on map... self.psf = EnergyDependentTablePSF.read(psf_file)
def test_partial_hpx_map_cutout(): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) m = HpxNDMap.create(nside=32, frame="galactic", axes=[axis], region="DISK(110.,75.,10.)") m.data += np.arange(90) cutout = m.cutout(SkyCoord("0d", "0d"), width=10 * u.deg) assert cutout.data.shape == (1, 25) assert_allclose(cutout.data.sum(), 2225) assert_allclose(cutout.data[0, 0], 89) assert_allclose(cutout.data[0, -1], 89)
def _make_residual_map_hpx(self, prefix, **kwargs): src_dict = copy.deepcopy(kwargs.setdefault('model', {})) exclude = kwargs.setdefault('exclude', None) loge_bounds = kwargs.setdefault('loge_bounds', None) use_weights = kwargs.setdefault('use_weights', False) if loge_bounds: if len(loge_bounds) != 2: raise Exception('Wrong size of loge_bounds array.') loge_bounds[0] = (loge_bounds[0] if loge_bounds[0] is not None else self.log_energies[0]) loge_bounds[1] = (loge_bounds[1] if loge_bounds[1] is not None else self.log_energies[-1]) else: loge_bounds = [self.log_energies[0], self.log_energies[-1]] kernel = None gauss_width = np.radians(0.3) hpxsky = self.counts_map().geom.to_image() mmst = HpxNDMap.from_geom(hpxsky) cmst = HpxNDMap.from_geom(hpxsky) emst = HpxNDMap.from_geom(hpxsky) ts = HpxNDMap.from_geom(hpxsky) sigma = HpxNDMap.from_geom(hpxsky) excess = HpxNDMap.from_geom(hpxsky) for i, c in enumerate(self.components): imin = utils.val_to_edge(c.log_energies, loge_bounds[0])[0] imax = utils.val_to_edge(c.log_energies, loge_bounds[1])[0] cc = c.counts_map() mc = c.model_counts_map(exclude=exclude) ec = HpxNDMap(cc.geom, cc.data - mc.data) if use_weights: wmap = c.weight_map() mask = wmap.sum_over_axes() mask.data = np.where(mask.data > 0., 1., 0.) else: wmap = None mask = None sigmas = gauss_width * np.ones(cc.data.shape[0]) ccs = convolve_map_hpx_gauss( cc, sigmas, imin=imin, imax=imax, wmap=wmap) mcs = convolve_map_hpx_gauss( mc, sigmas, imin=imin, imax=imax, wmap=wmap) ecs = convolve_map_hpx_gauss( ec, sigmas, imin=imin, imax=imax, wmap=wmap) cms = ccs.sum_over_axes() mms = mcs.sum_over_axes() ems = ecs.sum_over_axes() if cms.geom.order != hpxsky.order: cms = cms.to_ud_graded(hpxsky.nside, preserve_counts=True) mms = mms.to_ud_graded(hpxsky.nside, preserve_counts=True) ems = ems.to_ud_graded(hpxsky.nside, preserve_counts=True) cmst.data += cms.data mmst.data += mms.data emst.data += ems.data ts.data = 2.0 * (poisson_lnl(cmst.data, cmst.data) - poisson_lnl(cmst.data, mmst.data)) sigma.data = np.sqrt(ts.data) sigma.data[emst.data < 0] *= -1 modelname = 'gauss_0p3' o = {'name': utils.join_strings([prefix, modelname]), 'projtype': 'HPX', 'file': None, 'sigma': sigma, 'model': mmst, 'data': cmst, 'excess': emst, 'mask': mask, 'config': kwargs} return o