def test_hpxmap_init(nside, nested, coordsys, region, axes): geom = HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes) shape = [int(np.max(geom.npix))] if axes: shape += [ax.nbin for ax in axes] shape = shape[::-1] data = np.random.uniform(0, 1, shape) m = HpxNDMap(geom) assert m.data.shape == data.shape m = HpxNDMap(geom, data) assert_allclose(m.data, data)
def create_map(nside, nested, coordsys, region, axes): return HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes))
def create_map(nside, nested, frame, region, axes): return HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes))
def test_coadd_unit(): geom = HpxGeom.create(nside=128) m1 = HpxNDMap(geom, unit="m2") m2 = HpxNDMap(geom, unit="cm2") idx = geom.get_idx() weights = u.Quantity(np.ones_like(idx[0]), unit="cm2") m1.fill_by_idx(idx, weights=weights) assert_allclose(m1.data, 0.0001) weights = u.Quantity(np.ones_like(idx[0]), unit="m2") m1.fill_by_idx(idx, weights=weights) m1.coadd(m2) assert_allclose(m1.data, 1.0001)
def test_hpxmap_ud_grade(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.to_ud_graded(4)
def test_hpxmap_crop(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes)) m.crop(1)
def test_hpxmap_crop(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.crop(1)
def test_hpxmap_ud_grade(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes)) m.to_ud_graded(4)
def test_hpxmap_to_wcs(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.to_wcs(sum_bands=False, oversample=2, normalize=False) m.to_wcs(sum_bands=True, oversample=2, normalize=False)
def test_hpxmap_swap_scheme(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes)) m.data = np.arange(m.data.size).reshape(m.geom.data_shape) m2 = m.to_swapped() coords = m.geom.get_coord(flat=True) assert_allclose(m.get_by_coord(coords), m2.get_by_coord(coords))
def test_hpxmap_interp_by_coord(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) coords = m.geom.get_coord(flat=True) m.set_by_coord(coords, coords[1]) assert_allclose(m.get_by_coord(coords), m.interp_by_coord(coords, interp="linear"))
def test_hpxmap_swap_scheme(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) fill_poisson(m, mu=1.0, random_state=0) m2 = m.to_swapped() coords = m.geom.get_coord(flat=True) assert_allclose(m.get_by_coord(coords), m2.get_by_coord(coords))
def test_hpx_nd_map_to_nside(): axis = MapAxis.from_edges([1, 2, 3], name="test-1") geom = HpxGeom.create(nside=64, axes=[axis]) m = HpxNDMap(geom, unit="m2") m.data += 1 m2 = m.to_nside(nside=32) assert_allclose(m2.data, 4) m3 = m.to_nside(nside=128) assert_allclose(m3.data, 0.25)
def test_hpxmap_sum_over_axes(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) coords = m.geom.get_coord(flat=True) m.fill_by_coord(coords, coords[0]) msum = m.sum_over_axes() if m.geom.is_regular: assert_allclose(np.nansum(m.data), np.nansum(msum.data))
def test_hpxmap_downsample(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes), unit="m2", ) m.set_by_pix(m.geom.get_idx(flat=True), 1.0) m_down = m.downsample(2, preserve_counts=True) assert_allclose(np.nansum(m.data), np.nansum(m_down.data)) assert m.unit == m_down.unit
def test_smooth(kernel): axes = [ MapAxis(np.logspace(0.0, 3.0, 3), interp="log"), MapAxis(np.logspace(1.0, 3.0, 4), interp="lin"), ] geom_nest = HpxGeom.create(nside=256, nest=False, frame="galactic", axes=axes) geom_ring = HpxGeom.create(nside=256, nest=True, frame="galactic", axes=axes) m_nest = HpxNDMap(geom_nest, data=np.ones(geom_nest.data_shape), unit="m2") m_ring = HpxNDMap(geom_ring, data=np.ones(geom_ring.data_shape), unit="m2") desired_nest = m_nest.data.sum() desired_ring = m_ring.data.sum() smoothed_nest = m_nest.smooth(0.2 * u.deg, kernel) smoothed_ring = m_ring.smooth(0.2 * u.deg, kernel) actual_nest = smoothed_nest.data.sum() assert_allclose(actual_nest, desired_nest) assert smoothed_nest.data.dtype == float actual_ring = smoothed_ring.data.sum() assert_allclose(actual_ring, desired_ring) assert smoothed_ring.data.dtype == float # with pytest.raises(NotImplementedError): cutout = m_nest.cutout(position=(0, 0), width=15 * u.deg) smoothed_cutout = cutout.smooth(0.1 * u.deg, kernel) actual_cutout = cutout.data.sum() desired_cutout = smoothed_cutout.data.sum() assert_allclose(actual_cutout, desired_cutout, rtol=0.01) with pytest.raises(ValueError): m_nest.smooth(0.2 * u.deg, "box")
def make_diff_maps(comp_map, input_pref): for k,v in sorted(comp_map.items()): fname_st = "mcube_%s_%s.fits"%(input_pref,k) #fname_gard = "%s_FinalModels/%s.fits.gz"%(args.gardian, v) fname_gard = "../../GardianResults_local/%s.fits.gz"%(v) map_st = Map.read(fname_st, 'SKYMAP') try: map_gard = Map.read(fname_gard, 'SKYMAP2') except KeyError as msg: print ("Failed to read %s " % fname_gard) raise KeyError(msg) map_gard_cast = map_gard.to_ud_graded(map_st.geom.nside, True) diff = map_st.data - map_gard_cast.data map_diff = HpxNDMap(map_st.geom, diff) map_diff.write("diff_gard_%s.fits"%(k)) esum = diff.sum(0) stsum = map_st.data.sum(0) fsum = esum / stsum frac_diff = HpxNDMap(map_st.geom.to_image(), fsum) frac_diff.write("diff_gard_frac_%s.fits"%(k))
def test_hpxmap_upsample(nside, nested, frame, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, frame=frame, region=region, axes=axes), unit="m2", ) m.set_by_pix(m.geom.get_idx(flat=True), 1.0) m_up = m.upsample(2, preserve_counts=True) assert_allclose(np.nansum(m.data), np.nansum(m_up.data)) m_up = m.upsample(2, preserve_counts=False) assert_allclose(4.0 * np.nansum(m.data), np.nansum(m_up.data)) assert m.unit == m_up.unit
def test_hpxmap_interp_by_coord_quantities(): ax = MapAxis(np.logspace(0.0, 3.0, 3), interp="log", name="energy", unit="TeV") geom = HpxGeom(nside=1, axes=[ax]) m = HpxNDMap(geom=geom) coords_dict = {"lon": 99, "lat": 42, "energy": 1000 * u.GeV} coords = m.geom.get_coord(flat=True) m.set_by_coord(coords, coords["lat"]) coords_dict["energy"] = 1 * u.TeV val = m.interp_by_coord(coords_dict) assert_allclose(val, 42, rtol=1e-2)
def convolve_map_hpx_gauss(m, sigmas, imin=0, imax=None, wmap=None): """ Perform an energy-dependent convolution on a sequence of 2-D spatial maps. Parameters ---------- m : `HpxMap` 2-D map containing a sequence of 1-D HEALPix maps. First dimension should be energy. sigmas : `~numpy.ndarray` 1-D map containing a sequence gaussian widths for smoothing imin : int Minimum index in energy dimension. imax : int Maximum index in energy dimension. wmap : `~numpy.ndarray` 2-D map containing a sequence of 1-D HEALPix maps of weights. First dimension should be energy. This map should have the same dimension as m. """ islice = slice(imin, imax) o = np.zeros(m.data.shape) nside = m.geom.nside nest = m.geom.nest # Loop over energy for i, ms in enumerate(m.data[islice, ...]): sigma = sigmas[islice][i] # Need to be in RING scheme if nest: ms = hp.pixelfunc.reorder(ms, n2r=True) o[islice, ...][i] = hp.sphtfunc.smoothing(ms, sigma=sigma) if nest: o[islice, ...][i] = hp.pixelfunc.reorder(o[islice, ...][i], r2n=True) if wmap is not None: o[islice, ...][i] *= wmap.data[islice, ...][i] return HpxNDMap(m.geom, o)
def test_hpxmap_pad(nside, nested, coordsys, region, axes): m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) m.set_by_pix(m.geom.get_idx(flat=True), 1.0) cval = 2.2 m_pad = m.pad(1, mode="constant", cval=cval) coords_pad = m_pad.geom.get_coord(flat=True) msk = m.geom.contains(coords_pad) coords_out = tuple([c[~msk] for c in coords_pad]) assert_allclose(m_pad.get_by_coord(coords_out), cval * np.ones_like(coords_out[0])) coords_in = tuple([c[msk] for c in coords_pad]) assert_allclose(m_pad.get_by_coord(coords_in), np.ones_like(coords_in[0]))
def create_map(nside, nested, coordsys, region, axes, sparse): if sparse: m = HpxSparseMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) else: m = HpxNDMap( HpxGeom(nside=nside, nest=nested, coordsys=coordsys, region=region, axes=axes)) return m
def test_hpxndmap_resample_axis(): axis_1 = MapAxis.from_edges([1, 2, 3, 4, 5], name="test-1") axis_2 = MapAxis.from_edges([1, 2, 3, 4], name="test-2") geom = HpxGeom.create(nside=16, axes=[axis_1, axis_2]) m = HpxNDMap(geom, unit="m2") m.data += 1 new_axis = MapAxis.from_edges([2, 3, 5], name="test-1") m2 = m.resample_axis(axis=new_axis) assert m2.data.shape == (3, 2, 3072) assert_allclose(m2.data[0, :, 0], [1, 2]) # Test without all interval covered new_axis = MapAxis.from_edges([1.7, 4], name="test-1") m3 = m.resample_axis(axis=new_axis) assert m3.data.shape == (3, 1, 3072) assert_allclose(m3.data, 2)
def test_map_reproject_hpx_to_wcs(): axis = MapAxis.from_bounds(1.0, 10.0, 3, interp="log", name="energy", node_type="center") geom_wcs = WcsGeom.create(skydir=(0, 0), npix=(11, 11), binsz=10, axes=[axis], coordsys="GAL") geom_hpx = HpxGeom.create(binsz=10, coordsys="GAL", axes=[axis]) data = np.arange(3 * 768).reshape(geom_hpx.data_shape) m = HpxNDMap(geom_hpx, data=data) m_r = m.reproject(geom_wcs) actual = m_r.get_by_coord({ "lon": 0, "lat": 0, "energy": [1.0, 3.16227766, 10.0] }) assert_allclose(actual, [287.5, 1055.5, 1823.5], rtol=1e-3)
def _make_residual_map_hpx(self, prefix, **kwargs): src_dict = copy.deepcopy(kwargs.setdefault('model', {})) exclude = kwargs.setdefault('exclude', None) loge_bounds = kwargs.setdefault('loge_bounds', None) use_weights = kwargs.setdefault('use_weights', False) if loge_bounds: if len(loge_bounds) != 2: raise Exception('Wrong size of loge_bounds array.') loge_bounds[0] = (loge_bounds[0] if loge_bounds[0] is not None else self.log_energies[0]) loge_bounds[1] = (loge_bounds[1] if loge_bounds[1] is not None else self.log_energies[-1]) else: loge_bounds = [self.log_energies[0], self.log_energies[-1]] kernel = None gauss_width = np.radians(0.3) hpxsky = self.counts_map().geom.to_image() mmst = HpxNDMap.from_geom(hpxsky) cmst = HpxNDMap.from_geom(hpxsky) emst = HpxNDMap.from_geom(hpxsky) ts = HpxNDMap.from_geom(hpxsky) sigma = HpxNDMap.from_geom(hpxsky) excess = HpxNDMap.from_geom(hpxsky) for i, c in enumerate(self.components): imin = utils.val_to_edge(c.log_energies, loge_bounds[0])[0] imax = utils.val_to_edge(c.log_energies, loge_bounds[1])[0] cc = c.counts_map() mc = c.model_counts_map(exclude=exclude) ec = HpxNDMap(cc.geom, cc.data - mc.data) if use_weights: wmap = c.weight_map() mask = wmap.sum_over_axes() mask.data = np.where(mask.data > 0., 1., 0.) else: wmap = None mask = None sigmas = gauss_width * np.ones(cc.data.shape[0]) ccs = convolve_map_hpx_gauss(cc, sigmas, imin=imin, imax=imax, wmap=wmap) mcs = convolve_map_hpx_gauss(mc, sigmas, imin=imin, imax=imax, wmap=wmap) ecs = convolve_map_hpx_gauss(ec, sigmas, imin=imin, imax=imax, wmap=wmap) cms = ccs.sum_over_axes() mms = mcs.sum_over_axes() ems = ecs.sum_over_axes() if cms.geom.order != hpxsky.order: cms = cms.to_ud_graded(hpxsky.nside, preserve_counts=True) mms = mms.to_ud_graded(hpxsky.nside, preserve_counts=True) ems = ems.to_ud_graded(hpxsky.nside, preserve_counts=True) cmst.data += cms.data mmst.data += mms.data emst.data += ems.data ts.data = 2.0 * (poisson_lnl(cmst.data, cmst.data) - poisson_lnl(cmst.data, mmst.data)) sigma.data = np.sqrt(ts.data) sigma.data[emst.data < 0] *= -1 modelname = 'gauss_0p3' o = { 'name': utils.join_strings([prefix, modelname]), 'projtype': 'HPX', 'file': None, 'sigma': sigma, 'model': mmst, 'data': cmst, 'excess': emst, 'mask': mask, 'config': kwargs } return o