def query_disc(nside, vec, radius, inclusive=False, fact=4, nest=False): """ Wrapper around healpy.query_disc to deal with old healpy implementation. nside : int The nside of the Healpix map. vec : float, sequence of 3 elements The coordinates of unit vector defining the disk center. radius : float The radius (in degrees) of the disc inclusive : bool, optional If False, return the exact set of pixels whose pixel centers lie within the disk; if True, return all pixels that overlap with the disk, and maybe a few more. Default: False fact : int, optional Only used when inclusive=True. The overlapping test will be done at the resolution fact*nside. For NESTED ordering, fact must be a power of 2, else it can be any positive integer. Default: 4. nest: bool, optional if True, assume NESTED pixel ordering, otherwise, RING pixel ordering """ try: # New-style call (healpy 1.6.3) return hp.query_disc(nside, vec, np.radians(radius), inclusive, fact, nest) except Exception as e: print(e) # Old-style call (healpy 0.10.2) return hp.query_disc(nside, vec, np.radians(radius), nest, deg=False)
def compute_luminosity(self, SZ_map): self.compute_FWHM_in_degrees(SZ_map) NSIDE = hp.get_nside(SZ_map) pixels_in_disk = hp.query_disc(NSIDE, hp.pix2vec(NSIDE, self.peak_index), np.radians(self.FWHM * 1.5)) nb_pixels_disk = len(pixels_in_disk) vania_luminosity = 0 for pixel in pixels_in_disk: vania_luminosity += SZ_map[pixel] inner_ring = hp.query_disc(NSIDE, hp.pix2vec(NSIDE, self.peak_index), np.radians(self.FWHM * 3.5)) outer_ring = hp.query_disc(NSIDE, hp.pix2vec(NSIDE, self.peak_index), np.radians(self.FWHM * 5.5)) pixels_values_ring = list() nb_pixels_background = len(outer_ring) - len(inner_ring) for pixel in outer_ring: if not (pixel in inner_ring): pixels_values_ring.append(SZ_map[pixel]) self.luminosity_error = np.std(pixels_values_ring) local_noise_ring = sum(pixels_values_ring) self.luminosity = vania_luminosity - local_noise_ring * ( nb_pixels_disk / nb_pixels_background) print 'luminosity = %s \pm %s' % (self.luminosity, self.luminosity_error)
def test_combine(tmpdir): """Test ligo-skymap-combine.""" fn1 = str(tmpdir / 'skymap1.fits.gz') fn2 = str(tmpdir / 'skymap2.fits.gz') fn3 = str(tmpdir / 'joint_skymap.fits.gz') # generate a hemisphere of constant probability nside1 = 32 npix1 = ah.nside_to_npix(nside1) m1 = np.zeros(npix1) disc_idx = hp.query_disc(nside1, (1, 0, 0), np.pi / 2) m1[disc_idx] = 1 m1 /= m1.sum() hp.write_map(fn1, m1, column_names=['PROBABILITY'], extra_header=[('INSTRUME', 'X1')]) # generate another hemisphere of constant probability # but with higher resolution and rotated 90 degrees nside2 = 64 npix2 = ah.nside_to_npix(nside2) m2 = np.zeros(npix2) disc_idx = hp.query_disc(nside2, (0, 1, 0), np.pi / 2) m2[disc_idx] = 1 m2 /= m2.sum() hp.write_map(fn2, m2, column_names=['PROBABILITY'], extra_header=[('INSTRUME', 'Y1')]) run_entry_point('ligo-skymap-combine', fn1, fn2, fn3) m3 = hp.read_map(fn3, nest=True) npix3 = len(m3) nside3 = ah.npix_to_nside(npix3) pix_area3 = ah.nside_to_pixel_area(nside3).to_value(u.sr) # resolution must match the highest original resolution assert npix3 == npix2 # probability must be normalized to 1 assert m3.sum() == pytest.approx(1) # support must be ¼ of the sphere tolerance = 10 * ah.nside_to_pixel_area(nside1).to_value(u.sr) assert sum(m3 > 0) * pix_area3 == pytest.approx(np.pi, abs=tolerance) # generate a BAYESTAR-like map with mock distance information d_mu = np.zeros_like(m1) d_sigma = np.ones_like(m1) d_norm = np.ones_like(m1) io.write_sky_map(fn1, [m1, d_mu, d_sigma, d_norm]) run_entry_point('ligo-skymap-combine', fn1, fn2, fn3) m3, meta3 = io.read_sky_map(fn3, nest=True, distances=True) # check that marginal distance moments match what was simulated mean, std, _ = distance.parameters_to_moments(d_mu[0], d_sigma[0]) assert meta3['distmean'] == pytest.approx(mean) assert meta3['diststd'] == pytest.approx(std)
def get_hem_Cls(skymap, direction, LMAX=256, deg=90.): """ from the given healpix skymap, return Cls for two hemispheres defined by the direction given, useful to study the possible scale dependence of power modulation direction should be a unit vector """ # generate hemispherical mask NPIX = len(skymap) NSIDE = hp.npix2nside(NPIX) maskp = np.array([0.] * NPIX) disc = hp.query_disc(nside=NSIDE, vec=direction, radius=0.0174532925 * deg) maskp[disc] = 1. #skymap=hp.remove_monopole(skymap) map1 = hp.ma(skymap) map1.mask = maskp Clsp = hp.anafast(map1, lmax=LMAX * 2.0) if (deg < 90.): maskm = np.array([0.] * NPIX) disc = hp.query_disc(nside=NSIDE, vec=-direction, radius=0.0174532925 * deg) maskm[disc] = 1. map1.mask = maskm else: map1.mask = np.logical_not(maskp) Clsm = hp.anafast(map1, lmax=LMAX * 2.0) return [Clsp[0:LMAX + 1], Clsm[0:LMAX + 1]]
def csi_compute(param): """worker function""" get_var_from_file(os.path.join(GRATOOLS_CONFIG, 'Csi_config.py')) th_bins = data.TH_BINNING i, veci, dI, R, nside = param if i%10000 == 0: print i dIi = dI[i] Ri = R[i] dIij_list = [[] for l in range(0, len(th_bins)-1)] counts_list = [[] for l in range(0, len(th_bins)-1)] Rij_list = [[] for l in range(0, len(th_bins)-1)] for th, (thmin, thmax) in enumerate(zip(th_bins[:-1], th_bins[1:])): pixintorad_min = hp.query_disc(nside, veci, thmin) pixintorad_max = hp.query_disc(nside, veci, thmax) pixintoring = np.setxor1d(pixintorad_max, pixintorad_min) Rj = R[pixintoring] Rj = Rj[Rj > hp.UNSEEN] dIj = dI[pixintoring] dIj = dIj[dIj > hp.UNSEEN] dIij = np.sum(dIi*dIj)#-Imean**2) Rij = np.sum(Ri*Rj) counts = len(dIj) dIij_list[th].append(dIij) counts_list[th].append(counts) Rij_list[th].append(Rij) return dIij_list, counts_list, Rij_list
def get_index_list(nside, nest, region): """ Returns the list of pixels indices for all the pixels in a region nside : HEALPix nside parameter nest : True for 'NESTED', False = 'RING' region : HEALPix region string """ tokens = parse_hpxregion(region) if tokens[0] == 'DISK': vec = coords_to_vec(float(tokens[1]), float(tokens[2])) ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])), inclusive=False, nest=nest) elif tokens[0] == 'DISK_INC': vec = coords_to_vec(float(tokens[1]), float(tokens[2])) ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])), inclusive=True, fact=int(tokens[4]), nest=nest) elif tokens[0] == 'HPX_PIXEL': nside_pix = int(tokens[2]) if tokens[1] == 'NESTED': ipix_ring = hp.nest2ring(nside_pix, int(tokens[3])) elif tokens[1] == 'RING': ipix_ring = int(tokens[3]) else: raise Exception( "Did not recognize ordering scheme %s" % tokens[1]) ilist = match_hpx_pixel(nside, nest, nside_pix, ipix_ring) else: raise Exception( "HPX.get_index_list did not recognize region type %s" % tokens[0]) return ilist
def mask_extsrc(cat_file, MASK_S_RAD, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. cat_file: str .fits file of the sorce catalog MASK_S_RAD: float radius around each source definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for extended sources activated') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT_EXTENDED = src_cat['ExtendedSources'] BAD_PIX_SRC = [] EXT_SOURCES = CAT_EXTENDED.data src_cat.close() for i, src in enumerate(EXT_SOURCES): NAME = EXT_SOURCES.field('Source_Name')[i] GLON = EXT_SOURCES.field('GLON')[i] GLAT = EXT_SOURCES.field('GLAT')[i] if 'LMC' in NAME or 'CenA Lobes' in NAME: x, y, z = hp.rotator.dir2vec(GLON,GLAT,lonlat=True) b_pix= hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), np.radians(10)) BAD_PIX_SRC.extend(radintpix) else: x, y, z = hp.rotator.dir2vec(GLON,GLAT,lonlat=True) b_pix = hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), np.radians(5)) BAD_PIX_SRC.extend(radintpix) return BAD_PIX_SRC
def get_disk(mask, pos_src, out_edge, in_edge, tol=1): ''' Return unmasked pixel indices of disks centred at a source. For usage purpose the input includes two disks. Input: mask: mask healpix map; pos_src: position of source (with shape=3); out_edge: outer disk centred at each source(in arcmin); in_edge: inner disk centred at each source(in arcmin); tol: tolerance between 0 and 1 indicating a threshold of fraction of unmasked pixels in a disk Output: pixel indices of both disks. ''' Ns = hp.npix2nside(mask.size) list_out = hp.query_disc(Ns, pos_src, np.radians(out_edge / 60.)) npix_out = len(list_out) list_out_unmasked = list_out[mask[list_out] > 0] if (list_out_unmasked.size < tol * npix_out): return [0, 0] lon, lat = hp.vec2dir(pos_src, lonlat=True) list_in = hp.query_disc(Ns, pos_src, np.radians(in_edge / 60.)) list_in_unmasked = list_in[mask[list_in] > 0] return list_out_unmasked, list_in_unmasked
def get_index_list(nside, nest, region): """ Returns the list of pixels indices for all the pixels in a region nside : HEALPix nside parameter nest : True for 'NESTED', False = 'RING' region : HEALPix region string """ import healpy as hp tokens = re.split('\(|\)|,', region) if tokens[0] == 'DISK': vec = coords_to_vec(float(tokens[1]), float(tokens[2])) ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])), inclusive=False, nest=nest) elif tokens[0] == 'DISK_INC': vec = coords_to_vec(float(tokens[1]), float(tokens[2])) ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])), inclusive=True, fact=int(tokens[4]), nest=nest) elif tokens[0] == 'HPX_PIXEL': nside_pix = int(tokens[2]) if tokens[1] == 'NESTED': ipix_ring = hp.nest2ring(nside_pix, int(tokens[3])) elif tokens[1] == 'RING': ipix_ring = int(tokens[3]) else: raise Exception( "Did not recognize ordering scheme %s" % tokens[1]) ilist = match_hpx_pixel(nside, nest, nside_pix, ipix_ring) else: raise Exception( "HPX.get_index_list did not recognize region type %s" % tokens[0]) return ilist
def get_hem_Cls(skymap, direction, LMAX=256, deg=90.): """ from the given healpix skymap, return Cls for two hemispheres defined by the direction given, useful to study the possible scale dependence of power modulation direction should be a unit vector """ # generate hemispherical mask NPIX=len(skymap) NSIDE=hp.npix2nside(NPIX) maskp=np.array([0.]*NPIX) disc=hp.query_disc(nside=NSIDE, vec=direction, radius=0.0174532925*deg) maskp[disc]=1. #skymap=hp.remove_monopole(skymap) map1=hp.ma(skymap) map1.mask=maskp Clsp=hp.anafast(map1, lmax=LMAX) if (deg<90.): maskm=np.array([0.]*NPIX) disc=hp.query_disc(nside=NSIDE, vec=-direction, radius=0.0174532925*deg) maskm[disc]=1. map1.mask=maskm else: map1.mask=np.logical_not(maskp) Clsm=hp.anafast(map1, lmax=LMAX) return [Clsp, Clsm]
def csi_compute(param): """worker function""" get_var_from_file(os.path.join(GRATOOLS_CONFIG, 'Csi_config.py')) th_bins = data.TH_BINNING i, veci, dI, R, nside = param if i % 10000 == 0: print i dIi = dI[i] Ri = R[i] dIij_list = [[] for l in range(0, len(th_bins) - 1)] counts_list = [[] for l in range(0, len(th_bins) - 1)] Rij_list = [[] for l in range(0, len(th_bins) - 1)] for th, (thmin, thmax) in enumerate(zip(th_bins[:-1], th_bins[1:])): pixintorad_min = hp.query_disc(nside, veci, thmin) pixintorad_max = hp.query_disc(nside, veci, thmax) pixintoring = np.setxor1d(pixintorad_max, pixintorad_min) Rj = R[pixintoring] Rj = Rj[Rj > hp.UNSEEN] dIj = dI[pixintoring] dIj = dIj[dIj > hp.UNSEEN] dIij = np.sum(dIi * dIj) #-Imean**2) Rij = np.sum(Ri * Rj) counts = len(dIj) dIij_list[th].append(dIij) counts_list[th].append(counts) Rij_list[th].append(Rij) return dIij_list, counts_list, Rij_list
def get_disk(self, src_ind, out_edge, in_edge, tol=1): ''' Return unmasked pixel indices of disks centred at a source. For usage purpose the input includes two disks. Input: src_ind: index of source; out_edge: outer disk centred at each source(in arcmin); in_edge: inner disk centred at each source(in arcmin); tol: tolerance between 0 and 1 indicating a threshold of fraction of unmasked pixels in a disk Output: pixel indices of both disks. ''' mask = self.mask Ns = self.Nside pos_src = self.pos_src_all[src_ind] list_out = hp.query_disc(Ns, pos_src, np.radians(out_edge / 60.)) npix_out = len(list_out) list_out_unmasked = list_out[mask[list_out] > 0] if(list_out_unmasked.size < tol * npix_out): return [0, 0] lon, lat = hp.vec2dir(pos_src, lonlat=True) list_in = hp.query_disc(Ns, pos_src, np.radians(in_edge / 60.)) list_in_unmasked = list_in[mask[list_in] > 0] return list_out_unmasked, list_in_unmasked
def find_biggest_pixel(ra, dec, radius, root_nside=1, max_nside=32): from astropy.coordinates import SkyCoord from astropy import units as u import healpy as hp import numpy as np nside = root_nside radius = np.radians(radius) sc = SkyCoord(ra=ra * u.degree, dec=dec * u.degree, frame='icrs') theta = sc.galactic.l.degree phi = sc.galactic.b.degree vec = hp.ang2vec(theta=theta, phi=phi, lonlat=True) pixels = hp.query_disc(vec=vec, nside=nside, radius=radius, inclusive=False, nest=True) while len(pixels) <= 1: if nside == max_nside: break nside *= 2 pixels = hp.query_disc(vec=vec, nside=nside, radius=radius, inclusive=False, nest=True) if nside > 1: nside //= 2 return nside, hp.vec2pix(nside, *vec, nest=True)
def CreateAnafastPartialSky_(cl, nside, lmin, lmax, delta_ell, f_sky=2 / 100, plot_results=False, noise_rms=200): import NamasterLib as nam # Determine SEEN pixels from f_sky using query_disc vec = hp.pixelfunc.ang2vec(np.pi / 2, np.pi * 3 / 4) radius = f_sky * np.pi #print(np.array([cl.T[0,:]]).shape) ipix_disc = hp.query_disc(nside=nside, vec=vec, radius=radius, nest=False) while len(ipix_disc) < f_sky * 12 * nside**2: radius += 0.01 * np.pi ipix_disc = hp.query_disc(nside=nside, vec=vec, radius=radius, nest=False) #print("npix_partial_sky: ", len(ipix_disc)) m = np.arange(12 * nside**2) m = np.delete(m, ipix_disc, axis=None) # Define the seen pixels seenpix = ipix_disc ### Making mask - it will be automaticall apodized when instanciating the object with default (tunable) parameters mask = np.zeros(12 * nside**2) mask[seenpix] = 1 Namaster = nam.Namaster(mask, lmin=lmin, lmax=lmax, delta_ell=delta_ell) ell_binned, b = Namaster.get_binning(nside) # Get binned input spectra cl_theo_binned = np.zeros(shape=(4, ell_binned.shape[0])) for i in range(4): cl_theo_binned[i, :] = Namaster.bin_spectra(np.array([cl.T[i, :]]), nside) map_ = hp.synfast(cl.T, nside, pixwin=False, verbose=False, new=True) npix = 12 * nside**2 noise = np.random.randn(npix) * noise_rms map_partial = map_ + noise # Anafast spectrum of this map # Set UNSEEN pixels to hp.UNSEEN for Anafast map_partial[:, m] = hp.UNSEEN cl_ana, alm_ana = hp.anafast(map_partial, alm=True, lmax=lmax) # Get binned input spectra cl_ana_binned = np.zeros(shape=(4, ell_binned.shape[0])) for i in range(4): cl_ana_binned[i, :] = Namaster.bin_spectra(np.array([cl_ana[i, :]]), nside) return alm_ana, cl_ana_binned, cl_theo_binned
def getAMatrix(self): # A = lil_matrix((self.Nd, self.Np), dtype=np.float32) data = [] rows = [] columns = [] start = time.time() pixarea = hp.nside2pixarea(self.Nside, False) # get relevant pair of pixels # for j in range(1000): for j in range(self.Nd): qtheta1=self.q.theta[self.d.i1[j]] qphi1=self.q.phi[self.d.i1[j]] qtheta2=self.q.theta[self.d.i2[j]] qphi2=self.q.phi[self.d.i2[j]] q1 = self.Ang2Vec(qtheta1, qphi1) q2 = self.Ang2Vec(qtheta2, qphi2) # rad = np.arccos(np.dot(q1,q2)) # if(rad <= self.resolution): # s = np.array([self.d.hi1[j]]) # else: neipixels1=hp.query_disc(self.Nside, q1, self.sradius) neipixels2=hp.query_disc(self.Nside, q2, self.sradius) s = np.union1d(neipixels1, neipixels2) ss = set(s) smols = np.array([*ss.intersection(self.setpix)]) jthrow = [self.pixdict[l] for l in smols] if (smols.shape[0] == 0): continue ms = np.array([self.hpix[x] for x in smols]) d1 = q1 - ms d2 = q2 - ms norm1 = np.sqrt(np.einsum('ij,ij->i', d1, d1)) #Faster way to calculate norms norm2 = np.sqrt(np.einsum('ij,ij->i', d2, d2)) resp1=1/norm1*(1-np.exp(-norm1**2/(self.sigweight))) resp2=1/norm2*(1-np.exp(-norm2**2/(self.sigweight))) drr = (d1.T*resp1).T - (d2.T*resp2).T dr = d1 - d2 totresponse = 2*pixarea*np.einsum('ij,ij->i', drr, drr)/np.einsum('ij,ij->i',dr,dr) totresponse *= np.sqrt(self.d.weight[j]) data += list(totresponse) rows += [j]*len(jthrow) columns += jthrow # A[j,jthrow] = totresponse if(j%1000==0): iteration = time.time() print("%i/%i with time: %f" % (j,self.Nd, iteration - start)) print("Creating A matrix...") # A = scipy.sparse.csr_matrix((data, (rows, columns)), shape=(self.Nd, self.Np)) A = csr_matrix((data, (rows, columns)), shape=(self.Nd, self.Np)) return A
def get_index_list(nside, nest, region): """Get list of pixels indices for all the pixels in a region. Parameters ---------- nside : int HEALPIX nside parameter nest : bool True for 'NESTED', False = 'RING' region : str HEALPIX region string Returns ------- ilist : `~numpy.ndarray` List of pixel indices. """ import healpy as hp # TODO: this should return something more friendly than a tuple # e.g. a namedtuple or a dict tokens = parse_hpxregion(region) reg_type = tokens[0] if reg_type == "DISK": lon, lat = float(tokens[1]), float(tokens[2]) radius = np.radians(float(tokens[3])) vec = coords_to_vec(lon, lat)[0] ilist = hp.query_disc(nside, vec, radius, inclusive=False, nest=nest) elif reg_type == "DISK_INC": lon, lat = float(tokens[1]), float(tokens[2]) radius = np.radians(float(tokens[3])) vec = coords_to_vec(lon, lat)[0] fact = int(tokens[4]) ilist = hp.query_disc(nside, vec, radius, inclusive=True, nest=nest, fact=fact) elif reg_type == "HPX_PIXEL": nside_pix = int(tokens[2]) if tokens[1] == "NESTED": ipix_ring = hp.nest2ring(nside_pix, int(tokens[3])) elif tokens[1] == "RING": ipix_ring = int(tokens[3]) else: raise ValueError(f"Invalid ordering scheme: {tokens[1]!r}") ilist = match_hpx_pix(nside, nest, nside_pix, ipix_ring) else: raise ValueError(f"Invalid region type: {reg_type!r}") return ilist
def get_annulus_galactic(phi,theta,R,hp_map,nside): vec = hp.ang2vec(theta,phi) R_rad_inner = (np.pi/180.)*R R_rad_outer = R_rad_inner*np.sqrt(2) pix_disc_out = hp.query_disc(nside,vec,R_rad_outer,inclusive = True) pix_disc_inner = hp.query_disc(nside,vec,R_rad_inner, inclusive = True) pix_annulus = np.setdiff1d(pix_disc_out,pix_disc_inner) vals_annulus = hp_map[pix_annulus] return pix_annulus, vals_annulus
def get_annulus(RA, Dec, R, hp_map, nside): theta, phi = DeclRaToThetaPhi(Dec, RA) vec = hp.ang2vec(theta, phi) R_rad_inner = (np.pi / 180.) * R R_rad_outer = R_rad_inner * np.sqrt(2) pix_disc_out = hp.query_disc(nside, vec, R_rad_outer, inclusive=True) pix_disc_inner = hp.query_disc(nside, vec, R_rad_inner, inclusive=True) pix_annulus = np.setdiff1d(pix_disc_out, pix_disc_inner) vals_annulus = hp_map[pix_annulus] return pix_annulus, vals_annulus
def get_probability_coverage(ft2file, ligo_map_file, met_t1, met_t2, theta_cut, zenith_cut): ft2data = pyfits.getdata(ft2file) ligo_map = hp.read_map(ligo_map_file) # Probe NSIDE nside = hp.get_nside(ligo_map) # Get entries from the FT2 file every 10 s (cadence) start, ra_scz, dec_scz, ra_zenith, dec_zenith = _gtmktime(ft2data, met_t1, met_t2, cadence=30) coverage = np.zeros_like(start) for i, (t, rz, dz, rz2, dz2) in enumerate( zip(start, ra_scz, dec_scz, ra_zenith, dec_zenith)): # Find the pixels inside the LAT FoV (theta < theta_cut) vec = hp.rotator.dir2vec(rz, dz, lonlat=True) idx_z = hp.query_disc(nside, vec, np.deg2rad(theta_cut), inclusive=False) # Find the pixels at Zenith angles less # than zenith_cut vec = hp.rotator.dir2vec(rz2, dz2, lonlat=True) idx_z2 = hp.query_disc(nside, vec, np.deg2rad(zenith_cut), inclusive=False) # Intersect the two lists of pixels to find pixels which are at the same time # inside the FoV and at Zenith < zenith_cut idx = np.intersect1d(idx_z, idx_z2) # Compute the incremental probability coverage coverage[i] = np.sum(ligo_map[idx]) # Now put the pixel I counted to zero so I don't count them twice ligo_map[idx] = 0 sys.stdout.write("\r%.1f percent completed" % ((i + 1) / float(coverage.shape[0]) * 100.0)) return start, coverage
def makeMasks(nside=64, nested=False, ISWDir='/Data/PSG/hundred_point/'): """ Purpose: Makes a set of masks around GNS coordinates of various apertures Args: nside: nested: ISWDir: Returns: writes healpix files to ISWDir containing masks """ # load HEALpix coordinates file print 'NSIDE=', nside, ' NESTED=', nested longitudes, latitudes = getMapCoords(nside, nested) # load GNS catalog coordinates cgl, cgb, vgl, vgb = getGNScoords() # set radii for apertures around coordinate locations radiiDeg = np.array([ 4, 4.5, 5, 5.5, 6, 6.5, 7, 7.5, 8, 8.5, 9, 9.5, 10, 10.5, 11, 11.5, 12 ]) #degrees #radiiDeg = np.array([5.0]) radii = radiiDeg * np.pi / 180. # converted to radians numCV = 50 # number of clusters and voids in catalog for radNum, radius in enumerate(radii): print 'starting radius ', radiiDeg[radNum], ' degrees: ' mask = np.zeros(hp.nside2npix(nside)) cCentralVec = glgb2vec(cgl, cgb) #returns array of unit vectors vCentralVec = glgb2vec(vgl, vgb) #returns array of unit vectors for cvNum in np.arange(numCV): #print 'starting (cluster,void) number ',cvNum+1 # cluster myPixels = hp.query_disc(nside, cCentralVec[cvNum], radius, nest=nested) mask[myPixels] = 1 # void myPixels = hp.query_disc(nside, vCentralVec[cvNum], radius, nest=nested) mask[myPixels] = 1 radString = str("%04.1f" % radiiDeg[radNum]) pixNumStr = str(int(np.sum(mask))) print 'number of pixels for radius ' + radString + ': ' + pixNumStr maskFile = 'ISWmask_' + radString + 'deg_' + pixNumStr + 'pix.fits' hp.write_map(ISWDir + maskFile, mask, nest=nested, coord='GALACTIC')
def plot_exposures(pointings, Aeff_fact, index=1, lat=0., lon=np.radians(260.), Earth=True, antiEarth=False, NSIDE=32, doplot=True): npointings = len(pointings) sc = Spacecraft(pointings, lat=lat, lon=lon) exposure_positions_hp = np.arange(hp.nside2npix(NSIDE)) exposure_positions_pix = hp.pix2ang(NSIDE, exposure_positions_hp, lonlat=True) exposure_positions = np.vstack(exposure_positions_pix) exposures = np.array([[ detector.exposure(position[0], position[1], alt=-90., index=index) for position in exposure_positions.T ] for detector in sc.detectors]) exps = exposures.sum(axis=0) * Aeff_fact fs = exps #-min(gbm_exps))/max(gbm_exps) if Earth: vec = hp.ang2vec(180, 0, lonlat=True) i = hp.query_disc(NSIDE, vec, 67 * np.pi / 180.) fs[i] = 0 exposures[:, i] = 0 if antiEarth: vec = hp.ang2vec(np.degrees(lon) - 260. + 180., 0, lonlat=True) i = hp.query_disc(NSIDE, vec, 67 * np.pi / 180.) fs[i] = 0 exposures[:, i] = 0 if doplot: plot.figure(figsize=(20, npointings)) s = np.argsort(pointings.keys()) for j in range(npointings): i = s[j] hp.mollview(exposures[i]/max(exposures[i])*Aeff_fact,title='Detector ',\ sub = [np.round(npointings/3.+0.5),3,int(str(j+1))]) #+pointings.keys()[i],\ hp.mollview(fs, title='Sum of All Detectors') # plot.savefig(biadir+'exposure_maps_'+str(ang)+'.png') return sc, fs, exposure_positions, pointings, exposures
def addring(nside, ra, dec, anga, angb): """ take a map, and add 1 to elements between anga and angb from ra, dec """ theta = np.deg2rad(90-dec) phi = np.deg2rad(ra) temp_map = np.zeros(hp.nside2npix(nside)) assert angb > anga # else error # Everything from 0 to angb = 1 pixlist = hp.query_disc(nside, hp.ang2vec(theta, phi), np.deg2rad(angb)) temp_map[pixlist] += 1 # now delete everything from 0 to anga pixlist = hp.query_disc(nside, hp.ang2vec(theta, phi), np.deg2rad(anga)) temp_map[pixlist] -= 1 return temp_map
def gen_map_disc(radec_cen, rad, nside): '''Generates a Healpix map with the only non-zero values in the pixels inside the input disc. Parameters ---------- radec_cen : array-like with shape (2,) The center ra,dec of the disc in degrees rad : float The radius of the disc in degrees nside : int The nside of the output Healpix map Returns ------- hpx_map : array-like A Healpix map with non-zero values inside the disc ''' theta = np.pi / 2 - np.radians(radec_cen[1]) phi = np.radians(radec_cen[0]) vec = H.ang2vec(theta, phi) ipix = H.query_disc(nside, vec, np.radians(rad)) hpx_map = np.zeros(H.nside2npix(nside)) hpx_map[ipix] = 1.0 return hpx_map
def grow_hp(inmap, hpids, radius=1.75, replace_val=np.nan): """ grow a healpix mask Parameters ---------- inmap : np.array A HEALpix map hpids : array The healpixel values to grow around radius : float (1.75) The radius to grow around each point (degrees) replace_val : float (np.nan) The value to plug into the grown areas """ nside = hp.npix2nside(np.size(inmap)) theta, phi = hp.pix2ang(nside=nside, ipix=hpids) vec = hp.ang2vec(theta, phi) ipix_disc = [ hp.query_disc(nside=nside, vec=vector, radius=np.radians(radius)) for vector in vec ] ipix_disc = np.unique(np.concatenate(ipix_disc)) outmap = inmap + 0 outmap[ipix_disc] = replace_val return outmap
def query_disc(self, lon, lat, radius, fact=4): """ Find pixels that overlap with discs centered at (lon,lat) Inputs ------ lon : float, ndarray longitude (degr) lat : float, ndarray latitude (degr) radius : float, ndarray radius of disk (degrees) fact : float supersampling factor to find overlapping pixels (see healpy.query_disc doc) Returns ------ ndarray : pixel indices """ phi = np.array(lon) * self.deg2rad theta = (90 - np.array(lat)) * self.deg2rad vec = healpy.ang2vec(theta, phi) pix = healpy.query_disc(self.nside, vec, radius * self.deg2rad, inclusive=True, fact=fact, nest=self.nest) return pix
def find_field_visits(visits, ra, decl, nside=512, field_radius_deg=1.75): """Return visits centered near a pointing Parameters ---------- visits : `pandas.DataFrame` The visits in which to look for fields ra : `float` The RA around which to search. decl : `float` The declination around which to search nside : `int` The nside for the healpix search field_radious_deg : `float` The radious round which to search, in degrees Returns ------- field_visits : `pandas.DataFrame` The visits on the field. """ field_hpxs = healpy.query_disc( nside, healpy.ang2vec(ra, decl, lonlat=True), np.radians(field_radius_deg), ) visit_hpxs = healpy.ang2pix(nside, visits["fieldRA"].values, visits["fieldDec"].values, lonlat=True) field_visits = visits.loc[np.isin(visit_hpxs, field_hpxs)] return field_visits
def pencil(self, theta, phi, angrad, **kwargs): """Query a pencil beam from this `SkyMap` Returns the subset of this `SkyMap` that covers an angular disc on the sky (i.e., a pencil beam) Parameters ---------- theta : `float` zenith angle (radians) at the center of the disc phi : `float` azimuth angle (radians) at the center of the disc angrad : `float` or `~astropy.units.Quantity` angular radius (radians) subtended by the disc **kwargs : `dict`, optional additional keyword arguments to `~healpy.query_disc` Returns ------- out : `SkyMap` the subset of `SkyMap` subtended by this pencil beam """ if isinstance(angrad, units.Quantity): angrad = angrad.to("rad").value direction = healpy.ang2vec(theta, phi) indices = healpy.query_disc(self.nside, direction, angrad, nest=self.nest, **kwargs) return self[indices]
def handling_exception(params, constraints): NSIDEmax = params['NSIDE max'] vec = hp.ang2vec(params["ang"][0], params["ang"][1], lonlat=True) pixels = hp.query_disc(NSIDEmax, vec, np.radians(params['r'] / 3600.), inclusive=True) subjobs = mastcasjobs.MastCasJobs(context="PanSTARRS_DR2") for pixel in pixels: ang, r = qr.parameters(NSIDEmax, pixel) subquery = sub_query_string(ang[0], ang[1], r) accept = True while accept: try: subtab = subjobs.quick(subquery, task_name="python cone search") accept = False except Exception: from time import sleep sleep(60) pass subtab = qr.fixcolnames(ascii.read(subtab)) subtab = qr.query_constraints(subtab, constraints) if pixel == pixels[0]: table = subtab else: table = vstack([table, subtab]) return table
def mask_src(cat_file, MASK_S_RAD, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. cat_file: str .fits file of the sorce catalog MASK_S_RAD: float radius around each source definig bad pixels to mask NSIDE: int healpix nside parameter """ logger.info('Mask for sources activated') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT = src_cat['LAT_Point_Source_Catalog'] BAD_PIX_SRC = [] SOURCES = CAT.data RADrad = MASK_S_RAD*np.pi/180. for i in range (0,len(SOURCES)-1): GLON = SOURCES.field('GLON')[i] GLAT = SOURCES.field('GLAT')[i] x, y, z = hp.rotator.dir2vec(GLON,GLAT,lonlat=True) b_pix= hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) BAD_PIX_inrad = [] for bn in BAD_PIX_SRC: pixVec = hp.pix2vec(NSIDE,bn) radintpix = hp.query_disc(NSIDE, pixVec, RADrad) BAD_PIX_inrad.extend(radintpix) BAD_PIX_SRC.extend(BAD_PIX_inrad) src_cat.close() return BAD_PIX_SRC
def generate_inmaskcat(self, out_edge, tol=1): ''' Return unmasked sources. Input: out_edge: out edge of the disk centred at each source(in arcmin); tol: tolerance between 0 and 1 indicating a threshold of fraction of unmasked pixels in a disk Output: indices of unmasked sources. ''' mask = self.mask Ns = self.Nside Nsource = self.Nsource pos_src_all = self.pos_src_all inmask_ind = np.zeros(Nsource) for i in range(Nsource): if i % 10000 == 0: print i list_out = hp.query_disc(Ns, pos_src_all[i], np.radians(out_edge / 60.)) npix_out = list_out.size neff_out = np.sum(mask[list_out]) if(neff_out < tol * npix_out): continue else: inmask_ind[i] = 1 print str(inmask_ind.sum()) + ' sources in mask.' return inmask_ind
def get_hpmask_subpix_indices(submask_nside, submask_hpix, submask_border, nside_mask, hpix): """ """ nside_cutref = np.clip(submask_nside * 4, 256, nside_mask) # Find out which cutref pixels are inside the main pixel theta, phi = hp.pix2ang(nside_cutref, np.arange(hp.nside2npix(nside_cutref))) ipring_coarse = hp.ang2pix(submask_nside, theta, phi) inhpix, = np.where(ipring_coarse == submask_hpix) # If there is a border, we need to find the boundary pixels if submask_border > 0.0: boundaries = hp.boundaries(submask_nside, submask_hpix, step=nside_cutref/submask_nside) # These are all the pixels that touch the boundary for i in xrange(boundaries.shape[1]): pixint = hp.query_disc(nside_cutref, boundaries[:, i], np.radians(submask_border), inclusive=True, fact=8) inhpix = np.append(inhpix, pixint) # Need to uniqify here because of overlapping pixels inhpix = np.unique(inhpix) # And now choose just those depthmap pixels that are in the inhpix region theta, phi = hp.pix2ang(nside_mask, hpix) ipring = hp.ang2pix(nside_cutref, theta, phi) _, use = esutil.numpy_util.match(inhpix, ipring) return use
def calculate_efficiency(hpx, d0, s0, s1, nfields, fieldop): '''This function determines the score of a given set of tiling parameters for the optimization. The figure of merit is the sum value of the for the <nfields> higher fields of the tiling''' nside = hp.npix2nside(len(hpx)) keptfields = build_fields(hpx, d0, s0, s1, nfields, fieldop) # totaldots = np.sum(keptfields["prob"]) total = 0 prob_integral = [] for indec in range(0, len(keptfields)): # cornerra, cornerdec = getcorners(keptfields["ra"][indec],keptfields["dec"][indec],field=4.2) # xyz = hp.ang2vec(checktheta(dectotheta(cornerdec)),checkphi(ratophi(cornerdec))) # hp.query_polygon(nside,xyz) xyz = hp.ang2vec(dectotheta(keptfields["dec"][indec]), ratophi(keptfields["ra"][indec])) ipix_disc = hp.query_disc(nside, xyz, np.deg2rad( fieldop)) #here radius seems to be a diameter instead * (sq2+1)/4) totdisc = hpx[ipix_disc].sum() prob_integral.append(totdisc) total += totdisc # efficiency = total / nfields #print (total) return total, prob_integral
def disk_plot(value, dir_vec, ang_size, nside_var): coord = hp.query_disc(nside=nside_var, vec=dir_vec, radius=np.deg2rad(ang_size), inclusive=True, fact=16) m[coord] = value
def label_visits(visits, wfd_footprint, nside=64): # Set up DD names. d = set() for p in visits['note'].unique(): if p.startswith('DD'): d.add(define_ddname(p)) # Define dictionary of proposal tags. propTags = {'Other': 0, 'WFD': 1} for i, field in enumerate(d): propTags[field] = i + 2 # Identify Healpixels associated with each visit. vec = hp.dir2vec(visits['fieldRA'], visits['fieldDec'], lonlat=True) vec = vec.swapaxes(0, 1) radius = np.radians(1.75) # fov radius #pointings = [] propId = np.zeros(len(visits), int) for i, (v, note) in enumerate(zip(vec, visits['note'])): # Identify the healpixels which would be inside this pointing pointing_healpix = hp.query_disc(nside, v, radius, inclusive=False) # This can be useful for debugging/plotting #pointings.append(pointing_healpix) # The wfd_footprint consists of values of 0/1 if out/in WFD footprint in_wfd = wfd_footprint[pointing_healpix].sum() # So in_wfd = the number of healpixels which were in the WFD footprint # .. in the # in / total # > limit (0.4) then "yes" it's in WFD propId[i] = np.where(in_wfd / len(pointing_healpix) > 0.4, propTags['WFD'], 0) # BUT override - if the visit was taken for DD, use that flag instead. if note.startswith('DD'): propId[i] = propTags[define_ddname(note)] return visits, propTags, propId
def dust_vals_disk(self,lcen,bcen,dist,radius): """ NAME: dust_vals_disk PURPOSE: return the distribution of extinction within a small disk as samples INPUT: lcen, bcen - Galactic longitude and latitude of the center of the disk (deg) dist - distance in kpc radius - radius of the disk (deg) OUTPUT: (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value HISTORY: 2015-03-07 - Written - Bovy (IAS) """ # Convert the disk center to a HEALPIX vector vec= healpy.pixelfunc.ang2vec((90.-bcen)*_DEGTORAD,lcen*_DEGTORAD) # We pixelize the map with a HEALPIX grid with nside=256, to somewhat # oversample the Drimmel resolution nside= 256 # Find the pixels at this resolution that fall within the disk ipixs= healpy.query_disc(nside,vec,radius*_DEGTORAD, inclusive=False,nest=False) # Query the HEALPIX map for pixels that lie within the disk pixarea= healpy.pixelfunc.nside2pixarea(nside)+numpy.zeros(len(ipixs)) extinction= [] for ii, ipix in enumerate(ipixs): # Get glon and glat b9, l= healpy.pixelfunc.pix2ang(nside,ipix,nest=False) b= 90.-b9/_DEGTORAD l/= _DEGTORAD # Now evaluate extinction.append(self._evaluate(l,b,dist)) extinction= numpy.array(extinction) return (pixarea,extinction)
def gen_map_disc(radec_cen, rad, nside): '''Generates a Healpix map with the only non-zero values in the pixels inside the input disc. Parameters ---------- radec_cen : array-like with shape (2,) The center ra,dec of the disc in degrees rad : float The radius of the disc in degrees nside : int The nside of the output Healpix map Returns ------- hpx_map : array-like A Healpix map with non-zero values inside the disc ''' theta = np.pi/2 - np.radians(radec_cen[1]) phi = np.radians(radec_cen[0]) vec = H.ang2vec(theta, phi) ipix = H.query_disc(nside, vec, np.radians(rad)) hpx_map = np.zeros(H.nside2npix(nside)) hpx_map[ipix] = 1.0 return hpx_map
def _get_mask(self): if self.config.get('mask_file', None) is not None: mask = hp.read_map(self.config['mask_file']) mask = hp.ud_grade(rotate_mask(mask, self.rot), nside_out=self.nside) mask[mask > 0.5] = 1. mask[mask <= 0.5] = 0. else: mask = np.ones(self.npix) r = hp.Rotator(coord=['C', 'G']) RApix, DEpix = hp.pix2ang(self.nside, np.arange(self.npix), lonlat=True) lpix, bpix = r(RApix, DEpix, lonlat=True) # angular conditions mask[(DEpix < self.config.get('DEC_min_deg', -40)) | (np.fabs(bpix) < self.config.get('GLAT_max_deg', 5))] = 0 if self.file_sourcemask is not None: # holes catalog RAmask, DEmask, radmask = np.loadtxt(self.file_sourcemask, unpack=True) vecmask = hp.ang2vec(RAmask, DEmask, lonlat=True) for vec, radius in zip(vecmask, radmask): ipix_hole = hp.query_disc(self.nside, vec, np.radians(radius), inclusive=True) mask[ipix_hole] = 0 mask = rotate_mask(mask, self.rot, binarize=True) return mask
def compute_circle(N, t, p, r, r_in): # Compute query (outer circle) and query_in (inner circle) query = hp.query_disc(nside=N, vec=hp.ang2vec(np.pi / 2 - t, p), radius=r, inclusive=False, nest=False) query_in = hp.query_disc(nside=N, vec=hp.ang2vec(np.pi / 2 - t, p), radius=r_in, inclusive=False, nest=False) # Inverse intersection of query (outer circle) and query_in (inner circle) inner = minus(query, query_in) return inner
def test_inclusive(self): #HIDL> query_disc, 8, [ 0.17101007, 0.03015369, 0.98480775],6,listpix,/DEG,NESTED=0,/inclusive #HIDL> print,listpix # 0 3 4 5 11 12 13 23 np.testing.assert_array_equal( query_disc(self.NSIDE, self.vec, self.radius, inclusive=True), np.array([ 0, 3, 4, 5, 11, 12, 13, 23 ]) )
def test_not_inclusive(self): #HIDL> query_disc, 8, [ 0.17101007, 0.03015369, 0.98480775],6,listpix,/DEG,NESTED=0 #HIDL> print,listpix # 4 np.testing.assert_array_equal( query_disc(self.NSIDE, self.vec, self.radius, inclusive=False), np.array([4]) )
def get_disc(pix, disc_size, nside): vec = hp.pix2vec(nside, pix, nest=False) in_disc = hp.query_disc( nside=nside, vec=vec, radius=np.deg2rad(disc_size), nest=False) return in_disc
def get_pixels(centr_ra, centr_decl, fov_radius): """ Get a list of HEALPIX zones that contain a given image. """ vector = healpy.ang2vec(radians(90.0 - centr_decl), radians(centr_ra)) pixels = healpy.query_disc(32, vector, radians(fov_radius), inclusive=True, nest=True) return str(pixels.tolist())[1:-1]
def gaussian_on_a_sphere(mean_th, mean_phi, sigma): """ This function returns a 2D normal pdf on a discretized healpy grid. To chose the function values correctly in spherical coordinates, the true angular distances to the mean are used. Pixels farther away from the mean than clip * sigma are clipped because the normal distribution falls quickly to zero. The error made by this can be easily estimated and a discussion can be found in [arXiv:1005.1929](https://arxiv.org/abs/1005.1929v2). Parameters ---------- mean_th : float Position of the mean in healpy coordinate `theta`. `theta` is in [0, pi] going from north to south pole. mean_phi : float Position of the mean in healpy coordinate `phi`. `phi` is in [0, 2pi] and is equivalent to the azimuth angle. sigma : float Standard deviation of the 2D normal distribution. Only symmetric normal pdfs are used here. Returns ------- kernel : array Values of the 2D normal distribution at the selected pixels. If clip False, kernel is a valid healpy map with resolution NSIDE. keep_idx : array Pixel indices that are kept from the full healpy map after clipping. If clip is False this is a sorted integer array with values [0, 1, ..., NPIX-1] with NPIX tha number of pixels in the full healpy map with resolution NSIDE. """ # Clip unneccessary pixels, just keep clip*sigma (radians) # around the mean. Using inlusive=True to make sure at least # one pixel gets returned. # Always returns a list, so no manual np.array() required. keep_idx = hp.query_disc( self._nside, hp.ang2vec(mean_th, mean_phi), self._clip * sigma, inclusive=True) # Create only the needed the pixel healpy coordinates th, phi = hp.pix2ang(self._nside, keep_idx) sinTh = np.sin(th) # For each pixel get the distance to (mean_th, mean_phi) direction dist = angdist(mean_phi, mean_th, phi, sinTh) # Get the 2D gaussian values at those distances -> kernel function # Because the kernel is radial symmetric we use a simplified # version -> 1D gaussian, properly normed sigma2 = 2 * sigma**2 kernel = np.exp(-dist**2 / sigma2) / (np.pi * sigma2) return kernel, keep_idx
def map_generator(): #Constructing maps............................... #if not os.path.exists('./map'): # os.makedirs('./map') #Star map if(map_list['star'] == True): map['star'] = hp.read_map(star_map_file)[mpix2hpix] #Galaxy map if((map_list['gal'] == True) or (map_list['odds'] == True)): for bin in zbin: map['gal'][zbintag(bin)] = {} i_eff = 0 for od in od_cut: gal_map = np.zeros(N_mpix) mask = cut(bin, od, cat['p']['val']['zp'], cat['p']['val']['od']) mpix = cat['p']['val']['mpix'][mask] for i in mpix: if(i >= 0): gal_map[i] += 1 map['gal'][zbintag(bin)][odtag(eff_cut[i_eff])] = gal_map if(map_list['gal'] == True): hp.write_map(folder_out + 'map/map_gal' + nside_tag + '_' + zbintag(bin) + '_' + odtag(eff_cut[i_eff]) + '.fits', np.append(gal_map, 0)[hpix2mpix]) i_eff += 1 #Odds map if(map_list['odds'] == True): for bin in zbin: mask = cut(bin, 0., cat['p']['val']['zp'], cat['p']['val']['od']) od_map = np.zeros(N_mpix) n_map = map['gal'][zbintag(bin)][odtag(0.0)] mpix = cat['p']['val']['mpix'][mask] odds = cat['p']['val']['od'][mask] for i in range(len(mpix)): if(mpix[i] >= 0): od_map[mpix[i]] += odds[i] for i in range(N_mpix): if(n_map[i] != 0): od_map[i] /= n_map[i] od_map_corr = np.copy(od_map) for i in range(N_mpix): if(n_map[i] == 0): i_vec = hp.pix2vec(nside, mpix2hpix[i]) nest = hpix2mpix[hp.query_disc(nside, i_vec, 1 * (np.pi/ 180))] nest = nest[nest >= 0] o = 0 n = 0 for j in nest: o += od_map[j] if (od_map[j] > 0.00000001): n += 1 o /= float(n) od_map_corr[i] = o map['odds'][zbintag(bin)] = od_map_corr hp.write_map(folder_out + 'Map/od_map' + nside_tag + zbintag(bin) + '.fits', np.append(od_map_corr,0)[hpix2mpix])
def dust_vals_disk(self,lcen,bcen,dist,radius): """ NAME: dust_vals_disk PURPOSE: return the distribution of extinction within a small disk as samples INPUT: lcen, bcen - Galactic longitude and latitude of the center of the disk (deg) dist - distance in kpc radius - radius of the disk (deg) OUTPUT: (pixarea,extinction) - arrays of pixel-area in sq rad and extinction value HISTORY: 2015-03-06 - Written - Bovy (IAS) """ # Convert the disk center to a HEALPIX vector vec= healpy.pixelfunc.ang2vec((90.-bcen)*_DEGTORAD,lcen*_DEGTORAD) distmod= 5.*numpy.log10(dist)+10. # Query the HEALPIX map for pixels that lie within the disk pixarea= [] extinction= [] for nside in self._nsides: # Find the pixels at this resolution that fall within the disk ipixs= healpy.query_disc(nside,vec,radius*_DEGTORAD, inclusive=False,nest=True) # Get indices of all pixels within the disk at current nside level nsideindx= self._pix_info['nside'] == nside potenIndxs= self._indexArray[nsideindx] nsidepix= self._pix_info['healpix_index'][nsideindx] # Loop through the pixels in the (small) disk tout= [] for ii,ipix in enumerate(ipixs): lbIndx= potenIndxs[ipix == nsidepix] if numpy.sum(lbIndx) == 0: continue if self._intps[lbIndx] != 0: tout.append(self._intps[lbIndx][0](distmod)) else: interpData=\ interpolate.InterpolatedUnivariateSpline(self._distmods, self._best_fit[lbIndx], k=self._interpk) tout.append(interpData(distmod)) self._intps[lbIndx]= interpData tarea= healpy.pixelfunc.nside2pixarea(nside) tarea= [tarea for ii in range(len(tout))] pixarea.extend(tarea) extinction.extend(tout) pixarea= numpy.array(pixarea) extinction= numpy.array(extinction) if not self._filter is None: extinction= extinction*aebv(self._filter,sf10=self._sf10) return (pixarea,extinction)
def get_healsparse_subpix_indices(subpix_nside, subpix_hpix, subpix_border, coverage_nside): """ Retrieve the coverage pixels that intersect the region, with a border. Parameters ---------- subpix_nside: `int` Nside for the subregion subpix_hpix: `int` Pixel number for the subregion (ring format) subpix_border: `float` Border radius to cover outside subpix_hpix coverage_nside: `int` Nside of the healsparse coverage map """ # First, we need to know which pixel(s) from nside_coverage are covered by # subpix_hpix if subpix_nside == coverage_nside: # simply convert to nest covpix = hp.ring2nest(subpix_nside, subpix_hpix) elif subpix_nside > coverage_nside: # what pixel is this contained in? theta, phi = hp.pix2ang(subpix_nside, subpix_hpix, nest=False) covpix = hp.ang2pix(coverage_nside, theta, phi, nest=True) else: # This is subpix_nside < coverage_nside # what coverage pixels are contained in subpix_hpix? subpix_hpix_nest = hp.ring2nest(subpix_nside, subpix_hpix) bit_shift = 2 * int(np.round(np.log(coverage_nside / subpix_nside) / np.log(2))) n_pix = 2**bit_shift covpix = np.left_shift(subpix_hpix_nest, bit_shift) + np.arange(n_pix) # And now if we have a border... if subpix_border > 0.0: nside_testing = max([coverage_nside * 4, subpix_nside * 4]) boundaries = hp.boundaries(subpix_nside, subpix_hpix, step=nside_testing/subpix_nside) extrapix = np.zeros(0, dtype=np.int64) # These are pixels that touch the boundary for i in xrange(boundaries.shape[1]): pixint = hp.query_disc(nside_testing, boundaries[:, i], np.radians(subpix_border), inclusive=True, fact=8) extrapix = np.append(extrapix, pixint) extrapix = np.unique(extrapix) theta, phi = hp.pix2ang(nside_testing, extrapix) covpix = np.unique(np.append(covpix, hp.ang2pix(coverage_nside, theta, phi, nest=True))) return covpix
def local_mean_map(map1, mask1, deg): """ return the local mean map for map1 with mask mask1 """ mp1=hp.ma(map1) mp1.mask=np.logical_not(mask1) NSIDE1=hp.npix2nside(len(map1)) NSIDE2=nside(deg) NPIX2=hp.nside2npix(NSIDE2) mp2=np.array([0.]*NPIX2) for pix2 in range(0, NPIX2): disc1=hp.query_disc(nside=NSIDE1, vec=hp.pix2vec(NSIDE2, pix2), radius=deg2rad(deg)) mp2[pix2]=np.mean(mp1[disc1]) return mp2
def find_available_galaxies(fiber_set, tile_set, object_set, tile_ID, filename="available_galaxies"): assert tile_set.healpix_n_side==object_set.healpix_n_side, "healpix n_side is different for tiles and objects" assert tile_set.healpix_n_side>0, "healpix n_side is negative" tile_theta = tile_set.theta[tile_ID] tile_phi = tile_set.phi[tile_ID] tile_center = np.array([tile_theta,tile_phi]) tile_vector = hp.rotator.dir2vec(tile_center) pix = hp.query_disc(tile_set.healpix_n_side, tile_vector, fiber_set.plate_radius, inclusive=True) # stores the galaxies that fall into the tile objects_in_id = np.empty((0)) for i_pix in pix: tmp_id_in = np.where(object_set.healpix_pixels==i_pix) tmp_id_in = tmp_id_in[0] objects_in_id = np.append(objects_in_id, tmp_id_in) objects_in_id = np.int_(objects_in_id) #if(not(i_tiling%(n_tilings/20))): selected_objects = object_set.select(objects_in_id) #these selected objects must have new a x,y coordinates in the focal plane selected_x, selected_y = radec2xy(selected_objects.ra, selected_objects.dec, tile_set.ra[tile_ID], tile_set.dec[tile_ID]) n_fibers = np.size(fiber_set.x) fiber_list = np.arange(n_fibers) #np.random.shuffle(fiber_list) #print fiber_list out = open("tile_%d_%s.dat"%(tile_ID, filename), "w") for fiber_i in fiber_list: fiber_x = fiber_set.x[fiber_i] fiber_y = fiber_set.y[fiber_i] radius = np.sqrt((selected_x - fiber_x)**2 + (selected_y-fiber_y)**2) inside = np.where((radius>fiber_set.patrol_radius_min) & (radius<fiber_set.patrol_radius_max)) inside = inside[0] n_available = np.size(inside) if(n_available): out.write("%d %d %d "%(tile_ID, fiber_i, n_available)) id_available = ''.join('%d ' % i for i in selected_objects.ID[inside]) out.write("%s\n"%(id_available)) else: out.write("%d %d %d \n"%(tile_ID, fiber_i, n_available)) out.close() return selected_x, selected_y
def probability_inside_circle(self, ra, dec, radius): """Return the probability inside a circle.""" prob = hp.read_map(self.skymap, verbose=False) theta = 0.5 * np.pi - np.deg2rad(dec) phi = np.deg2rad(ra) radius = np.deg2rad(radius) xyz = hp.ang2vec(theta, phi) ipix_disc = hp.query_disc(self.nside, xyz, radius) probability_inside_disc = prob[ipix_disc].sum() return "%.1e" % probability_inside_disc
def correlations(i_ang): ang_low = (pi / 180) * (ang[i_ang] - (ang_res / 2)) ang_high = (pi / 180) * (ang[i_ang] + (ang_res / 2)) n = np.zeros(N_jkmpix) c = np.zeros((n_corr, N_jkmpix)) c_out = np.zeros((n_corr, N_jkmpix)) #c_gal = 0 #n_gal = 0 for i in range(N_mpix): i_vec = hp.pix2vec(nside, mpix2hpix[i]) disc_low = hp.query_disc(nside, i_vec, ang_low, inclusive = False) disc_high = hp.query_disc(nside, i_vec, ang_high, inclusive = False) disc = hpix2mpix[np.setdiff1d(disc_high, disc_low)] disc = disc[disc >= 0] k = hp.vec2pix(nside_jk, i_vec[0], i_vec[1], i_vec[2]) k = jkhpix2jkmpix[k] for l in range(n_corr): dmap1 = dmap[l][0] dmap2 = dmap[l][1] c[l][k] += dmap1[i] * dmap2[disc].sum() n[k] += len(disc) #c_gal += dmap[0][0][i] * dmap[0][1][disc].sum() #n_gal += len(disc) for l in range(n_corr): for k in range(N_jkmpix): c_out[l][k] = (c[l].sum() - c[l][k]) / (n.sum() - n[k]) print "theta = %.2f Done!" % ang[i_ang] return c_out
def make_dom_map(pmt_directions, values, nside=512, d=0.2, smoothing=0.1): """Create a mollweide projection of a DOM with given PMTs. The output can be used to call the `healpy.mollview` function. """ import healpy as hp discs = [hp.query_disc(nside, dir, 0.2) for dir in pmt_directions] npix = hp.nside2npix(nside) pixels = np.zeros(npix) for disc, value in zip(discs, values): for d in disc: pixels[d] = value if smoothing > 0: return hp.sphtfunc.smoothing(pixels, fwhm=smoothing, iter=1) return pixels
def local_variance_map(map1, mask1, deg): """ return the local variance map for map1 with mask mask1, given the error tolerance tol for mask2 """ mp1=hp.ma(map1) mp1.mask=np.logical_not(mask1) NSIDE1=hp.npix2nside(len(map1)) NSIDE2=nside(deg) NPIX2=hp.nside2npix(NSIDE2) #mask2=np.round(hp.ud_grade(mask1, nside_out=NSIDE2)+tol) mp2=np.array([0.]*NPIX2) for pix2 in range(0, NPIX2): disc1=hp.query_disc(nside=NSIDE1, vec=hp.pix2vec(NSIDE2, pix2), radius=deg2rad(deg)) mp2[pix2]=np.var(mp1[disc1]) #varmp2.mask=np.logical_not(mask2) return mp2
def smoothMap(map, smooth=5.): if smooth == 0: return map npix = len(map) nside = hp.npix2nside(npix) smooth_rad = smooth * np.pi/180. smooth_map = np.zeros(map.shape) vec = np.transpose(hp.pix2vec(nside, np.arange(npix))) for i in range(npix): neighbors = hp.query_disc(nside, vec[i], smooth_rad) smooth_map[i] += np.sum(map[neighbors], axis=0) return smooth_map
def hp_interpolator(map_, el, az, n_pix=4): NSide = hp.pixelfunc.get_nside(map_) direction = np.array([np.pi/2.-el.to_rad(),az.to_rad()]) steplength = hp.pixelfunc.max_pixrad(NSide) for i, r in enumerate(np.arange(steplength, np.pi, steplength)): pixels = np.array(hp.query_disc(NSide,hp.ang2vec(direction[0], direction[1]), r)) filled = np.where(map_[pixels] > -1.)[0] l = len(filled) if l >= n_pix: # print(i, l) filled_pixel = pixels[filled] filled_pixel_directions = hp.pix2vec(NSide, filled_pixel) angular_distance = hp.rotator.angdist(direction, filled_pixel_directions) if angular_distance.min() == 0.: # do we really want this? return map_[filled_pixel[angular_distance.argmin()]] return np.average(map_[filled_pixel], weights=np.power(1./angular_distance, 2))
def doPreCalcs(self): """ Perform the precalculations necessary to set up the sparse matrix. """ self.opsimdf['hids'] = [hp.query_disc(self.nside, vec, self._fieldRadius, inclusive=self.inclusive, fact=self.fact, nest=self.nest) for vec in self.opsimdf[self.vecColName]] lens = map(len, self.opsimdf.hids.values) rowdata = [] _ = list(rowdata.extend(repeat(i, lens[i])) for i in xrange(len(self.opsimdf))) coldata = np.concatenate(self.opsimdf.hids.values) self._rowdata = rowdata self._coldata = coldata
def mask_src_weighted_custom(cat_file, ENERGY, NSIDE): """Returns the 'bad pixels' defined by the position of a source and a certain radius away from that point. The radii increase with the brightness and rescaled by a factor between 1 and 0.3 shaped as the PSF. cat_file: str .fits file with the sorce catalog ENERGY: float Mean energy of the map to be masked NSIDE: int healpix nside parameter """ psf_ref_file = os.path.join(GRATOOLS_CONFIG, 'ascii/PSF_UCV_PSF1.txt') src_cat = pf.open(cat_file) NPIX = hp.pixelfunc.nside2npix(NSIDE) CAT = src_cat[1] BAD_PIX_SRC = [] SOURCES = CAT.data src_cat.close() psf_ref = get_psf_ref(psf_ref_file) psf_en = psf_ref(ENERGY) psf_min, psf_max = psf_ref.y[5], psf_ref.y[-1] norm_min, norm_max = 1, 0.3 norm = norm_min + psf_en*((norm_max - norm_min)/(psf_max - psf_min)) -\ psf_min*((norm_max - norm_min)/(psf_max - psf_min)) logger.info('Normalization of radii due to energy: %.3f'%norm) logger.info('Psf(%.2f)= %.2f'%(ENERGY, psf_en)) FLUX = np.log10(SOURCES.field('eflux1000')) flux_min, flux_max = min(FLUX), max(FLUX) rad_min, rad_max = 1, 5. RADdeg = rad_min + FLUX*((rad_max - rad_min)/(flux_max - flux_min)) -\ flux_min*((rad_max - rad_min)/(flux_max - flux_min)) RADrad = np.radians(RADdeg) logger.info('Flux-weighted mask for sources activated') TS = SOURCES.field('ts') indTS25 = TS > 25. GLON = SOURCES.field('GLON')[indTS25] GLAT = SOURCES.field('GLAT')[indTS25] logger.info('Num Src: %i'%len(TS)) logger.info('Num Src TS>25: %i'%len(TS[indTS25])) for i, src in enumerate(SOURCES[indTS25]): x, y, z = hp.rotator.dir2vec(GLON[i],GLAT[i],lonlat=True) b_pix= hp.pixelfunc.vec2pix(NSIDE, x, y, z) BAD_PIX_SRC.append(b_pix) radintpix = hp.query_disc(NSIDE, (x, y, z), RADrad[i]*norm) BAD_PIX_SRC.extend(radintpix) return BAD_PIX_SRC
def local_power_spectrum(map1, mask1, deg, LMAX=256): """ return the local power spectrum for each disk given by the radius [deg] """ mp1=hp.ma(map1) mp1.mask=np.logical_not(mask1) NSIDE1=hp.npix2nside(len(map1)) NSIDE2=nside(deg) NPIX2=hp.nside2npix(NSIDE2) NPIX1=hp.nside2npix(NSIDE1) mp2=[0]*NPIX2 for pix2 in range(0, NPIX2): newmask=np.array([0.]*NPIX1) disc1=hp.query_disc(nside=NSIDE1, vec=hp.pix2vec(NSIDE2, pix2), radius=deg2rad(deg)) newmask[disc1]=1. newmask=newmask*mask1 mp1.mask=np.logical_not(mask1) mp2[pix2]=hp.anafast(mp1, lmax=LMAX) return mp2
def get_subpixel_indices(galtable, hpix=None, border=0.0, nside=0): """ Routine to get subpixel indices from a galaxy table. Parameters ---------- galtable: `redmapper.Catalog` A redmapper galaxy table master catalog hpix: `int`, optional Healpix number (ring format) of sub-region. Default is 0 (full catalog). border: `float`, optional Border around hpix (in degrees) to find pixels. Default is 0.0. nside: `int`, optional Nside of healpix subregion. Default is 0 (full catalog). Returns ------- indices: `np.array` Integer array of indices of galaxy table pixels in the subregion. """ if hpix is None or nside == 0: return np.arange(galtable.filenames.size) theta, phi = hp.pix2ang(galtable.nside, galtable.hpix) ipring_big = hp.ang2pix(nside, theta, phi) indices, = np.where(ipring_big == hpix) if border > 0.0: # now we need to find the extra boundary... boundaries = hp.boundaries(nside, hpix, step=galtable.nside/nside) inhpix = galtable.hpix[indices] for i in xrange(boundaries.shape[1]): pixint = hp.query_disc(galtable.nside, boundaries[:, i], border*np.pi/180., inclusive=True, fact=8) inhpix = np.append(inhpix, pixint) inhpix = np.unique(inhpix) _, indices = esutil.numpy_util.match(inhpix, galtable.hpix) return indices
def make_disc( nside, radius, direction=[1,0,0], radians=False, minus=False): import numpy as np import healpy as hp info_message( 'nside = ' + str(nside) ) info_message( 'radius = ' + str(radius) ) npix = 12l*nside**2 mask = np.zeros( npix ) d2r = np.pi / 180. if not radians: radius *= d2r listpix = hp.query_disc( nside, direction, radius ) mask[listpix] = 1. if minus: mask = 1. - mask return mask