def test_upgrade_healpix(self):
        """Test correctness of healpix upgrading

        """
        nside_in = 2
        nside_out = nside_in * 2  # must differ by 1 order for this test
        npix_in = hp.nside2npix(nside_in)
        npix_out = hp.nside2npix(nside_out)
        pix_i = 5
        # Upgrade pix_i in NSIDE=1 using cu
        # Downgrade all pixels in NSIDE=2 to NSIDE=1
        # Check if mappings from NSIDE=1 to NSIDE=2 match
        # Output is always NESTED
        # Test 1: Input pix_i is in NESTED
        # "visual" checks with https://healpix.jpl.nasa.gov/html/intronode4.htm
        actual = obs_utils.upgrade_healpix(pix_i, True, nside_in, nside_out)
        desired_all = np.arange(npix_out).reshape((npix_in, 4))
        desired = np.sort(desired_all[pix_i, :])  # NESTED
        np.testing.assert_array_equal(desired, [20, 21, 22, 23], "visual")
        np.testing.assert_array_equal(actual, desired, "input in NESTED")
        # Test 2: Input pix_i is in RING
        actual = obs_utils.upgrade_healpix(pix_i, False, nside_in, nside_out)
        # See https://stackoverflow.com/a/56675901
        # `reorder` reorders RING IDs in NESTED order
        # `reshape` is possible because the ordering is NESTED
        # indexing should be done with a NESTED ID because ordering is NESTED
        # but the output is in RING ID, which was reordered in the first place
        desired_all = hp.reorder(np.arange(npix_out), r2n=True).reshape(
            (npix_in, 4))
        desired_ring = desired_all[hp.ring2nest(nside_in, pix_i), :]
        np.testing.assert_array_equal(np.sort(desired_ring), [14, 26, 27, 43],
                                      "visual")
        desired_nest = hp.ring2nest(nside_out, desired_ring)
        np.testing.assert_array_equal(np.sort(actual), np.sort(desired_nest),
                                      "input in RING")
Esempio n. 2
0
def get_healsparse_subpix_indices(subpix_nside, subpix_hpix, subpix_border, coverage_nside):
    """
    Retrieve the coverage pixels that intersect the region, with a border.

    Parameters
    ----------
    subpix_nside: `int`
       Nside for the subregion
    subpix_hpix: `int`
       Pixel number for the subregion (ring format)
    subpix_border: `float`
       Border radius to cover outside subpix_hpix
    coverage_nside: `int`
       Nside of the healsparse coverage map
    """

    # First, we need to know which pixel(s) from nside_coverage are covered by
    # subpix_hpix

    if subpix_nside == coverage_nside:
        # simply convert to nest
        covpix = hp.ring2nest(subpix_nside, subpix_hpix)
    elif subpix_nside > coverage_nside:
        # what pixel is this contained in?
        theta, phi = hp.pix2ang(subpix_nside, subpix_hpix, nest=False)
        covpix = hp.ang2pix(coverage_nside, theta, phi, nest=True)
    else:
        # This is subpix_nside < coverage_nside
        # what coverage pixels are contained in subpix_hpix?
        subpix_hpix_nest = hp.ring2nest(subpix_nside, subpix_hpix)
        bit_shift = 2 * int(np.round(np.log(coverage_nside / subpix_nside) / np.log(2)))
        n_pix = 2**bit_shift
        covpix = np.left_shift(subpix_hpix_nest, bit_shift) + np.arange(n_pix)

    # And now if we have a border...
    if subpix_border > 0.0:
        nside_testing = max([coverage_nside * 4, subpix_nside * 4])
        boundaries = hp.boundaries(subpix_nside, subpix_hpix, step=nside_testing/subpix_nside)

        extrapix = np.zeros(0, dtype=np.int64)

        # These are pixels that touch the boundary
        for i in xrange(boundaries.shape[1]):
            pixint = hp.query_disc(nside_testing, boundaries[:, i],
                                   np.radians(subpix_border), inclusive=True, fact=8)
            extrapix = np.append(extrapix, pixint)

        extrapix = np.unique(extrapix)
        theta, phi = hp.pix2ang(nside_testing, extrapix)
        covpix = np.unique(np.append(covpix, hp.ang2pix(coverage_nside, theta, phi, nest=True)))

    return covpix
Esempio n. 3
0
def patches(ind, NSIDEin, NSIDEout, nest=False):
    """Daughter pixel indices in a low resolution HEALPix patch.

    Return HEALPix indices for all pixels of a higher resolution map 
    contained inside the pixel(s) of a lower resolution map. Output pixels 
    are always in the RING ordering scheme.

    Parameters
    ----------
    ind: int or array of ints
        Index of the parent HEALPix patch(es).
    NSIDEin: int
        NSIDE resolution of the parent HEALPix patch(es).
    NSIDEout: int
        NSIDE resolution of the daughter HEALPix pixels.
    nest: bool, optional
        If True, assume ``ind`` are given in NESTED pixel ordering. 
        Otherwise, assume RING ordering. Default: False.

    Returns
    -------
    ipix: 1d-array of int
        Indices of all pixels contained with the parent patch(es). Output 
        is always in RING ordering.

    """
    if NSIDEout/2 == NSIDEin: # Base case
        inds = np.array(ind)
        if nest:
            return hp.nest2ring(NSIDEout, np.tile(np.arange(4), inds.size) + \
                4*inds.repeat(4))
        else:
            return hp.nest2ring(NSIDEout, np.tile(np.arange(4), inds.size) + \
                4*hp.ring2nest(NSIDEin, inds).repeat(4))
    else:
        inds = np.array(ind)
        s = inds.size
        if nest:
            ipix = np.tile(np.arange(4), s) + 4*inds.repeat(4)
        else:
            ipix = np.tile(np.arange(4), s) + \
                4*hp.ring2nest(NSIDEin, inds).repeat(4)
    return np.concatenate((patches(ipix[:s], NSIDEin*2, NSIDEout, True), 
                            patches(ipix[s:2*s], NSIDEin*2, \
                                NSIDEout, True), 
                            patches(ipix[2*s:3*s], NSIDEin*2, \
                                NSIDEout, True), 
                            patches(ipix[3*s:], NSIDEin*2, \
                                NSIDEout, True), 
                            ))
Esempio n. 4
0
def gen_random_fast(nrandom, mask):
    """ This method approximates using a higher resolution healpix map
    to place the random points. It should be fine for measurements larger
    than the mask pixel scale. We take advantage of the equal area nature
    of the healpixels. The downside of this method is that it needs a lot
    of memory for large masks"""

    nside = hp.get_nside(mask)
    nside2 = 4 * nside
    ra = []
    th = []
    filled_pixels = np.where((mask > 0) & (np.isnan(mask) == False))[0]
    densities = mask[filled_pixels]
    kpix = np.random.choice(filled_pixels,
                            size=nrandom,
                            p=densities / np.sum(densities))
    bincounts = np.bincount(kpix)
    kpix2 = np.unique(kpix)
    counts = bincounts[bincounts > 0]
    hh = nside2**2 / nside**2
    i = 0
    for i, c in enumerate(counts):
        rpix = np.random.randint(0, high=hh, size=c)
        nestpix = hp.ring2nest(nside, kpix2[i])
        theta, phi = hp.pix2ang(nside2, hh * nestpix + rpix, nest=True)
        theta = 90. - theta * 180. / np.pi
        phi = phi * 180. / np.pi
        for j in range(0, len(theta)):
            ra.append(phi[j])
            th.append(theta[j])
    ra = np.array(ra)
    dec = np.array(th)
    return ra, dec
Esempio n. 5
0
def u_grade_ipix(ipix, nside_in, nside_out, nest=False):
    """
    Return the indices of sub-pixels (resolution nside_subpix) within
    the super-pixel(s) (resolution nside_superpix).
    
    Parameters:
    -----------
    ipix      : index of the input superpixel(s)
    nside_in  : nside of the input superpixel
    nside_out : nside of the desired subpixels

    Returns:
    --------
    ipix_out : subpixels for each superpixel
    """

    if nside_in == nside_out: return ipix
    if not (nside_in < nside_out):
        raise ValueError("nside_in must be less than nside_out")

    if nest: nest_ipix = ipix
    else: nest_ipix = hp.ring2nest(nside_in, ipix)

    factor = (nside_out // nside_in)**2
    if np.isscalar(ipix):
        nest_ipix_out = factor * nest_ipix + np.arange(factor)
    else:
        nest_ipix_out = factor * np.asarray(
            nest_ipix)[:, np.newaxis] + np.arange(factor)

    if nest: return nest_ipix_out
    else: return hp.nest2ring(nside_out, nest_ipix_out)
Esempio n. 6
0
    def _get_converted_data(self, scheme):
        """
        internal routine to get the data converted to the requested
        scheme

        If the scheme would be unchanged, a reference to the data is returned
        """
        import healpy

        scheme_num = get_scheme_num(scheme)
        if scheme_num == self.hpix.scheme_num:
            return self.data
        
        if scheme_num==NESTED:
            ipring=numpy.arange(self.hpix.npix,dtype='i8')
            ipnest=healpy.ring2nest(self.hpix.nside, ipring)

            newdata=self.data.copy()
            newdata[ipnest]=self.data
        else:
            ipnest=numpy.arange(self.hpix.npix,dtype='i8')
            ipring=healpy.nest2ring(self.hpix.nside, ipnest)

            newdata=self.data.copy()
            newdata[ipring]=self.data

        return newdata
Esempio n. 7
0
def random_u_grade_ang(m_inds, nside_in=0, nside_out=16384, is_nest=False):
    """
    Random upscaling of the PS positions, given the pixel centres at resolution nside_in.
    Each PS is moved to one of the high-resolution pixel centres at resolution nside_out.
    For example, for nside_in = 128 and nside_out = 16384, there are npix_out / npix_in = 16384
    possible PS locations within each pixel.
    :param m_inds: indices of the PSs w.r.t. nside_in, in RING ordering
    :param nside_in: nside in
    :param nside_out: nside to use for upscaling
    :param is_nest: if True: indices are assumed to correspond to NEST format instead of RING
    :return: theta, phi of randomly placed PSs within each pixel
    """
    if len(m_inds) == 0:
        return m_inds
    n_ps = len(m_inds)  # number of point sources
    if is_nest:
        m_inds_nest = m_inds
    else:
        m_inds_nest = hp.ring2nest(nside_in,
                                   m_inds)  # convert to NEST ordering
    hp.isnsideok(nside_out, nest=True)  # check that nside_out is okay
    npix_in = hp.nside2npix(nside_in)
    npix_out = hp.nside2npix(nside_out)
    rat2 = npix_out // npix_in
    # For each PS, draw a random fine pixel within the coarse pixel
    inds_fine = np.random.choice(rat2, size=n_ps)
    # Set indices w.r.t. upscaled nside (NEST): rat2 * m_inds_nest -> high-res. pixel 0, inds_fine adds 0 ... rat2 - 1
    inds_out = rat2 * m_inds_nest + inds_fine
    # Calculate angles
    th_out, ph_out = hp.pix2ang(nside_out, inds_out, nest=True, lonlat=False)
    # Note: for rat2 = 16384 and 150 PSs, the chance that there exists at least a pair of equal th/ph_out is > 50%!
    return th_out, ph_out
Esempio n. 8
0
 def _get_nz(self):
     c_p = self.get_catalog()
     c_s = self._get_specsample(c_p)
     # Sort spec sample by nested pixel index so jackknife
     # samples are spatially correlated.
     ip_s = hp.ring2nest(
         self.nside,
         hp.ang2pix(self.nside,
                    c_s[self.ra_name],
                    c_s[self.dec_name],
                    lonlat=True))
     idsort = np.argsort(ip_s)
     c_s = c_s[idsort]
     # Compute DIR N(z)
     z, nz, nz_jk = get_DIR_Nz(
         c_s,
         c_p, [
             'JCORR', 'KCORR', 'HCORR', 'W1MCORR', 'W2MCORR', 'BCALCORR',
             'RCALCORR', 'ICALCORR'
         ],
         zflag='ZSPEC',
         zrange=[0, 0.4],
         nz=100,
         njk=self.config.get('n_jk_dir', 100))
     zm = 0.5 * (z[1:] + z[:-1])
     return {'z_mid': zm, 'nz': nz, 'nz_jk': nz_jk}
Esempio n. 9
0
    def _sample_from_ipix(self, ipix, nest=False):
        """
        Sample vectors from a uniform distribution within a HEALpixel.

        Credit goes to
        https://git.rwth-aachen.de/astro/astrotools/blob/master/
        astrotools/healpytools.py

        :param ipix: pixel number(s)
        :param nest: set True in case you work with healpy's nested scheme
        :return: vectors containing events from the pixel(s) specified in ipix

        Parameters
        ----------
        ipix : int, list of int
            Healpy pixels.
        nest : bool, optional
            Set to True in case healpy's nested scheme is used.

        Returns
        -------
        np.array, np.array, np.array
            The sampled direction vector components.
        """
        if not nest:
            ipix = hp.ring2nest(self.nside, ipix=ipix)

        n_up = 29 - self._n_order
        i_up = ipix * 4**n_up
        i_up += self._random_state.randint(0, 4**n_up, size=np.size(ipix))
        return hp.pix2vec(nside=2**29, ipix=i_up, nest=True)
Esempio n. 10
0
    def _get_converted_data(self, scheme):
        """
        internal routine to get the data converted to the requested
        scheme

        If the scheme would be unchanged, a reference to the data is returned
        """
        import healpy

        scheme_num = get_scheme_num(scheme)
        if scheme_num == self.hpix.scheme_num:
            return self.data

        if scheme_num == NESTED:
            ipring = numpy.arange(self.hpix.npix, dtype='i8')
            ipnest = healpy.ring2nest(self.hpix.nside, ipring)

            newdata = self.data.copy()
            newdata[ipnest] = self.data
        else:
            ipnest = numpy.arange(self.hpix.npix, dtype='i8')
            ipring = healpy.nest2ring(self.hpix.nside, ipnest)

            newdata = self.data.copy()
            newdata[ipring] = self.data

        return newdata
Esempio n. 11
0
def upgrade_healpix(pix_id, nested, nside_in, nside_out):
    """Upgrade (superresolve) a healpix into finer ones

    Parameters
    ----------
    pix_id : int
        coarse healpix ID to upgrade
    nested : bool
        whether `pix_id` is given in NESTED scheme
    nside_in : int
        NSIDE of `pix_id`
    nside_out : int
        desired NSIDE of finer healpix

    Returns
    -------
    np.array
        the upgraded healpix IDs in the NESTED scheme

    """
    if not nested:
        pix_id = hp.ring2nest(nside_in, pix_id)
    order_diff = np.log2(nside_out) - np.log2(nside_in)
    factor = 4**order_diff
    upgraded_ids = pix_id * factor + np.arange(factor)
    return upgraded_ids.astype(int)
Esempio n. 12
0
def patchToPixels(nsidePat, nsidePix, patch, nest=False, sort=False):
    """
  Return all pixel ring or nest numbers related to a given patch.
  
  Parameters
  ----------
  nsidePat : int
    nside of the patch
  nsidePix : int
    nside of the child patch
  patch : int
    ring or nest number of the patch
  nest : bool, optional
    consider nest
  sort : bool, optional
    sort the returned array
  
  Returns
  -------
  pix : int array
    ring or nest numbers of all pixels
  """
    patch = patch if nest == True else hp.ring2nest(nsidePat, patch)
    length = (nsidePix // nsidePat)**2
    pix = np.arange(patch * length, (patch + 1) * length)
    pix = pix if nest == True else hp.nest2ring(nsidePix, pix)
    if sort == True:
        pix.sort()
    return pix
Esempio n. 13
0
def hp_in_dec_range(nside, decmin, decmax, inclusive=True):
    """HEALPixels in a specified range of Declination.

    Parameters
    ----------
    nside : :class:`int`
        (NESTED) HEALPixel nside.
    decmin, decmax : :class:`float`
        Declination range (degrees).
    inclusive : :class:`bool`, optional, defaults to ``True``
        see documentation for `healpy.query_strip()`.

    Returns
    -------
    :class:`list`
        (Nested) HEALPixels at `nside` in the specified Dec range.

    Notes
    -----
        - Just syntactic sugar around `healpy.query_strip()`.
        - `healpy.query_strip()` isn't implemented for the NESTED scheme
          in early healpy versions, so this queries in the RING scheme
          and then converts to the NESTED scheme.
    """
    # ADM convert Dec to co-latitude in radians.
    # ADM remember that, min/max swap because of the -ve sign.
    thetamin = np.radians(90.-decmax)
    thetamax = np.radians(90.-decmin)

    # ADM determine the pixels that touch the box.
    pixring = hp.query_strip(nside, thetamin, thetamax,
                             inclusive=inclusive, nest=False)
    pixnest = hp.ring2nest(nside, pixring)

    return pixnest
Esempio n. 14
0
def read_map2(filename,field=0,dtype=np.float64,nest=False,hdu=1,h=False,verbose=True,memmap=False):
    hdr=fitsio.read_header(filename,ext=hdu)
    
    fullsky = False
    try:
        if (hdr['OBJECT'].strip() == 'PARTIAL') :
            # partial sky format
            fullsky=False
        else:
            fullsky=True
    except:
        # if no OBJECT in header, assume full sky
        fullsky=True

    if fullsky:
        m=hp.read_map(filename,field=field,dtype=dtype,nest=nest,hdu=hdu,h=h,verbose=verbose,memmap=memmap)
    else:
        # partial sky
        st=fitsio.read(filename,ext=1)
        nside=hdr['NSIDE']

        m=np.zeros(12*nside*nside,dtype=dtype) + hp.UNSEEN

        if ((hdr['ORDERING'].strip() == 'NESTED') and (not nest)) :
            # change from nest to ring...
            m[hp.nest2ring(nside,st['PIXEL'])] = st['SIGNAL']
        elif ((hdr['ORDERING'].strip() == 'RING') and (nest)):
            # change from ring to nest...
            m[hp.ring2nest(nside,st['PIXEL'])] = st['SIGNAL']
        else :
            # straight up
            m[st['PIXEL']] = st['SIGNAL']

    return m
Esempio n. 15
0
def get_subpixels(idx, nside_superpix, nside_subpix, nest=True):
    """Compute the indices of subpixels contained within superpixels.

    This function returns an output array with one additional
    dimension of size N for subpixel indices where N is the maximum
    number of subpixels for any pair of ``nside_superpix`` and
    ``nside_subpix``.  If the number of subpixels is less than N the
    remaining subpixel indices will be set to -1.

    Parameters
    ----------
    idx : `~numpy.ndarray`
        Array of HEALPix pixel indices for superpixels of NSIDE
        ``nside_superpix``.
    nside_superpix : int or `~numpy.ndarray`
        NSIDE of superpixel.
    nside_subpix  : int or `~numpy.ndarray`
        NSIDE of subpixel.
    nest : bool
        If True, assume NESTED pixel ordering, otherwise, RING pixel
        ordering.

    Returns
    -------
    idx_sub : `~numpy.ndarray`
        Indices of HEALpix pixels of nside ``nside_subpix`` contained
        within pixel indices ``idx`` of nside ``nside_superpix``.
    """
    import healpy as hp

    if not nest:
        idx = hp.ring2nest(nside_superpix, idx)

    idx = np.asarray(idx)
    nside_superpix = np.asarray(nside_superpix)
    nside_subpix = np.asarray(nside_subpix)

    if np.any(~is_power2(nside_superpix)) or np.any(~is_power2(nside_subpix)):
        raise ValueError("NSIDE must be a power of 2.")

    # number of subpixels in each superpixel
    npix = np.array((nside_subpix // nside_superpix)**2, ndmin=1)
    x = np.arange(np.max(npix), dtype=int)
    idx = idx * npix

    if not np.all(npix[0] == npix):
        x = np.broadcast_to(x, idx.shape + x.shape)
        idx = idx[..., None] + x
        idx[x >= np.broadcast_to(npix[..., None], x.shape)] = INVALID_INDEX.int
    else:
        idx = idx[..., None] + x

    if not nest:
        m = idx == INVALID_INDEX.int
        idx[m] = 0
        idx = hp.nest2ring(nside_subpix[..., None], idx)
        idx[m] = INVALID_INDEX.int

    return idx
Esempio n. 16
0
    def calc_areas(self, mags):
        """
        Calculate total area from the depth map as a function of magnitude.

        Parameters
        ----------
        mags: `np.array`
           Float array of magnitudes at which to compute area

        Returns
        -------
        areas: `np.array`
           Float array of total areas for each of the mags
        """

        pixsize = hp.nside2pixarea(self.nside, degrees=True)

        if (self.w < 0.0):
            # This is just constant area
            areas = np.zeros(mags.size) + self.config_area
            return areas

        if self.subpix_hpix > 0:
            # for the subregion, we need the area covered in the main pixel
            # I'm not sure what to do about border...but you shouldn't
            # be running this with a subregion with a border
            if self.subpix_border > 0.0:
                raise RuntimeError(
                    "Cannot run calc_areas() with a subregion with a border")

            bitShift = 2 * int(
                np.round(np.log(self.nside / self.subpix_nside) / np.log(2)))
            nFinePerSub = 2**bitShift
            ipnest = np.left_shift(
                hp.ring2nest(self.subpix_nside, self.subpix_hpix),
                bitShift) + np.arange(nFinePerSub)
        else:
            ipnest = self.sparse_depthmap.validPixels

        areas = np.zeros(mags.size)

        values = self.sparse_depthmap.getValuePixel(ipnest)

        gd, = np.where(values['m50'] > 0.0)

        depths = values['m50'][gd]
        st = np.argsort(depths)
        depths = depths[st]

        fracgoods = values['fracgood'][gd[st]]

        inds = np.clip(np.searchsorted(depths, mags) - 1, 1, depths.size - 1)

        lo = (inds < 0)
        areas[lo] = np.sum(fracgoods) * pixsize
        carea = pixsize * np.cumsum(fracgoods)
        areas[~lo] = carea[carea.size - inds[~lo]]

        return areas
Esempio n. 17
0
    def getFilePixels(self, r):
        """Given a healpix cell and radius for a given nside, figure out which
        lightcone pixels we need to read

        Parameters
        ----------
        r : int
            radial bin to read

        Returns
        -------
        pix_file : list
            List of the lightcone file pixels that need to be read
        peano_idx : list
            List of the peano indices of particles that need to be read
        """

        partpath = self.nbody.partpath[self.nbody.boxnum]
        nside = self.nbody.domain.nside
        pix = self.nbody.domain.pix

        header_fmt = ['Np', 'nside_index', 'nside_file', 'box_rmin',
                      'box_rmax', 'void', 'Lbox', 'Mpart', 'Omega_m', 'Omega_l', 'h']

        f = '{}/snapshot_Lightcone_{}_0'.format(partpath, r)
        hdr, idx = read_radial_bin(f)
        hdr = dict(zip(header_fmt, hdr))
        self.part_mass = hdr['Mpart'] * 1e10

        if not self.nbody.domain.nest:
            pix = hp.ring2nest(nside, pix)

        # this assumes that nside < nside_index which should always be true
        idxmap = hp.ud_grade(np.arange(12 * nside**2), hdr['nside_index'],
                             order_in='NESTED', order_out='NESTED')

        # get peano cells corresponding to pix
        # first get nside=nside_index, nest ordered cells corresponding to pix

        peano_idx = nest2peano(np.where(idxmap == pix)[0],
                               int(np.log2(hdr['nside_index'])))

        if nside < hdr['nside_file']:
            udmap = hp.ud_grade(np.arange(12 * nside**2), hdr['nside_file'],
                                order_in='NESTED', order_out='NESTED')
            pix_file, = np.where(udmap == pix)

        elif nside > hdr['nside_file']:
            udmap = hp.ud_grade(np.arange(12 * hdr['nside_file']**2), nside,
                                order_in='NESTED', order_out='NESTED')
            pix_file = [udmap[pix]]

        else:
            pix_file = [pix]

        return pix_file, peano_idx
Esempio n. 18
0
def ring2nest(testcase):
    cs = []
    for norder in range(16):
        nside = 1 << norder
        for i in range(1000):
            ipix = random.randrange(12 * nside * nside)
            args = (nside, ipix)
            cs.append(
                dict(args=args, expected=healpy.ring2nest(*args).tolist()))
    testcase['ring2nest'] = cs
Esempio n. 19
0
def downsize(a):
    """For an HEAPix RING array with nside a power of two, 
    return an nside/2 array of the average for each group of pixels 
    """
    import healpy
    nside = int(np.sqrt(len(a)/12.))
    a1 =a[healpy.nest2ring(nside, range(len(a)))]
    t=a1.reshape(len(a)/4,4)
    a2=t.mean(axis=1)
    return a2[healpy.ring2nest(nside/2, range(len(a2)))]
Esempio n. 20
0
    def calc_areas(self, mags):
        """
        Calculate total area from the depth map as a function of magnitude.

        Parameters
        ----------
        mags: `np.array`
           Float array of magnitudes at which to compute area

        Returns
        -------
        areas: `np.array`
           Float array of total areas for each of the mags
        """

        pixsize = hp.nside2pixarea(self.nside, degrees=True)

        if (self.w < 0.0):
            # This is just constant area
            areas = np.zeros(mags.size) + self.config_area
            return areas

        if self.subpix_hpix > 0:
            # for the subregion, we need the area covered in the main pixel
            # I'm not sure what to do about border...but you shouldn't
            # be running this with a subregion with a border
            if self.subpix_border > 0.0:
                raise RuntimeError("Cannot run calc_areas() with a subregion with a border")

            bitShift = 2 * int(np.round(np.log(self.nside / self.subpix_nside) / np.log(2)))
            nFinePerSub = 2**bitShift
            ipnest = np.left_shift(hp.ring2nest(self.subpix_nside, self.subpix_hpix), bitShift) + np.arange(nFinePerSub)
        else:
            ipnest = self.sparse_depthmap.validPixels

        areas = np.zeros(mags.size)

        values = self.sparse_depthmap.getValuePixel(ipnest)

        gd, = np.where(values['m50'] > 0.0)

        depths = values['m50'][gd]
        st = np.argsort(depths)
        depths = depths[st]

        fracgoods = values['fracgood'][gd[st]]

        inds = np.clip(np.searchsorted(depths, mags) - 1, 1, depths.size - 1)

        lo = (inds < 0)
        areas[lo] = np.sum(fracgoods) * pixsize
        carea = pixsize * np.cumsum(fracgoods)
        areas[~lo] = carea[carea.size - inds[~lo]]

        return areas
Esempio n. 21
0
def build_cov(cl,
              nside,
              mask=None,
              tree_depth=0,
              lmax=None,
              apply_pixwin=False,
              ninterp=10000,
              log=False,
              shift=None):
    tree_depth = 2**(tree_depth)

    if mask is None:
        mask = np.ones(hp.nside2npix(nside), dtype=bool)

    if lmax is not None and lmax > len(cl) - 1:
        lmax = len(cl) - 1

        cl = cl[:lmax + 1]

    if apply_pixwin:
        pw = hp.pixwin(nside * tree_depth, lmax=lmax)

        cl = cl[:len(pw)] * (pw**2)

    npix = int(mask.sum())
    mask_inds = hp.ring2nest(nside, np.arange(hp.nside2npix(nside))[mask])

    thetas = np.linspace(0, np.pi, ninterp)
    xis = cl2xi_theta(cl, thetas)

    exe_path = os.path.join(os.path.dirname(__file__), 'healcov')
    proc = subprocess.Popen(exe_path,
                            stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE)

    proc.stdin.write(nside.to_bytes(8, 'little'))
    proc.stdin.write(npix.to_bytes(8, 'little'))
    proc.stdin.write(mask_inds.tobytes())
    proc.stdin.write(tree_depth.to_bytes(8, 'little'))
    proc.stdin.write(ninterp.to_bytes(8, 'little'))
    proc.stdin.write(thetas.tobytes())
    proc.stdin.write(xis.tobytes())
    proc.stdin.close()

    cov = np.frombuffer(proc.stdout.read()).reshape([npix, npix])

    info = proc.wait()

    if info != 0:
        raise Exception()

    if log is True:
        cov = np.log(cov / (shift**2) + 1)

    return cov
Esempio n. 22
0
def compute_cl(m, nside=1024, nest=True, lmax=1500):
    logging.info('compute_cl -> lmax=%d' % lmax)
    if nest:
        npix = hp.nside2npix(nside)
        mm = np.zeros(npix)
        i = np.arange(npix)
        mm[:] = m[hp.ring2nest(nside, i)]
    else:
        mm = m
    c = hp.anafast(mm, lmax=lmax)
    return c
Esempio n. 23
0
def read_skymap_rotate(skymap_filename, theta=0, phi=0):
    '''Read the healpix skymap'''

    skymap, metadata = fits.read_sky_map(skymap_filename, nest=True)
    nside = hp.get_nside(skymap)
    npix = hp.nside2npix(nside)
    skymap = skymap[hp.ring2nest(nside, np.arange(npix))]
    if (not theta == 0) or (not phi == 0):
        skymap = rotate_map(skymap, np.deg2rad(theta), np.deg2rad(phi))

    return skymap
Esempio n. 24
0
def pix_in_pix(nside_low, nside_high, ipix, ord_in='Ring', ord_out='Ring'):

    rat = int(nside_high // nside_low)**2
    if ord_in is 'Ring':
        ipix = hp.ring2nest(nside_low, ipix)
    ipix *= rat
    liste = np.arange(ipix, ipix + rat)
    if ord_out is 'Ring':
        liste = hp.nest2ring(nside_high, liste)

    return liste
Esempio n. 25
0
def anafast_patch(i, ps, kap):
    t = time.time()
    #print "Starting " +str(i+1)+ " / " +str(N)
    n1 = hp.get_nside(kap_down)
    n2 = hp.npix2nside(len(cib))
    r = n2 / n1
    pixn = hp.ring2nest(n1, i)
    ind = np.arange(r**2 * pixn, r**2 * (pixn + 1))

    ind = hp.nest2ring(n2, ind)
    ps[i] = np.mean(cib[ind]**power) - (np.mean(cib[ind]))**power
    kap[i] = np.mean(kappa[ind])
Esempio n. 26
0
def inpaint(m,num_degrades=1,nside_in=2048):
    """
    Inpaints missing pixels by degrading the map (while ignoring missing pixels),
    then setting the missing values correpsonding to their value in the degraded map. 
    """
    import healpy as H
    nside_deg = nside_in/(2**num_degrades)
    badpix = arange(12*nside_in**2)[m==H.UNSEEN]
    badpix_deg = H.nest2ring(nside_deg,H.ring2nest(nside_in,badpix) >> 2*num_degrades)
    m_deg = H.ud_grade(m,nside_deg)
    m2=m.copy()
    m2[badpix]=m_deg[badpix_deg]
    return m2
    def crosscheck_prob(self):

        try:
            nside = self.ligo_nside
        except AttributeError:
            nside = self.nside

        class MNS:
            def __init__(self, data):
                self.data = pandas.DataFrame(
                    data, columns=["field", "ra", "dec", "datetime"])

        data = []

        for f in self.overlap_fields:
            ra, dec = ztfquery_fields.field_to_coords(float(f))[0]
            t = Time(self.t_min.jd, format="jd").utc
            t.format = "isot"
            t = t.value
            data.append([f, ra, dec, t])

            mns = MNS(data)

        data = mns.data.copy()

        self.logger.info("Unpacking observations")
        field_prob = 0.0

        ps = []

        for index, row in tqdm(data.iterrows()):
            pix = get_quadrant_ipix(nside, row["ra"], row["dec"])

            flat_pix = []

            for sub_list in pix:
                for p in sub_list:
                    flat_pix.append(p)

            flat_pix = list(set(flat_pix))
            ps += flat_pix

        ps = list(set(ps))

        for p in hp.ring2nest(nside, ps):
            field_prob += self.data[self.key][int(p)]

        self.logger.info(
            f"Intergrating all fields overlapping 90% contour gives {100*field_prob:.2g}%"
        )
Esempio n. 28
0
    def add_weights(self, filename):
        """Add weights to the photon data
        """
        
        # load a pickle containing weights, generated by pointlike
        assert os.path.exists(filename),f'File {filename} not found.'
        with open(filename, 'rb') as file:
            wtd = pickle.load(file, encoding='latin1')
        assert type(wtd)==dict, 'Expect a dictionary'
        test_elements = 'energy_bins pixels weights nside model_name radius order roi_name'.split()
        assert np.all([x in wtd.keys() for x in test_elements]),f'Dict missing one of the keys {test_elements}'

        pos = wtd['source_lb']
        if self.verbose>0:
            print(f'Adding weights from file {os.path.realpath(filename)}')
            print(f'Found weights for {wtd["source_name"]} at ({pos[0]:.2f}, {pos[1]:.2f})')
        # extract pixel ids and nside used
        wt_pix   = wtd['pixels']
        nside_wt = wtd['nside']
    
        # merge the weights into a table, with default nans
        # indexing is band id rows by weight pixel columns
        # append one empty column for photons not in a weight pixel
        # calculated weights are in a dict with band id keys        
        wts = np.full((32, len(wt_pix)+1), np.nan, dtype=np.float32)    
        weight_dict = wtd['weights']
        for k in weight_dict.keys():
            wts[k,:-1] = weight_dict[k]   

        # get the photon pixel ids, convert to NEST and right shift them 
        photons = self.photon_data
        photon_pix = healpy.ring2nest(self.nside, photons.pixel.values)
        to_shift = 2*int(np.log2(self.nside/nside_wt)); 
        shifted_pix =   np.right_shift(photon_pix, to_shift)
        bad = np.logical_not(np.isin(shifted_pix, wt_pix)) 
        if self.verbose>0:
            print(f'\t{sum(bad)} / {len(bad)} photon pixels are outside weight region')
        if sum(bad)==len(bad):
            raise Exception('No weights found')
        shifted_pix[bad] = 12*nside_wt**2 # set index to be beyond pixel indices

        # find indices with search and add a "weights" column
        # (expect that wt_pix are NEST ordering and sorted) 
        weight_index = np.searchsorted(wt_pix,shifted_pix)
        band_index = photons.band.values
        # final grand lookup -- isn't numpy wonderful!
        photons.loc[:,'weight'] = wts[tuple([band_index, weight_index])] 
        if self.verbose>0:
            print(f'\t{sum(np.isnan(photons.weight.values))} weights set to NaN')
        return wtd # for reference   
    def test_get_healpix_centers(self):
        """Test if correct sky locations are returned in the cosmoDC2 convention

        """
        # Correct answers hardcoded with known cosmoDC2 catalog values
        # Input i_pix is in nested scheme
        ra, dec = obs_utils.get_healpix_centers(hp.ring2nest(32, 10450),
                                                32,
                                                nest=True)
        np.testing.assert_array_almost_equal(ra, [67.5], decimal=1)
        np.testing.assert_array_almost_equal(dec, [-45.0], decimal=1)
        # Input i_pix is in ring scheme
        ra, dec = obs_utils.get_healpix_centers(10450, 32, nest=False)
        np.testing.assert_array_almost_equal(ra, [67.5], decimal=1)
        np.testing.assert_array_almost_equal(dec, [-45.0], decimal=1)
Esempio n. 30
0
def ncmap_sample_positions(nc_map, ip_good, nside, fgoodmap, nside_up=4):
    """
    Samples positions of galaxies inside a footprint from a number counts map
    """
    ip_good_nest = hp.ring2nest(nside, ip_good)
    ipix_nest = [
        rd.sample(
            range(ip_good_nest[i] * 4**nside_up,
                  (ip_good_nest[i] + 1) * 4**nside_up), nc_map[i])
        for i in range(len(ip_good))
    ]
    ipix_nest = [ip for sub in ipix_nest for ip in sub]

    hpix = hu.HealPix('nest', nside * 2**nside_up)
    ra, dec = hpix.pix2eq(ipix_nest)
    return ra, dec
Esempio n. 31
0
def rand_exposure_vec_in_pix(nside,
                             ipix,
                             a0=-35.25,
                             zmax=60,
                             coord_system='gal',
                             deviation=0.5,
                             nest=False):
    """
    Draw vectors from a distribution within a HEALpixel that follow the exposure
    distribution within the pixel. It is much slower than rand_vec_in_pix() and
    should therefore only be used for problematic pixels (close to zero exposure).

    :param nside: nside of the healpy pixelization
    :param ipix: pixel number(s)
    :param a0: latitude of detector (-90, 90) in degrees (default: Auger)
    :param zmax: maximum acceptance zenith angle (0, 90) degrees
    :param coord_system: choose between different coordinate systems - gal, eq, sgal, ecl
    :param deviation: maximum relative deviation between exposure values in pixel corners
    :param nest: set True in case you work with healpy's nested scheme
    :return: vectors containing events from the pixel(s) specified in ipix
    """
    ipix = np.atleast_1d(ipix)
    vecs = np.zeros((3, ipix.size))
    mask = check_problematic_pixel(nside, ipix, a0, zmax, deviation)
    vecs[:, ~mask] = rand_vec_in_pix(nside, ipix[~mask], nest)
    if not nest:
        ipix = hp.ring2nest(nside, ipix=ipix)

    for pix in np.unique(ipix[mask]):
        n = np.sum(ipix == pix)
        # increase resolution of healpy schemes cooresponding to number of crs per pixel
        n_up = max(3, int(np.ceil(np.log10(10 * n) / np.log10(4))))
        pix_new = pix * 4**n_up + np.arange(4**n_up)
        v = pix2vec(nside=nside * 2**n_up, ipix=pix_new, nest=True)
        if coord_system != 'eq':
            v = getattr(coord, '%s2eq' % coord_system)(v)
        p = coord.exposure_equatorial(coord.vec2ang(v)[1], a0, zmax)
        pixel = np.random.choice(pix_new,
                                 size=n,
                                 replace=False,
                                 p=p / np.sum(p))
        vecs[:, ipix == pix] = pix2vec(nside=nside * 2**n_up,
                                       ipix=pixel,
                                       nest=True)

    return np.array(vecs)
Esempio n. 32
0
def multiply(a, b):
    """ multiply HEALPix array a by b, element by element. Both must have power-of-2 nside.
      b must have smaller of equal nside. Return the new array, with dimension of a.
    """
    alen=len(a); blen=len(b)
    if alen==blen: return a*b
    assert alen>blen, 'Expect len(a)>len(b)'
    anside = int(np.sqrt(alen/12.))
    bnside = int(np.sqrt(blen/12.))
    #reorder a and b to NEST order, reshape a, b to 2D with first dimension blen
    anest =  a[healpy.nest2ring(anside, range(alen))].reshape(blen, alen/blen)
    bnest =  b[healpy.nest2ring(bnside, range(blen))].reshape(blen, 1)
    # broadcast the multiply, back to 1D
    cnest = (anest * bnest).reshape(alen)
    # back to RING
    c = cnest[healpy.ring2nest(anside, range(alen))]
    return c                    
Esempio n. 33
0
def _add_weights(config, wts, wt_pix, nside_wt, photon_data):
    """ get the photon pixel ids, convert to NEST (if not already) and right shift them
        add 'weight', remove 'band', 'pixel'
    """
    if not config.nest:
        # data are RING
        photon_pix = healpy.ring2nest(config.nside,
                                      photon_data.nest_index.values)
    else:
        photon_pix = photon_data.nest_index.values
    to_shift = 2 * int(np.log2(config.nside / nside_wt))
    shifted_pix = np.right_shift(photon_pix, to_shift)
    bad = np.logical_not(np.isin(shifted_pix, wt_pix))
    if config.verbose > 0 & sum(bad) > 0:
        print(
            f'\tApplying weights: {sum(bad)} / {len(bad)} photon pixels are outside weight region'
        )
    if sum(bad) == len(bad):
        a = np.array(healpy.pix2ang(nside_wt, wt_pix, nest=True,
                                    lonlat=True)).mean(axis=1).round(1)
        b = np.array(
            healpy.pix2ang(nside_wt, shifted_pix, nest=True,
                           lonlat=True)).mean(axis=1).round(1)

        raise Exception(
            f'There was no overlap of the photon data at {b} and the weights at {a}'
        )
    shifted_pix[bad] = 12 * nside_wt**2  # set index to be beyond pixel indices

    # find indices with search and add a "weights" column
    # (expect that wt_pix are NEST ordering and sorted)
    weight_index = np.searchsorted(wt_pix, shifted_pix)
    band_index = np.fmin(
        31, photon_data.band.values)  #all above 1 TeV into last bin

    # final grand lookup -- isn't numpy wonderful!
    photon_data.loc[:, 'weight'] = self.wts[tuple([band_index, weight_index])]

    # don't need these columns now (add flag to config to control??)
    #     photon_data.drop(['band', 'pixel'], axis=1)

    if config.verbose > 1:
        print(
            f'\t{sum(np.isnan(photon_data.weight.values))} events without weight'
        )
Esempio n. 34
0
    def to_swapped(self):
        import healpy as hp
        hpx_out = self.geom.to_swapped()
        map_out = self.__class__(hpx_out, meta=copy.deepcopy(self.meta))
        idx = self.geom.get_idx(flat=True)
        vals = self.get_by_idx(idx)
        if self.geom.nside.size > 1:
            nside = self.geom.nside[idx[1:]]
        else:
            nside = self.geom.nside

        if self.geom.nest:
            idx_new = tuple([hp.nest2ring(nside, idx[0])]) + idx[1:]
        else:
            idx_new = tuple([hp.ring2nest(nside, idx[0])]) + idx[1:]

        map_out.set_by_idx(idx_new, vals)
        return map_out
Esempio n. 35
0
def randVecInPix(nside, ipix, nest=False):
    """
    Draw vectors from a uniform distribution within a HEALpixel.
    nside : healpix nside parameter
    ipix  : pixel number(s)
    """
    if not(nest):
        ipix = healpy.ring2nest(nside, ipix=ipix)

    norder = nside2norder(nside)
    nUp = 29 - norder
    iUp = ipix * 4**nUp

    if np.iterable(ipix):
        iUp += np.random.randint(0, 4**nUp, size=np.size(ipix))
    else:
        iUp += np.random.randint(0, 4**nUp)
    vec = healpy.pix2vec(nside=2**29, ipix=iUp, nest=True)
    return vec
Esempio n. 36
0
    def to_swapped(self):
        import healpy as hp

        hpx_out = self.geom.to_swapped()
        map_out = self._init_copy(geom=hpx_out, data=None)
        idx = self.geom.get_idx(flat=True)
        vals = self.get_by_idx(idx)
        if self.geom.nside.size > 1:
            nside = self.geom.nside[idx[1:]]
        else:
            nside = self.geom.nside

        if self.geom.nest:
            idx_new = tuple([hp.nest2ring(nside, idx[0])]) + idx[1:]
        else:
            idx_new = tuple([hp.ring2nest(nside, idx[0])]) + idx[1:]

        map_out.set_by_idx(idx_new, vals)
        return map_out
Esempio n. 37
0
def read_map(filename, HDU=0, field=0, nest=False):
    """Read Healpix map
    all columns of the specified HDU are read into a compound numpy MASKED array
    if nest is not None, the map is converted if need to NEST or RING ordering.
    this function requires healpy"""
    m, h = read(filename, HDU=HDU, return_header=True)
    try:
        m = m[field]
    except exceptions.KeyError:
        m = m.values()[field]
    nside = healpy.npix2nside(m.size)
    if not nest is None:
        if h.get('ORDERING', False):
            if h['ORDERING'] == 'NESTED' and not nest:
                idx = healpy.ring2nest(nside,np.arange(m.size,dtype=np.int32))
                m = m[idx]
            elif h['ORDERING'] == 'RING' and nest:
                idx = healpy.nest2ring(nside,np.arange(m.size,dtype=np.int32))
                m = m[idx]
    return healpy.ma(m)
Esempio n. 38
0
    def generateStarCatalog(self, nstars,nside2=65536):
        """ 
        Generates  star catalog  by sampling from healpix map.
        nstars : number of stars to generate
        nside2 : healpix grid use to subsample pixels 

        note that since nside2 is never allocated it can be arbitrarily large.
        """

        nside=hp.get_nside(self.smap.data)
        ra=[]
        th=[]
        filled_pixels = np.where(self.smap>0)[0]
        densities = self.smap[filled_pixels]
        kpix = np.random.choice(filled_pixels,size=nstars,p=densities/np.sum(densities))
        bincounts = np.bincount(kpix)
        kpix2 = np.unique(kpix)
        counts=bincounts[bincounts>0]
        hh=nside2**2/nside**2
        i=0
        for i,c in enumerate(counts):
            rpix=np.random.randint(0,high=hh,size=c)
            nestpix=hp.ring2nest(nside,kpix2[i])
            theta, phi = hp.pix2ang(nside2,hh*nestpix+rpix,nest=True)
            theta=90.-theta*180./np.pi
            phi=phi*180./np.pi
            for j in range(0,len(theta)):
                ra.append(phi[j])
                th.append(theta[j])
        ra=np.array(ra)
        dec=np.array(th)
        z=np.random.normal(self.zmean, self.zsigma, nstars)
        catalog=cat.Catalog(nstars)
        catalog['ra']=ra
        catalog['dec']=dec
        catalog['z']=z
        return catalog
Esempio n. 39
0
    map_cmb_true[:,ipol]=hp.read_map("../../r0p00s4321/cmb_r0p00_ns256s4321.fits",field=ipol)

maps_obs=np.zeros([par.n_pix,par.n_pol,par.n_nu])
for inu in np.arange(par.n_nu) :
    for ipol in np.arange(par.n_pol) :
        maps_obs[:,ipol,inu]=hp.read_map("../../r0p00s4321/obs_r0p00_nu%03d.fits"%(inu+1),field=ipol)

amin2_per_pix=4*np.pi*(180*60/np.pi)**2/par.n_pix
sigma2_per_pix=par.noise_list**2/amin2_per_pix
maps_s2_noise=np.ones([par.n_pix,par.n_pol,par.n_nu])*sigma2_per_pix[np.newaxis,np.newaxis,:]
if par.include_polarization :
    maps_s2_noise[:,1:,:]*=2
maps_noise_weights=1./maps_s2_noise

npix_spec=hp.nside2npix(nside_spec)
ipix0=hp.ring2nest(par.nside_spec,hp.ang2pix(par.nside_spec,theta_patch*np.pi/180,phi_patch*np.pi/180))
print "pixel", ipix0
map_mean=np.zeros([par.n_pix,par.n_pol,par.n_comp])
map_sigma=np.zeros([par.n_pix,par.n_pol,par.n_comp])
map_xspec_mean=np.zeros([npix_spec,par.n_spec_vary])
map_xspec_sigma=np.zeros([npix_spec,par.n_spec_vary])

#for ipix in [ipix0] :
for ipix in np.arange(npix_spec) :
    if ipix!=ipix0 :
        continue
    ipix_list=hp.nest2ring(par.nside,ipix*par.n_sub+np.arange(par.n_sub))

    if plot_stuff :
        map_show=np.zeros(par.n_pix); map_show[ipix_list]=1.0; hp.mollview(map_show); plt.show()
        
Esempio n. 40
0
def computeHPXpix_sequ_new(nside, propertyArray, pixoffset=0, ratiores=4, coadd_cut=False): 
    #return 'ERROR'
    #img_ras, img_decs = [propertyArray[v] for v in ['ra0', 'ra1', 'ra2','ra3']],[propertyArray[v] for v in ['dec0', 'dec1', 'dec2','dec3']]
    #x = [1+pixoffset, propertyArray['NAXIS1']-pixoffset, propertyArray['NAXIS1']-pixoffset, 1+pixoffset, 1+pixoffset]
    #y = [1+pixoffset, 1+pixoffset, propertyArray['NAXIS2']-pixoffset, propertyArray['NAXIS2']-pixoffset, 1+pixoffset]

    #if np.any(img_ras > 360.0):
    #    img_ras[img_ras > 360.0] -= 360.0
    #if np.any(img_ras < 0.0):
    #    img_ras[img_ras < 0.0] += 360.0
    #print 'in here'
    #print len(img_ras)#,len(img_ras[0])
    #plt.plot(img_ras[0],img_decs[0],'k,')
    #plt.show()
    img_ras, img_decs = computeCorners_WCS_TPV(propertyArray, pixoffset)
    # Coordinates of coadd corners
    # RALL, t.DECLL, t.RAUL, t.DECUL, t.RAUR, t.DECUR, t.RALR, t.DECLR, t.URALL, t.UDECLL, t.URAUR, t.UDECUR
    if coadd_cut:
        #coadd_ras = [propertyArray[v] for v in ['URAUL', 'URALL', 'URALR', 'URAUR']]
        #coadd_decs = [propertyArray[v] for v in ['UDECUL', 'UDECLL', 'UDECLR', 'UDECUR']]
        coadd_ras = [propertyArray[v] for v in ['ra0', 'ra1', 'ra2', 'ra3']]
        coadd_decs = [propertyArray[v] for v in ['dec0', 'dec1', 'dec2', 'dec3']]
        coadd_phis = np.multiply(coadd_ras, np.pi/180)
        coadd_thetas =  np.pi/2  - np.multiply(coadd_decs, np.pi/180)
    else:
        coadd_phis = 0.0
        coadd_thetas = 0.0
    # Coordinates of image corners
    #print img_ras
    img_phis = np.multiply(img_ras , np.pi/180)
    img_thetas =  np.pi/2  - np.multiply(img_decs , np.pi/180)
    img_pix = hp.ang2pix(nside, img_thetas, img_phis, nest=False)
    pix_thetas, pix_phis = hp.pix2ang(nside, img_pix, nest=False)
    #img_phis = np.mod( img_phis + np.pi, 2*np.pi ) # Enable these two lines to rotate everything by 180 degrees
    #coadd_phis = np.mod( coadd_phis + np.pi, 2*np.pi ) # Enable these two lines to rotate everything by 180 degrees
    ind_U = 0
    ind_L = 2
    ind_R = 3
    ind_B = 1
    ipix_list = np.zeros(0, dtype=long)
    weight_list = np.zeros(0, dtype=float)
    # loop over rings until reached bottom
    iring_U = ring_num(nside, np.cos(img_thetas.min()), shift=0)
    iring_B = ring_num(nside, np.cos(img_thetas.max()), shift=0)
    ipixs_ring = []
    pmax = np.max(img_phis)
    pmin = np.min(img_phis)
    if (pmax - pmin > np.pi):
        ipixs_ring = np.int64(np.concatenate([in_ring(nside, iring, pmax, pmin, conservative=True) for iring in range(iring_U-1, iring_B+1)]))
    else:
        ipixs_ring = np.int64(np.concatenate([in_ring(nside, iring, pmin, pmax, conservative=True) for iring in range(iring_U-1, iring_B+1)]))

    ipixs_nest = hp.ring2nest(nside, ipixs_ring)
    npixtot = hp.nside2npix(nside)
    if ratiores > 1:
        subipixs_nest = np.concatenate([np.arange(ipix*ratiores**2, ipix*ratiores**2+ratiores**2, dtype=np.int64) for ipix in ipixs_nest])
        nsubpixperpix = ratiores**2
    else:
        subipixs_nest = ipixs_nest
        nsubpixperpix = 1

    rangepix_thetas, rangepix_phis = hp.pix2ang(nside*ratiores, subipixs_nest, nest=True)
    #subipixs_ring = hp.ang2pix(nside*ratiores, rangepix_thetas, rangepix_phis, nest=False).reshape(-1, nsubpixperpix)

    if (pmax - pmin > np.pi) or (np.max(coadd_phis) - np.min(coadd_phis) > np.pi):
        img_phis= np.mod( img_phis + np.pi, 2*np.pi )
        coadd_phis= np.mod( coadd_phis + np.pi, 2*np.pi )
        rangepix_phis = np.mod( rangepix_phis + np.pi, 2*np.pi )

    subweights = in_region(rangepix_thetas, rangepix_phis,
                                   img_thetas[ind_U], img_phis[ind_U], img_thetas[ind_L], img_phis[ind_L],
                                   img_thetas[ind_R], img_phis[ind_R], img_thetas[ind_B], img_phis[ind_B])
    if coadd_cut:
        subweights_coadd = in_region(rangepix_thetas, rangepix_phis,
                                   coadd_thetas[ind_U], coadd_phis[ind_U], coadd_thetas[ind_L], coadd_phis[ind_L],
                                   coadd_thetas[ind_R], coadd_phis[ind_R], coadd_thetas[ind_B], coadd_phis[ind_B])
        resubweights = np.logical_and(subweights, subweights_coadd).reshape(-1, nsubpixperpix)
    else:
        resubweights = subweights.reshape(-1, nsubpixperpix)

    sweights = resubweights.sum(axis=1) / float(nsubpixperpix)
    ind = (sweights > 0.0)
    
    return ipixs_ring[ind], sweights[ind], img_thetas, img_phis, resubweights[ind,:]
        )
elif targetsys == 'MS':
    correctallhpxindices_ring = healpy.ang2pix(
        targetnside, np.pi / 2 - mlat, mlon, nest=False
        )
else:
    print 'not implemented'
    sys.exit(1)

# if targetnside<2048:
    # print "removing duplicate pixels for targetnside",targetnside,"<",2048
    # print "size before",len(correctallhpxindices_ring)
    # correctallhpxindices_ring=np.unique(correctallhpxindices_ring)
    # print "size after",len(correctallhpxindices_ring)

correctallhpxindices = healpy.ring2nest(targetnside, correctallhpxindices_ring)
print 'done'

# print infodict.keys()

print "I'm pretty sure it's datetime.now()..."
starttime = datetime.datetime.now()
print starttime.isoformat()

hpx4indices = range(healpy.nside2npix(4))

# we want to split the 2048 final (gridded) hpx db to be split into pieces
# according to the 192 hpx indices of nside=4
# using the lookup, we can easily find out, which files contribute, however,
# we need to include adjacent pixels
# idea: for each of the 192 pixels, take all 49152/192 subpixels
Esempio n. 42
0
def getchildren(ipix,nside):
    ipixnest = healpy.ring2nest(ipix,nside)
    # regardless of resolution the next children are always the same: 4 * ipix + 0,1,2,3
    children = [ipixnest*4, ipixnest*4+1, ipixnest*4+2, ipixnest*4+3]
    return children
Esempio n. 43
0
def radec2healpix(ra,dec,nside):
    (theta,phi) = radec2polar(ra,dec)
    healpixring = polar2healpix(theta,phi,nside)
    return healpy.ring2nest(nside,healpixring)
Esempio n. 44
0
def ipix2tuple(pixelring,nside):
    ipixnest = healpy.ring2nest(nside,pixelring)
    npix = healpy.nside2npix(nside)
    return (ipixnest / (npix/12) ,ipixnest % (npix/12))
Esempio n. 45
0
def project_and_write_maps(mode, propertiesweightsoperations, tbdata, catalogue_name, outrootdir, sample_names, inds, nside, ratiores, pixoffset, nsidesout=None):

    resol_prefix = 'nside'+str(nside)+'_oversamp'+str(ratiores)
    outroot = outrootdir + '/' + catalogue_name + '/' + resol_prefix + '/'
    mkdir_p(outroot)
    if mode == 1: # Fully sequential
        for sample_name, ind in zip(sample_names, inds):
            #print len(tbdata[ind]['ra1'])
            #plt.plot(tbdata[ind]['ra1'],tbdata[ind]['dec1'],'k,')
            #plt.show()
            treemap = makeHealTree( (catalogue_name+'_'+sample_name, nside, ratiores, pixoffset, np.array(tbdata[ind])) )
            for property, weights, operation in propertiesweightsoperations:
                cutmap_indices, cutmap_signal = makeHpxMap_partial( (treemap, property, weights, operation) )
                if nsidesout is None:
                    fname = outroot + '_'.join([catalogue_name, sample_name, resol_prefix, property, weights, operation]) + '.fits'
                    print 'Creating and writing', fname
                    write_partial_map(fname, cutmap_indices, cutmap_signal, nside, nest=False)
                else:
                    cutmap_indices_nest = hp.ring2nest(nside, cutmap_indices)
                    outmap_hi = np.zeros(hp.nside2npix(nside))
                    outmap_hi.fill(0.0) #outmap_hi.fill(hp.UNSEEN)
                    outmap_hi[cutmap_indices_nest] = cutmap_signal
                    for nside_out in nsidesout:
                        if nside_out == nside:
                            outmap_lo = outmap_hi
                        else:
                            outmap_lo = hp.ud_grade(outmap_hi, nside_out, order_in='NESTED', order_out='NESTED')
                        resol_prefix2 = 'nside'+str(nside_out)+'from'+str(nside)+'o'+str(ratiores)
                        outroot2 = outrootdir + '/' + catalogue_name + '/' + resol_prefix2 + '/'
                        mkdir_p(outroot2)
                        fname = outroot2 + '_'.join([catalogue_name, sample_name, resol_prefix2, property, weights, operation]) + '.fits'
                        print 'Writing', fname
                        hp.write_map(fname, outmap_lo, nest=True)
                        subprocess.call("gzip "+fname,shell=True)


    if mode == 3: # Fully parallel
        pool = Pool(len(inds))
        print 'Creating HealTrees'
        treemaps = pool.map( makeHealTree,
                         [ (catalogue_name+'_'+samplename, nside, ratiores, pixoffset, np.array(tbdata[ind]))
                           for samplename, ind in zip(sample_names, inds) ] )

        for property, weights, operation in propertiesweightsoperations:
            print 'Making maps for', property, weights, operation
            outmaps = pool.map( makeHpxMap_partial,
                            [ (treemap, property, weights, operation) for treemap in treemaps ] )
            for sample_name, outmap in zip(sample_names, outmaps):
                fname = outroot + '_'.join([catalogue_name, sample_name, resol_prefix, property, weights, operation]) + '.fits'
                print 'Writing', fname
                cutmap_indices, cutmap_signal = outmap
                write_partial_map(fname, cutmap_indices, cutmap_signal, nside, nest=False)


    if mode == 2:  # Parallel tree making and sequential writing
        pool = Pool(len(inds))
        print 'Creating HealTrees'
        treemaps = pool.map( makeHealTree,
                         [ (catalogue_name+'_'+samplename, nside, ratiores, pixoffset, np.array(tbdata[ind]))
                           for samplename, ind in zip(sample_names, inds) ] )

        for property, weights, operation in propertiesweightsoperations:
            for sample_name, treemap in zip(sample_names, treemaps):
                fname = outroot + '_'.join([catalogue_name, sample_name, resol_prefix, property, weights, operation]) + '.fits'
                print 'Writing', fname
                #outmap = makeHpxMap( (treemap, property, weights, operation) )
                #hp.write_map(fname, outmap, nest=False)
                cutmap_indices, cutmap_signal = makeHpxMap_partial( (treemap, property, weights, operation) )
                write_partial_map(fname, cutmap_indices, cutmap_signal, nside, nest=False)
Esempio n. 46
0
    def _stage_pixels(self, data, detectors, nsamp, ndet, obs_period_ranges, nside):
        """ Stage pixels

        """
        auto_timer = timing.auto_timer(type(self).__name__)
        self._madam_pixels = self._cache.create(
            "pixels", madam.PIXEL_TYPE, (nsamp * ndet,)
        )
        self._madam_pixels[:] = -1

        global_offset = 0
        for iobs, obs in enumerate(data.obs):
            tod = obs["tod"]
            period_ranges = obs_period_ranges[iobs]

            commonflags = None
            for idet, det in enumerate(detectors):
                # Optionally get the flags, otherwise they are
                # assumed to have been applied to the pixel numbers.

                if self._apply_flags:
                    detflags = tod.local_flags(det, self._flag_name)
                    commonflags = tod.local_common_flags(self._common_flag_name)
                    flags = np.logical_or(
                        (detflags & self._flag_mask) != 0,
                        (commonflags & self._common_flag_mask) != 0,
                    )
                    del detflags

                # get the pixels for the valid intervals from the cache

                pixelsname = "{}_{}".format(self._pixels, det)
                pixels = tod.cache.reference(pixelsname)
                pixels_dtype = pixels.dtype

                if not self._pixels_nested:
                    # Madam expects the pixels to be in nested ordering
                    pixels = pixels.copy()
                    good = pixels >= 0
                    pixels[good] = hp.ring2nest(nside, pixels[good])

                if self._apply_flags:
                    pixels = pixels.copy()
                    pixels[flags] = -1

                offset = global_offset
                for istart, istop in period_ranges:
                    nn = istop - istart
                    dslice = slice(idet * nsamp + offset, idet * nsamp + offset + nn)
                    self._madam_pixels[dslice] = pixels[istart:istop]
                    offset += nn

                del pixels
                if self._apply_flags:
                    del flags

            # Always purge the pixels but restore them from the Madam
            # buffers when purge_pixels=False
            for idet, det in enumerate(detectors):
                pixelsname = "{}_{}".format(self._pixels, det)
                tod.cache.clear(pattern=pixelsname)
                if self._name is not None and (
                    self._purge_tod or self._name == self._name_out
                ):
                    cachename = "{}_{}".format(self._name, det)
                    tod.cache.clear(pattern=cachename)
                if self._purge_flags and self._flag_name is not None:
                    cacheflagname = "{}_{}".format(self._flag_name, det)
                    tod.cache.clear(pattern=cacheflagname)

            del commonflags
            if self._purge_flags and self._common_flag_name is not None:
                tod.cache.clear(pattern=self._common_flag_name)
            global_offset = offset

        return pixels_dtype
Esempio n. 47
0
    def getFilePixels(self, nside):
        """
        Get the healpix cells occupied by galaxies
        in each file. Assumes files have already been
        sorted correctly by parseFileStruct
        """
        fpix = []

        # BCC catalogs have pixels in filenames
        if ("BCC" in self.__class__.__name__) & (self.filenside is not None) & (self.filenside >= self.groupnside):
            fk = self.filestruct.keys()

            for f in self.filestruct[fk[0]]:
                p = int(f.split(".")[-2])

                if self.filenside == self.groupnside:
                    fpix.append([p])
                else:
                    if not self.nest:
                        while p > 12 * self.filenside ** 2:
                            p = p - 1000
                        p = hp.ring2nest(self.filenside, p)

                    o1 = int(np.log2(self.filenside))
                    o2 = int(np.log2(self.groupnside))

                    base = int(p >> 2 * o1)
                    hosubpix = int(p & ((1 << (2 * o1)) - 1))
                    losubpix = int(hosubpix // (1 << 2 * (o1 - o2)))
                    p = int(base * (1 << (2 * o2)) + losubpix)

                    fpix.append([p])

        else:
            ct = ["galaxycatalog"]

            pmetric = PixMetric(self.ministry, self.groupnside, catalog_type=ct, nest=self.nest)
            mg = self.ministry.genMetricGroups([pmetric])
            ms = mg[0][1]
            fm = mg[0][0]

            mappables = self.ministry.genMappables(mg[0])

            if self.ministry.parallel:
                from mpi4py import MPI

                comm = MPI.COMM_WORLD
                rank = comm.Get_rank()
                size = comm.Get_size()

                mappables = mappables[rank::size]

            for i, mappable in enumerate(mappables):

                mapunit = self.ministry.readMappable(mappable, fm)
                print("converting before getting file pixels")
                if (not hasattr(ms, "__iter__")) and ("only" in ms.aschema):
                    mapunit = self.ministry.scListToDict(mapunit)
                    mapunit = self.ministry.convert(mapunit, ms)
                    mapunit = self.ministry.filter(mapunit)

                elif "only" in ms[0].aschema:
                    mapunit = self.ministry.scListToDict(mapunit)
                    mapunit = self.ministry.convert(mapunit, ms)
                    mapunit = self.ministry.filter(mapunit)

                if (ms[0].aschema == "galaxygalaxy") | (ms[0].aschema == "halohalo"):
                    mapunit = self.ministry.dcListToDict(mapunit)
                    mapunit = self.ministry.convert(mapunit, ms)
                    mapunit = self.ministry.filter(mapunit)

                fpix.append(pmetric.map(mapunit))

                del mapunit

            if self.ministry.parallel:
                gfpix = comm.allgather(fpix)
                fpix = []
                for fp in gfpix:
                    fpix.extend(fp)

        return fpix
Esempio n. 48
0
def read_sky_map(filename, nest=False, distances=False, moc=False, **kwargs):
    """
    Read a LIGO/Virgo-type sky map and return a tuple of the HEALPix array
    and a dictionary of metadata from the header.

    Parameters
    ----------

    filename: string
        Path to the optionally gzip-compressed FITS file.

    nest: bool, optional
        If omitted or False, then detect the pixel ordering in the FITS file
        and rearrange if necessary to RING indexing before returning.

        If True, then detect the pixel ordering and rearrange if necessary to
        NESTED indexing before returning.

        If None, then preserve the ordering from the FITS file.

        Regardless of the value of this option, the ordering used in the FITS
        file is indicated as the value of the 'nest' key in the metadata
        dictionary.

    distances: bool, optional
        If true, then read also read the additional HEALPix layers representing
        the conditional mean and standard deviation of distance as a function
        of sky location.

    moc: bool, optional
        If true, then preserve multi-order structure if present.

    Example
    -------

    Test that we can read a legacy IDL-compatible file
    (https://bugs.ligo.org/redmine/issues/5168):

    >>> import tempfile
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     nside = 512
    ...     npix = hp.nside2npix(nside)
    ...     ipix_nest = np.arange(npix)
    ...     hp.write_map(f.name, ipix_nest, nest=True, column_names=['PROB'])
    ...     m, meta = read_sky_map(f.name)
    ...     np.testing.assert_array_equal(m, hp.ring2nest(nside, ipix_nest))
    """
    m = Table.read(filename, format='fits', **kwargs)

    # Remove some keys that we do not need
    for key in (
            'PIXTYPE', 'EXTNAME', 'NSIDE', 'FIRSTPIX', 'LASTPIX', 'INDXSCHM'):
        m.meta.pop(key, None)

    if m.meta.pop('COORDSYS', 'C') != 'C':
        raise ValueError('LALInference only reads and writes sky maps in '
                         'equatorial coordinates.')

    try:
        value = m.meta.pop('ORDERING')
    except KeyError:
        pass
    else:
        if value == 'RING':
            m.meta['nest'] = False
        elif value == 'NESTED':
            m.meta['nest'] = True
        elif value == 'NUNIQ':
            pass
        else:
            raise ValueError(
                'ORDERING card in header has unknown value: {0}'.format(value))

    for fits_key, rows in itertools.groupby(
            FITS_META_MAPPING, lambda row: row[1]):
        try:
            value = m.meta.pop(fits_key)
        except KeyError:
            pass
        else:
            for row in rows:
                key, _, _, _, from_fits = row
                if from_fits is not None:
                    m.meta[key] = from_fits(value)

    if 'UNIQ' not in m.colnames:
        m = Table([col.ravel() for col in m.columns.values()], meta=m.meta)

    if 'UNIQ' in m.colnames and not moc:
        from ..bayestar.sky_map import rasterize
        m = rasterize(m)
        m.meta['nest'] = True
    elif 'UNIQ' not in m.colnames and moc:
        from ..bayestar.sky_map import derasterize
        if not m.meta['nest']:
            npix = len(m)
            nside = hp.npix2nside(npix)
            m = m[hp.nest2ring(nside, np.arange(npix))]
        m = derasterize(m)
        m.meta.pop('nest', None)

    if 'UNIQ' not in m.colnames:
        npix = len(m)
        nside = hp.npix2nside(npix)

        if nest is None:
            pass
        elif m.meta['nest'] and not nest:
            m = m[hp.ring2nest(nside, np.arange(npix))]
        elif not m.meta['nest'] and nest:
            m = m[hp.nest2ring(nside, np.arange(npix))]

    if moc:
        return m
    elif distances:
        return tuple(
            np.asarray(m[name]) for name in DEFAULT_NESTED_NAMES), m.meta
    else:
        return np.asarray(m[DEFAULT_NESTED_NAMES[0]]), m.meta
Esempio n. 49
0
phiinc = 0.98 * (2.0 * np.pi / angperring)

theta = np.zeros(nring * angperring)
phi = np.zeros_like(theta)

n = 0

for t in range(nring):
    for p in range(angperring):
        theta[n] = t * thetainc
        phi[n] = p * phiinc
        n += 1

pixring = hp.ang2pix(nside, theta, phi)
pixnest = hp.ring2nest(nside, pixring)

ringtheta, ringphi = hp.pix2ang(nside, pixring)

print("    int64_t nside = {};".format(nside))
print("")

print("    int64_t ntest = {};".format(n))
print("")

print("    double theta[{}] = {{".format(n))
for i in range(n):
    print("        {},".format(theta[i]))
print("    };")
print("")