Пример #1
0
def read_map2(filename,field=0,dtype=np.float64,nest=False,hdu=1,h=False,verbose=True,memmap=False):
    hdr=fitsio.read_header(filename,ext=hdu)
    
    fullsky = False
    try:
        if (hdr['OBJECT'].strip() == 'PARTIAL') :
            # partial sky format
            fullsky=False
        else:
            fullsky=True
    except:
        # if no OBJECT in header, assume full sky
        fullsky=True

    if fullsky:
        m=hp.read_map(filename,field=field,dtype=dtype,nest=nest,hdu=hdu,h=h,verbose=verbose,memmap=memmap)
    else:
        # partial sky
        st=fitsio.read(filename,ext=1)
        nside=hdr['NSIDE']

        m=np.zeros(12*nside*nside,dtype=dtype) + hp.UNSEEN

        if ((hdr['ORDERING'].strip() == 'NESTED') and (not nest)) :
            # change from nest to ring...
            m[hp.nest2ring(nside,st['PIXEL'])] = st['SIGNAL']
        elif ((hdr['ORDERING'].strip() == 'RING') and (nest)):
            # change from ring to nest...
            m[hp.ring2nest(nside,st['PIXEL'])] = st['SIGNAL']
        else :
            # straight up
            m[st['PIXEL']] = st['SIGNAL']

    return m
Пример #2
0
    def _get_converted_data(self, scheme):
        """
        internal routine to get the data converted to the requested
        scheme

        If the scheme would be unchanged, a reference to the data is returned
        """
        import healpy

        scheme_num = get_scheme_num(scheme)
        if scheme_num == self.hpix.scheme_num:
            return self.data
        
        if scheme_num==NESTED:
            ipring=numpy.arange(self.hpix.npix,dtype='i8')
            ipnest=healpy.ring2nest(self.hpix.nside, ipring)

            newdata=self.data.copy()
            newdata[ipnest]=self.data
        else:
            ipnest=numpy.arange(self.hpix.npix,dtype='i8')
            ipring=healpy.nest2ring(self.hpix.nside, ipnest)

            newdata=self.data.copy()
            newdata[ipring]=self.data

        return newdata
Пример #3
0
    def get_index_list(nside, nest, region):
        """ Returns the list of pixels indices for all the pixels in a region

        nside    : HEALPix nside parameter
        nest     : True for 'NESTED', False = 'RING'
        region   : HEALPix region string
        """

        tokens = parse_hpxregion(region)
        if tokens[0] == 'DISK':
            vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
            ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])),
                                  inclusive=False, nest=nest)
        elif tokens[0] == 'DISK_INC':
            vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
            ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])),
                                  inclusive=True, fact=int(tokens[4]),
                                  nest=nest)
        elif tokens[0] == 'HPX_PIXEL':
            nside_pix = int(tokens[2])
            if tokens[1] == 'NESTED':
                ipix_ring = hp.nest2ring(nside_pix, int(tokens[3]))
            elif tokens[1] == 'RING':
                ipix_ring = int(tokens[3])
            else:
                raise Exception(
                    "Did not recognize ordering scheme %s" % tokens[1])
            ilist = match_hpx_pixel(nside, nest, nside_pix, ipix_ring)
        else:
            raise Exception(
                "HPX.get_index_list did not recognize region type %s" % tokens[0])
        return ilist
Пример #4
0
def _main():

    """
    This is the main routine.
    """

    data = np.loadtxt("all_ts_data_0.1GeVplus.gz",skiprows=32,unpack=True)

    print len(data[0])
    nside=int(round(mt.sqrt(len(data[9])/12)))
    tsarray=data[0]*0
    RA=data[8]
    decl=data[9]
#    index=DeclRaToIndex(decl,RA,nside)
    index=hp.nest2ring(nside,np.array(data[0]).astype(int))
    tsarray[index]=data[22]
    tsarray = hp.sphtfunc.smoothing(tsarray,sigma = 0.008)

 #   tsmap=0*tsarray


    hp.mollview(np.arcsinh(tsarray)/mt.log(10.0),coord='C', title='TS Map', unit='prob',xsize = 2048)
#    hp.graticule()
    plt.savefig("ts.png")
    plt.show()
Пример #5
0
def inpaint(m,num_degrades=1,nside_in=2048):
    """
    Inpaints missing pixels by degrading the map (while ignoring missing pixels),
    then setting the missing values correpsonding to their value in the degraded map. 
    """
    import healpy as H
    nside_deg = nside_in/(2**num_degrades)
    badpix = arange(12*nside_in**2)[m==H.UNSEEN]
    badpix_deg = H.nest2ring(nside_deg,H.ring2nest(nside_in,badpix) >> 2*num_degrades)
    m_deg = H.ud_grade(m,nside_deg)
    m2=m.copy()
    m2[badpix]=m_deg[badpix_deg]
    return m2
Пример #6
0
def convert_pixel_to_map(pixel, valkey='SIGNAL', pixkey='PIXEL',
                         nested=False, nside=4096, return_seen=True):
    ycoord = pixel[pixkey]
    if nested:
        # convert to ring
        ycoord = hp.nest2ring(nside, ycoord)

    m = np.zeros(hp.nside2npix(nside), dtype=pixel[pixkey].dtype) + hp.UNSEEN
    m[ycoord] = pixel[valkey]

    if return_seen:
        indices, = np.where(m != hp.UNSEEN)
        m = m[indices]
        return indices, m
    else:
        return np.arange(m.size), m
Пример #7
0
    def to_swapped(self):
        import healpy as hp
        hpx_out = self.geom.to_swapped()
        map_out = self.__class__(hpx_out, meta=copy.deepcopy(self.meta))
        idx = self.geom.get_idx(flat=True)
        vals = self.get_by_idx(idx)
        if self.geom.nside.size > 1:
            nside = self.geom.nside[idx[1:]]
        else:
            nside = self.geom.nside

        if self.geom.nest:
            idx_new = tuple([hp.nest2ring(nside, idx[0])]) + idx[1:]
        else:
            idx_new = tuple([hp.ring2nest(nside, idx[0])]) + idx[1:]

        map_out.set_by_idx(idx_new, vals)
        return map_out
Пример #8
0
    def to_swapped(self):
        import healpy as hp

        hpx_out = self.geom.to_swapped()
        map_out = self._init_copy(geom=hpx_out, data=None)
        idx = self.geom.get_idx(flat=True)
        vals = self.get_by_idx(idx)
        if self.geom.nside.size > 1:
            nside = self.geom.nside[idx[1:]]
        else:
            nside = self.geom.nside

        if self.geom.nest:
            idx_new = tuple([hp.nest2ring(nside, idx[0])]) + idx[1:]
        else:
            idx_new = tuple([hp.ring2nest(nside, idx[0])]) + idx[1:]

        map_out.set_by_idx(idx_new, vals)
        return map_out
Пример #9
0
    def query(self, sources, chunksize=1000, method='array'):
        """
        Returns the selection function at the requested coordinates.

        Args:
            sources (:obj:`selectionfunctions.source.Source`): The coordinates, magnitude and colour to query.

        Returns:
            Selection function at the specified coordinates, as a fraction.

        """

        # Convert coordinates to healpix indices
        if method == 'array': nside = hp.npix2nside(self.x.shape[2])
        else: nside = self.nside
        hpxidx = coord2healpix(sources.coord, 'icrs', nside, nest=True)

        # Extract Gaia G magnitude
        mag = sources.photometry.measurement['gaia_g']
        try:
            color = sources.photometry.measurement['gaia_g_gaia_rp']
        except KeyError:
            color = np.zeros(len(mag))

        # Evaluate selection function
        if method == 'array':
            selection_function = self._selection_function(mag, color, hpxidx)
        elif method == 'gp':
            # Load spherical basis
            if not hasattr(self, 'basis'):
                print(f'Spherical Basis: {self.spherical_basis_file}')
                self._load_spherical_basis()
            # Switch positions to ring ordering
            pix = hp.nest2ring(self.nside, hpxidx)
            # Don't evaluate for outside range
            selection_function = np.zeros(len(pix))
            subset = (mag > self.Mbins[0]) & (mag < self.Mbins[-1])
            selection_function[subset] = self._selection_function_gp(
                mag[subset], color[subset], pix[subset])
        elif method == 'basis':
            raise ValueError('basis method not implemented yet.')

        return selection_function
Пример #10
0
    def to_swapped(self):
        import healpy as hp

        hpx_out = self.geom.to_swapped()
        map_out = self._init_copy(geom=hpx_out, data=None)
        idx = self.geom.get_idx(flat=True)
        vals = self.get_by_idx(idx)
        if self.geom.nside.size > 1:
            nside = self.geom.nside[idx[1:]]
        else:
            nside = self.geom.nside

        if self.geom.nest:
            idx_new = tuple([hp.nest2ring(nside, idx[0])]) + idx[1:]
        else:
            idx_new = tuple([hp.ring2nest(nside, idx[0])]) + idx[1:]

        map_out.set_by_idx(idx_new, vals)
        return map_out
Пример #11
0
def read_map(filename, HDU=0, field=0, nest=False):
    """Read Healpix map
    all columns of the specified HDU are read into a compound numpy MASKED array
    if nest is not None, the map is converted if need to NEST or RING ordering.
    this function requires healpy"""
    m, h = read(filename, HDU=HDU, return_header=True)
    try:
        m = m[field]
    except exceptions.KeyError:
        m = m.values()[field]
    nside = healpy.npix2nside(m.size)
    if not nest is None:
        if h.get('ORDERING', False):
            if h['ORDERING'] == 'NESTED' and not nest:
                idx = healpy.ring2nest(nside,np.arange(m.size,dtype=np.int32))
                m = m[idx]
            elif h['ORDERING'] == 'RING' and nest:
                idx = healpy.nest2ring(nside,np.arange(m.size,dtype=np.int32))
                m = m[idx]
    return healpy.ma(m)
Пример #12
0
def convert_pixel_to_map(pixel,
                         valkey='SIGNAL',
                         pixkey='PIXEL',
                         nested=False,
                         nside=4096,
                         return_seen=True):
    ycoord = pixel[pixkey]
    if nested:
        # convert to ring
        ycoord = hp.nest2ring(nside, ycoord)

    m = np.zeros(hp.nside2npix(nside), dtype=pixel[pixkey].dtype) + hp.UNSEEN
    m[ycoord] = pixel[valkey]

    if return_seen:
        indices, = np.where(m != hp.UNSEEN)
        m = m[indices]
        return indices, m
    else:
        return np.arange(m.size), m
Пример #13
0
def face_pix(nside, face=0, ordering='Ring'):

    while face > 11:
        face -= 12

    if nside is 1:
        subpix = np.array([face])
    else:
        tmpnside = 2

        subpix = np.array([[3, 1], [2, 0]])
        #subpix = np.array([[2,0],[3,1]])

        ## Enlarge if necessary
        ##----------------------
        while tmpnside < nside:
            tmpnside *= 2
            pix = np.zeros((tmpnside, tmpnside), dtype=np.int64)

            pix[0:tmpnside // 2,
                0:tmpnside // 2] = subpix + 3 * (tmpnside**2) // 4
            pix[tmpnside // 2:tmpnside, tmpnside // 2:tmpnside] = subpix
            pix[tmpnside // 2:tmpnside,
                0:tmpnside // 2] = subpix + 2 * (tmpnside**2) // 4
            pix[0:tmpnside // 2,
                tmpnside // 2:tmpnside] = subpix + (tmpnside**2) // 4
            '''
            pix[0:tmpnside/2,0:tmpnside/2] = subpix + 2*(tmpnside**2)/4
            pix[tmpnside/2:tmpnside,tmpnside/2:tmpnside] = subpix + (tmpnside**2)/4
            pix[tmpnside/2:tmpnside,0:tmpnside/2] = subpix + 3*(tmpnside**2)/4
            pix[0:tmpnside/2,tmpnside/2:tmpnside] = subpix
            '''
            subpix = pix

        subpix += face * (nside**2)

        if ordering is 'Ring':
            subpix = hp.nest2ring(nside, subpix)

    return subpix
Пример #14
0
    def get_index_list(nside, nest, region):
        """ Returns the list of pixels indices for all the pixels in a region

        nside    : HEALPix nside parameter
        nest     : True for 'NESTED', False = 'RING'
        region   : HEALPix region string
        """

        tokens = parse_hpxregion(region)
        if tokens[0] == 'DISK':
            vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
            ilist = hp.query_disc(nside,
                                  vec[0],
                                  np.radians(float(tokens[3])),
                                  inclusive=False,
                                  nest=nest)
        elif tokens[0] == 'DISK_INC':
            vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
            ilist = hp.query_disc(nside,
                                  vec[0],
                                  np.radians(float(tokens[3])),
                                  inclusive=True,
                                  fact=int(tokens[4]),
                                  nest=nest)
        elif tokens[0] == 'HPX_PIXEL':
            nside_pix = int(tokens[2])
            if tokens[1] == 'NESTED':
                ipix_ring = hp.nest2ring(nside_pix, int(tokens[3]))
            elif tokens[1] == 'RING':
                ipix_ring = int(tokens[3])
            else:
                raise Exception("Did not recognize ordering scheme %s" %
                                tokens[1])
            ilist = match_hpx_pixel(nside, nest, nside_pix, ipix_ring)
        else:
            raise Exception(
                "HPX.get_index_list did not recognize region type %s" %
                tokens[0])
        return ilist
Пример #15
0
    def to_swapped_scheme(self):

        import healpy as hp
        hpx_out = self.geom.to_swapped()
        map_out = self.__class__(hpx_out)
        idx = list(self.geom.get_idx())
        vals = self.get_by_idx(idx)
        msk = vals > 0
        idx = [t[msk] for t in idx]
        vals = vals[msk]

        if self.geom.nside.size > 1:
            nside = self.geom.nside[idx[1:]]
        else:
            nside = self.geom.nside

        if self.geom.nest:
            idx_new = tuple([hp.nest2ring(nside, idx[0])] + idx[1:])
        else:
            idx_new = tuple([hp.ring2nest(nside, idx[0])] + idx[1:])

        map_out.set_by_pix(idx_new, vals)
        return map_out
Пример #16
0
    def query(self, sources, chunksize=1000):
        """
        Returns the selection function at the requested coordinates.

        Args:
            coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to query.

        Returns:
            Selection function at the specified coordinates, as a fraction.

        """

        # Convert coordinates to healpix indices
        hpxidx = coord2healpix(sources.coord, 'icrs', self.nside, nest=True)

        # Extract Gaia G magnitude
        mag = sources.photometry.measurement['gaia_g']
        try:
            color = sources.photometry.measurement['gaia_bp_gaia_rp']
        except KeyError:
            color = np.zeros(len(mag))

        # Switch positions to ring ordering
        pix = hp.nest2ring(self.nside, hpxidx)

        # Evaluate selection function
        selection_function = self._selection_function(mag,
                                                      color,
                                                      pix,
                                                      chunksize=chunksize)

        if self._bounds == True:
            _outside_bounds = np.where((mag < self._g_min)
                                       | (mag > self._g_max))
            selection_function[_outside_bounds] = 0.0

        return selection_function
Пример #17
0
    def spread_pixels(self, Nside_low, Nside_high, ID, order='nest'):
        """
        returns a list of pixel IDs in the Nside_high resolution
        from a pixel ID in the Nside_low resolution.
        """
        from math import log
        import numpy as np
        import healpy as hp

        if order != 'nest' and order != 'ring':
            raise KeyError('ERROR: check order in spread_pixels')

        if Nside_low == Nside_high:
            if isinstance(ID, list):
                return ID
            else:
                return [ID]

        if Nside_low > Nside_high:
            raise KeyError('ERROR using spread_pixels')

        Llow = int(log(Nside_low, 2))
        Lhigh = int(log(Nside_high, 2))

        if isinstance(ID, list) or isinstance(ID, np.ndarray):
            pixids = []
            if order == 'ring':
                for ipix in ID:
                    j = hp.ring2nest(Nside_low, ipix)
                    pixids.append(j)
            else:
                pixids = ID

            pix_IDs = []
            for id in pixids:
                b = bin(id)
                DN = Lhigh - Llow
                a = [bin(i)[2:].zfill(2 * DN) for i in range(4**DN)]
                for i in a:
                    x = (b[2:].zfill(Llow) + i)
                    pix_IDs.append(int(x, 2))
        elif isinstance(ID, int) or isinstance(ID, np.int64):

            if order == 'ring':
                pixids = hp.ring2nest(Nside_low, ID)
            else:
                pixids = ID

            b = bin(pixids)
            DN = Lhigh - Llow
            a = [bin(i)[2:].zfill(2 * DN) for i in range(4**DN)]
            pix_IDs = []
            for i in a:
                x = (b[2:].zfill(Llow) + i)
                pix_IDs.append(int(x, 2))
        else:
            print('wtf')
            pix_IDs = 0

        if order == 'ring':
            for i in range(len(pix_IDs)):
                pix_IDs[i] = hp.nest2ring(Nside_high, pix_IDs[i])

        return (pix_IDs)
Пример #18
0
    def _unstage_data(self, comm, data, nsamp, nnz, nnz_full,
                      obs_period_ranges, detectors, pixels_dtype, nside,
                      weight_dtype):
        """ Clear Madam buffers, restore pointing into TOAST caches
        and cache the destriped signal.

        """
        auto_timer = timing.auto_timer(type(self).__name__)
        self._madam_timestamps = None
        self._cache.destroy('timestamps')

        if self._conserve_memory:
            nodecomm = comm.Split_type(MPI.COMM_TYPE_SHARED, comm.rank)
            nread = nodecomm.size
        else:
            nodecomm = MPI.COMM_SELF
            nread = 1

        for iread in range(nread):
            nodecomm.Barrier()
            if nodecomm.rank % nread != iread:
                continue
            if self._name_out is not None:
                global_offset = 0
                for obs, period_ranges in zip(data.obs, obs_period_ranges):
                    tod = obs['tod']
                    nlocal = tod.local_samples[1]
                    for idet, det in enumerate(detectors):
                        signal = np.ones(nlocal) * np.nan
                        offset = global_offset
                        for istart, istop in period_ranges:
                            nn = istop - istart
                            dslice = slice(idet * nsamp + offset,
                                           idet * nsamp + offset + nn)
                            signal[istart:istop] = self._madam_signal[dslice]
                            offset += nn
                        cachename = "{}_{}".format(self._name_out, det)
                        tod.cache.put(cachename, signal, replace=True)
                    global_offset = offset
            self._madam_signal = None
            self._cache.destroy('signal')

            if not self._purge_pixels:
                # restore the pixels from the Madam buffers
                global_offset = 0
                for obs, period_ranges in zip(data.obs, obs_period_ranges):
                    tod = obs['tod']
                    nlocal = tod.local_samples[1]
                    for idet, det in enumerate(detectors):
                        pixels = -np.ones(nlocal, dtype=pixels_dtype)
                        offset = global_offset
                        for istart, istop in period_ranges:
                            nn = istop - istart
                            dslice = slice(idet * nsamp + offset,
                                           idet * nsamp + offset + nn)
                            pixels[istart:istop] = self._madam_pixels[dslice]
                            offset += nn
                        npix = 12 * nside**2
                        good = np.logical_and(pixels >= 0, pixels < npix)
                        if not self._pixels_nested:
                            pixels[good] = hp.nest2ring(nside, pixels[good])
                        pixels[np.logical_not(good)] = -1
                        cachename = "{}_{}".format(self._pixels, det)
                        tod.cache.put(cachename, pixels, replace=True)
                    global_offset = offset
            self._madam_pixels = None
            self._cache.destroy('pixels')

            if not self._purge_weights and nnz == nnz_full:
                # restore the weights from the Madam buffers
                global_offset = 0
                for obs, period_ranges in zip(data.obs, obs_period_ranges):
                    tod = obs['tod']
                    nlocal = tod.local_samples[1]
                    for idet, det in enumerate(detectors):
                        weights = np.zeros([nlocal, nnz], dtype=weight_dtype)
                        offset = global_offset
                        for istart, istop in period_ranges:
                            nn = istop - istart
                            dwslice = slice((idet * nsamp + offset) * nnz,
                                            (idet * nsamp + offset + nn) * nnz)
                            weights[istart:istop] = self._madam_pixweights[
                                dwslice].reshape([-1, nnz])
                            offset += nn
                        cachename = "{}_{}".format(self._weights, det)
                        tod.cache.put(cachename, weights, replace=True)
                    global_offset = offset
            self._madam_pixweights = None
            self._cache.destroy('pixweights')
        del nodecomm
        return
Пример #19
0
def read_sky_map(filename, nest=False, distances=False, moc=False, **kwargs):
    """
    Read a LIGO/Virgo-type sky map and return a tuple of the HEALPix array
    and a dictionary of metadata from the header.

    Parameters
    ----------

    filename: string
        Path to the optionally gzip-compressed FITS file.

    nest: bool, optional
        If omitted or False, then detect the pixel ordering in the FITS file
        and rearrange if necessary to RING indexing before returning.

        If True, then detect the pixel ordering and rearrange if necessary to
        NESTED indexing before returning.

        If None, then preserve the ordering from the FITS file.

        Regardless of the value of this option, the ordering used in the FITS
        file is indicated as the value of the 'nest' key in the metadata
        dictionary.

    distances: bool, optional
        If true, then read also read the additional HEALPix layers representing
        the conditional mean and standard deviation of distance as a function
        of sky location.

    moc: bool, optional
        If true, then preserve multi-order structure if present.

    Example
    -------

    Test that we can read a legacy IDL-compatible file
    (https://bugs.ligo.org/redmine/issues/5168):

    >>> import tempfile
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     nside = 512
    ...     npix = hp.nside2npix(nside)
    ...     ipix_nest = np.arange(npix)
    ...     hp.write_map(f.name, ipix_nest, nest=True, column_names=['PROB'])
    ...     m, meta = read_sky_map(f.name)
    ...     np.testing.assert_array_equal(m, hp.ring2nest(nside, ipix_nest))
    """
    m = Table.read(filename, format='fits', **kwargs)

    # Remove some keys that we do not need
    for key in (
            'PIXTYPE', 'EXTNAME', 'NSIDE', 'FIRSTPIX', 'LASTPIX', 'INDXSCHM'):
        m.meta.pop(key, None)

    if m.meta.pop('COORDSYS', 'C') != 'C':
        raise ValueError('LALInference only reads and writes sky maps in '
                         'equatorial coordinates.')

    try:
        value = m.meta.pop('ORDERING')
    except KeyError:
        pass
    else:
        if value == 'RING':
            m.meta['nest'] = False
        elif value == 'NESTED':
            m.meta['nest'] = True
        elif value == 'NUNIQ':
            pass
        else:
            raise ValueError(
                'ORDERING card in header has unknown value: {0}'.format(value))

    for fits_key, rows in itertools.groupby(
            FITS_META_MAPPING, lambda row: row[1]):
        try:
            value = m.meta.pop(fits_key)
        except KeyError:
            pass
        else:
            for row in rows:
                key, _, _, _, from_fits = row
                if from_fits is not None:
                    m.meta[key] = from_fits(value)

    if 'UNIQ' not in m.colnames:
        m = Table([col.ravel() for col in m.columns.values()], meta=m.meta)

    if 'UNIQ' in m.colnames and not moc:
        from ..bayestar.sky_map import rasterize
        m = rasterize(m)
        m.meta['nest'] = True
    elif 'UNIQ' not in m.colnames and moc:
        from ..bayestar.sky_map import derasterize
        if not m.meta['nest']:
            npix = len(m)
            nside = hp.npix2nside(npix)
            m = m[hp.nest2ring(nside, np.arange(npix))]
        m = derasterize(m)
        m.meta.pop('nest', None)

    if 'UNIQ' not in m.colnames:
        npix = len(m)
        nside = hp.npix2nside(npix)

        if nest is None:
            pass
        elif m.meta['nest'] and not nest:
            m = m[hp.ring2nest(nside, np.arange(npix))]
        elif not m.meta['nest'] and nest:
            m = m[hp.nest2ring(nside, np.arange(npix))]

    if moc:
        return m
    elif distances:
        return tuple(
            np.asarray(m[name]) for name in DEFAULT_NESTED_NAMES), m.meta
    else:
        return np.asarray(m[DEFAULT_NESTED_NAMES[0]]), m.meta
Пример #20
0
print('pix_nest =', pix_nest)

all_pix_ring = np.arange(192)
all_pix_nest = hp.ring2nest(Nside, all_pix_ring)
hp.mollview(all_pix_nest, title='Nest scheme', unit='Pix index')
pyplot.savefig('mollweide_view_nest.png')

#%%
"""
STEP 3:Nest to ring

"""

# Nside = 4
print('pix_nest =', pix_nest)
pix_ring = hp.nest2ring(Nside, pix_nest)
print('pix_ring =', pix_ring)

all_pix_ring = hp.nest2ring(Nside, all_pix_nest)
hp.mollview(all_pix_ring, title='Ring scheme', unit='Pix index')
pyplot.savefig('mollweide_view_ring.png')

#%%
"""
STEP 4: Reordering a map

"""
mapa_ring = hp.read_map('COM_CMB_IQU-smica_1024_R2.02_full.fits')
hp.mollview(mapa_ring, title='CMB map - Ring', unit='K')
pyplot.savefig('mollweide_view_cmb.png')
Пример #21
0
    def test_lookup(self):
        """
        Test lookup functionality
        """
        np.random.seed(12345)

        nside_coverage = 32
        nside_map = 1024

        full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
        full_map[0:200000] = np.random.random(size=200000)

        sparse_map = healsparse.HealSparseMap(healpix_map=full_map,
                                              nside_coverage=nside_coverage)

        n_rand = 100000
        ra = np.random.random(n_rand) * 360.0
        dec = np.random.random(n_rand) * 180.0 - 90.0

        theta = np.radians(90.0 - dec)
        phi = np.radians(ra)
        ipnest = hp.ang2pix(nside_map, theta, phi, nest=True)

        test_values = full_map[ipnest]

        # Test the pixel lookup
        comp_values = sparse_map.get_values_pix(ipnest)
        testing.assert_almost_equal(comp_values, test_values)

        # Test pixel lookup (valid pixels)
        # Note that this tests all the downstream functions
        valid_mask = sparse_map.get_values_pix(ipnest, valid_mask=True)
        testing.assert_equal(valid_mask, comp_values > hp.UNSEEN)

        # Test pixel lookup (ring)
        ipring = hp.nest2ring(nside_map, ipnest)
        comp_values = sparse_map.get_values_pix(ipring, nest=False)
        testing.assert_almost_equal(comp_values, test_values)

        # Test pixel lookup (higher nside)
        comp_values = sparse_map.get_values_pix(hp.ang2pix(4096,
                                                           ra,
                                                           dec,
                                                           lonlat=True,
                                                           nest=True),
                                                nside=4096)
        testing.assert_almost_equal(comp_values, test_values)

        # Test pixel lookup (lower nside)
        lowres_pix = hp.ang2pix(256, ra, dec, lonlat=True, nest=True)
        self.assertRaises(ValueError,
                          sparse_map.get_values_pix,
                          lowres_pix,
                          nside=256)

        # Test the theta/phi lookup
        comp_values = sparse_map.get_values_pos(theta, phi, lonlat=False)
        testing.assert_almost_equal(comp_values, test_values)

        # Test the ra/dec lookup
        comp_values = sparse_map.get_values_pos(ra, dec, lonlat=True)
        testing.assert_almost_equal(comp_values, test_values)

        # Test the list of valid pixels
        valid_pixels = sparse_map.valid_pixels
        testing.assert_equal(valid_pixels, np.where(full_map > hp.UNSEEN)[0])

        # Test the position of valid pixels
        ra_sp, dec_sp = sparse_map.valid_pixels_pos(lonlat=True)
        _ra_sp, _dec_sp = hp.pix2ang(nside_map,
                                     np.where(full_map > hp.UNSEEN)[0],
                                     lonlat=True,
                                     nest=True)
        testing.assert_equal(ra_sp, _ra_sp)
        testing.assert_equal(dec_sp, _dec_sp)

        # Test position of valid pixels and valid pixels
        valid_pixels, ra_sp, dec_sp = sparse_map.valid_pixels_pos(
            lonlat=True, return_pixels=True)
        _ra_sp, _dec_sp = hp.pix2ang(nside_map,
                                     np.where(full_map > hp.UNSEEN)[0],
                                     lonlat=True,
                                     nest=True)
        testing.assert_equal(ra_sp, _ra_sp)
        testing.assert_equal(dec_sp, _dec_sp)
        testing.assert_equal(valid_pixels, np.where(full_map > hp.UNSEEN)[0])
Пример #22
0
def convert_transmission_to_deltas(obj_path,
                                   out_dir,
                                   in_dir=None,
                                   in_filenames=None,
                                   lambda_min=3600.,
                                   lambda_max=5500.,
                                   lambda_min_rest_frame=1040.,
                                   lambda_max_rest_frame=1200.,
                                   delta_log_lambda=None,
                                   delta_lambda=None,
                                   lin_spaced=False,
                                   max_num_spec=None,
                                   nproc=None,
                                   use_old_weights=False,
                                   out_healpix_order='RING'):
    """Convert transmission files to picca delta files

    Args:
        obj_path: str
            Path to the catalog of object to extract the transmission from
        out_dir: str
            Path to the directory where delta files will be written
        in_dir: str or None - default: None
            Path to the directory containing the transmission files directory.
            If 'None', then in_files must be a non-empty array
        in_filenames: array of str or None - default: None
            List of the filenames for the transmission files. Ignored if in_dir
            is not 'None'
        lambda_min: float - default: 3600.
            Minimum observed wavelength in Angstrom
        lambda_max: float - default: 5500.
            Maximum observed wavelength in Angstrom
        lambda_min_rest_frame: float - default: 1040.
            Minimum Rest Frame wavelength in Angstrom
        lambda_max_rest_frame: float - default: 1200.
            Maximum Rest Frame wavelength in Angstrom
        delta_log_lambda: float - default: None
            Variation of the logarithm of the wavelength between two pixels
        delta_lambda: float - default: None
            Variation of the wavelength between two pixels
        lin_spaced: float - default: False
            Whether to use linear spacing for the wavelength binning
        max_num_spec: int or None - default: None
            Maximum number of spectra to read. 'None' for no maximum
        nproc: int or None - default: None
            Number of cpus to use for I/O operations. If None, defaults to os.cpu_count().
        use_old_weights: boolean - default: False
            Whether to use the old weights based only on the bin size
        out_healpix_order: string: 'RING' or 'NEST' - default: 'RING'
            Healpix numbering scheme for output files.
    """
    # read catalog of objects
    hdul = fitsio.FITS(obj_path)
    key_val = np.char.strip(
        np.array([
            hdul[1].read_header()[key] for key in hdul[1].read_header().keys()
        ]).astype(str))
    if 'TARGETID' in key_val:
        objs_thingid = hdul[1]['TARGETID'][:]
    elif 'THING_ID' in key_val:
        objs_thingid = hdul[1]['THING_ID'][:]
    w = hdul[1]['Z'][:] > max(0., lambda_min / lambda_max_rest_frame - 1.)
    w &= hdul[1]['Z'][:] < max(0., lambda_max / lambda_min_rest_frame - 1.)
    objs_ra = hdul[1]['RA'][:][w].astype('float64') * np.pi / 180.
    objs_dec = hdul[1]['DEC'][:][w].astype('float64') * np.pi / 180.
    objs_thingid = objs_thingid[w]
    hdul.close()
    userprint('INFO: Found {} quasars'.format(objs_ra.size))

    # Load list of transmission files
    if ((in_dir is None and in_filenames is None)
            or (in_dir is not None and in_filenames is not None)):
        userprint(("ERROR: No transmisson input files or both 'in_dir' and "
                   "'in_filenames' given"))
        sys.exit()
    elif in_dir is not None:
        files = sorted(glob.glob(in_dir + '/*/*/transmission*.fits*'))
        files = np.sort(np.array(files))
        hdul = fitsio.FITS(files[0])
        in_nside = hdul['METADATA'].read_header()['HPXNSIDE']
        nest = hdul['METADATA'].read_header()['HPXNEST']
        hdul.close()
        in_healpixs = healpy.ang2pix(in_nside,
                                     np.pi / 2. - objs_dec,
                                     objs_ra,
                                     nest=nest)
        if files[0].endswith('.gz'):
            end_of_file = '.gz'
        else:
            end_of_file = ''
        files = np.sort(
            np.array([("{}/{}/{healpix}/transmission-{}-{healpix}"
                       ".fits{}").format(in_dir,
                                         int(healpix // 100),
                                         in_nside,
                                         end_of_file,
                                         healpix=healpix)
                      for healpix in np.unique(in_healpixs)]))
    else:
        files = np.sort(np.array(in_filenames))
        nest = None
    userprint('INFO: Found {} files'.format(files.size))

    # Check if we should compute linear or log spaced deltas
    # Use the x_min/x_max/delta_x variables to stand in for either
    # linear of log spaced parameters
    if lin_spaced:
        x_min = lambda_min
        x_max = lambda_max
        delta_x = delta_lambda if delta_lambda is not None else 3.
    else:
        x_min = np.log10(lambda_min)
        x_max = np.log10(lambda_max)
        delta_x = delta_log_lambda if delta_lambda is not None else 3.e-4
    num_bins = int((x_max - x_min) / delta_x) + 1

    # Read the transmission files in parallel
    arguments = [(f, num_bins, objs_thingid, lambda_min, lambda_max,
                  lambda_min_rest_frame, lambda_max_rest_frame,
                  delta_log_lambda, delta_lambda, lin_spaced) for f in files]
    pool = Pool(processes=nproc)
    read_results = pool.starmap(read_transmission_file, arguments)
    pool.close()

    # Read and merge the results
    stack_flux = np.zeros(num_bins)
    stack_weight = np.zeros(num_bins)
    deltas = {}
    for res in read_results:
        if res is not None:
            healpix_deltas = res[0]
            healpix_stack_flux = res[1]
            healpix_stack_weight = res[2]

            for key in healpix_deltas.keys():
                if key not in deltas.keys():
                    deltas[key] = []
                deltas[key] += healpix_deltas[key]

            stack_flux += healpix_stack_flux
            stack_weight += healpix_stack_weight

            num_spec = np.sum([len(deltas[healpix]) for healpix in deltas])
            if (max_num_spec is not None and num_spec >= max_num_spec):
                break

    userprint('\n')

    # normalize stacked transmission
    w = stack_weight > 0.
    mean_flux = stack_flux
    mean_flux[w] /= stack_weight[w]

    #  save results
    out_filenames = {}
    for healpix in sorted(deltas):
        if nest is None:
            if out_healpix_order is None:
                out_healpix = healpix
            else:
                raise ValueError(
                    'Input HEALPix scheme not known, cannot'
                    'convert to scheme {}'.format(out_healpix_order))
        else:
            if nest:
                if out_healpix_order.lower() == 'nest':
                    out_healpix = healpix
                elif out_healpix_order.lower() == 'ring':
                    out_healpix = healpy.nest2ring(int(in_nside), int(healpix))
                else:
                    raise ValueError('HEALPix scheme {} not recognised'.format(
                        out_healpix_order))
            else:
                if out_healpix_order.lower() == 'nest':
                    out_healpix = healpy.ring2nest(int(in_nside), int(healpix))
                elif out_healpix_order.lower() == 'ring':
                    out_healpix = healpix
                else:
                    raise ValueError('HEALPix scheme {} not recognised'.format(
                        out_healpix_order))

        print('Input nested? {} // in_healpix={} // out_healpix={}'.format(
            nest, healpix, out_healpix))
        out_filenames[healpix] = out_dir + '/delta-{}'.format(
            out_healpix) + '.fits.gz'

    if use_old_weights:
        flux_variance = None
    else:
        # Compute variance
        stack_variance = np.zeros(len(mean_flux))
        var_weights = np.zeros(len(mean_flux))
        for hpix_deltas in deltas.values():
            for delta in hpix_deltas:
                lambda_array = delta.log_lambda
                if lin_spaced:
                    lambda_array = 10**(lambda_array)

                norm_lambda = (lambda_array - x_min) / delta_x + 0.5
                bins = np.floor(np.around(norm_lambda, decimals=3)).astype(int)

                stack_variance[bins] += (delta.delta - mean_flux[bins])**2
                var_weights[bins] += np.ones(len(bins))

        w = var_weights > 0.
        flux_variance = stack_variance
        flux_variance[w] /= var_weights[w]

    arguments = [(deltas[hpix], mean_flux, flux_variance, hpix,
                  out_filenames[hpix], x_min, delta_x, lin_spaced)
                 for hpix in deltas.keys()]
    pool = Pool(processes=nproc)
    _ = pool.starmap(write_delta_from_transmission, arguments)
    pool.close()

    userprint("")

    # Output the mean flux and other info
    dir_name = os.path.basename(os.path.normpath(out_dir))
    rebin_lambda = (x_min + np.arange(num_bins) * delta_x)
    results = fitsio.FITS(out_dir + '/../{}-stats.fits.gz'.format(dir_name),
                          'rw',
                          clobber=True)
    cols = [rebin_lambda, mean_flux, stack_weight, flux_variance, var_weights]
    names = ['LAMBDA', 'MEANFLUX', 'WEIGHTS', 'VAR', 'VARWEIGHTS']
    header = {}
    header['L_MIN'] = lambda_min
    header['L_MAX'] = lambda_max
    header['LR_MIN'] = lambda_min_rest_frame
    header['LR_MAX'] = lambda_max_rest_frame
    header['DEL_LL'] = delta_log_lambda
    header['DEL_L'] = delta_lambda
    header['LINEAR'] = lin_spaced
    results.write(cols, names=names, header=header, extname='STATS')
    results.close()

    userprint("")
Пример #23
0
def plot_sp_info(datadir, maptype, xlabel, insetlabel, threshold,
                 generate_maps, histmin, histmax):
    footprint = fitsio.read(datadir +
                            'masks/y3a2_footprint_griz_1exp_v2.0.fits.gz',
                            ext=1)['I'].ravel()
    tdataSP_foot = np.empty(hp.nside2npix(nside))
    tdataSP_foot.fill(hp.UNSEEN)
    PIX_TO_ARCSEC = 0.263

    for i, (band, v) in enumerate(BAND_COLORS.items()):
        if band == 'u':
            continue
        if band == 'Y' and (maptype == 'sof' or maptype == 'mof'):
            continue
        print('Processing band', band)
        if maptype == 'maglim':
            filename = 'sp_maps/y3a2_' + band + '_o.4096_t.32768_maglim_EQU.fits'
            tdataSP = fitsio.read(datadir + filename)
        elif maptype == 'sof':
            filename = 'sp_maps/y3a2_gold_2_2_1_sof_nside4096_nest_' + band + '_depth.fits'
            tdataSP = fitsio.read(datadir + filename)['I'].ravel()
        elif maptype == 'auto':
            filename = 'sp_maps/y3a2_gold_2_2_1_auto_nside4096_nest_' + band + '_depth.fits.gz'
            tdataSP = fitsio.read(datadir + filename)['I'].ravel()
        elif maptype == 'fwhm':
            filename = 'sp_maps/y3a2_' + band + '_o.4096_t.32768_FWHM.WMEAN_EQU.fits.gz'
            tdataSP = fitsio.read(datadir + filename)
        elif maptype == 'skybrite':
            filename = 'sp_maps/y3a2_' + band + '_o.4096_t.32768_SKYBRITE.WMEAN_EQU.fits.gz'
            tdataSP = fitsio.read(datadir + filename)
        elif maptype == 'airmass':
            filename = 'sp_maps/y3a2_' + band + '_o.4096_t.32768_AIRMASS.WMEAN_EQU.fits.gz'
            tdataSP = fitsio.read(datadir + filename)
        elif maptype == 'exptime':
            filename = 'sp_maps/y3a2_' + band + '_o.4096_t.32768_EXPTIME.SUM_EQU.fits.gz'
            tdataSP = fitsio.read(datadir + filename)
        elif maptype == 'skyvar':
            filename = 'sp_maps/y3a2_' + band + '_o.4096_t.32768_SKYVAR.UNCERTAINTY_EQU.fits'
            tdataSP = fitsio.read(datadir + filename)
        elif maptype == 'sbcontrast':
            filename = 'sp_maps/Y3A1_SURFACE_BRIGHTNESS-v1.fits'
            tdataSP = fitsio.read(datadir + filename)
            if band == 'g':
                sel = (tdataSP['band'] == b'g') & (tdataSP['size'] == 10)
            elif band == 'r':
                sel = (tdataSP['band'] == b'r') & (tdataSP['size'] == 10)
            elif band == 'i':
                sel = (tdataSP['band'] == b'i') & (tdataSP['size'] == 10)
            elif band == 'z':
                sel = (tdataSP['band'] == b'z') & (tdataSP['size'] == 10)
            else:
                sel = (tdataSP['band'] == b'Y') & (tdataSP['size'] == 10)
            tdataSP = tdataSP[sel]
        else:
            print('Map type', maptype, 'not found')
            sys.exit(1)

        if len(tdataSP) != hp.nside2npix(nside):
            tdataSP_foot[tdataSP['PIXEL']] = tdataSP['SIGNAL']
            tdataSP_foot = tdataSP_foot * footprint
        else:
            tdataSP_foot = tdataSP * footprint

        tdataSP_foot[tdataSP_foot > threshold] = hp.UNSEEN
        tdataSP_foot[tdataSP_foot < 0] = hp.UNSEEN
        mask = (tdataSP_foot != hp.UNSEEN)

        if generate_maps:
            fig = plt.figure(figsize=(12., 4.))
            gs = plt.GridSpec(1, 4, wspace=0.001)

            fig.add_subplot(gs[:2])
            smap = DESSkymap()
            smap.draw_hpxmap(tdataSP['SIGNAL'],
                             hp.nest2ring(nside, tdataSP['PIXEL']), nside)
            if maptype == 'skybrite':
                smap.draw_inset_colorbar(fontsize=10,
                                         format='%.1f',
                                         label=insetlabel)
            else:
                smap.draw_inset_colorbar(fontsize=10, label=insetlabel)
            smap.draw_des()

            fig.add_subplot(gs[3])

        upper, lower = np.percentile(tdataSP_foot[mask], [84.075, 15.825])
        med = np.median(tdataSP_foot[mask])
        print('Median', med, '+', upper - med, '-', med - lower)
        if maptype == 'skyvar':
            medmag = -2.5 * np.log10(med / PIX_TO_ARCSEC**2) + 30
            print('Median (mag/arcsec**2)', medmag)

        value = tdataSP_foot[mask]
        plt.hist(value,
                 normed=True,
                 histtype='step',
                 bins=100,
                 linewidth=2,
                 color=v,
                 label=band,
                 range=[histmin, histmax])
        plt.xlabel(xlabel)
        plt.xticks(fontsize=12)
        plt.ylabel('PDF')
        plt.tight_layout()
        plt.legend(loc='upper right')
        if generate_maps:
            #plt.savefig('/Users/nsevilla/y3gold-paper/figs/y3gold_'+maptype+'_'+band+'_map.png')
            plt.savefig('/Users/nsevilla/y3gold-paper/figs/y3gold_' + maptype +
                        '_' + band + '_map.pdf')

    if generate_maps == False:
        #plt.savefig('/Users/nsevilla/y3gold-paper/figs/y3gold_'+maptype+'_hist.png')
        plt.savefig('/Users/nsevilla/y3gold-paper/figs/y3gold_' + maptype +
                    '_hist.pdf')
Пример #24
0
def read_sky_map(filename, nest=False, distances=False, moc=False, **kwargs):
    """
    Read a LIGO/Virgo-type sky map and return a tuple of the HEALPix array
    and a dictionary of metadata from the header.

    Parameters
    ----------

    filename: string
        Path to the optionally gzip-compressed FITS file.

    nest: bool, optional
        If omitted or False, then detect the pixel ordering in the FITS file
        and rearrange if necessary to RING indexing before returning.

        If True, then detect the pixel ordering and rearrange if necessary to
        NESTED indexing before returning.

        If None, then preserve the ordering from the FITS file.

        Regardless of the value of this option, the ordering used in the FITS
        file is indicated as the value of the 'nest' key in the metadata
        dictionary.

    distances: bool, optional
        If true, then read also read the additional HEALPix layers representing
        the conditional mean and standard deviation of distance as a function
        of sky location.

    moc: bool, optional
        If true, then preserve multi-order structure if present.

    Example
    -------

    Test that we can read a legacy IDL-compatible file
    (https://bugs.ligo.org/redmine/issues/5168):

    >>> import tempfile
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     nside = 512
    ...     npix = hp.nside2npix(nside)
    ...     ipix_nest = np.arange(npix)
    ...     hp.write_map(f.name, ipix_nest, nest=True, column_names=['PROB'])
    ...     m, meta = read_sky_map(f.name)
    ...     np.testing.assert_array_equal(m, hp.ring2nest(nside, ipix_nest))
    """
    m = Table.read(filename, format='fits', **kwargs)

    # Remove some keys that we do not need
    for key in ('PIXTYPE', 'EXTNAME', 'NSIDE', 'FIRSTPIX', 'LASTPIX',
                'INDXSCHM', 'MOCORDER'):
        m.meta.pop(key, None)

    if m.meta.pop('COORDSYS', 'C') != 'C':
        raise ValueError('LALInference only reads and writes sky maps in '
                         'equatorial coordinates.')

    try:
        value = m.meta.pop('ORDERING')
    except KeyError:
        pass
    else:
        if value == 'RING':
            m.meta['nest'] = False
        elif value == 'NESTED':
            m.meta['nest'] = True
        elif value == 'NUNIQ':
            pass
        else:
            raise ValueError(
                'ORDERING card in header has unknown value: {0}'.format(value))

    for fits_key, rows in itertools.groupby(FITS_META_MAPPING,
                                            lambda row: row[1]):
        try:
            value = m.meta.pop(fits_key)
        except KeyError:
            pass
        else:
            for row in rows:
                key, _, _, _, from_fits = row
                if from_fits is not None:
                    m.meta[key] = from_fits(value)

    if 'UNIQ' not in m.colnames:
        m = Table([col.ravel() for col in m.columns.values()], meta=m.meta)

    if 'UNIQ' in m.colnames and not moc:
        from ..bayestar.sky_map import rasterize
        m = rasterize(m)
        m.meta['nest'] = True
    elif 'UNIQ' not in m.colnames and moc:
        from ..bayestar.sky_map import derasterize
        if not m.meta['nest']:
            npix = len(m)
            nside = hp.npix2nside(npix)
            m = m[hp.nest2ring(nside, np.arange(npix))]
        m = derasterize(m)
        m.meta.pop('nest', None)

    if 'UNIQ' not in m.colnames:
        npix = len(m)
        nside = hp.npix2nside(npix)

        if nest is None:
            pass
        elif m.meta['nest'] and not nest:
            m = m[hp.ring2nest(nside, np.arange(npix))]
        elif not m.meta['nest'] and nest:
            m = m[hp.nest2ring(nside, np.arange(npix))]

    if moc:
        return m
    elif distances:
        return tuple(np.asarray(m[name])
                     for name in DEFAULT_NESTED_NAMES), m.meta
    else:
        return np.asarray(m[DEFAULT_NESTED_NAMES[0]]), m.meta
Пример #25
0
                 overwrite=True,
                 nest=True,
                 fits_IDL=False,
                 partial=True)

    #Make plot of the healpix map
    upix, uval = [], []
    for jj, vv in enumerate(hp_aux):
        if vv != hp.UNSEEN:
            upix.append(jj)
            uval.append(vv)

    upix = np.array(upix)
    uval = np.array(uval)

    k = hp.nest2ring(4096, upix)

    m = Skymap(projection='cass',
               lon_0=67.,
               lat_0=-60.,
               celestial=False,
               llcrnrlon=56.68,
               urcrnrlon=77.46,
               llcrnrlat=-64.2,
               urcrnrlat=-55.40,
               parallels=False,
               meridians=False)

    m.draw_hpxmap(uval, k, nside=4096, xsize=1000)
    plt.savefig('healpix_imageLow.png')
Пример #26
0
else:
    print('Exist\nReading\n')
    read = np.loadtxt('PIXELS/PixelList.txt')
    pixel = pd.DataFrame(read, columns=['HPIX_' + str(params['NSIDE'])])
    strips = pixel['HPIX_' + str(params['NSIDE'])].astype(int)
    len_strips = len(strips)

print("\n\n")
time0 = time()
for num, pix in enumerate(strips):
    timei = time()
    theta, phi = hp.pix2ang(params['NSIDE'], pix, lonlat=True, nest=False)

    params['pixel'] = pix

    if not ver.exist_or_not_file(params, restart):
        print(ver.exist_or_not_file(params, restart))
        tab = qr.query_function(params)

        timef = strftime('%H:%M:%S', gmtime(time() - timei))

        print("Program's time (hh:mm:ss): {}".format(timef))
        print("Pixel {}".format(pix))
        params['pixel'] = hp.nest2ring(params['NSIDE'], pix)
        if len(tab) > 0: wr.write_fits(tab, params)
        else: print("Not save. 0 galaxies")

        print("\n\n")

print("End Programm: {0}".format(time0 - time()))
Пример #27
0
    assert(x[i] == i)
assert(x[1501] == 0)

x.indexedsparse = True # Ring to indexed
assert(x.nside == 64)
assert(x.npix_allocated == 1500)
for i in range(1,1500):
    assert(x[i] == i)
assert(x[1501] == 0)

k,v = x.nonzero_pixels()
assert(len(k) == len(v) == 1500)
assert(set(v) == set(a)) # Lazy test that doesn't care about order

import healpy as hp
p0 = hp.nest2ring(64, (hp.ring2nest(32, 0) * 4 + np.arange(4)).astype(int))
v0 = sum([x[int(i)] for i in p0])

x2 = x.rebin(2, norm=False)
assert(x2[0] == v0)
assert(np.sum(x2) == np.sum(v))

x.shift_ra = True

# check attributes
xc = x.clone()
for attr in attrs:
    assert getattr(xc, attr) == getattr(x, attr), "Attribute {} mismatched on clone()".format(attr)
xc = x.clone(False)
for attr in attrs:
    assert getattr(xc, attr) == getattr(x, attr), "Attribute {} mismatched on clone(False)".format(attr)
Пример #28
0
        for ip in [0, 1]:
            mp_sky = hp.smoothing((mpfg + mpc)[ip, :, inu],
                                  fwhm=beam_max,
                                  verbose=False)
            mps_d[0, ip, :, inu] = mp_sky + mps_no1[ip, :, inu]
            mps_d[1, ip, :, inu] = mp_sky + mps_no2[ip, :, inu]
    mps_d_rw = np.array([mps_no1, mps_no2]) + (mpfg + mpc)[None, :, :, :]
else:
    mps_d = np.array([mps_no1, mps_no2]) + (mpfg + mpc)[None, :, :, :]
if OUTPUT_LEVEL > 0:
    hp.write_map(predir + 'cmb_true.fits', amc, overwrite=True)

#Domain decomposition
ipnest_sub = hp.ring2nest(NSIDE_SPEC_DEFAULT,
                          np.arange(hp.nside2npix(NSIDE_SPEC_DEFAULT)))
ipring = hp.nest2ring(NSIDE_DEFAULT, np.arange(hp.nside2npix(NSIDE_DEFAULT)))
ip_patches_good = []
for ip_sub_ring in np.arange(hp.nside2npix(NSIDE_SPEC_DEFAULT)):
    ips_ring = ipring[ipnest_sub[ip_sub_ring] * NSIDE_RATIO**2 +
                      np.arange(NSIDE_RATIO**2)]
    if np.sum(msk[ips_ring] > ZER0) > 0:
        ip_patches_good.append(ips_ring[msk[ips_ring] > ZER0])

plotmap = -1. * np.ones(hp.nside2npix(NSIDE_DEFAULT))
for i, ips in enumerate(ip_patches_good):
    plotmap[ips] = i

#Zero mask outside edges for safety
ids_bad = np.where(msk < ZER0)[0]
msk[ids_bad] = 0.
if OUTPUT_LEVEL > 0:
Пример #29
0
Z_unmatch = z_nbr_rm[~ind]
Z_match = z_nbr_rm[ind]
Zerr_match = zerr_nbr_rm[ind]
memid_match = memid_rm[ind]
z_spt_tem = z_spt_match[ind]
SNR_spt_tem = xi_spt_match[ind]
m500_spt_tem = m500_spt_match[ind]
z_spt_unmatch = z_spt_match[~ind]
zerr_spt_unmatch = zerr_spt_match[~ind]
zerr_spt_tem = zerr_spt_match[ind]
separation_match = separation[ind]
ra_spt_mat = ra_spt_match[ind]
dec_spt_mat = dec_spt_match[ind]
pix_spt_match = hp_pix_spt_match[ind]
pix_spt_unmatch = hp_pix_spt_match[~ind]
pix_spt_match = hp.nest2ring(4096, pix_spt_match)
pix_spt_unmatch = hp.nest2ring(4096, pix_spt_unmatch)

print len(z_spt_tem), 'after ra/dec matching <3Mpc/h'
print len(ind) - len(z_spt_tem), 'number of unmatched clusters'
'''
lambda_match = lam_act[ind]
Z_unmatch = Ztile_des[~ind]
Z_match = Ztile_des[ind]
rades_match = rades[ind]
decdes_match = decdes[ind]
Zerr_match = Zerrtile_des[ind]
memid_match = memid_des[ind]
z_act_tem = z_act_match4[ind]
z_act_unmatch = z_act_match4[~ind]
zerr_act_tem = zerr_act_match4[ind]
Пример #30
0
def get_position(Nsides, pixel_nest):
    pix_r = hp.nest2ring(Nsides, pixel_nest)
    theta, phi = hp.pixelfunc.pix2ang(Nsides, pix_r)
    return (pix_r, theta, phi)
Пример #31
0
    def __init__(self, config):
        # record for posterity
        self.depthfile = config.depthfile

        depthinfo, hdr = fitsio.read(self.depthfile, ext=1, header=True, upper=True)
        # convert into catalog for convenience...
        depthinfo = Catalog(depthinfo)

        #self.npix = depthinfo.hpix.size
        nside_mask = hdr['NSIDE']
        nest = hdr['NEST']
        self.nsig = hdr['NSIG']
        self.zp = hdr['ZP']
        self.nband = hdr['NBAND']
        self.w = hdr['W']
        self.eff = hdr['EFF']

        self.config_area = config.area
        self.submask_hpix = config.hpix
        self.submask_nside = config.nside
        self.submask_border = config.border
        self.galfile_nside = config.galfile_nside

        if nest != 1:
            hpix_ring = depthinfo.hpix
        else:
            hpix_ring = hp.nest2ring(nside_mask, depthinfo.hpix)

        # if we have a sub-region of the sky, cut down mask to save memory
        if self.submask_hpix > 0:
            border = np.radians(self.submask_border) + hp.nside2resol(nside_mask)
            theta, phi = hp.pix2ang(self.submask_nside, self.submask_hpix)
            radius = np.sqrt(2) * (hp.nside2resol(self.submask_nside)/2. + border)
            pixint = hp.query_disc(nside_mask, hp.ang2vec(theta, phi),
                                   radius, inclusive=False)
            suba, subb = esutil.numpy_util.match(pixint, hpix_ring)
            hpix_ring = hpix_ring[subb]
            duse = subb
        else:
            duse = np.arange(hpix_ring.size, dtype='i4')

        self.nside = nside_mask
        self.offset = np.min(hpix_ring) - 1
        self.ntot = np.max(hpix_ring) - np.min(hpix_ring) + 3

        self.npix = hpix_ring.size

        self.fracgood = np.zeros(self.npix + 1, dtype='f4')
        try:
            self.fracgood_float = 1
            self.fracgood[0:self.npix] = depthinfo[duse].fracgood
        except AttributeError:
            self.fracgood_float = 0
            self.fracgood[0:self.npix] = 0

        self.exptime = np.zeros(self.npix + 1, dtype='f4')
        self.exptime[0:self.npix] = depthinfo[duse].exptime
        self.limmag = np.zeros(self.npix + 1, dtype='f4')
        self.limmag[0:self.npix] = depthinfo[duse].limmag
        self.m50 = np.zeros(self.npix + 1, dtype='f4')
        self.m50[0:self.npix] = depthinfo[duse].m50

        # And the overflow bins
        self.fracgood[self.npix] = hp.UNSEEN
        self.exptime[self.npix] = hp.UNSEEN
        self.limmag[self.npix] = hp.UNSEEN
        self.m50[self.npix] = hp.UNSEEN

        # The look-up table
        #  Set default to overflow bin
        self.hpix_to_index = np.zeros(self.ntot, dtype='i4') + self.npix
        self.hpix_to_index[hpix_ring - self.offset] = np.arange(self.npix)
    def calculate_overlap_with_observations(self,
                                            fields=None,
                                            pid=None,
                                            first_det_window_days=3.0,
                                            min_sep=0.01):
        if fields is None:
            mns = self.get_multi_night_summary(first_det_window_days)

        else:

            class MNS:
                def __init__(self, data):
                    self.data = pandas.DataFrame(
                        data, columns=["field", "ra", "dec", "datetime"])

            data = []

            for f in fields:
                ra, dec = ztfquery_fields.get_field_centroid(f)[0]
                for i in range(2):
                    t = Time(self.t_min.jd + 0.1 * i, format="jd").utc
                    t.format = "isot"
                    t = t.value
                    data.append([f, ra, dec, t])

            mns = MNS(data)

        # Skip all 64 simultaneous quadrant entries, we only need one per observation for qa log
        # data = mns.data.copy().iloc[::64]

        data = mns.data.copy()

        ras = np.ones_like(data["field"]) * np.nan
        decs = np.ones_like(data["field"]) * np.nan

        # Actually load up ra/dec

        veto_fields = []

        for field in list(set(data["field"])):

            mask = data["field"] == field

            res = ztfquery_fields.get_field_centroid(field)

            if len(res) > 0:

                ras[mask] = res[0][0]
                decs[mask] = res[0][1]

            else:
                veto_fields.append(field)

        if len(veto_fields) > 0:
            self.logger.info(
                f"No RA/Dec found by ztfquery for fields {veto_fields}. "
                f"These observation have to be ignored.")

        data["ra"] = ras
        data["dec"] = decs

        mask = np.array([~np.isnan(x) for x in data["ra"]])

        data = data[mask]

        if pid is not None:
            pid_mask = data["pid"] == str(pid)
            data = data[pid_mask]

        obs_times = np.array([
            Time(
                data["datetime"].iat[i].replace(" ", "T"),
                format="isot",
                scale="utc",
            ) for i in range(len(data))
        ])

        if first_det_window_days is not None:
            first_det_mask = [
                x < Time(self.t_min.jd + first_det_window_days,
                         format="jd").utc for x in obs_times
            ]
            data = data[first_det_mask]
            obs_times = obs_times[first_det_mask]

        self.logger.info(f"Most recent observation found is {obs_times[-1]}")
        self.logger.info("Unpacking observations")

        pix_map = dict()
        pix_obs_times = dict()

        infile = os.path.join("nuztf", "data",
                              f"ztf_fields_ipix_nside={self.nside}.pickle")

        # Generate a lookup table for field healpix
        # if none exists (because this is computationally costly)
        if not os.path.isfile(infile):
            self.generate_flatpix_file()

        with open(infile, "rb") as f:
            field_pix = pickle.load(f)
        f.close()

        for i, obs_time in enumerate(tqdm(obs_times)):

            field = data["field"].iat[i]

            flat_pix = field_pix[field]

            t = obs_time.jd

            for p in flat_pix:
                if p not in pix_obs_times.keys():
                    pix_obs_times[p] = [t]
                else:
                    pix_obs_times[p] += [t]

                if p not in pix_map.keys():
                    pix_map[p] = [field]
                else:
                    pix_map[p] += [field]

        npix = hp.nside2npix(self.nside)
        theta, phi = hp.pix2ang(self.nside, np.arange(npix), nest=False)
        radecs = SkyCoord(ra=phi * u.rad, dec=(0.5 * np.pi - theta) * u.rad)
        idx = np.where(np.abs(radecs.galactic.b.deg) <= 10.0)[0]

        double_in_plane_pixels = []
        double_in_plane_probs = []
        single_in_plane_pixels = []
        single_in_plane_prob = []
        veto_pixels = []
        plane_pixels = []
        plane_probs = []
        times = []
        double_no_plane_prob = []
        double_no_plane_pixels = []
        single_no_plane_prob = []
        single_no_plane_pixels = []

        overlapping_fields = []

        for i, p in enumerate(tqdm(hp.nest2ring(self.nside, self.pixel_nos))):

            if p in pix_obs_times.keys():

                if p in idx:
                    plane_pixels.append(p)
                    plane_probs.append(self.map_probs[i])

                obs = pix_obs_times[p]

                # check which healpix are observed twice
                if max(obs) - min(obs) > min_sep:
                    # is it in galactic plane or not?
                    if p not in idx:
                        double_no_plane_prob.append(self.map_probs[i])
                        double_no_plane_pixels.append(p)
                    else:
                        double_in_plane_probs.append(self.map_probs[i])
                        double_in_plane_pixels.append(p)

                else:
                    if p not in idx:
                        single_no_plane_pixels.append(p)
                        single_no_plane_prob.append(self.map_probs[i])
                    else:
                        single_in_plane_prob.append(self.map_probs[i])
                        single_in_plane_pixels.append(p)

                overlapping_fields += pix_map[p]

                times += list(obs)
            else:
                veto_pixels.append(p)

        overlapping_fields = sorted(list(set(overlapping_fields)))

        try:
            self.first_obs = Time(min(times), format="jd")
            self.first_obs.utc.format = "isot"
            self.last_obs = Time(max(times), format="jd")
            self.last_obs.utc.format = "isot"

        except ValueError:
            err = (
                f"No observations of this field were found at any time between {self.t_min} and"
                f"{obs_times[-1]}. Coverage overlap is 0%, but recent observations might be missing!"
            )
            self.logger.error(err)
            raise ValueError(err)

        self.logger.info(f"Observations started at {self.first_obs.jd}")

        return (
            double_in_plane_pixels,
            double_in_plane_probs,
            single_in_plane_pixels,
            single_in_plane_prob,
            veto_pixels,
            plane_pixels,
            plane_probs,
            times,
            double_no_plane_prob,
            double_no_plane_pixels,
            single_no_plane_prob,
            single_no_plane_pixels,
            overlapping_fields,
        )
    hpx2048index = [int(a, 2) for a in hpx2048index_bin]
    hpx2048index_enlarged = [int(a, 2) for a in hpx2048index_enlarged_bin]

    mask_enlarged = np.in1d(correctallhpxindices, hpx2048index_enlarged)
    numspecs_enlarged = len(mask_enlarged[mask_enlarged])
    if numspecs_enlarged < 1:
        continue

    print 'number of specs we want', len(hpx2048index)
    print 'number of specs we will use for gridding', numspecs_enlarged

    fileindices = np.unique(infodict['allfileindices'][mask_enlarged])
    filenames = np.array(infodict['filenames'])[fileindices]
    print 'number of files contributing', len(filenames)

    targethpx = healpy.nest2ring(targetnside, hpx2048index)
    theta, phi = healpy.pix2ang(targetnside, targethpx, nest=False)
    header = pf.getheader(datapath + filenames[0], 1)
    # bugfix:
    header['CDELT3'] = 1288.2149691241

    if targetres is not None:
        print 'computing new gridding kernel, ',
        print 'to match target resolution of', targetres * 60, 'arcmin'
        if targetres < header['BMAJ']:
            print 'error, target resolution smaller than original resolution'
            sys.exit(1)
        if targetres < header['BMAJ'] * np.sqrt(1 + 0.05 ** 2):
            print 'warning, target resolution too small ',
            print '(gridding kernel would be less than 5%% of original resolution'
        kernelsizefwhm = np.sqrt(targetres ** 2 - header['BMAJ'] ** 2)
Пример #34
0
def tile_geometry(polygon, nside, ring=False, return_coords=False, inclusive=True):
    """Returns a list of pixels that tile a Shapely polygon.

    While `healpy.query_polygon` provides an efficient method to tile a
    polygon, that function assumes that the lines joining the vertices of the
    polygon are the segments of the great circles that intersect the vertices.
    This is relatively irrelevant for polygons that cover a small area on the
    sky but for large polygons the result is significantly different from what
    one would expect.

    This function tiles a `Polygon <shapely:Polygon>` assuming an Euclidian
    distance between the vertices. The polygon must have longitude
    limits defined between -360 and 720 degrees.

    If ``inclusive=True`` the function will return all pixels that overlap with
    the region. The area of the pixel is approximated as a circle with the
    same area as a pixel. This approximation is good for low latitudes but
    becomes worse for higher latitudes, where the shape of the pixel cannot
    be approximated by a circle. If ``inclusive=False`` only pixels whose
    centre is inside the region will be returned.

    Parameters
    ----------
    polygon : shapely:Polygon or ~numpy.ndarray
        The polygon to tile. If an array, it must be a collection of ``Nx2``
        points defining the position of the vertices. The polygon must be
        convex.
    nside : int
        The nside of the pixels used to tile the polygon.
    ring : bool
        By default the function returns the values in the nested pixel
        ordering. If ``ring=True`` the equivalent ring pixels will be returned.
    return_coords : bool
        If `True`, returns an array with the longitude and latitude of the
        pixels in degrees.
    inclusive : bool
        Whether to return pixels that only partially overlap with the region.

    Returns
    -------
    tiling : `~numpy.ndarray`
        An array with the list of pixels that tile the geometry or (if
        ``return_coords=True``) the longitude and latitude of the pixels.

    """

    # nside = 2^k

    assert numpy.log2(nside).is_integer(), 'nside is not a power of 2.'
    k_end = int(numpy.log2(nside))

    if not isinstance(polygon, shapely.geometry.Polygon):
        polygon = shapely.geometry.Polygon(polygon.tolist())

    # Create a prepared polygon. This allows only contained and intersect
    # operations but that's all we need and it's more efficient.
    prep_polygon = shapely.prepared.PreparedGeometry(polygon)

    pixels = []
    intersect = []

    for kk in range(0, k_end + 1):

        nside_k = 2**kk

        # Approximates the pixel as a circle of radius r.
        rr = numpy.sqrt(healpy.nside2pixarea(nside_k, degrees=True) / numpy.pi)

        # If k=0 (first HealPix level) we test all the 12 pixels. Otherwise we
        # take the pixels that overlapped in the previous level and test each
        # one of their children.
        if kk == 0 and len(intersect) == 0:
            pix_to_test = list(range(0, 12))
        else:
            pix_to_test = nested_regrade(intersect, 2**(kk - 1), nside_k).flatten().tolist()
            intersect = []

        for pix in pix_to_test:

            lon, lat = healpy.pix2ang(nside_k, pix, nest=True, lonlat=True)

            # We offset the pixel to check so that if the polygon wraps around
            # [0, 360] we still overlap with it.
            for offset in [0, -360, 360]:

                # Create a Point object with a radius dd centred at the
                # position of the pixel +/- the offset.
                point = shapely.geometry.Point(lon + offset, lat).buffer(rr)

                # If a pixel is completely contained by the polygon, adds
                # all the nested pixels at the nside resolution and we are done.
                if prep_polygon.contains(point):
                    if nside_k < nside:
                        pixels += nested_regrade(pix, nside_k, nside).flatten().tolist()
                    else:
                        pixels.append(pix)
                    break

                # If we are at the final nside level and the pixel intersects
                # with the polygon, we include it.
                if nside_k == nside and inclusive and prep_polygon.intersects(point):
                    pixels.append(pix)
                    break

                # If we are not yet at the final nside level we want to be greedy.
                # We create a pixel with twice the radius and check if it intersects.
                # If it does we add it to the list of pixels to test in the next
                # nside level. We do this to compensate for the fact that pixels
                # at high latitudes are significantly non-circular and if we
                # just use point we may be missing some pixels.
                if nside_k < nside:
                    point_2r = shapely.geometry.Point(lon + offset, lat).buffer(2. * rr)
                    if prep_polygon.intersects(point_2r):
                        intersect.append(pix)
                        break

        intersect = numpy.unique(intersect).tolist()

    if len(pixels) == 0:
        raise ValueError('the list of tiling pixels is empty.')

    pixels = numpy.unique(pixels)

    if return_coords:
        lon, lat = healpy.pix2ang(nside, pixels, nest=True, lonlat=True)
        return numpy.array([lon, lat]).T

    if ring:
        return healpy.nest2ring(nside, pixels)

    return pixels
Пример #35
0
        stars_color=opts.stars_color,
        stars_alpha=opts.stars_alpha,
        arms=arms,
        arms_color=opts.arms_color,
        arms_linewidth=opts.arms_linewidth,
        arms_alpha=opts.arms_alpha,
    )

for label in labels:
    fits = maps[label]['fits']
    if opts.verbose:
        print "reading map from", fits
    post, header = hp.read_map(fits, h=True, verbose=False)
    npix = len(post)
    if (dict(header)['ORDERING'] == 'NEST'):  ### convert to RING ordering
        post = hp.nest2ring(nside, post)
    nside = hp.npix2nside(npix)
    if opts.verbose:
        print "    nside=%d" % nside

    fig, ax = ct.genCR_fig_ax(figind,
                              figwidth=opts.figwidth,
                              figheight=opts.figheight,
                              grid=opts.grid)
    figind += 1

    ct.heatmap(post,
               ax,
               xlim,
               ylim,
               color_map=opts.color_map,
Пример #36
0
    maps_s2_noise[:,1:,:]*=2
maps_noise_weights=1./maps_s2_noise

npix_spec=hp.nside2npix(nside_spec)
ipix0=hp.ring2nest(par.nside_spec,hp.ang2pix(par.nside_spec,theta_patch*np.pi/180,phi_patch*np.pi/180))
print "pixel", ipix0
map_mean=np.zeros([par.n_pix,par.n_pol,par.n_comp])
map_sigma=np.zeros([par.n_pix,par.n_pol,par.n_comp])
map_xspec_mean=np.zeros([npix_spec,par.n_spec_vary])
map_xspec_sigma=np.zeros([npix_spec,par.n_spec_vary])

#for ipix in [ipix0] :
for ipix in np.arange(npix_spec) :
    if ipix!=ipix0 :
        continue
    ipix_list=hp.nest2ring(par.nside,ipix*par.n_sub+np.arange(par.n_sub))

    if plot_stuff :
        map_show=np.zeros(par.n_pix); map_show[ipix_list]=1.0; hp.mollview(map_show); plt.show()
        
    patch_cmb_true=map_cmb_true[ipix_list,:]
    patch_obs=maps_obs[ipix_list,:,:]
    patch_noise_weights=maps_noise_weights[ipix_list,:,:]

    if plot_stuff :
        for ipol in np.arange(par.n_pol) :
            plt.title("True CMB %d"%ipol)
            plt.imshow(np.reshape(patch_cmb_true[:,ipol],(par.n_side_sub,par.n_side_sub)),interpolation='none',origin='lower');
            plt.show()

    if plot_stuff==2 :
Пример #37
0
def worker(image, return_dict, procnum):
    #Read image header and get coordinates of the image

    print(i, 'working with image', image)
    hdr = pf.open(image)
    w = astrowcs.WCS(hdr[1].header)
    corners_image = w.calc_footprint(center=False)
    corners_pixels = w.calc_footprint(center=True)
    nx = hdr[1].header['naxis1']
    ny = hdr[1].header['naxis2']

    coords1 = corners_image[0]
    coords2 = corners_image[1]
    coords3 = corners_image[2]
    coords4 = corners_image[3]
    #Read image data
    image_data = pf.getdata(image)

    #Define healsparse polygon within the image
    ra = [coords1[0], coords4[0], coords3[0], coords2[0]]
    dec = [coords1[1], coords4[1], coords3[1], coords2[1]]
    raC = (np.max(ra) + np.min(ra)) / 2.
    decC = (np.max(dec) + np.min(dec)) / 2.

    nside = 2**17
    poly = hs.Polygon(ra=ra, dec=dec, value=1)
    smap = poly.get_map(nside=nside, dtype=np.int16)
    a = smap.validPixels

    b = hp.nest2ring(nside, a)
    #a are pixels in NEST, b in RING
    #Get center coordinates of each pixel
    raF, decF = hp.pix2ang(nside, a, lonlat=True, nest=True)

    #Get nx, ny in image from healsparse pixels
    myhdr = hdr[1].header
    wcs = wc.WCS(myhdr)
    xn, yn = wcs.sky2image(raF, decF)
    #Get associated weight values
    values = []
    for x, y in zip(xn, yn):

        values.append(image_data[int(y - 1), int(x - 1)])

    values = np.array(values)
    #Define healsparse map
    hsp_map_2 = hs.HealSparseMap.makeEmpty(512, nside, dtype=np.int16)

    hsp_map_2.updateValues(a, values)

    #Degrade to nside=4096
    low_res_hsp = hsp_map_2.degrade(4096)

    j = low_res_hsp.validPixels

    test_values_2 = low_res_hsp.getValuePixel(j)

    #Uncomment if you want in RING format
    #k=hp.nest2ring(4096,j)

    #hp_aux = np.zeros(hp.nside2npix(4096))+hp.UNSEEN
    #hp_aux[j] = test_values_2

    hdr.close()

    return_dict[procnum] = np.array([j, test_values_2]).transpose()
Пример #38
0
def healpix2radec(ipix,nside):
    ipix = healpy.nest2ring(nside,ipix)
    (theta,phi) = healpy.pix2ang(nside,ipix)
    return polar2radec(theta,phi)    
Пример #39
0
def read_sky_map(filename, nest=False, distances=False, moc=False):
    """
    Read a LIGO/Virgo-type sky map and return a tuple of the HEALPix array
    and a dictionary of metadata from the header.

    Parameters
    ----------

    filename: string
        Path to the optionally gzip-compressed FITS file.

    nest: bool, optional
        If omitted or False, then detect the pixel ordering in the FITS file
        and rearrange if necessary to RING indexing before returning.

        If True, then detect the pixel ordering and rearrange if necessary to
        NESTED indexing before returning.

        If None, then preserve the ordering from the FITS file.

        Regardless of the value of this option, the ordering used in the FITS
        file is indicated as the value of the 'nest' key in the metadata
        dictionary.

    distances: bool, optional
        If true, then read also read the additional HEALPix layers representing
        the conditional mean and standard deviation of distance as a function
        of sky location.

    moc: bool, optional
        If true, then preserve multi-order structure if present.
    """
    m = Table.read(filename, format='fits')

    del m.meta['PIXTYPE']

    if m.meta.pop('COORDSYS', 'C') != 'C':
        raise ValueError('LALInference only reads and writes sky maps in equatorial coordinates.')

    try:
        value = m.meta.pop('ORDERING')
    except KeyError:
        pass
    else:
        if value == 'RING':
            m.meta['nest'] = False
        elif value == 'NESTED':
            m.meta['nest'] = True
        elif value == 'NUNIQ':
            pass
        else:
            raise ValueError(
                'ORDERING card in header has unknown value: {0}'.format(value))

    for fits_key, rows in itertools.groupby(FITS_META_MAPPING, lambda row: row[1]):
        try:
            value = m.meta.pop(fits_key)
        except KeyError:
            pass
        else:
            for row in rows:
                key, _, _, _, from_fits = row
                if from_fits is not None:
                    m.meta[key] = from_fits(value)

    if 'UNIQ' in m.colnames and not moc:
        from ..bayestar.sky_map import rasterize
        m = rasterize(m)
        m.meta['nest'] = True

    if 'UNIQ' not in m.colnames:
        npix = len(m)
        nside = hp.npix2nside(npix)

        if nest is None:
            pass
        elif m.meta['nest'] and not nest:
            m = m[hp.ring2nest(nside, np.arange(npix))]
        elif not m.meta['nest'] and nest:
            m = m[hp.nest2ring(nside, np.arange(npix))]

    if not moc:
        if distances:
            return tuple(np.asarray(m[name]).ravel() for name in DEFAULT_NESTED_NAMES), m.meta
        else:
            return np.asarray(m[DEFAULT_NESTED_NAMES[0]]).ravel(), m.meta

    return m
Пример #40
0
def hsp2hpx(hspmap):
    """ Convert a healsparse map to a sparse healpix map """
    nside = hspmap.nside_sparse
    pixels = hp.nest2ring(nside, hspmap.valid_pixels)
    hpxmap = hspmap.get_values_pix(pixels)
    return hpxmap, pixels, nside
Пример #41
0
def read_map(filename, nest=False, hdu=None, h=False, verbose=True):
    """Read a healpix map from a fits file.  Partial-sky files,
    if properly identified, are expanded to full size and filled with UNSEEN.
    Uses fitsio to mirror much (but not all) of the functionality of healpy.read_map
    
    Parameters:
    -----------
    filename : str 
      the fits file name
    nest : bool, optional
      If True return the map in NEST ordering, otherwise in RING ordering;
      use fits keyword ORDERING to decide whether conversion is needed or not
      If None, no conversion is performed.
    hdu : int, optional
      the header number to look at (start at 0)
    h : bool, optional
      If True, return also the header. Default: False.
    verbose : bool, optional
      If True, print a number of diagnostic messages
    
    Returns
    -------
    m [, header] : array, optionally with header appended
      The map read from the file, and the header if *h* is True.
    """

    data, hdr = fitsio.read(filename, header=True, ext=hdu)

    nside = int(hdr.get('NSIDE'))
    if verbose: print('NSIDE = {0:d}'.format(nside))

    if not healpy.isnsideok(nside):
        raise ValueError('Wrong nside parameter.')
    sz = healpy.nside2npix(nside)

    ordering = hdr.get('ORDERING', 'UNDEF').strip()
    if verbose: print('ORDERING = {0:s} in fits file'.format(ordering))

    schm = hdr.get('INDXSCHM', 'UNDEF').strip()
    if verbose: print('INDXSCHM = {0:s}'.format(schm))
    if schm == 'EXPLICIT':
        partial = True
    elif schm == 'IMPLICIT':
        partial = False

    # monkey patch on a field method
    fields = data.dtype.names

    # Could be done more efficiently (but complicated) by reordering first
    if hdr['INDXSCHM'] == 'EXPLICIT':
        m = healpy.UNSEEN * np.ones(sz, dtype=data[fields[1]].dtype)
        m[data[fields[0]]] = data[fields[1]]
    else:
        m = data[fields[0]].ravel()

    if (not healpy.isnpixok(m.size) or (sz > 0 and sz != m.size)) and verbose:
        print('nside={0:d}, sz={1:d}, m.size={2:d}'.format(nside, sz, m.size))
        raise ValueError('Wrong nside parameter.')
    if nest is not None:
        if nest and ordering.startswith('RING'):
            idx = healpy.nest2ring(nside, np.arange(m.size, dtype=np.int32))
            m = m[idx]
            if verbose: print('Ordering converted to NEST')
        elif (not nest) and ordering.startswith('NESTED'):
            idx = healpy.ring2nest(nside, np.arange(m.size, dtype=np.int32))
            m = m[idx]
            if verbose: print('Ordering converted to RING')

    if h: return m, hdr
    else: return m
Пример #42
0
    def _unstage_data(
        self,
        comm,
        data,
        nsamp,
        nnz,
        nnz_full,
        obs_period_ranges,
        detectors,
        signal_type,
        pixels_dtype,
        nside,
        weight_dtype,
    ):
        """ Clear Madam buffers, restore pointing into TOAST caches
        and cache the destriped signal.

        """
        auto_timer = timing.auto_timer(type(self).__name__)
        self._madam_timestamps = None
        self._cache.destroy("timestamps")

        if self._conserve_memory:
            nodecomm = comm.Split_type(MPI.COMM_TYPE_SHARED, comm.rank)
            nread = nodecomm.size
        else:
            nodecomm = MPI.COMM_SELF
            nread = 1

        for iread in range(nread):
            nodecomm.Barrier()
            if nodecomm.rank % nread != iread:
                continue
            if self._name_out is not None:
                global_offset = 0
                for obs, period_ranges in zip(data.obs, obs_period_ranges):
                    tod = obs["tod"]
                    nlocal = tod.local_samples[1]
                    for idet, det in enumerate(detectors):
                        signal = np.ones(nlocal, dtype=signal_type) * np.nan
                        offset = global_offset
                        for istart, istop in period_ranges:
                            nn = istop - istart
                            dslice = slice(
                                idet * nsamp + offset, idet * nsamp + offset + nn
                            )
                            signal[istart:istop] = self._madam_signal[dslice]
                            offset += nn
                        cachename = "{}_{}".format(self._name_out, det)
                        tod.cache.put(cachename, signal, replace=True)
                    global_offset = offset
            self._madam_signal = None
            self._cache.destroy("signal")

            if not self._purge_pixels:
                # restore the pixels from the Madam buffers
                global_offset = 0
                for obs, period_ranges in zip(data.obs, obs_period_ranges):
                    tod = obs["tod"]
                    nlocal = tod.local_samples[1]
                    for idet, det in enumerate(detectors):
                        pixels = -np.ones(nlocal, dtype=pixels_dtype)
                        offset = global_offset
                        for istart, istop in period_ranges:
                            nn = istop - istart
                            dslice = slice(
                                idet * nsamp + offset, idet * nsamp + offset + nn
                            )
                            pixels[istart:istop] = self._madam_pixels[dslice]
                            offset += nn
                        npix = 12 * nside ** 2
                        good = np.logical_and(pixels >= 0, pixels < npix)
                        if not self._pixels_nested:
                            pixels[good] = hp.nest2ring(nside, pixels[good])
                        pixels[np.logical_not(good)] = -1
                        cachename = "{}_{}".format(self._pixels, det)
                        tod.cache.put(cachename, pixels, replace=True)
                    global_offset = offset
            self._madam_pixels = None
            self._cache.destroy("pixels")

            if not self._purge_weights and nnz == nnz_full:
                # restore the weights from the Madam buffers
                global_offset = 0
                for obs, period_ranges in zip(data.obs, obs_period_ranges):
                    tod = obs["tod"]
                    nlocal = tod.local_samples[1]
                    for idet, det in enumerate(detectors):
                        weights = np.zeros([nlocal, nnz], dtype=weight_dtype)
                        offset = global_offset
                        for istart, istop in period_ranges:
                            nn = istop - istart
                            dwslice = slice(
                                (idet * nsamp + offset) * nnz,
                                (idet * nsamp + offset + nn) * nnz,
                            )
                            weights[istart:istop] = self._madam_pixweights[
                                dwslice
                            ].reshape([-1, nnz])
                            offset += nn
                        cachename = "{}_{}".format(self._weights, det)
                        tod.cache.put(cachename, weights, replace=True)
                    global_offset = offset
            self._madam_pixweights = None
            self._cache.destroy("pixweights")
        del nodecomm
        return