Example #1
0
def getSquarePixels(ra_pointing, dec_pointing, tileSide, nside, alpha = 0.2, color='#6c71c4'):

    area = tileSide*tileSide

    decCorners = (dec_pointing - tileSide / 2.0, dec_pointing + tileSide / 2.0)
 
    radecs = []
    for d in decCorners:
        if d > 90.:
            d = 180. - d
        elif d < -90.:
            d = -180 - d

        raCorners = (ra_pointing - (tileSide / 2.0) / np.cos(np.deg2rad(d)) , ra_pointing + (tileSide / 2.0) / np.cos(np.deg2rad(d)))

        for r in raCorners:
            if r > 360.:
                r = 720. - r
            elif r < 0.:
                r = 360. + r
            radecs.append([r,d])

    radecs = np.array(radecs)
    idx1 = np.where(radecs[:,0]>=180.0)[0] 
    idx2 = np.where(radecs[:,0]<180.0)[0]
    idx3 = np.where(radecs[:,0]>300.0)[0]
    idx4 = np.where(radecs[:,0]<60.0)[0]
    if (len(idx1)>0 and len(idx2)>0) and not (len(idx3)>0 and len(idx4)>0):
        alpha = 0.0

    idx1 = np.where((radecs[:,1]>=87.0) | (radecs[:,1]<=-87.0))[0]
    if len(idx1)>0:
        radecs = np.delete(radecs, idx1[0], 0)

    xyz = []
    for r, d in radecs:
        xyz.append(hp.ang2vec(r, d, lonlat=True))

    npts, junk = radecs.shape
    if npts == 4:
        xyz = [xyz[0], xyz[1],xyz[3], xyz[2]]
        ipix = hp.query_polygon(nside, np.array(xyz))
    else:    
        ipix = hp.query_polygon(nside, np.array(xyz))

    #idx1 = np.where((radecs[:,1]>=70.0) | (radecs[:,1]<=-70.0))[0]
    #idx2 = np.where((radecs[:,0]>300.0) | (radecs[:,0]<60.0))[0]
    #if (len(idx1) == 0) or (len(idx2) > 0):
    #    return [], [], [], []

    xyz = np.array(xyz)
    proj = hp.projector.MollweideProj(rot=None, coord=None) 
    x,y = proj.vec2xy(xyz[:,0],xyz[:,1],xyz[:,2])
    xy = np.zeros(radecs.shape)
    xy[:,0] = x
    xy[:,1] = y
    path = matplotlib.path.Path(xy)
    patch = matplotlib.patches.PathPatch(path, alpha=alpha, color=color, fill=True, zorder=3,)
    
    return ipix, radecs, patch, area
Example #2
0
    def ipix_in_box(self, ra, dec, width, height, nside, nest):
        """finding the healpix indices of a given box   

        Parameters
        ----------
        ra,dec,width,height : sequence, or single number
           the center and size of a vertice box
        nside :      int
            healpix nside parameter, must be a power of 2, less than 2**30
        nest :       bool
            healpix ordering options: 
            if True, healpix map use `nest` ordering, otherwise, use `ring` instead

        Returns
        -------
        ipix_in_box : list
           a sequence of healpix indices    
        """

        v1_ra, v2_ra, v3_ra, v4_ra, v1_dec, v2_dec, v3_dec, v4_dec = \
            self.vertices(ra, dec, width, height)
        ra_vertices, dec_vertices = ([v1_ra, v2_ra, v4_ra, v3_ra],\
                                     [v1_dec, v2_dec, v4_dec, v3_dec])
        theta = 0.5 * np.pi - np.deg2rad(dec_vertices)
        phi = np.deg2rad(ra_vertices)
        xyz = hp.ang2vec(theta, phi)
        if self.is_seq(ra) and self.is_seq(dec) and \
           self.is_seq(width) and self.is_seq(height):
            ipix_fov_box = []
            for _xyz in xyz:
                ipix_fov_box.append(hp.query_polygon(nside, _xyz, nest=nest))
        else:
            ipix_fov_box = hp.query_polygon(nside, xyz, nest=nest)
        return ipix_fov_box
Example #3
0
def make_hpmoll(d, hpx0, recov, band, f, p, sub, nside, argsMV):

    hpx = np.copy(hpx0)
    hpx[hpx0 != hp.UNSEEN] = hpx0[hpx0 != hp.UNSEEN] + 1
    hpx[hpx > recov] = hp.UNSEEN

    ra, dec = np.array([]), np.array([])

    for b in band:
        ra  = np.concatenate((ra,  d[d['band'] == b]['Ra']))
        dec = np.concatenate((dec, d[d['band'] == b]['Dec']))

    if len(ra) > 0:

        for r, d in zip(ra, dec):
            cells = p.copy()
            f.to_uv(cells, (r, d))

            for c in cells:
                poly = np.vstack([c['p0'], c['p1'], c['p2'], c['p3']])

                try:
                    ipix = hp.query_polygon(nside, poly, nest=True, inclusive=False)
                    hpx[ipix] = 0
                except:
                    continue

    argsMV['title'] = band
    argsMV['sub'] = sub

    hp.mollview(hpx, min=0, max=recov, **argsMV)

    return hpx
Example #4
0
    def extract(self, ra, dec, width, height, keep=0):
        if not self.valid:
            return
        if keep == 0:
            self.data = Table()

        polygon = SkyCoord( [ra-width/2., ra+width/2., ra+width/2., ra-width/2.],\
                [dec-height/2., dec-height/2., dec+height/2., dec+height/2.],\
                unit = 'deg' ).cartesian.get_xyz().T
        pix = hp.query_polygon(self.NSIDE, polygon, inclusive=True, nest=True)
        rangelist = self._get_tgasptyc_zone_file(pix)

        f = open(self.datafile, "r")
        lines = []
        for r in rangelist:
            f.seek(r[0] * self.linelength)
            lines.append(f.read((r[1] - r[0]) * self.linelength))
        f.close()
        content = ''.join(lines)

        reader = ascii.get_reader(Reader=ascii.Cds,
                                  fill_values=[('', 0)],
                                  readme=self.readmefile)
        reader.data.table_name = "tgasptyc.dat"
        catalog = reader.read(content)
        p = catalog[ np.where( ( catalog['RAdeg'] > ra - width/2. )\
                & ( catalog['RAdeg'] < ra + width/2. )\
                & ( catalog['DEdeg'] > dec - height/2. )\
                & ( catalog['DEdeg'] < dec + height/2. ) ) ]
        self.data = astropy.table.vstack([self.data, p])

        return (len(self.data))
Example #5
0
    def _get_box_pix(self, nside, ra_cent, dec_cent, width, height):
        """
        Get healpix pixels overlapping a box.

        Parameters
        ----------
        nside : `int`
        ra_cent : `float`
        dec_cent : `float`
        width : `float`
        height : `float`

        Returns
        -------
        pixels : `np.ndarray`
        """
        wid = width / np.cos(np.deg2rad(dec_cent))
        vertices = hp.ang2vec(np.array([ra_cent - wid/2.,
                                        ra_cent - wid/2.,
                                        ra_cent + wid/2.,
                                        ra_cent + wid/2.]),
                              np.array([dec_cent - height/2.,
                                        dec_cent + height/2.,
                                        dec_cent + height/2.,
                                        dec_cent - height/2.]),
                              lonlat=True)
        return hp.query_polygon(nside, vertices, nest=True)
Example #6
0
    def add_poly(self, positions, depth=None):
        """
        Add a single polygon to this region.

        Parameters
        ----------
        positions : [[ra, dec], ...]
            Positions for the vertices of the polygon. The polygon needs to be convex and non-intersecting.

        depth : int
            The deepth at which the polygon will be inserted.
        """
        if not (len(positions) >= 3):
            raise AssertionError(
                "A minimum of three coordinate pairs are required")

        if depth is None or depth > self.maxdepth:
            depth = self.maxdepth

        ras, decs = np.array(list(zip(*positions)))
        sky = self.radec2sky(ras, decs)
        pix = hp.query_polygon(2**depth,
                               self.sky2vec(sky),
                               inclusive=True,
                               nest=True)
        self.add_pixels(pix, depth)
        self._renorm()
        return
Example #7
0
def main_map(fn='all_fields.npy'):
    nside = 1024
    m = np.zeros(hp.nside2npix(nside))

    # opsim log
    log = np.load(fn)
    mjd_min = DateTimeFrom('2022-01-01').mjd
    mjd_max = DateTimeFrom('2023-01-01').mjd
    idx = (log['band'] == 'LSSTPG::i')
    idx &= (log['mjd'] >= mjd_min)
    idx &= (log['mjd'] <= mjd_max)
    log = log[idx]
    coords = np.vstack(
        (log['Ra'] * DEGREE_PER_RADIAN, log['Dec'] * DEGREE_PER_RADIAN)).T
    print coords.shape

    f = FocalPlane()
    p = f.pixellize(1, 1)

    for i, (r, d) in enumerate(coords):
        if i % 100 == 0:
            print i, r, d
        cells = p.copy()
        f.to_uv(cells, (r, d))

        for c in cells:
            poly = np.vstack([c['p0'], c['p1'], c['p2'], c['p3']])
            ipix = hp.query_polygon(nside, poly, nest=True)
            m[ipix] += 1.

    return m, p
Example #8
0
def write_k2_moc(campaign=0, norder_moc=NORDER_MOC, output_fn=None):
    if output_fn is None:
        fieldinfo = getFieldInfo(campaign)
        if "preliminary" in fieldinfo:
            output_fn = "../moc/k2-footprint-c{:02d}-proposed.moc".format(campaign)
        else:
            output_fn = "../moc/k2-footprint-c{:02d}.moc".format(campaign)
    # Obtain the footprint corners in polar coordinates
    log.info("Preparing footprint polygons for C{}".format(campaign))
    polygons = []
    for _, channel in FOOTPRINT["c{}".format(campaign)]["channels"].items():
        polygon = [np.pi/2. - np.radians(channel["corners_dec"]),
                   np.radians(channel["corners_ra"])]
        polygons.append(polygon)
    # Obtain the healpix diamonds that cover the polygons entirely
    # and add these to a `MOC` object
    log.info("Converting polygons into healpix format")
    moc = mocpy.MOC(moc_order=norder_moc)
    for p in polygons:
        pix_list = hp.query_polygon(2**norder_moc,
                                    hp.ang2vec(p[0], p[1]),
                                    inclusive=True, nest=True)
        for pix in pix_list:
            moc.add_pix(norder_moc, pix)
    # Finally, write the resulting MOC file to disk
    log.info("Writing {}".format(output_fn))
    moc.plot()  # IMPORTANT! moc.write is corrupt if plot is not called first
    moc.write(output_fn)
    def enclosed_pixel_indices(self, nside_out):

        # Sanity
        if nside_out < self.nside:
            raise (
                "Can't get enclosed pixel indices for lower resolution pixels!"
            )

        if len(self.__epi) == 0:

            # Start with the central pixel, in case the size of the FOV is <= the pixel size
            self.__epi = np.asarray([
                hp.ang2pix(self.nside, 0.5 * np.pi - self.__coord.dec.radian,
                           self.__coord.ra.radian)
            ])

            pixel_xyz_vertices = hp.boundaries(self.nside, pix=self.index)
            internal_pix = hp.query_polygon(nside_out,
                                            pixel_xyz_vertices,
                                            inclusive=False)

            if len(internal_pix) > 0:
                self.__epi = internal_pix

        return self.__epi
Example #10
0
def readhealcat_polygon(ra,dec,nside=32,outfile=None,path='/line12/Pan-STARRS/chunks-qz-star-v2/',silent=True,prefix='ps1-',postfix='.fits'):
    """
    read the healpix catalog within a spheric rectangle
    ra,dec: in degrees array
    """
    
    ra=np.deg2rad(ra)
    dec=np.deg2rad(dec)
    if len(ra) != len(dec) or len(ra) < 3:
       raise ValueError('ra and dec should have same size and size should > 3 to present a polygon')
    x,y,z=c.spherical_to_cartesian(1.0,dec,ra)
    
    vertices=np.array([x,y,z]).transpose()
    pix=query_polygon(nside, vertices, inclusive=True)
    npix=pix.size
    nd=len(str(healpy.nside2npix(nside)))
    if not silent:
        print('Total '+str(npix)+'  healpix pixels:')
    cat=[]
    for ipix in pix:
        ipixstr='{ipix:0{width}}'.format(ipix=ipix,width=nd)
        catname=os.path.join(path,prefix+ipixstr+postfix)
        if not os.path.isfile(catname):
            if not silent: print(catname+' not exist!')
            continue
        if not silent:
            print('reading '+catname)
        cat.append(table.Table.read(catname,format='fits'))
    totalcat = []
    if cat != []:
        totalcat=table.vstack(cat)
        if outfile is not None:
            totalcat.write(outfile,format='fits',overwrite=True)
    return totalcat
Example #11
0
 def pix_in_fields(self):
     """
     Returns the array of pixel indices contained within the fields.
     """
     return np.array([
         hp.query_polygon(self.nside, coords)
         for coords in self.field_coords_array
     ])
Example #12
0
def makeGauss(nside, ra, dec, amp,
              varparam=np.sqrt(2 * np.pi)):  #bounds=None):#radius=6447797.0):
    print('RA length:' + str(len(ra)))
    print('dec length:' + str(len(dec)))
    print('amp length:' + str(len(amp)))
    raMin = min(ra) - 2.0
    raMax = max(ra) + 2.0
    decMin = min(dec) - 2.0
    decMax = max(dec) + 2.0
    #m = np.arange(hp.nside2npix(nside))
    vertices = hp.ang2vec([raMin, raMin, raMax, raMax],
                          [decMin, decMax, decMax, decMin],
                          lonlat=True)
    m = hp.query_polygon(nside, vertices)
    m_ra, m_dec = hp.pix2ang(nside, m, lonlat=True)
    m_amp = np.zeros_like(m, dtype='float64')
    #ra = ra*1.0
    #dec = dec*1.0
    numpix = m.size

    #if True: #bounds != None:
    #    raMin = min(ra)-2.0
    #    raMax = max(ra)+2.0
    #    decMin = min(dec)-2.0
    #    decMax = min(dec)+2.0
    # Compute distance of center of data to each pixel
    # centDist = haversine(np.full_like(m,raCent),np.full_like(m,decCent),m_ra,m_dec)
    # Get indices of pixels to delete
    #    toDelete = [ k for (i,j,k) in zip(m_ra,m_dec,m) if i < raMin or i > raMax or j < decMin or j > decMax ]
    #    #m = np.delete(m, toDelete)
    #    m_ra = np.delete(m_ra, toDelete)
    #    m_dec = np.delete(m_dec, toDelete)
    #    m_amp = np.delete(m_amp, toDelete)
    #    numdeleted = str(len(toDelete))
    #    print(numdeleted + ' uninteresting pixels deleted of '+str(numpix))

    ### TO IMPLEMENT: Check ra, dec, amp are of same size
    #if isinstance(ra, np.ndarray):
    for rai, deci, ampi in itertools.izip(ra, dec, amp):
        # Used to convert from ra/dec to pixel to vec; should not be necessary
        #nearpix = hp.pix2vec(nside,hp.ang2pix(nside,rai,deci,lonlat=True))
        nearpix = hp.ang2vec(rai, deci, lonlat=True)
        # Calculate Gaussian on pixels within following radius (in degrees)
        radius = 3.0 / 60.0  # Hard-coded; remove eventually
        pxls = hp.query_disc(nside, nearpix, radius * np.pi / 180.0)
        indices = np.searchsorted(m, pxls)
        near_ra, near_dec = hp.pix2ang(nside, pxls, lonlat=True)
        #print('Using nearest '+str(len(near_ra))+' pixels to source.')
        #near_ra = m_ra[pxls]
        #near_dec = m_dec[pxls]
        m_dist = np.zeros_like(near_ra)
        var = varparam * 1.0
        m_dist = haversine(np.full_like(near_ra, rai),
                           np.full_like(near_ra, deci), near_ra, near_dec)
        m_amp[indices] += computeGauss(ampi, m_dist, var)
    print('makeGauss output length: ' + str(m_ra.size))
    return m_ra, m_dec, m_dist, m_amp
Example #13
0
def integrate_intensity_map(Imap,nside,latmin=-2,latmax=2. ,nsteps_long=500,rad_units=False,planck_map=False):
	"""
	Compute the integral of the intensity map along latitude and longitude; to compare observed
	intensity map and the model one.
	To check consistency of the model we compute the integral as in eqs.(6) and (7) of
	`Puglisi+ 2017 <http://arxiv.org/abs/1701.07856>`_.

	*Parameters*

	- `Imap`:{array}
		intensity map
	- `nside`: {int}
		:mod:`healpy` gridding parameter
	- `latmin`, `latmax`:{double}
		minimum and maximum latitudes in `degree` where to perform the integral (default :math:`\pm 2\, deg`)
		if you have the angles in radiants set `rad_units` to `True`.
	- `nsteps_long`:{int}
		number of longitudinal bins, (default 500)
	- `planck_map`:{bool}
		if set to `True`, it sets to zero all  the :mod:`healpy.UNSEEN` masked pixels of the map,
		(useful when dealing with observational maps).

	**Returns**

	- `I_l` :{array}
		latitude integration within the set interval :math:`[b_{min}, b_{max}]`
	- `I_tot`:{double}
		integration of `I_l` in :math:`\ell \in [0,2 \pi]`.

	"""
	if planck_map:
		arr=np.ma.masked_equal(Imap,hp.UNSEEN)
		Imap[arr.mask]=0.


	if not rad_units:
		latmin=np.pi/2.+(np.deg2rad(latmin))
		latmax=np.pi/2.+(np.deg2rad(latmax))

	nbins_long=nsteps_long-1
	long_edges=np.linspace(0.,2*np.pi,num=nsteps_long)
	long_centr=[.5*(long_edges[i]+ long_edges[i+1]) for i in range(nbins_long)]
	listpix=[]
	for i in range(nbins_long):
		v=[ hp.ang2vec(latmax, long_edges[i]),
			hp.ang2vec(latmax, long_edges[i+1]),
			hp.ang2vec(latmin, long_edges[i+1]),
			hp.ang2vec(latmin, long_edges[i])]
		listpix.append(hp.query_polygon(nside,v))
	delta_b=pixelsize(nside,arcmin=False)
	delta_l=2*np.pi/nbins_long

	I_l=[sum(Imap[l])*delta_b for l in listpix ]
	Itot= sum(I_l)*delta_l
	return Itot,I_l
Example #14
0
def query_polygon(RA_right, RA_left, DEC_bottom, DEC_top, NSIDE):
    
    Ra_arr = np.array([RA_left, RA_right, RA_right, RA_left]) #Массивы координат, задающих вершины прямоугольника на сфере
    Dec_arr = np.array([DEC_top, DEC_top, DEC_bottom, DEC_bottom])
    
    astropy_array = astropy.coordinates.spherical_to_cartesian(1, np.deg2rad(Dec_arr), np.deg2rad(Ra_arr))
    vertex_array = np.array(astropy_array) #Декартовы координаты вершин прямоугольника
    
    heal_indexes = hp.query_polygon(NSIDE, vertex_array.T, nest=True)
    
    return(heal_indexes)
Example #15
0
def natural_order(nside, ind, subn):
    assert nside <= subn
    if subn == nside:
        return np.array([ind])
    sub = hp.query_polygon(2 * nside,
                           hp.boundaries(nside, ind, nest=True).T,
                           nest=True)
    assert len(sub) == 4

    r = [natural_order(nside * 2, s, subn) for s in np.sort(sub)]
    return np.vstack((np.hstack((r[0], r[1])), np.hstack((r[2], r[3]))))
Example #16
0
    def get_tile_pixels(self, nside, corners):
        """Return pixels contained in a tile defined by its corner coordinates"""

        #conversion to vectors using healpy function
        xyz = []
        for i in range(len(corners)):
            xyz.append(hp.ang2vec(corners[i][0], corners[i][1], lonlat=True))
    
        tile_pixels = hp.query_polygon(nside, np.array(xyz), inclusive=False)

        return tile_pixels
Example #17
0
    def __ipix_sum(self, ra_vertices, dec_vertices):
        """Return the ipix sum inside a polygon."""

        theta = 0.5 * np.pi - np.deg2rad(dec_vertices)
        phi = np.deg2rad(ra_vertices)
        xyz = hp.ang2vec(theta, phi)

        ipix_poly = hp.query_polygon(self.nside, xyz)

        ipix_sum_polygon = self.prob[ipix_poly].sum()

        return ipix_sum_polygon
Example #18
0
    def probability_inside_box(self, infile, ra_vertices, dec_vertices):
        """Return the probability inside a polygon."""

        prob = hp.read_map(self.skymap, verbose=False)

        theta = 0.5 * np.pi - np.deg2rad(dec_vertices)
        phi = np.deg2rad(ra_vertices)
        xyz = hp.ang2vec(theta, phi)

        ipix_poly = hp.query_polygon(self.nside, xyz)

        probability_inside_polygon = prob[ipix_poly].sum()

        return probability_inside_polygon
Example #19
0
    def enclosed_pixel_indices(self):

        if len(self.__enclosed_pixel_indices) == 0:

            # Start with the central pixel, in case the size of the FOV is <= the pixel size
            self.__enclosed_pixel_indices = np.asarray(
                [hp.ang2pix(self.nside, 0.5 * np.pi - self.dec_rad, self.ra_rad)])
            internal_pix = hp.query_polygon(self.nside, self.corner_xyz, inclusive=False)

            # However, if there is at least one pixel returned from query_polygon, use that array
            if len(internal_pix) > 0:
                self.__enclosed_pixel_indices = internal_pix

        return self.__enclosed_pixel_indices
Example #20
0
    def probability_inside_box(self, infile, ra_vertices, dec_vertices):
        """Return the probability inside a polygon."""

        prob = hp.read_map(self.skymap, verbose=False)

        theta = 0.5 * np.pi - np.deg2rad(dec_vertices)
        phi = np.deg2rad(ra_vertices)
        xyz = hp.ang2vec(theta, phi)

        ipix_poly = hp.query_polygon(self.nside, xyz)

        probability_inside_polygon = prob[ipix_poly].sum()

        return probability_inside_polygon
Example #21
0
def warp_source_files(fnames, work_dir, pixel_order, tile_order):
    tile_nside = healpy.order2nside(tile_order)
    for fname in fnames:
        with afits.open(fname) as hdul:
            src = Source(find_image_hdu(hdul))
            tile_indices = healpy.query_polygon(tile_nside,
                                                ad2xyz(*src.polygon).T,
                                                nest=True)
            logging.info('{} tiles'.format(len(tile_indices)))

            def warp(tile_index):
                logging.info('warping {}:{}...'.format(fname, tile_index))
                src.warp(work_dir, tile_order, tile_index, pixel_order)

            parallel.map(warp, tile_indices)
Example #22
0
def pixInTile(tile_corners, nside):
    """Return the pixels inside a Tile."""

    ra = np.array([
        tile_corners[0][0], tile_corners[1][0], tile_corners[3][0],
        tile_corners[2][0]
    ])
    dec = np.array([
        tile_corners[0][1], tile_corners[1][1], tile_corners[3][1],
        tile_corners[2][1]
    ])
    phi, theta = ra * np.pi / 180., (90. - dec) * np.pi / 180.
    vertices = hp.ang2vec(theta, phi)
    pixIn = hp.query_polygon(nside, vertices, inclusive=False, nest=False)
    return pixIn
Example #23
0
def get_quadrant_ipix(nside, ra, dec):

    ccd_coords = get_decam_ccds(ra, dec)

    skyoffset_frames = SkyCoord(ra, dec, unit=u.deg).skyoffset_frame()
    ccd_coords_icrs = SkyCoord(
        *np.tile(ccd_coords[:, np.newaxis, ...], (1, 1, 1)),
        unit=u.deg,
        frame=skyoffset_frames[:, np.newaxis, np.newaxis]).transform_to(ICRS)
    ccd_xyz = np.moveaxis(ccd_coords_icrs.cartesian.xyz.value, 0, -1)[0]

    ipixs = []
    for subfield_id, xyz in enumerate(ccd_xyz):
        ipix = hp.query_polygon(nside, xyz)
        ipixs.append(ipix.tolist())
    return ipixs
Example #24
0
def readhealcat_rectangle(ra,dec,width=(0.6,0.6),nside=32,outfile=None,path='/line12/Pan-STARRS/chunks-qz-star-v2/',silent=True,prefix='ps1-',postfix='.fits',slim=False):
    """
    read the healpix catalog within a spheric rectangle
    ra,dec: in degrees array
    """
    if np.isscalar(width) or len(width) == 1:
        width=(width,width)
    ra=np.deg2rad(ra)
    dec=np.deg2rad(dec)
    wra=np.deg2rad(width[0]/np.cos(dec))
    wdec=np.deg2rad(width[1])
    ra1=ra-wra/2.0
    dec1=dec-wdec/2.0
    ra2=ra-wra/2.0
    dec2=dec+wdec/2.0
    ra3=ra+wra/2.0
    dec3=dec+wdec/2.0
    ra4=ra+wra/2.0
    dec4=dec-wdec/2.0
    x,y,z=c.spherical_to_cartesian(1.0,[dec1,dec2,dec3,dec4],[ra1,ra2,ra3,ra4])
    
    vertices=np.array([x,y,z]).transpose()
    pix=query_polygon(nside, vertices, inclusive=True)
    npix=pix.size
    nd=len(str(healpy.nside2npix(nside)))
    if not silent:
        print('Total '+str(npix)+'  healpix pixels:')
    cat=[]
    for ipix in pix:
        ipixstr='{ipix:0{width}}'.format(ipix=ipix,width=nd)
        catname=os.path.join(path,prefix+ipixstr+postfix)
        if not os.path.isfile(catname):
            if not silent: print(catname+' not exist!')
            continue
        if not silent:
            print('reading '+catname)
        cat.append(table.Table.read(catname,format='fits'))
    totalcat = []
    if cat != []:
        totalcat=table.vstack(cat)
        if slim: 
            mask=(totalcat['RA'] > np.rad2deg(ra1)) & (totalcat['RA'] < np.rad2deg(ra3)) & (totalcat['DEC'] > np.rad2deg(dec1)) & (totalcat['DEC'] < np.rad2deg(dec3))
            totalcat=totalcat[mask]
        if totalcat != [] and outfile is not None:
            totalcat.write(outfile,format='fits',overwrite=True)
    return totalcat
Example #25
0
    def add_poly(self,positions,depth=None):
        """
        Add a single polygon to this region
        :param positions: list of [ (ra,dec), ... ] positions that form the polygon
        :param depth: The depth at which we wish to represent the circle (forced to be <=maxdepth
        :return: None
        """
        assert len(positions)>=3, "A minimum of three coordinate pairs are required"

        if depth==None or depth>self.maxdepth:
            depth=self.maxdepth

        ras,decs =zip(*positions)
        sky=self.radec2sky(ras,decs)
        pix=hp.query_polygon(2**depth,self.sky2vec(sky),inclusive=True,nest=True)
        self.add_pixels(pix,depth)
        self._renorm()
        return
Example #26
0
def modify_map(hpmap, value_to_fill):
    global fig, cid, coords
    coords = []
    fig = plt.figure()
    hp.mollview(hpmap, fig=fig.number)
    cid = fig.canvas.mpl_connect('button_press_event', onclick)
    raw_input()
    print coords
    line, = plt.plot(coords[0][0], coords[0][1])  # empty line
    linebuilder = LineBuilder(line)
    raw_input()
    coords = np.array(coords)
    coords = np.delete(coords, -1, 0)
    vec = transform_coords(coords)
    nside = hp.npix2nside(len(hpmap))
    to_mask = hp.query_polygon(nside, vec)
    map_out = np.copy(hpmap)
    map_out[to_mask] = value_to_fill
    return map_out
Example #27
0
def get_quadrant_ipix(nside, field_id, ra, dec):

    ipixs = []

    tile = QuadProb(field_id, ra, dec)
    Z = ZTFtile(ra, dec)
    quad_cents_RA, quad_cents_Dec = Z.quadrant_centers()
    quadIndices = np.arange(64)
    for quadrant_id in quadIndices:
        thisQuad = tile.getWCS(quad_cents_RA[quadrant_id],
                               quad_cents_Dec[quadrant_id])
        footprint = thisQuad.calc_footprint(axes=tile.quadrant_size)

        xyz = []
        for r, d in footprint:
            xyz.append(hp.ang2vec(r, d, lonlat=True))
        ipix = hp.query_polygon(nside, np.array(xyz)).tolist()
        ipixs.append(ipix)

    return ipixs
Example #28
0
def get_quadrant_ipix(nside, ra, dec, subfield_ids=None):

    quadrant_coords = get_ztf_quadrants()

    skyoffset_frames = SkyCoord(ra, dec, unit=u.deg).skyoffset_frame()
    quadrant_coords_icrs = SkyCoord(
        *np.tile(quadrant_coords[:, np.newaxis, ...], (1, 1, 1)),
        unit=u.deg,
        frame=skyoffset_frames[:, np.newaxis, np.newaxis]).transform_to(ICRS)
    quadrant_xyz = np.moveaxis(quadrant_coords_icrs.cartesian.xyz.value, 0,
                               -1)[0]

    ipixs = []
    for subfield_id, xyz in enumerate(quadrant_xyz):
        if not subfield_ids is None:
            if not subfield_id in subfield_ids:
                continue
        ipix = hp.query_polygon(nside, xyz)
        ipixs.append(ipix.tolist())
    return ipixs
Example #29
0
def gen_catalog_bin(ngal):
    """
    Generate a random catalog for a single bin using healpy routines
    
    :param ngal: The number of galaxies per pixel
    :type ngal: :class:`lsssys.Map`
    :return: The right ascension and declination of the generated catalog
    :rtype: ``tuple`` of 2 :class:`numpy.ndarray` of ``float``
    """
    pix = np.where(ngal.data > 0.)[0]
    n_gal = ngal.data[pix].astype(int)
    highres_nside = ngal.nside * next_power_of_2(2 * n_gal.max())
    pix_nest = hp.ring2nest(ngal.nside, pix)
    corners = np.array(
        [c.T for c in hp.boundaries(ngal.nside, pix_nest, nest=True)])
    high_res_pix = np.concatenate([
        np.random.choice(hp.query_polygon(highres_nside, corn, nest=True), n)
        for corn, n in zip(corners, n_gal)
    ])
    hpix_highres = hu.HealPix("nest", highres_nside)
    return hpix_highres.pix2eq(high_res_pix)
Example #30
0
def readps1_rectangle(ra,dec,width=(0.6,0.6),outfile=None,path='/line12/Pan-STARRS/chunks-qz-star-v2/',silent=True):
    """
    read the PS1 catalog within a spheric rectangle
    ra,dec: in degrees array
    """
    if np.isscalar(width) or len(width) == 1:
        width=(width,width)
    ra=np.deg2rad(ra)
    dec=np.deg2rad(dec)
    wra=np.deg2rad(width[0]/np.cos(dec))
    wdec=np.deg2rad(width[1])
    ra1=ra-wra/2.0
    dec1=dec-wdec/2.0
    ra2=ra-wra/2.0
    dec2=dec+wdec/2.0
    ra3=ra+wra/2.0
    dec3=dec+wdec/2.0
    ra4=ra+wra/2.0
    dec4=dec-wdec/2.0
    x,y,z=c.spherical_to_cartesian(1.0,[dec1,dec2,dec3,dec4],[ra1,ra2,ra3,ra4])
    
    vertices=np.array([x,y,z]).transpose()
    pix=query_polygon(32, vertices, inclusive=True)
    npix=pix.size
    if not silent:
        print('Total '+str(npix)+'  healpix pixels:')
    cat=[]
    for ipix in pix:
        catname=os.path.join(path,'ps1-'+'%5.5d'%ipix+'.fits')
        if not os.path.isfile(catname):
            continue
        if not silent:
            print('reading '+catname)
        cat.append(table.Table.read(catname,format='fits'))
    totalcat = []
    if cat != []:
        totalcat=table.vstack(cat)
        if outfile is not None:
            totalcat.write(outfile,format='fits',overwrite=True)
    return totalcat
Example #31
0
    def add_poly(self, positions, depth=None):
        """
        Add a single polygon to this region
        :param positions: list of [ (ra,dec), ... ] positions that form the polygon
        :param depth: The depth at which we wish to represent the circle (forced to be <=maxdepth
        :return: None
        """
        assert len(
            positions) >= 3, "A minimum of three coordinate pairs are required"

        if depth == None or depth > self.maxdepth:
            depth = self.maxdepth

        ras, decs = zip(*positions)
        sky = self.radec2sky(ras, decs)
        pix = hp.query_polygon(2**depth,
                               self.sky2vec(sky),
                               inclusive=True,
                               nest=True)
        self.add_pixels(pix, depth)
        self._renorm()
        return
Example #32
0
    def extract(self, ra, dec, width, height, keep=0):
        if not self.valid:
            return
        if keep == 0:
            self.data = Table()

        polygon = SkyCoord( [ra-width/2., ra+width/2., ra+width/2., ra-width/2.],\
                [dec-height/2., dec-height/2., dec+height/2., dec+height/2.],\
                unit = 'deg' ).cartesian.get_xyz().T
        pix = hp.query_polygon(self.NSIDE, polygon, inclusive=True, nest=True)
        source_id_min = pix * 2**35 * 4**(12 - self.LEVEL)
        source_id_max = (pix + 1) * 2**35 * 4**(12 - self.LEVEL)
        filelist = self._get_gaia1_zone_file(source_id_min, source_id_max)

        for f in filelist:
            catalog = Table.read(f['col0'], format=self.fmt)
            p = catalog[ np.where( ( catalog['ra'] > ra - width/2. )\
                    & ( catalog['ra'] < ra + width/2. )\
                    & ( catalog['dec'] > dec - height/2. )\
                    & ( catalog['dec'] < dec + height/2. ) ) ]
            self.data = astropy.table.vstack([self.data, p])
        return (len(self.data))
Example #33
0
def gen_map_polygon(vertices, nside):
    '''Generates a Healpix map with the only non-zero values in the pixels
    inside the input polygon

    Parameters
    ----------
    vertices : array-like with shape (n,2) or (2,n)
        The lon,lat vertices of the polygon in degrees. n >= 3

    nside : int
        The nside of the output Healpix map

    Returns
    -------
    hpx_map : array-like
        A Healpix map with non-zero values inside the polygon
    '''

    vertices = np.array(vertices)

    if vertices.shape[1] != 2:
        vertices = np.transpose(vertices)
        if vertices.shape[1] != 2:
            raise ValueError("Need a n x 2 or 2 x n input vertices array")

    thetas = np.pi / 2 - np.radians(vertices[:, 1])
    phis = np.radians(vertices[:, 0])

    vecs = H.ang2vec(thetas, phis)

    ipix = H.query_polygon(nside, vecs)

    hpx_map = np.zeros(H.nside2npix(nside))
    hpx_map[ipix] = 1.0

    return hpx_map
Example #34
0
    def get_pixels(self, *, nside):
        """
        get the pixels associated with this polygon

        Parameters
        ----------
        nside: int
            Nside for the pixels
        """
        try:

            pixels = hp.query_polygon(
                nside,
                self._vertices,
                nest=True,
                inclusive=False,
            )

        except RuntimeError:
            # healpy raises a RuntimeError with no information attached in the
            # string, but this seems to always be a non-convex polygon
            raise ValueError('polygon is not convex: %s' % repr(self))

        return pixels
Example #35
0
def gen_map_polygon(vertices, nside):
    '''Generates a Healpix map with the only non-zero values in the pixels
    inside the input polygon

    Parameters
    ----------
    vertices : array-like with shape (n,2) or (2,n)
        The lon,lat vertices of the polygon in degrees. n >= 3

    nside : int
        The nside of the output Healpix map

    Returns
    -------
    hpx_map : array-like
        A Healpix map with non-zero values inside the polygon
    '''

    vertices = np.array(vertices)

    if vertices.shape[1] != 2:
        vertices = np.transpose(vertices)
        if vertices.shape[1] != 2:
            raise ValueError("Need a n x 2 or 2 x n input vertices array")

    thetas = np.pi/2 - np.radians(vertices[:, 1])
    phis = np.radians(vertices[:, 0])

    vecs = H.ang2vec(thetas, phis)

    ipix = H.query_polygon(nside, vecs)

    hpx_map = np.zeros(H.nside2npix(nside))
    hpx_map[ipix] = 1.0

    return hpx_map
        # check that the cutout is entirely within the quadrant
        patchFits = np.sum(latEdges < 0.) + np.sum(latEdges > 90.) + np.sum(
            lonEdges < 0.) + np.sum(lonEdges > 90.)
        patchFits = patchFits == 0.

        # if it fits
        #if patchFits and nPatches>=17:
        if patchFits:
            nPatches += 1

            #if nPatches>=17:
            #if nPatches>=30:

            # plot the footprint
            xyz = hp.ang2vec(lonEdges, latEdges, lonlat=True)
            I = hp.query_polygon(nSide, xyz)
            hMap[I] += 1.

            # extract the cutouts
            pos = np.array([lonCenter, latCenter, 0.])
            # Official T map
            cutSehgalTMap = hp.visufunc.cartview(sehgalTMap,
                                                 rot=pos,
                                                 lonra=lonRange,
                                                 latra=latRange,
                                                 xsize=xSize,
                                                 ysize=ySize,
                                                 return_projected_map=True,
                                                 norm='hist')
            plt.close()
            # T map
Example #37
0
def create_all():
    db.create_all(bind=None)
    telescopes = ["ZTF", "Gattini", "DECam", "KPED", "GROWTH-India"]
    available_filters = {
        "ZTF": ["g", "r", "i"],
        "Gattini": ["J"],
        "DECam": ["g", "r", "i", "z"],
        "KPED": ["U", "g", "r", "i"],
        "GROWTH-India": ["g", "r", "i", "z"]
    }

    plan_args = {
        'ZTF': {
            'filt': ['g', 'r', 'g'],
            'exposuretimes': [300.0, 300.0, 300.0],
            'doReferences': True,
            'doUsePrimary': True,
            'doBalanceExposure': False,
            'doDither': False,
            'usePrevious': False,
            'doCompletedObservations': False,
            'doPlannedObservations': False,
            'cobs': [None, None],
            'schedule_type': 'greedy',
            'filterScheduleType': 'block',
            'airmass': 2.5,
            'schedule_strategy': 'tiling',
            'mindiff': 30. * 60.,
            'doMaxTiles': False,
            'max_nb_tiles': 1000,
            'doRASlice': True,
            'raslice': [0.0, 24.0],
        },
        'DECam': {
            'filt': ['g', 'z'],
            'exposuretimes': [25.0, 25.0],
            'doReferences': True,
            'doUsePrimary': False,
            'doBalanceExposure': False,
            'doDither': True,
            'usePrevious': False,
            'doCompletedObservations': False,
            'doPlannedObservations': False,
            'cobs': [None, None],
            'schedule_type': 'greedy_slew',
            'filterScheduleType': 'integrated',
            'airmass': 2.5,
            'schedule_strategy': 'tiling',
            'mindiff': 30. * 60.,
            'doMaxTiles': False,
            'max_nb_tiles': 1000,
            'doRASlice': True,
            'raslice': [0.0, 24.0],
        },
        'Gattini': {
            'filt': ['J'],
            'exposuretimes': [300.0],
            'doReferences': False,
            'doUsePrimary': False,
            'doBalanceExposure': False,
            'doDither': False,
            'usePrevious': False,
            'doCompletedObservations': False,
            'doPlannedObservations': False,
            'cobs': [None, None],
            'schedule_type': 'greedy',
            'filterScheduleType': 'block',
            'airmass': 2.5,
            'schedule_strategy': 'tiling',
            'mindiff': 30. * 60.,
            'doMaxTiles': False,
            'max_nb_tiles': 1000,
            'doRASlice': True,
            'raslice': [0.0, 24.0]
        },
        'KPED': {
            'filt': ['r'],
            'exposuretimes': [300.0],
            'doReferences': False,
            'doUsePrimary': False,
            'doBalanceExposure': False,
            'doDither': False,
            'usePrevious': False,
            'doCompletedObservations': False,
            'doPlannedObservations': False,
            'cobs': [None, None],
            'schedule_type': 'greedy',
            'filterScheduleType': 'integrated',
            'airmass': 2.5,
            'schedule_strategy': 'catalog',
            'mindiff': 30. * 60.,
            'doMaxTiles': False,
            'max_nb_tiles': 1000,
            'doRASlice': True,
            'raslice': [0.0, 24.0]
        },
        'GROWTH-India': {
            'filt': ['r'],
            'exposuretimes': [300.0],
            'doReferences': False,
            'doUsePrimary': False,
            'doBalanceExposure': False,
            'doDither': False,
            'usePrevious': False,
            'doCompletedObservations': False,
            'doPlannedObservations': False,
            'cobs': [None, None],
            'schedule_type': 'greedy',
            'filterScheduleType': 'integrated',
            'airmass': 2.5,
            'schedule_strategy': 'catalog',
            'mindiff': 30. * 60.,
            'doMaxTiles': False,
            'max_nb_tiles': 1000,
            'doRASlice': True,
            'raslice': [0.0, 24.0]
        }
    }

    with tqdm(telescopes) as telescope_progress:
        for tele in telescope_progress:
            telescope_progress.set_description('populating {}'.format(tele))

            filename = pkg_resources.resource_filename(__name__,
                                                       'input/%s.ref' % tele)
            if os.path.isfile(filename):
                refstable = table.Table.read(filename,
                                             format='ascii',
                                             data_start=2,
                                             data_end=-1)
                refs = table.unique(refstable, keys=['field', 'fid'])
                if "maglimcat" not in refs.columns:
                    refs["maglimcat"] = np.nan

                reference_images = {
                    group[0]['field']: group['fid'].astype(int).tolist()
                    for group in refs.group_by('field').groups
                }
                reference_mags = {
                    group[0]['field']: group['maglimcat'].tolist()
                    for group in refs.group_by('field').groups
                }

            else:
                reference_images = {}
                reference_mags = {}

            tesspath = 'input/%s.tess' % tele
            try:
                tessfile = app.open_instance_resource(tesspath)
            except IOError:
                tessfile = pkg_resources.resource_stream(__name__, tesspath)
            tessfilename = tessfile.name
            tessfile.close()
            fields = np.recfromtxt(tessfilename,
                                   usecols=range(3),
                                   names=['field_id', 'ra', 'dec'])

            with pkg_resources.resource_stream(__name__,
                                               'config/%s.config' % tele) as g:
                config_struct = {}
                for line in g.readlines():
                    line_without_return = line.decode().split("\n")
                    line_split = line_without_return[0].split(" ")
                    line_split = list(filter(None, line_split))
                    if line_split:
                        try:
                            config_struct[line_split[0]] = float(line_split[1])
                        except ValueError:
                            config_struct[line_split[0]] = line_split[1]

            db.session.merge(
                Telescope(telescope=tele,
                          lat=config_struct["latitude"],
                          lon=config_struct["longitude"],
                          elevation=config_struct["elevation"],
                          timezone=config_struct["timezone"],
                          filters=available_filters[tele],
                          default_plan_args=plan_args[tele]))

            for field_id, ra, dec in tqdm(fields, 'populating fields'):
                ref_filter_ids = reference_images.get(field_id, [])
                ref_filter_mags = []
                for val in reference_mags.get(field_id, []):
                    ref_filter_mags.append(val)
                bands = {1: 'g', 2: 'r', 3: 'i', 4: 'z', 5: 'J'}
                ref_filter_bands = [bands.get(n, n) for n in ref_filter_ids]

                if config_struct["FOV_type"] == "square":
                    ipix, radecs, patch, area = gwemopt.utils.getSquarePixels(
                        ra, dec, config_struct["FOV"], Localization.nside)
                elif config_struct["FOV_type"] == "circle":
                    ipix, radecs, patch, area = gwemopt.utils.getCirclePixels(
                        ra, dec, config_struct["FOV"], Localization.nside)
                if len(radecs) == 0:
                    continue
                corners = np.vstack((radecs, radecs[0, :]))
                if corners.size == 10:
                    corners_copy = copy.deepcopy(corners)
                    corners[2] = corners_copy[3]
                    corners[3] = corners_copy[2]
                contour = {
                    'type': 'Feature',
                    'geometry': {
                        'type': 'MultiLineString',
                        'coordinates': [corners.tolist()]
                    },
                    'properties': {
                        'telescope': tele,
                        'field_id': int(field_id),
                        'ra': ra,
                        'dec': dec,
                        'depth': dict(zip(ref_filter_bands, ref_filter_mags))
                    }
                }
                db.session.merge(
                    Field(telescope=tele,
                          field_id=int(field_id),
                          ra=ra,
                          dec=dec,
                          contour=contour,
                          reference_filter_ids=ref_filter_ids,
                          reference_filter_mags=ref_filter_mags,
                          ipix=ipix.tolist()))

            if tele == "ZTF":
                quadrant_coords = get_ztf_quadrants()

                skyoffset_frames = coordinates.SkyCoord(
                    fields['ra'], fields['dec'], unit=u.deg).skyoffset_frame()

                quadrant_coords_icrs = coordinates.SkyCoord(
                    *np.tile(quadrant_coords[:, np.newaxis, ...],
                             (len(fields), 1, 1)),
                    unit=u.deg,
                    frame=skyoffset_frames[:, np.newaxis,
                                           np.newaxis]).transform_to(
                                               coordinates.ICRS)

                quadrant_xyz = np.moveaxis(
                    quadrant_coords_icrs.cartesian.xyz.value, 0, -1)

                for field_id, xyz in zip(
                        tqdm(fields['field_id'], 'populating subfields'),
                        quadrant_xyz):
                    for ii, xyz in enumerate(xyz):
                        ipix = hp.query_polygon(Localization.nside, xyz)
                        db.session.merge(
                            SubField(telescope=tele,
                                     field_id=int(field_id),
                                     subfield_id=int(ii),
                                     ipix=ipix.tolist()))
Example #38
0
import pandas as pd

# data file created by get_data.py
df = pd.read_csv('/global/u2/z/zdu863/notebooks/project/butler_all.csv',
                 index_col=0)
# ra and dec of run 1.2i WFD field corners
wfd_ra = np.array([52.25, 52.11, 58.02, 57.87])
wfd_dec = np.array([-27.25, -32.25, -32.25, -27.25])

nsideCoverage = 32
nsideSparse = 2048

# get the the pixel indices within the DC2 1.2 field
field_ang = np.array([np.radians(90 - wfd_dec), np.radians(wfd_ra)])
ipix_infield = hp.query_polygon(nsideSparse,
                                hp.ang2vec(*field_ang),
                                nest=True,
                                inclusive=True)
Nmpix = len(ipix_infield)

datanames = [
    'fiveSigmaDepth', 'airmass', 'rawSeeing', 'finSeeing', 'bg_mean', 'bg_var',
    'zp', 'zp_err'
]
Nname = len(datanames)
bands = ['u', 'g', 'r', 'i', 'z', 'y']
nvisit = {b: np.zeros(Nmpix) for b in bands}
mp = {b: np.zeros([Nname, Nmpix]) for b in bands}

percent = 10
print('Computing systematic maps...')
# loop over all visits in the data file
    def annotate_exposures(
        self,
        exposures,
        pointingSide
    ):
        """
        *generate a the likeihood coverage of an exposure*

        **Key Arguments:**
            - ``exposures`` -- a dictionary of exposures with the unique exposures IDs as keys and (ra, dec) as tuple value.
            - ``squareSize`` -- the size of the FOV of the exposure/skycell

        **Return:**
            - ``exposureIDs`` -- a list of the exposureIDs  as they appear in the original input exposure dictionary
            - ``pointingSide`` -- a list of total likeihood coverage of the exposures

        **Usage:**

            See class docstring
        """
        self.log.debug('starting the ``annotate`` method')

        nside, hpixArea, aMap, healpixIds, wr, wd = self._create_healpixid_coordinate_grid()

        exposureIDs = []
        ra = []
        dec = []

        exposureIDs = []
        exposureIDs[:] = [t for t in exposures.keys()]
        ra = []
        dec = []
        ra[:] = [r[0] for r in exposures.values()]
        dec[:] = [d[1] for d in exposures.values()]

        probs = []
        for e, pra, pdec in zip(exposureIDs, ra, dec):
            # DETERMINE THE CORNERS FOR EACH ATLAS EXPOSURE AS MAPPED TO THE
            # SKY
            decCorners = (pdec - pointingSide / 2,
                          pdec + pointingSide / 2)
            corners = []
            for d in decCorners:
                if d > 90.:
                    d = 180. - d
                elif d < -90.:
                    d = -180 - d
                raCorners = (pra - (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR),
                             pra + (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR))
                for r in raCorners:
                    if r > 360.:
                        r = 720. - r
                    elif r < 0.:
                        r = 360. + r
                    corners.append(hp.ang2vec(r, d, lonlat=True))

            # FLIP CORNERS 3 & 4 SO HEALPY UNDERSTANDS POLYGON SHAPE
            corners = [corners[0], corners[1],
                       corners[3], corners[2]]

            # RETURN HEALPIXELS IN EXPOSURE AREA
            expPixels = hp.query_polygon(nside, np.array(
                corners))

            expProb = []
            expProb[:] = [aMap[i] for i in expPixels]
            expProb = sum(expProb)
            probs.append(expProb)

        self.log.debug('completed the ``annotate`` method')
        return exposureIDs, probs
    def get(self):
        """
        *get the survey footprint stats and print to screen/file*

        **Return:**
            - ``None``
        """
        self.log.debug('starting the ``get`` method')

        # GRAB METADATA FROM THE DATABASES
        this = plot_wave_observational_timelines(
            log=self.log, settings=self.settings)
        plotParameters, ps1Transients, ps1Pointings, altasPointings, atlasTransients = this.get_gw_parameters_from_settings(
            gwid=self.gwid,
            stackOnly=False)

        if self.telescope == "atlas":
            pointings = altasPointings
            pointingSide = 5.46
        if self.telescope == "ps1":
            pointings = ps1Pointings
            pointingSide = 0.4
        telescope = self.telescope.upper()

        # SORT ALL POINTINGS VIA MJD
        pointings = sorted(list(pointings),
                           key=itemgetter('mjd'), reverse=False)

        nside, hpixArea, aMap, healpixIds, wr, wd = self._create_healpixid_coordinate_grid()

        print "EXPID, RA, DEC, MJD, EXPTIME, FILTER, LIM-MAG, EXP-AREA, EXP-LIKELIHOOD, CUM-AREA, CUM-LIKELIHOOD" % locals()

        allHealpixIds = np.array([])
        dictList = []
        iindex = 0
        count = len(pointings)
        cumArea = 0
        cumProb = 0
        for pti, pt in enumerate(pointings):
            pti = pti + 1

            if pti > 1:
                # Cursor up one line and clear line
                sys.stdout.write("\x1b[1A\x1b[2K")

            percent = (float(pti) / float(count)) * 100.
            print '%(pti)s/%(count)s (%(percent)1.1f%% done): summing total area and likelihood covered by %(telescope)s' % locals()

            thisDict = collections.OrderedDict(sorted({}.items()))

            pra = pt["raDeg"]
            pdec = pt["decDeg"]
            pmjd = pt["mjd"]
            pexpid = pt["exp_id"]
            pexptime = pt["exp_time"]
            pfilter = pt["filter"]
            plim = pt["limiting_magnitude"]

            # DETERMINE THE CORNERS FOR EACH ATLAS EXPOSURE AS MAPPED TO THE
            # SKY
            decCorners = (pdec - pointingSide / 2,
                          pdec + pointingSide / 2)
            corners = []
            for d in decCorners:
                if d > 90.:
                    d = 180. - d
                elif d < -90.:
                    d = -180 - d
                raCorners = (pra - (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR),
                             pra + (pointingSide / 2) / np.cos(d * self.DEG_TO_RAD_FACTOR))
                for r in raCorners:
                    if r > 360.:
                        r = 720. - r
                    elif r < 0.:
                        r = 360. + r
                    corners.append(hp.ang2vec(r, d, lonlat=True))

            # FLIP CORNERS 3 & 4 SO HEALPY UNDERSTANDS POLYGON SHAPE
            corners = [corners[0], corners[1],
                       corners[3], corners[2]]

            # RETURN HEALPIXELS IN EXPOSURE AREA
            expPixels = hp.query_polygon(nside, np.array(
                corners))

            expProb = []
            expProb[:] = [aMap[i] for i in expPixels]
            expProb = sum(expProb)
            expArea = len(expPixels) * hpixArea
            if expProb / expArea < 2e-6:
                continue

            pindex = "%(iindex)05d" % locals()
            iindex += 1

            allHealpixIds = np.append(allHealpixIds, expPixels)
            allHealpixIds = np.unique(allHealpixIds)
            cumProb = []
            cumProb[:] = [aMap[int(i)] for i in allHealpixIds]
            cumProb = sum(cumProb)
            cumArea = len(allHealpixIds) * hpixArea
            thisDict["INDEX"] = pindex
            thisDict["EXPID"] = pexpid
            thisDict["RA"] = "%(pra)5.5f" % locals()
            thisDict["DEC"] = "%(pdec)5.5f" % locals()
            thisDict["MJD"] = "%(pmjd)6.6f" % locals()
            thisDict["EXPTIME"] = "%(pexptime)02.1f" % locals()
            thisDict["FILTER"] = pfilter
            try:
                thisDict["LIM-MAG"] = "%(plim)5.2f" % locals()
            except:
                thisDict["LIM-MAG"] = "NaN"
            # thisDict["EXP-AREA"] = expArea
            # thisDict["EXP-LIKELIHOOD"] = expProb
            thisDict["CUM-AREA"] = "%(cumArea)05.2f" % locals()
            thisDict["CUM-LIKELIHOOD"] = "%(cumProb)05.2f" % locals()
            dictList.append(thisDict)

        if not len(dictList):
            thisDict = {}
            thisDict["INDEX"] = "NULL"
            thisDict["EXPID"] = "NULL"
            thisDict["RA"] = "NULL"
            thisDict["DEC"] = "NULL"
            thisDict["MJD"] = "NULL"
            thisDict["EXPTIME"] = "NULL"
            thisDict["FILTER"] = "NULL"
            thisDict["LIM-MAG"] = "NULL"
            dictList.append(thisDict)

        print "AREA: %(cumArea)0.2f. PROB: %(cumProb)0.5f" % locals()

        printFile = self.settings["output directory"] + "/" + \
            self.gwid + "/" + self.gwid + "-" + self.telescope + "-coverage-stats.csv"

        # RECURSIVELY CREATE MISSING DIRECTORIES
        if not os.path.exists(self.settings["output directory"] + "/" + self.gwid):
            os.makedirs(self.settings["output directory"] + "/" + self.gwid)

        dataSet = list_of_dictionaries(
            log=self.log,
            listOfDictionaries=dictList,
        )
        csvData = dataSet.csv(filepath=printFile)

        print "The coverage stats file was written to `%(printFile)s`" % locals()

        self.log.debug('completed the ``get`` method')
        return None
width = 10
poles_margin = 0.1
halfwidth = width/2

for theta in range(halfwidth, 180, width):
    for phi in range(halfwidth, 360, width):

        filename = "plancktest/submaps/%03d_%03d_%03d" % (freq, theta, phi)
        print filename

        vertex_angles = np.radians(
                        [[theta-halfwidth, phi-halfwidth],
                         [theta+halfwidth, phi-halfwidth],
                         [theta+halfwidth, phi+halfwidth],
                         [theta-halfwidth, phi+halfwidth]])
        
        if theta - halfwidth == 0:
            vertex_angles[0,0] = np.radians(poles_margin)
            vertex_angles[3,0] = np.radians(poles_margin)

        if theta + halfwidth == 180:
            vertex_angles[1,0] = np.radians(180-poles_margin)
            vertex_angles[2,0] = np.radians(180-poles_margin)

        pix=hp.query_polygon(nside, hp.ang2vec(vertex_angles[:,0], vertex_angles[:,1]), np.radians(13))
        submap = np.zeros(len(pix), dtype=[("pix",np.int64), ("temp", np.float64)])
        submap["pix"] = pix
        submap["temp"] = m[pix]
        np.save(filename, submap)