コード例 #1
0
def doImagesOverlap(head1, head2):
    """ Do the two images overlap."""

    if isinstance(head1, WCS):
        wcs1 = head1
    else:
        wcs1 = WCS(head1)
    ny1, nx1 = wcs1.array_shape
    ra1, dec1 = wcs1.wcs_pix2world(nx1 / 2, ny1 / 2, 0)
    vra1, vdec1 = wcs1.wcs_pix2world([0, nx1 - 1, nx1 - 1, 0],
                                     [0, 0, ny1 - 1, ny1 - 1], 0)
    vlon1, vlat1 = coords.rotsphcen(vra1, vdec1, ra1, dec1, gnomic=True)

    if isinstance(head2, WCS):
        wcs2 = head2
    else:
        wcs2 = WCS(head2)
    ny2, nx2 = wcs2.array_shape
    vra2, vdec2 = wcs2.wcs_pix2world([0, nx2 - 1, nx2 - 1, 0],
                                     [0, 0, ny2 - 1, ny2 - 1], 0)
    vlon2, vlat2 = coords.rotsphcen(vra2, vdec2, ra1, dec1, gnomic=True)

    olap = coords.doPolygonsOverlap(vlon1, vlat1, vlon2, vlat2)

    return olap
コード例 #2
0
def plotfit(cat, pars, cov, savefig=None):
    """ Plot a figure of the data and the proper motion/parallax fit."""

    plt.rcParams.update({'font.size': 12})

    # Compute relative positions
    cenra = np.mean(cat['ra'])
    cendec = np.mean(cat['dec'])
    lon, lat = dcoords.rotsphcen(cat['ra'],
                                 cat['dec'],
                                 cenra,
                                 cendec,
                                 gnomic=True)
    lon *= d2a
    lat *= d2a

    # Array of MJDs for model curve
    mjd = np.linspace(np.min(cat['mjd']), np.max(cat['mjd']), 100)
    out = astrometryfunc([cenra, cendec, mjd], pars[0], pars[1], pars[2],
                         pars[3], pars[4])
    ll = out[0:100]
    bb = out[100:]

    # Plot the model and data
    plt.plot(ll, bb)
    plt.errorbar(lon,
                 lat,
                 xerr=cat['raerr'],
                 yerr=cat['decerr'],
                 fmt='o',
                 color='black',
                 markersize=5,
                 ecolor='lightgray',
                 elinewidth=2,
                 linestyle='none',
                 capsize=0)
    plt.xlabel('dRA (arcsec)')
    plt.ylabel('dDEC (arcsec)')
    xr = dln.minmax(np.concatenate((lon, ll)))
    xr = [xr[0] - 0.05 * dln.valrange(xr), xr[1] + 0.05 * dln.valrange(xr)]
    yr = dln.minmax(np.concatenate((lat, bb)))
    yr = [yr[0] - 0.05 * dln.valrange(yr), yr[1] + 0.05 * dln.valrange(yr)]
    plt.xlim(xr)
    plt.ylim(yr)
    perr = np.sqrt(np.diag(cov))
    plt.annotate(
        r'$\mu_\alpha$ = %5.3f $\pm$ %5.3f mas/yr' %
        (pars[2] * 1e3, perr[2] * 1e3) + '\n' +
        r'$\mu_\delta$ = %5.3f $\pm$ %5.3f mas/yr' %
        (pars[3] * 1e3, perr[3] * 1e3) + '\n' +
        r'$\pi$ = %5.3f $\pm$ %5.3f mas' % (pars[4] * 1e3, perr[4] * 1e3),
        xy=(xr[0] + 0.05 * dln.valrange(xr), yr[1] - 0.20 * dln.valrange(yr)),
        ha='left')
    if savefig is not None:
        plt.savefig(savefig)
コード例 #3
0
def fit(cat):
    """ Fit proper motion and parallax to ra/dec/mjd data in a table."""
    mjd = cat['mjd']
    ra = cat['ra']
    raerr = cat['raerr']
    dec = cat['dec']
    decerr = cat['decerr']

    # Compute relative positions
    cenra = np.mean(ra)
    cendec = np.mean(dec)
    lon, lat = dcoords.rotsphcen(ra, dec, cenra, cendec, gnomic=True)
    lon *= d2a
    lat *= d2a

    # Fit proper motion and parallax
    pars, cov = curve_fit(astrometryfunc, [ra, dec, mjd],
                          np.concatenate([lon, lat]).flatten(),
                          sigma=np.concatenate([raerr, decerr]).flatten())

    return pars, cov
コード例 #4
0
ファイル: plugmap.py プロジェクト: sdss/apogee_drp
def load(plugfile, verbose=False, fixfiberid=None):
    """
    This program loads an APOGEE plugmap file.

    Parameters
    ----------
    plugfile : str
           The absolute path of the plugmap file

    Returns
    -------
    plugmap : dict
           The plugmap structure with all the relevant information

    Example
    -------
    pmap = plugmap.load(plugfile)

    By D.Nidever  May 2010
    converted to python, Oct 2020
    """

    # Check that the plug file exists
    if os.path.exists(plugfile) == False:
        raise ValueError(plugfile + ' NOT FOUND')

    # Load the plugmap yanny file
    plugmap = yanny.yanny(plugfile, np=- True)

    # Add ETA/ZETA to plugmap structure
    fiberdata = plugmap['PLUGMAPOBJ']
    del plugmap['PLUGMAPOBJ']  # gets replaced with fiberdata below
    fiberdata = dln.addcatcols(
        fiberdata, np.dtype([('zeta', np.float64), ('eta', np.float64)]))
    zeta, eta = coords.rotsphcen(fiberdata['ra'],
                                 fiberdata['dec'],
                                 np.float64(plugmap['raCen']),
                                 np.float64(plugmap['decCen']),
                                 gnomic=True)
    fiberdata['zeta'] = zeta
    fiberdata['eta'] = eta

    # Fix bit 6 erroneously set in plugmap files for tellurics
    ind, = np.where((fiberdata['spectrographId'] == 2)
                    & (fiberdata['holeType'].astype(str) == 'OBJECT')
                    & (fiberdata['objType'].astype(str) == 'HOT_STD'))
    if len(ind) > 0:
        fiberdata['secTarget'][ind] = np.int32(fiberdata['secTarget'][ind]
                                               & 0xFFFFFFDF)
        #fiberdata['secTarget'][ind] = np.uint64(fiberdata['secTarget'][ind] & 0xFFFFFFDF)

    # Custom errors in mapping?
    if fixfiberid is not None:
        if fixfiberid == 1:
            starind = np.where(fiberdata['spectrographId'] == 2)
            for istar in range(len(star)):
                fiberid = fiberdata['fiberId'][starind[istar]]
                if fiberid >= 0:
                    subid = (fiberid - 1) % 30
                    bundleid = (fiberid - subid) // 30
                    fiberdata['fiberId'][
                        star[istar]] = (9 - bundleid) * 30 + subid + 1
                print(star, fiberid, subid, bundleid,
                      fiberdata['fiberId'][star[istar]])
        if fixfiberid == 2:
            # MTP#2 rotated
            starind, = np.where((fiberdata['spectrographId'] == 2)
                                & (fiberdata['holeType'] == 'OBJECT'))
            fiberid = fiberdata['fiberId'][starind]
            j, = np.where((fiberid == 31) & (fiberid <= 36))
            fiberdata['fiberId'][starind[j]] = fiberid[j] + 23
            j, = np.where((fiberid == 37) & (fiberid <= 44))
            fiberdata['fiberId'][starind[j]] = fiberid[j] + 8
            j, = np.where((fiberid == 45) & (fiberid <= 52))
            fiberdata['fiberId'][starind[j]] = fiberid[j] - 8
            j, = np.where((fiberid == 54) & (fiberid <= 59))
            fiberdata['fiberId'][starind[j]] = fiberid[j] - 23
            # Missing fibers from unpopulated 2 of MTP
            j, = np.where((fiberdata['fiberId'][star] == 53)
                          | (fiberdata['fiberId'][star] == 60))
            fiberdata['fiberId'][starind[j]] = -1

    # Plug fixed fiberdata back in
    plugmap['fiberdata'] = fiberdata

    return plugmap
コード例 #5
0
def image_interp(imagefile, outhead, weightfile=None, masknan=False):
    """ Interpolate a single image (can be multi-extension) to the output WCS."""

    if os.path.exists(imagefile) is False:
        raise ValueError(imagefile + " NOT FOUND")
    if weightfile is not None:
        if os.path.exists(weightfile) is False:
            raise ValueError(weightfile + " NOT FOUND")

    # Output vertices
    bricknx = outhead['NAXIS1']
    brickny = outhead['NAXIS1']
    brickwcs = WCS(outhead)
    brickra, brickdec = brickwcs.wcs_pix2world(bricknx / 2, brickny / 2, 0)
    brickvra, brickvdec = brickwcs.wcs_pix2world(
        [0, bricknx - 1, bricknx - 1, 0], [0, 0, brickny - 1, brickny - 1], 0)
    brickvlon, brickvlat = coords.rotsphcen(brickvra,
                                            brickvdec,
                                            brickra,
                                            brickdec,
                                            gnomic=True)

    # How many extensions
    hdulist = fits.open(imagefile)
    nimhdu = len(hdulist)
    hdulist.close()
    if weightfile is not None:
        hdulist = fits.open(weightfile)
        nwthdu = len(hdulist)
        hdulist.close()
        if nimhdu != nwthdu:
            raise ValueError(imagefile + ' and ' + weightfile +
                             ' do NOT have the same number of extensions.')

    # Open the files
    imhdulist = fits.open(imagefile)
    if weightfile is not None:
        wthdulist = fits.open(weightfile)

    # Initialize final images
    fnx = outhead['NAXIS1']
    fny = outhead['NAXIS2']
    fim = np.zeros((fnx, fny), float)
    fwt = np.zeros((fnx, fny), float)
    fbg = np.zeros((fnx, fny), float)

    # Loop over the HDUs
    for i in range(nimhdu):
        # Just get the header
        head = imhdulist[i].header
        if head['NAXIS'] == 0:  # no image
            continue
        wcs = WCS(head)
        nx1 = head['NAXIS1']
        ny1 = head['NAXIS2']

        # Check that it overlaps the final area
        if doImagesOverlap(brickwcs, wcs) is False:
            continue

        mask = None
        # Flux image
        im = imhdulist[i].data
        head = imhdulist[i].header
        im = inNativeByteOrder(im)  # for sep need native byte order
        nx1, ny1 = im.shape
        # Weight image
        if weightfile is not None:
            wt = wthdulist[i].data
            whead = wthdulist[i].header
            wt = inNativeByteOrder(wt)
            mask = (wt <= 0)

        # Mask NaNs/Infs
        if masknan is True:
            if mask is not None:
                mask = (mask == True) | ~np.isfinite(im)
            else:
                mask = ~np.isfinite(im)
            im[mask] = np.median(im[~mask])

        # Step 1. Background subtract the image
        bkg = sep.Background(im, mask=mask, bw=64, bh=64, fw=3, fh=3)
        bkg_image = bkg.back()
        im -= bkg_image
        im[mask] = 0

        import pdb
        pdb.set_trace()

        newim = image_reproject(im, head, outhead)

        # Step 2. Reproject the image
        newim, footprint = reproject_interp((im, head), outhead)
        if weightfile is not None:
            newwt, wfootprint = reproject_interp((wt, whead), outhead)
        newbg, bfootprint = reproject_interp((bkg_image, head), outhead)

        # Step 3. Add to final images
        fim += newim
        if weightfile is not None:
            fwt += newwt
        fbg += newbg

    return fim, fwt, fbf
コード例 #6
0
def getbrickexposures(brick, band=None, version='v3'):
    """ Get exposures information that overlap a brick."""

    # Directories
    dldir, mssdir, localdir = rootdirs()
    # Get brick information
    brickdata = getbrickinfo(brick, version=version)

    # Healpix information
    pix128 = hp.ang2pix(128, brickdata['ra'], brickdata['dec'], lonlat=True)
    # neighbors
    neipix = hp.get_all_neighbours(128, pix128)

    # Get all of the exposures overlapping this region
    meta_dbfile = dldir + '/dnidever/nsc/instcal/' + version + '/lists/nsc_meta.db'
    allpix = np.hstack((neipix.flatten(), pix128))
    whr = ' or '.join(['ring128==' + h for h in allpix.astype(str)])
    chipdata = db.query(meta_dbfile, table='chip', cols='*', where=whr)

    # Do more overlap checking
    brickvra = np.hstack((brickdata['ra1'], brickdata['ra2'], brickdata['ra2'],
                          brickdata['ra1']))
    brickvdec = np.hstack((brickdata['dec1'], brickdata['dec1'],
                           brickdata['dec2'], brickdata['dec2']))
    brickvlon, brickvlat = coords.rotsphcen(brickvra,
                                            brickvdec,
                                            brickdata['ra'],
                                            brickdata['dec'],
                                            gnomic=True)
    olap = np.zeros(len(chipdata), bool)
    for i in range(len(chipdata)):
        vra = np.hstack((chipdata['vra1'][i], chipdata['vra2'][i],
                         chipdata['vra2'][i], chipdata['vra1'][i]))
        vdec = np.hstack((chipdata['vdec1'][i], chipdata['vdec1'][i],
                          chipdata['vdec2'][i], chipdata['vdec2'][i]))
        vlon, vlat = coords.rotsphcen(vra,
                                      vdec,
                                      brickdata['ra'],
                                      brickdata['dec'],
                                      gnomic=True)
        olap[i] = coords.doPolygonsOverlap(vlon, vlat, brickvlon, brickvlat)
    ngdch = np.sum(olap)
    if ngdch == 0:
        print('No exposures overlap brick ' + brick)
        return None
    chipdata = chipdata[olap]
    exposure = np.unique(chipdata['exposure'])

    # Get the exosure data
    whr = ' or '.join(['exposure=="' + e + '"' for e in exposure.astype(str)])
    expdata = db.query(meta_dbfile, table='exposure', cols='*', where=whr)

    # Check band
    if band is not None:
        gband, = np.where(expdata['filter'] == band)
        if len(gband) == 0:
            print('No ' + band + ' exposures overlap brick ' + brick)
            return None
        expdata = expdata[gband]

    nexp = len(expdata)
    print(str(nexp) + ' exposures overlap brick ' + brick)

    return expdata
コード例 #7
0
def loadmeas(metafile, buffdict=None, verbose=False):

    if os.path.exists(metafile) is False:
        print(metafile + ' NOT FOUND')
        return np.array([])
    meta = fits.getdata(metafile, 1)
    chmeta = fits.getdata(metafile, 2)

    fdir = os.path.dirname(metafile)
    fbase, ext = os.path.splitext(os.path.basename(metafile))
    fbase = fbase[:-5]  # remove _meta at end

    # Loop over the chip files
    cat = None
    for j in range(len(chmeta)):
        # Check that this chip was astrometrically calibrated
        #   and falls in to HEALPix region
        if chmeta[j]['ngaiamatch'] == 0:
            if verbose: print('This chip was not astrometrically calibrate')

        # Check that this overlaps the healpix region
        inside = True
        if buffdict is not None:
            vra = chmeta[j]['vra']
            vdec = chmeta[j]['vdec']
            if (np.max(vra) - np.min(vra)) > 100:  # deal with RA=0 wrapround
                bd, = np.where(vra > 180)
                if len(bd) > 0: vra[bd] -= 360
            if coords.doPolygonsOverlap(buffdict['ra'], buffdict['dec'], vra,
                                        vdec) is False:
                if verbose:
                    print(
                        'This chip does NOT overlap the HEALPix region+buffer')
                inside = False

        # Check if the chip-level file exists
        chfile = fdir + '/' + fbase + '_' + str(
            chmeta[j]['ccdnum']) + '_meas.fits'
        if os.path.exists(chfile) is False:
            print(chfile + ' NOT FOUND')

        # Load this one
        if (os.path.exists(chfile) is
                True) and (inside is True) and (chmeta[j]['ngaiamatch'] > 1):
            # Load the chip-level catalog
            cat1 = fits.getdata(chfile, 1)
            ncat1 = len(cat1)
            print('  ' + str(ncat1) + ' sources')

            # Make sure it's in the right format
            if len(cat1.dtype.fields) != 32:
                if verbose:
                    print(
                        '  This catalog does not have the right format. Skipping'
                    )
                del (cat1)
                ncat1 = 0

            # Only include sources inside Boundary+Buffer zone
            #  -use ROI_CUT
            #  -reproject to tangent plane first so we don't have to deal
            #     with RA=0 wrapping or pol issues
            if buffdict is not None:
                lon, lat = coords.rotsphcen(cat1['ra'],
                                            cat1['dec'],
                                            buffdict['cenra'],
                                            buffdict['cendec'],
                                            gnomic=True)
                ind0, ind1 = utils.roi_cut(buffdict['lon'], buffdict['lat'],
                                           lon, lat)
                nmatch = len(ind1)
                # Only want source inside this pixel
                if nmatch > 0:
                    cat1 = cat1[ind1]
                ncat1 = len(cat1)
                if verbose:
                    print('  ' + str(nmatch) +
                          ' sources are inside this pixel')

            # Combine the catalogs
            if ncat1 > 0:
                if cat is None:
                    dtype_cat = cat1.dtype
                    cat = np.zeros(np.sum(chmeta['nsources']), dtype=dtype_cat)
                    catcount = 0
                cat[catcount:catcount + ncat1] = cat1
                catcount += ncat1

            #BOMB1:
    if cat is not None: cat = cat[0:catcount]  # trim excess
    if cat is None: cat = np.array([])  # empty cat

    return cat
コード例 #8
0
    # Get the boundary coordinates
    #   healpy.boundaries but not sure how to do it in IDL
    #   pix2vec_ring/nest can optionally return vertices but only 4
    #     maybe subsample myself between the vectors
    # Expand the boundary to include a "buffer" zone
    #  to deal with edge cases
    vecbound = hp.boundaries(nside, pix, step=100)
    rabound, decbound = hp.vec2ang(np.transpose(vecbound), lonlat=True)

    # Expand the boundary by the buffer size
    cenra, cendec = hp.pix2ang(nside, pix, lonlat=True)
    # reproject onto tangent plane
    lonbound, latbound = coords.rotsphcen(rabound,
                                          decbound,
                                          cenra,
                                          cendec,
                                          gnomic=True)
    # expand by a fraction, it's not an extact boundary but good enough
    buffsize = 10.0 / 3600.  # in deg
    radbound = np.sqrt(lonbound**2 + latbound**2)
    frac = 1.0 + 1.5 * np.max(buffsize / radbound)
    lonbuff = lonbound * frac
    latbuff = latbound * frac
    rabuff, decbuff = coords.rotsphcen(lonbuff,
                                       latbuff,
                                       cenra,
                                       cendec,
                                       gnomic=True,
                                       reverse=True)
    if (np.max(rabuff) - np.min(rabuff)) > 100:  # deal with RA=0 wraparound
コード例 #9
0
def get_meas(pix,nside=128):
    """ Get the measurements for a particular healpix."""
    # objid, ra, raerr, dec, decerr, mjd

    t0 = time.time()

    connection = pq.connect(user="******",host="db01.datalab.noao.edu",
                            password="",port = "5432",database = "tapdb")
    cur = connection.cursor()

    if nside==128:
        cmd = """SELECT m.objectid,m.ra,m.raerr,m.dec,m.decerr,m.mjd from nsc_dr2.meas as m join
             nsc_dr2.object as obj on m.objectid=obj.objectid where obj.pix={0};""".format(pix)
    if nside==256:
        cmd = """SELECT m.objectid,m.ra,m.raerr,m.dec,m.decerr,m.mjd from nsc_dr2.meas as m join
             nsc_dr2.object as obj on m.objectid=obj.objectid where obj.ring256={0};""".format(pix)

    if (nside==128) | (nside==256):
        cur.execute(cmd)
        data = cur.fetchall()
        # Convert to numpy structured array
        dtype = np.dtype([('objectid',np.str,50),('ra',np.float64),('raerr',float),
                          ('dec',np.float64),('decerr',float),('mjd',np.float64)])
        meas = np.zeros(len(data),dtype=dtype)
        meas[...] = data
        del(data)

    # nside>256
    else:
        ra,dec = hp.pix2ang(nside,pix,lonlat=True)
        radius = hp.nside2resol(nside,arcmin=True)/60.*1.5
        # First get object ra/dec and figure out the radius
        #cmd = """SELECT objectid,ra,dec from nsc_dr2.object where
        #         q3c_radial_query(ra,dec,{0},{1},{2});""".format(ra,dec,radius)
        #cur.execute(cmd)
        #data = cur.fetchall()
        ## Convert to numpy structured array
        #dtype = np.dtype([('objectid',np.str,50),('ra',np.float64),('dec',np.float64)])
        #obj = np.zeros(len(data),dtype=dtype)
        #obj[...] = data
        #del(data)

        # https://github.com/segasai/q3c
        # The polygonal query, i.e. the query of the objects which lie inside the region bounded by the polygon on the sphere.
        # To query the objects in the polygon ((0,0),(2,0),(2,1),(0,1)) ) (this is the spherical polygon with following vertices:
        #  (ra=0, dec=0) ; (ra=2, dec=0); (ra=2, dec=1); (ra=0, dec=1)):
        # my_db# SELECT * FROM mytable WHERE q3c_poly_query(ra, dec, '{0, 0, 2, 0, 2, 1, 0, 1}');
        
        vecbound = hp.boundaries(nside,pix)
        rabound, decbound = hp.vec2ang(np.transpose(vecbound),lonlat=True)
        # Expand the boundary by the buffer size
        cenra, cendec = hp.pix2ang(nside,pix,lonlat=True)
        # reproject onto tangent plane
        lonbound, latbound = coords.rotsphcen(rabound,decbound,cenra,cendec,gnomic=True)
        # expand by a fraction, it's not an exact boundary but good enough
        frac = 1.05
        lonbuff = lonbound*frac
        latbuff = latbound*frac
        rabuff, decbuff = coords.rotsphcen(lonbuff,latbuff,cenra,cendec,gnomic=True,reverse=True)
        if (np.max(rabuff)-np.min(rabuff))>100:  # deal with RA=0 wraparound
            bd,nbd = dln.where(rabuff>180)
            if nbd>0:rabuff[bd] -=360.0
        bnd = (rabuff[0],decbuff[0], rabuff[1],decbuff[1], rabuff[2],decbuff[2], rabuff[3],decbuff[3])

        cmd = "SELECT m.objectid,m.ra,m.raerr,m.dec,m.decerr,m.mjd from nsc_dr2.meas as m join "
        cmd += "nsc_dr2.object as obj on m.objectid=obj.objectid where "
        cmd += "q3c_poly_query(obj.ra,obj.dec,'{%10.6f,%10.6f, %10.6f,%10.6f, %10.6f,%10.6f, %10.6f,%10.6f}'::double precision[]);" % (bnd)

        print(bnd)

        #cmd = """SELECT m.objectid,m.ra,m.raerr,m.dec,m.decerr,m.mjd from nsc_dr2.meas as m join
        #     nsc_dr2.object as obj on m.objectid=obj.objectid where q3c_radial_query(obj.ra,obj.dec,{0},{1},{2});""".format(ra,dec,radius)
        #print("""RA={0} DEC={1} RADIUS={2}""".format(ra,dec,radius))
        
        cur.execute(cmd)
        data = cur.fetchall()
        # Convert to numpy structured array
        dtype = np.dtype([('objectid',np.str,50),('ra',np.float64),('raerr',float),
                          ('dec',np.float64),('decerr',float),('mjd',np.float64)])
        meas = np.zeros(len(data),dtype=dtype)
        meas[...] = data
        del(data)

    cur.close()
    connection.close()

    dt = time.time()-t0
    print('Retrieved '+str(len(meas))+' measurements in '+str(dt)+' seconds')

    return meas