예제 #1
0
def symmetric_match_sky_coords(coord1, coord2, tolerance):
    '''produce the symmetric match of coord1 to coord2, such that objects are only a match if
       distance is < tolerance AND
       each is the closest match in the other list  
       output:
       index1_matched: index into coord1 for matched objects
       index2_matched: index into coord2 for matches of objects in index1_matched
       index1_unmatch: indices for unmatched objects in coord1
       index2_unmatch: indices for unmatched objects in coord2
    '''
    closest_2to1, sep2d_2to1, sep3d = match_coordinates_sky(coord1, coord2) # location in coord2 for closest match to each coord1. len = len(coord1)
    closest_1to2, sep2d_1to2, sep3d = match_coordinates_sky(coord2, coord1) # location in coord1 for closest match to each coord2. len = len(coord2)

    index1_matched = []
    index2_matched = []
    index1_unmatched = []
    index2_unmatched = []

    for i in range(0, len(coord1)): # doubtless there is a more Pythonic way to do this..
        # not sure this condition covers all of the possible corner cases. But good enough.
        # (NB: should put in an assertion to check that tolerance is an angular unit)
        if sep2d_1to2[i] < tolerance and i == closest_2to1[closest_1to2[i]]:     
            index1_matched.append(i)
            index2_matched.append(closest_2to1[i])
        else:
            index1_unmatched.append(i)

    for j in range(0, len(coord2)):
        if j not in index2_matched:
            index2_unmatched.append(j)
                        
    return(index1_matched, index2_matched, index1_unmatched, index2_unmatched)
예제 #2
0
    def symmetric_match_sky_coords(self, coord1, coord2, tolerance):
        '''produce the symmetric match of coord1 to coord2
           output:
           index1_matched: index into coord1 for matched objects
           index2_matched: index into coord2 for matches of objects in index1_matched
           index1_unmatch: indices for unmatched objects in coord1
           index2_unmatch: indices for unmatched objects in coord2
        '''
        closest_2to1, sep2d_2to1, sep3d = match_coordinates_sky(
            coord1, coord2
        )  # indices for "coord2" for closest match to each coord1. len = len(coord1)
        # location in coord1 for closest match to each coord2. len = len(coord2)
        closest_1to2, sep2d_1to2, sep3d = match_coordinates_sky(coord2, coord1)

        index1_matched = []
        index2_matched = []
        index1_unmatched = []
        index2_unmatched = []

        logging.debug("DEBUG STATEMENTS:")
        logging.debug("tolerance = {}".format(tolerance))
        logging.debug("len(sep2d_2to1) = {}".format(len(sep2d_2to1)))
        logging.debug("len(sep2d_1to2) = {}".format(len(sep2d_1to2)))
        logging.debug("len(closest_2to1) = {}".format(len(closest_2to1)))
        logging.debug("len(closest_1to2) = {}".format(len(closest_1to2)))
        logging.debug("len(coord1) = {}".format(len(coord1)))
        logging.debug("len(coord2) = {}".format(len(coord2)))

        for i in range(
                0, len(coord1)
        ):  # doubtless there is a more Pythonic way to do this..
            # not sure this condition covers all of the possible corner cases. But good enough.
            logging.debug("-------------------")
            logging.debug("iteration i = {}".format(i))
            logging.debug("-------------------")
            # logging.debug("sep2d_1to2[i] = {}".format(sep2d_1to2[i]))
            # logging.debug("closest_1to2[i] = {}".format(closest_1to2[i]))

            if sep2d_1to2[i] < tolerance and i == closest_2to1[
                    closest_1to2[i]]:
                index1_matched.append(i)
                index2_matched.append(closest_2to1[i])
            else:
                index1_unmatched.append(i)

        for j in range(0, len(coord2)):
            if j not in index2_matched:
                index2_unmatched.append(j)

        return (index1_matched, index2_matched, index1_unmatched,
                index2_unmatched)
예제 #3
0
def get_chandra_2003_now():
    c_chandra = SkyCoord(ra=ra_xray * u.degree, dec=dec_xray * u.degree)
    c_Grind = SkyCoord(ra=ra_Grind * u.degree, dec=dec_Grind * u.degree)

    idx, d2d, d3d = match_coordinates_sky(matchcoord=c_Grind,
                                          catalogcoord=c_chandra,
                                          nthneighbor=1)

    a = [
        245, 453, 245, 182, 224, 206, 453, 223, 211, 462, 402, 294, 364, 304,
        485, 182, 258, 283, 345
    ]

    a_opt03 = np.array([
        12, 14, 15, 18, 26, 27, 29, 30, 34, 36, 38, 41, 42, 44, 47, 72, 73, 75,
        1, 2, 3, 8, 11, 23, 33, 45, 69, 70, 76, 92, 66, 9, 21, 22, 25, 59, 68,
        94
    ])
    a_opt03_extend = [167, 163, 182, 120, 121, 197, 184]
    # print(ID_xray[idx])
    print(d2d.arcsec)
    print(np.intersect1d(a, ID_xray[idx]))
    print(ID_xray[idx[a_opt03 - 1]])

    # max_sep = 3.0 * u.arcsec
    # sep_constraint = d2d < max_sep
    # c_matches = c_Grind[sep_constraint]
    # catalog_matches = c_chandra[idx[sep_constraint]]

    return None
예제 #4
0
def get_chandra_HST2001():
    c_chandra = SkyCoord(ra=ra_xray * u.degree, dec=dec_xray * u.degree)
    c_HST = SkyCoord(ra=ra_HST * u.degree, dec=dec_HST * u.degree)

    idx, d2d, d3d = match_coordinates_sky(matchcoord=c_HST,
                                          catalogcoord=c_chandra,
                                          nthneighbor=1)
    max_sep = 5.0 * u.arcsec
    sep_constraint = d2d < max_sep
    c_matches = c_HST[sep_constraint]
    catalog_matches = c_chandra[idx[sep_constraint]]
    d2d_matches = d2d[sep_constraint]

    print(Name[sep_constraint])
    print(ID_xray[idx[sep_constraint]])
    print(period[sep_constraint] * 86400)
    # print(d2d_matches.arcsec)
    a = [
        245, 453, 245, 182, 224, 206, 453, 223, 211, 462, 402, 294, 364, 304,
        485, 182, 258, 283, 345
    ]
    print(np.intersect1d(a, ID_xray[idx[sep_constraint]]))
    print(
        period[sep_constraint][np.where(ID_xray[idx[sep_constraint]] == 345)] *
        86400)
    print(d2d_matches.arcsec[np.where(ID_xray[idx[sep_constraint]] == 345)])
    print(Name[sep_constraint][np.where(ID_xray[idx[sep_constraint]] == 345)])

    return None
def source_lightcurve(rel_phot_shlv, ra, dec, matchr=1.0):
    """Crossmatch ra and dec to a PTF shelve file, to return light curve of a given star"""
    shelf = shelve.open(rel_phot_shlv)
    ref_coords = coords.SkyCoord(shelf["ref_coords"].ra,
                                 shelf["ref_coords"].dec,
                                 frame='icrs',
                                 unit='deg')

    source_coords = coords.SkyCoord(ra, dec, frame='icrs', unit='deg')
    idx, sep, dist = coords.match_coordinates_sky(source_coords, ref_coords)

    wmatch = (sep <= matchr * u.arcsec)

    if sum(wmatch) == 1:
        mjds = shelf["mjds"]
        mags = shelf["mags"][idx]
        magerrs = shelf["magerrs"][idx]

        # filter so we only return good points
        wgood = (mags.mask == False)

        if (np.sum(wgood) == 0):
            raise ValueError("No good photometry at this position.")

        return mjds[wgood], mags[wgood], magerrs[wgood]

    else:
        raise ValueError(
            "There are no matches to the provided coordinates within %.1f arcsec"
            % (matchr))
예제 #6
0
def dla_stat(DLAs, qsos, vprox=None, buff=3000.*u.km/u.s,
             zem_min=0., flg_zsrch=0, vmin=0.*u.km/u.s,
             LLS_CUT=None, partial=False, prox=False,
             zem_tol=0.03):
    """ Identify the statistical DLA in a survey
    Note that this algorithm ignores any existing mask

    Parameters
    ----------
    DLAs : DLASurvey
    qsos : Table
      keys must include RA, DEC, ZEM, Z_START
    vmin : Quantity
    vprox
    maxdz
    zem_min
    buff : Quantity
      Buffer velocity in Proximate analysis [not ready for this]
    NHI_cut
    flg_zsrch
    dz_toler
    partial : bool, optional
      Analyze partial LLS? [pLLS]
    prox : bool, optional
      Proximate LLS? [PLLS]
    zem_tol : float, optional
      Tolerance in zem

    Returns
    -------
    msk_smpl : bool array
      True = statistical
    """
    import warnings
    from astropy.coordinates import SkyCoord, match_coordinates_sky
    # Check for mask
    if DLAs.mask is not None:
        warnings.warn("Resetting mask to None.  Be careful here")
        DLAs.mask = None
    # DLA
    msk_smpl = DLAs.zem != DLAs.zem
    #zmax = ltu.z_from_dv(vprox, qsos['ZEM'])
    zmin = ltu.z_from_dv(vmin*np.ones(len(qsos)), qsos['Z_START'].data) # vmin must be array-like to be applied to each individual qsos['Z_START']

    # Make some lists
    qsos_coord = SkyCoord(ra=qsos['RA'], dec=qsos['DEC'])
    dla_coord = DLAs.coord

    idx, d2d, d3d = match_coordinates_sky(dla_coord, qsos_coord, nthneighbor=1)
    close = d2d < 1.*u.arcsec

    for qq, idla in enumerate(DLAs._abs_sys):
        # In stat?
        if close[qq]:
            if np.abs(idla.zem-qsos['ZEM'][idx[qq]]) < zem_tol:
                if ((idla.zabs >= zmin[idx[qq]]) &
                        (idla.zabs <= qsos['Z_END'][idx[qq]]) & (qsos[idx[qq]]['FLG_BAL'] != 2)):
                        msk_smpl[qq] = True
    # Return
    return msk_smpl
def find_matching_stars(catalog_stars, image_stars, max_sep=0.5 * u.deg):
    """This function compares star positions.
    Parameters
    -----------
    catalog_stars: SkyCoord(frame='altaz')
        Stars from a catalog.
    image_stars: SkyCoord(frame='altaz')
        Found blobs converted to altaz frame
    max_sep: astropy.units.Quantity[deg]
        maximum angular separation for the matches
    Returns
    -------
    matches: array
        indices of the blobs that where matched to a catalog star
    mask: array[bool]
        True where a star could be matched to a blob
    """
    if len(image_stars) == 0:
        return np.array([], dtype=int), np.zeros(len(catalog_stars),
                                                 dtype=bool)

    idx, d2d, d3d = match_coordinates_sky(catalog_stars, image_stars)
    mask = d2d < max_sep

    return idx[mask], mask
예제 #8
0
    def match_coord(self, cat_coords, toler=0.5*u.arcsec, verbose=True):
        """ Match an input set of SkyCoords to the catalog within a given radius

        Parameters
        ----------
        coords : SkyCoord
          Single or array
        toler : Angle or Quantity, optional
          Tolerance for a match
        verbose : bool, optional

        Returns
        -------
        indices : bool array
          True = match

        """
        # Checks
        if not isinstance(toler, (Angle, Quantity)):
            raise IOError("Input radius must be an Angle type, e.g. 10.*u.arcsec")
        # Match
        idx, d2d, d3d = match_coordinates_sky(self.coords, cat_coords, nthneighbor=1)
        good = d2d < toler
        # Return
        if verbose:
            print("Your search yielded {:d} matches".format(np.sum(good)))
        return self.cat['IGM_ID'][good]
def add_vsx_names_to_star_descriptions(stars: List[StarDescription], max_separation=0.01):
    logging.info("Adding VSX names to star descriptions")
    result = stars  # no deep copy for now
    # copy.deepcopy(star_descriptions)
    vsx_catalog, vsx_dict = create_vsx_astropy_catalog()
    star_catalog = create_star_descriptions_catalog(stars)
    # vsx catalog is bigger in this case than star_catalog, but if we switch then different stars can be
    # matched with the same variable which is wrong.
    #
    # idx : integer array
    # Indices into catalogcoord to get the matched points for each matchcoord. Shape matches matchcoord.
    #
    # sep2d : Angle
    # The on-sky separation between the closest match for each matchcoord and the matchcoord. Shape matches matchcoord.
    idx, d2d, _ = match_coordinates_sky(star_catalog, vsx_catalog)
    logging.debug(len(idx))
    results_dict = {} # have temp results_dict so we can remove duplicates
    for index_star_catalog, entry in enumerate(d2d):
        if entry.value < max_separation:
            index_vsx = idx[index_star_catalog]
            # if it's a new vsx star or a better match than the last match, write into dict
            if index_vsx not in results_dict or results_dict[index_vsx][2] > entry.value:
                index_vsx = idx[index_star_catalog]
                results_dict[index_vsx] = (index_star_catalog, index_vsx, entry.value)
    # loop over dict and add the new vsx matches to the star descriptions
    for keys, values in results_dict.items():
        _add_catalog_match_to_entry('VSX', result[values[0]], vsx_dict,
                                    values[1], values[2])
        logging.debug(f"Adding vsx match: {values[0]},{values[1]}, {values[2]}\n\n")
    logging.info(f"Added {len(results_dict)} vsx stars.")
    return result
예제 #10
0
    def pairs(self, sep, dv):
        """ Generate a pair catalog
        Parameters
        ----------
        sep : Angle or Quantity
        dv : Quantity
          Offset in velocity.  Positive for projected pairs (i.e. dz > input value)

        Returns
        -------

        """
        # Checks
        if not isinstance(sep, (Angle, Quantity)):
            raise IOError("Input radius must be an Angle type, e.g. 10.*u.arcsec")
        if not isinstance(dv, (Quantity)):
            raise IOError("Input velocity must be a quantity, e.g. u.km/u.s")
        # Match
        idx, d2d, d3d = match_coordinates_sky(self.coords, self.coords, nthneighbor=2)
        close = d2d < sep
        # Cut on redshift
        if dv > 0.:  # Desire projected pairs
            zem1 = self.cat['zem'][close]
            zem2 = self.cat['zem'][idx[close]]
            dv12 = ltu.v_from_z(zem1,zem2)
            gdz = np.abs(dv12) > dv
            # f/g and b/g
            izfg = dv12[gdz] < 0*u.km/u.s
            ID_fg = self.cat['IGM_ID'][close][gdz][izfg]
            ID_bg = self.cat['IGM_ID'][idx[close]][gdz][izfg]
        else:
            pdb.set_trace()
        # Reload
        return ID_fg, ID_bg
예제 #11
0
def search_2MASS():
    '''
    Faz busca no catálogo 2MASS a partir das coordenadas celestes
    '''
    
    w = WCS(fits.getheader(UPLOAD_FOLDER+'/'+session['name']))
    r = session['r']
    o = SkyCoord(w.wcs_pix2world([(0,0)],1), unit='deg')
    opr = SkyCoord(w.wcs_pix2world([(r,r)],1), unit='deg')
    rw = o.separation(opr)[0]
    print('Separação',rw)

    req = request.get_json()

    data = pd.DataFrame(req)

    src = SkyCoord(ra=data['ra'], dec=data['dec'], unit='deg', frame='icrs')
    crval = SkyCoord(ra=np.mean(data['ra']), dec=np.mean(data['dec']), unit='deg', frame='icrs')
    r = 1.1*crval.separation(src).max()

    Q = Irsa.query_region(crval,catalog='fp_psc',spatial='Cone', radius=r,selcols=['ra','dec','j_m','k_m']).to_pandas()
    print(Q)
    m = SkyCoord(ra=Q['ra'],dec=Q['dec'], unit=('deg','deg'), frame='icrs')
    idx, d2, _ = match_coordinates_sky(src,m)

    Q.loc[idx[d2>=rw]] = None # retira estrela que não conseguiu chegar perto

    data[['j','k']] = Q[['j_m','k_m']].loc[idx].values
    print(data)
    res = make_response(data.to_json(), 200)

    return res
예제 #12
0
    def __add__(self, other, toler=2 * u.arcsec):
        """ Combine one or more IGMSurvey objects

        Routine does a number of checks on the abstype,
        the uniqueness of the sightlines and systems, etc.

        Parameters
        ----------
        other : IGMSurvey
        toler : Angle or Quantity
          Tolerance for uniqueness

        Returns
        -------
        combined : IGMSurvey

        """
        # Check the Surveys are the same type
        if self.abs_type != other.abs_type:
            raise IOError("Combined surveys need to be same abs_type")

        # Init
        combined = IGMSurvey(self.abs_type)
        if self.ref is not None:
            combined.ref = self.ref + ',' + other.ref
        else:
            combined.red = None

        # Check for unique systems
        other_coord = other.coord
        for abssys in self._abs_sys:
            if np.sum((abssys.coord.separation(other_coord) < toler)
                      & (np.abs(abssys.zabs - other.zabs) <
                         (1000 * (1 + abssys.zabs) / 3e5))) > 0:
                raise NotImplementedError("Need to deal with this")
        # Combine systems
        combined._abs_sys = self._abs_sys + other._abs_sys
        if self.mask is not None:
            combined.mask = np.concatenate((self.mask, other.mask)).flatten()
        else:
            combined.mask = None

        # Sightlines?
        if self.sightlines is not None:
            slf_scoord = SkyCoord(ra=self.sightlines['RA'] * u.deg,
                                  dec=self.sightlines['DEC'] * u.deg)
            oth_scoord = SkyCoord(ra=other.sightlines['RA'] * u.deg,
                                  dec=other.sightlines['DEC'] * u.deg)
            idx, d2d, d3d = coords.match_coordinates_sky(slf_scoord,
                                                         oth_scoord,
                                                         nthneighbor=1)
            mt = d2d < toler
            if np.sum(mt) > 0:
                raise NotImplementedError("Need to deal with this")
            else:
                # Combine systems
                combined.sightlines = vstack(
                    [self.sightlines, other.sightlines])
        # Return
        return combined
예제 #13
0
def match_cats(RA, Dec, refRA, refDec):
    """Match a catalog of RA's and Dec's to a reference catalog
    (refRA and refDec).
    
    Return the indices of the reference catalog that match each
    source in the input catalog, and the on-sky
    separation between each source's closest match in arcsec"""
    # create SkyCoord objects to use with the matching
    SC_cat = SkyCoord(ra=RA, dec=Dec, frame='icrs', unit=(u.deg, u.deg))
    SC_refcat = SkyCoord(ra=refRA,
                         dec=refDec,
                         frame='icrs',
                         unit=(u.deg, u.deg))

    #    idx - indices of matched sources in reference cat
    #  sep2d - on-sky angular separation between closest match
    # dist3d - 3D distance between closest matches
    idx, sep2d, dist3d = match_coordinates_sky(SC_cat, SC_refcat)
    # convert separation to arcsecs
    #separc = sep2d * u.arcsec
    #separc = sep2d.to(u.arcsec)
    #separc = sep2d.is_within_bounds(upper=threshold*u.arcsec)
    #print threshold
    #return (idx,separc)
    return (idx, sep2d)
예제 #14
0
파일: hosts.py 프로젝트: lao19881213/FRB
def random_separation(catalog, wcs, npix, trim=1*units.arcmin, ntrial=100):

    # Catalog
    cat_coord = SkyCoord(ra=catalog['ra'], dec=catalog['dec'], unit='deg')

    # Trim
    bottom_corner = wcs.pixel_to_world(0, 0)
    bottom_offset = bottom_corner.directional_offset_by(-45.*units.deg, trim*np.sqrt(2))
    x0,y0 = [float(i) for i in wcs.world_to_pixel(bottom_offset)]

    top_corner = wcs.pixel_to_world(npix-1, npix-1)
    top_offset = top_corner.directional_offset_by(135.*units.deg, trim*np.sqrt(2))
    x1,y1 = [float(i) for i in wcs.world_to_pixel(top_offset)]


    # Generate a uniform grid
    ndim = int(np.sqrt(ntrial))

    xval = np.outer(np.linspace(x0, x1, ndim), np.ones(ndim))
    yval = np.outer(np.ones(ndim), np.linspace(y0, y1, ndim))

    # Coordinates now
    grid_coord = wcs.pixel_to_world(xval.flatten(), yval.flatten())

    # Match
    idx, d2d, d3d = match_coordinates_sky(grid_coord, cat_coord, nthneighbor=1)

    return d2d
예제 #15
0
def write_sdss_sightlines():
    """ Writes the SDSS DR5 sightlines that have no (or very few) DLAs
    Returns
    -------
    None : Writes to Dropbox

    """
    import os
    import h5py
    outfile = os.getenv(
        'DROPBOX_DIR') + '/MachineLearning/DR5/SDSS_DR5_noDLAs.hdf5'
    # Load
    sdss = DLASurvey.load_SDSS_DR5(sample='all')
    slines, sdict = grab_sightlines(sdss, flg_bal=0)
    coords = SkyCoord(ra=slines['RA'], dec=slines['DEC'], unit='deg')
    # Load spectra -- RA/DEC in igmsp is not identical to RA_GROUP, DEC_GROUP in SDSS_DR7
    igmsp = IgmSpec()
    sdss_meta = igmsp['SDSS_DR7'].meta
    qso_coord = SkyCoord(ra=sdss_meta['RA_GROUP'],
                         dec=sdss_meta['DEC_GROUP'],
                         unit='deg')
    idxq, d2dq, d3dq = match_coordinates_sky(coords, qso_coord, nthneighbor=1)
    in_igmsp = d2dq < 1 * u.arcsec  # Check
    # Cut meta
    cut_meta = sdss_meta[idxq[in_igmsp]]
    assert len(slines) == len(cut_meta)
    # Grab
    spectra = igmsp['SDSS_DR7'].spec_from_meta(cut_meta)
    # Write
    hdf = h5py.File(outfile, 'w')
    spectra.write_to_hdf5(outfile, hdf5=hdf, clobber=True, fill_val=0.)
    # Add table (meta is already used)
    hdf['cut_meta'] = cut_meta
    hdf.close()
예제 #16
0
    def pairs(self, sep, dv):
        """ Generate a pair catalog
        Parameters
        ----------
        sep : Angle or Quantity
        dv : Quantity
          Offset in velocity.  Positive for projected pairs (i.e. dz > input value)

        Returns
        -------

        """
        # Checks
        if not isinstance(sep, (Angle, Quantity)):
            raise IOError("Input radius must be an Angle type, e.g. 10.*u.arcsec")
        if not isinstance(dv, (Quantity)):
            raise IOError("Input velocity must be a quantity, e.g. u.km/u.s")
        # Match
        idx, d2d, d3d = match_coordinates_sky(self.coords, self.coords, nthneighbor=2)
        close = d2d < sep
        # Cut on redshift
        if dv > 0.:  # Desire projected pairs
            zem1 = self.cat['zem'][close]
            zem2 = self.cat['zem'][idx[close]]
            dv12 = ltu.dv_from_z(zem1,zem2)
            gdz = np.abs(dv12) > dv
            # f/g and b/g
            izfg = dv12[gdz] < 0*u.km/u.s
            ID_fg = self.cat[self.idkey][close][gdz][izfg]
            ID_bg = self.cat[self.idkey][idx[close]][gdz][izfg]
        else:
            pdb.set_trace()
        # Reload
        return ID_fg, ID_bg
예제 #17
0
def removeCrossMatched(refCatalog, matchCatalog, radiusArcmin=2.5):
    """Cross matches matchCatalog onto refCatalog for objects found within some angular radius 
    (specified in arcmin), and returns refCatalog with the matching entries removed.
    
    Args:
        refCatalog (:obj:`astropy.table.Table`): The reference catalog.
        matchCatalog (:obj:`astropy.table.Table`): The catalog to match onto the reference catalog.
        radiusArcmin (float, optional): Cross-match radius in arcmin.
    
    Returns:
        Cross-matched reference catalog (:obj:`astropy.table.Table`) with matches to matchCatalog removed.
        
    """

    inTab = refCatalog
    outTab = matchCatalog
    RAKey1, decKey1 = getTableRADecKeys(inTab)
    RAKey2, decKey2 = getTableRADecKeys(outTab)
    cat1 = SkyCoord(ra=inTab[RAKey1].data, dec=inTab[decKey1].data, unit='deg')
    xMatchRadiusDeg = radiusArcmin / 60.
    cat2 = SkyCoord(ra=outTab[RAKey2].data,
                    dec=outTab[decKey2].data,
                    unit='deg')
    xIndices, rDeg, sep3d = match_coordinates_sky(cat1, cat2, nthneighbor=1)
    mask = np.greater(rDeg.value, xMatchRadiusDeg)
    inTab = inTab[mask]

    return inTab
예제 #18
0
def match_twofits():
    path = '/Users/baotong/Desktop/period_Tuc/'

    file1 = fits.open(path + 'cheng2019.fit')
    ra_1 = file1[1].data['RAJ2000']
    dec_1 = file1[1].data['DEJ2000']

    file2 = fits.open(path + '2019spectra.fit')
    ra_2 = file2[1].data['RAJ2000']
    dec_2 = file2[1].data['DEJ2000']

    c1 = SkyCoord(ra=ra_2 * u.degree, dec=dec_2 * u.degree)
    c2 = SkyCoord(ra=ra_1 * u.degree, dec=dec_1 * u.degree)
    idx, d2d, d3d = match_coordinates_sky(matchcoord=c1,
                                          catalogcoord=c2,
                                          nthneighbor=1)

    max_sep = 1 * u.arcsec
    sep_constraint = d2d < max_sep
    c_matches = c1[sep_constraint]
    catalog_matches = c2[idx[sep_constraint]]
    d2d_matches = d2d[sep_constraint]
    print(idx)
    judge = np.array(sep_constraint).astype('int')
    print(idx[sep_constraint])
    match_result = np.column_stack((np.arange(1,
                                              len(ra_2) + 1,
                                              1), idx + 1, judge))

    np.savetxt(path + 'match_spectra.txt', match_result, fmt='%10d %10d %10d')
    return idx[sep_constraint]
예제 #19
0
def crossMatch(refCatalog, matchCatalog, radiusArcmin=2.5):
    """Cross matches matchCatalog onto refCatalog for objects found within some angular radius 
    (specified in arcmin).
    
    Args:
        refCatalog (:obj:`astropy.table.Table`): The reference catalog.
        matchCatalog (:obj:`astropy.table.Table`): The catalog to match onto the reference catalog.
        radiusArcmin (float, optional): Cross-match radius in arcmin.
    
    Returns:
        Cross-matched reference catalog, matchCatalog, and array of angular separation in degrees, for 
        objects in common within the matching radius. The cross matched columns are sorted such that rows in
        each correspond to the matched objects.
    
    """

    inTab = refCatalog
    outTab = matchCatalog
    RAKey1, decKey1 = getTableRADecKeys(inTab)
    RAKey2, decKey2 = getTableRADecKeys(outTab)
    cat1 = SkyCoord(ra=inTab[RAKey1].data, dec=inTab[decKey1].data, unit='deg')
    xMatchRadiusDeg = radiusArcmin / 60.
    cat2 = SkyCoord(ra=outTab[RAKey2].data,
                    dec=outTab[decKey2].data,
                    unit='deg')
    xIndices, rDeg, sep3d = match_coordinates_sky(cat1, cat2, nthneighbor=1)
    mask = np.less(rDeg.value, xMatchRadiusDeg)
    matched_outTab = outTab[xIndices]

    inTab = inTab[mask]
    matched_outTab = matched_outTab[mask]
    rDeg = rDeg.value[mask]

    return inTab, matched_outTab, rDeg
예제 #20
0
def check_for_catalog_duplicates(cat,
                                 cat2=None,
                                 match_thresh=0.1,
                                 nthneighbor=2,
                                 ra_errors=None,
                                 dec_errors=None,
                                 zscore=1):

    cat_src_coord = SkyCoord(ra=cat['ra'] * u.degree,
                             dec=cat['dec'] * u.degree,
                             frame='icrs')

    if cat2 is None:
        cat2_src_coord = cat_src_coord
    else:
        cat2_src_coord = SkyCoord(ra=cat2['ra'] * u.degree,
                                  dec=cat['dec'] * u.degree,
                                  frame='icrs')

    # choose nthneighbor=2 to not just include the same source
    idx, d2d, _ = match_coordinates_sky(
        cat_src_coord, cat2_src_coord,
        nthneighbor=nthneighbor)  # there is an order specific element to this

    if ra_errors is not None:
        match_thresh = zscore * np.sqrt(ra_errors**2 + dec_errors**2)
        print('match threshes is ', match_thresh)

    no_dup_mask = np.where(d2d.arcsec > match_thresh)
    no_dup_cat = cat.iloc[no_dup_mask].copy()

    return no_dup_cat, d2d, idx
예제 #21
0
def _get_knownplanet_names_transits(df, is_kane_list=False):
    '''
    get names and "is_transiting" for known planets by by crossmatching
    coordinates to NASA exoplanet archive

    The astroquery call below, which is needed to know whether they transit,
    requires bleeding-edge astroquery dev branch.

    args:
        df: a dataframe with RAs and decs for which you want the corresponding
        names and "is_transiting" exoarchive column.
    '''
    from astroquery.nasa_exoplanet_archive import NasaExoplanetArchive
    ea = NasaExoplanetArchive.get_confirmed_planets_table(all_columns=True)

    if not is_kane_list:
        ticra, ticdec = np.array(df['RA']), np.array(df['DEC'])
    else:
        ticra, ticdec = np.array(df['ra']), np.array(df['dec'])
    neara, neadec = np.array(ea['ra']), np.array(ea['dec'])

    c_tic = SkyCoord(ra=ticra * u.deg, dec=ticdec * u.deg, frame='icrs')
    c_nea = SkyCoord(ra=neara * u.deg, dec=neadec * u.deg, frame='icrs')
    from astropy.coordinates import match_coordinates_sky
    idx_nea, d2d, _ = match_coordinates_sky(c_tic, c_nea)

    names = np.array(ea[idx_nea]['pl_hostname'])
    is_transiting = np.array(ea[idx_nea]['pl_tranflag'])
    return names, is_transiting
예제 #22
0
def catalog_match(pubcat_file, erscat_file, match_out_file, match_tol = 1.0):
    ''' matches combined NED/SIMBAD file to ERS source list
    '''
    pubcat = Table.read(pubcat_file, format = 'fits')
    erscat = Table.read(erscat_file, format='ascii.commented_header')

    # construct coordinates needed for matching
    pub_coo = SkyCoord(ra=pubcat['RA'], dec=pubcat['Dec'])
    ers_coo = SkyCoord(ra=erscat['ra']*u.degree, dec=erscat['dec']*u.degree) 

    # do the matching
#    closest_2to1, sep2d_2to1, sep3d = match_coordinates_sky(coord1, coord2) # location in coord2 for closest match to each coord1. len = len(coord1)
    closest, sep2d, sep3d = match_coordinates_sky(pub_coo, ers_coo) # location in coord2 for closest match to each coord1. len = len(coord1)
    matched  = sep2d < match_tol*u.arcsec
#    matched_ers, matched_pub, ers_only, pub_only = symmetric_match_sky_coords(ers_coo, pub_coo, match_tol*u.arcsec)

    # generate the matched table
    keeplist = ['id_','ra','dec']
    tmpcat = Table(erscat[keeplist])
    matchtab = hstack([tmpcat[closest][matched], pubcat[matched]], join_type = 'outer')

    # write the matched catalog to a file
    matchtab.write(match_out_file, format = 'ascii.commented_header')

    return
예제 #23
0
파일: utils.py 프로젝트: yzhenggit/pyigm
def check_dup_coord(coords, tol=1 * u.arcsec):
    """ Checks for duplicates in a list or array
    of coordinates, within an angular distance
    tolerance `tol`.

    Parameters
    ----------
    coords : SkyCoord array or list
        Coordinates to check for duplicates
    tol : Angle, optional
        Angular tolerance

    Returns
    -------
    isdup : boolean array, shape(len(coords),)
        Whether a given coordinate in `coords` is a duplicate
    idx : array, shape(len(coords),)
        Indices of the first nearest neighbour as
        given by astropy.coord.match_coordinates_sky()

    """

    idx, d2d, d3d = match_coordinates_sky(coords, coords, nthneighbor=2)
    isdup = d2d < tol

    return isdup, idx
def source_lightcurve(rel_phot_shlv, ra, dec, matchr = 1.0):
    """Crossmatch ra and dec to a PTF shelve file, to return light curve of a given star"""
    shelf = shelve.open(rel_phot_shlv)
    ref_coords = coords.SkyCoord(shelf["ref_coords"].ra, shelf["ref_coords"].dec,frame='icrs',unit='deg')    
    
    source_coords = coords.SkyCoord(ra, dec,frame='icrs',unit='deg')
    idx, sep, dist = coords.match_coordinates_sky(source_coords, ref_coords)        
    
    wmatch = (sep <= matchr*u.arcsec)
    
    if sum(wmatch) == 1:
        mjds = shelf["mjds"]
        mags = shelf["mags"][idx]
        magerrs = shelf["magerrs"][idx]

        # filter so we only return good points
        wgood = (mags.mask == False)

        if (np.sum(wgood) == 0):
            raise ValueError("No good photometry at this position.")
        
        return mjds[wgood], mags[wgood], magerrs[wgood]

    else:
        raise ValueError("There are no matches to the provided coordinates within %.1f arcsec" % (matchr))
예제 #25
0
def match_ra_dec_asteca(names_ra_dec, cat_ra_dec, ra, dec):
    '''
    Receive cluster center (ra, dec) coordinates in decimal degrees and use
    them to match with the closest cluster in the ASteCA database, within
    some predefined tolerance.
    '''

    # Store (ra, dec) as valid coordinate object.
    cl_coord = SkyCoord(ra*u.degree, dec*u.degree, frame='icrs')
    # Find closest match in ASteCA catalog to (ra, dec) coordinates.
    i, sep2d, dist3d = match_coordinates_sky(cl_coord, cat_ra_dec,
                                             nthneighbor=1)

    # Distance to closest match in degrees.
    dist_deg = float(sep2d[0].to_string(decimal=True))

    # Match within a given tolerance.
    # 1 arcsec ~ 0.000278 deg
    # if dist_deg < 0.004167:  # 15 arcsec ~ 0.004167
    # if dist_deg < 0.00833:  # 30 arcsec ~ 0.00833
    # if dist_deg < 0.0167:  # 1 arcmin ~ 0.0167
    # if dist_deg < 0.002778:  # 10 arcsec ~ 0.002778
    if dist_deg < 0.00556:  # 20 arcsec ~ 0.00556
        name = str(names_ra_dec[i])
    else:
        name = ''

    return name, dist_deg
예제 #26
0
def query_survey(dataframe, idx, features, survey):
    series = dataframe.loc[idx]
    coord = SkyCoord(ra=series.RA, dec=series.Dec, unit=(u.deg, u.deg))
    try:
        if survey == '2MASS':
            temp = Irsa.query_region(coord,
                                     radius=search_radius,
                                     catalog='fp_psc').to_pandas()

        elif survey == 'GAIA':
            temp = Gaia.query_object(coordinate=coord,
                                     width=search_radius,
                                     height=search_radius).to_pandas()

        else:
            print('Invalid Survey')

        catalog = SkyCoord(ra=temp.ra, dec=temp.dec, unit=(u.deg, u.deg))

        i, _, _ = match_coordinates_sky(coord, catalog)

        for feature in features:
            dataframe.at[idx, feature] = temp.at[int(i), feature]

    except Exception as e:
        print(e)
예제 #27
0
def sdss_redshifts():
    """
    Enter the directory and build a redshift table
    based on the spectra present

    Returns:

    """
    embed(header='THIS NEEDS HELP')
    #
    all_folders = glob.glob(db_path + '/SDSS/*')
    for folder in all_folders:
        Jnames = []
        # Grab the list of spectra files
        spec_files = glob.glob(os.path.join(folder, 'J*_spec.fits'))
        # Generate the name list
        Jnames += [
            os.path.basename(ifile).split('_')[0] for ifile in spec_files
        ]
        # Coords
        coords = SkyCoord(Jnames,
                          unit=(units.hourangle, units.deg))  # from DES
        # Setup
        done = np.zeros_like(coords.ra.value, dtype=bool)
        zs = np.zeros_like(coords.ra.value)

        # Loop me
        while np.any(~done):
            # Grab the first not done
            i0 = np.where(~done)[0][0]
            coord = coords[i0]
            # Grab the SDSS data
            sdssSurvey = sdss.SDSS_Survey(coord, 10 * units.arcmin)
            #
            sub_coords = coords[~done]
            sep = coord.separation(sub_coords)
            doidx = np.where(sep < 10 * units.arcmin)[0]
            dothem = coords[doidx]
            # Now match
            catalog = sdssSurvey.get_catalog()
            sdss_coords = SkyCoord(ra=catalog['ra'],
                                   dec=catalog['dec'],
                                   unit='deg')
            idx, d2d, d3d = match_coordinates_sky(dothem,
                                                  sdss_coords,
                                                  nthneighbor=1)
            # Fill
            zs[doidx] = catalog['z_spec'][idx]
            done[np.where(dothem)] = True

        # Write the catalog
        tbl = Table()
        tbl['RA'] = coords.ra.value
        tbl['DEC'] = coords.dec.value
        tbl['ZEM'] = zs
        tbl['ZEM_SOURCE'] = 'SDSS'
        tbl['ZQ'] = 4
        tbl.write(os.path.join(folder, 'z_SDSS.ascii'),
                  overwrite=True,
                  format='ascii.fixed_width')
def get_star_data(sdata, psf_data, data, sem):
    colname = 'MAG_APER_' + sem

    ### Define coordinates ###
    refcoord = SkyCoord(psf_data['ALPHA_J2000_1'] * u.degree,
                        psf_data['DELTA_J2000_1'] * u.degree)
    semcoord = SkyCoord(sdata['ALPHA_J2000_' + sem] * u.degree,
                        sdata['DELTA_J2000_' + sem] * u.degree)

    ### Match catalogues and create new table ###
    idx, d2d, _ = match_coordinates_sky(
        refcoord, semcoord)  #match these 'good' stars to create table
    tempsdata = sdata[idx]

    #    # limit magnitude range used in PSF
    #    mag = sdata[colname][:,4]
    #    mask1 = mag > 15
    #    mask2 = mag < 19
    #    mask = mask1*mask2
    #
    #    tempsdata = sdata[mask]

    x = tempsdata[colname][:, 4]
    y = tempsdata[colname][:, 4] - tempsdata[colname][:, 1]

    allx = data[colname][:, 4]
    ally = data[colname][:, 4] - data[colname][:, 1]

    return x, y, allx, ally
예제 #29
0
def known_source(sources: pd.DataFrame) -> int:
    '''
    Find the source closest to PSR J2129-04

    Parameters
    ----------
    sources : pd.DataFrame
        The sources to search through. Indicies must be reset.

    Returns
    -------
    id_match : int
        The index of the closest source.
    '''
    # from SIMBAD
    coords = SkyCoord(
        "21 29 45.29", "-04 29 11.9",
        frame='icrs',
        unit=('hourangle', 'deg')
    )

    # find PSR J2129-04 by matching coordinates in sources
    source_coords = SkyCoord(
        sources['wavg_ra'],
        sources['wavg_dec'],
        unit=('deg', 'deg')
    )
    id_match, *_ = match_coordinates_sky(coords, source_coords)

    return id_match
예제 #30
0
파일: utils.py 프로젝트: ninoc/pyigm
def check_dup_coord(coords, tol=1*u.arcsec):
    """ Checks for duplicates in a list or array
    of coordinates, within an angular distance
    tolerance `tol`.

    Parameters
    ----------
    coords : SkyCoord array or list
        Coordinates to check for duplicates
    tol : Angle, optional
        Angular tolerance

    Returns
    -------
    isdup : boolean array, shape(len(coords),)
        Whether a given coordinate in `coords` is a duplicate
    idx : array, shape(len(coords),)
        Indices of the first nearest neighbour as
        given by astropy.coord.match_coordinates_sky()

    """

    idx, d2d, d3d = match_coordinates_sky(coords, coords, nthneighbor=2)
    isdup = d2d < tol

    return isdup, idx
예제 #31
0
def cutout_from_local_files2(fitscut, ra, dec, imsize, local_file_path="", local_file_list="", clobber=False):
    """
args:
    fitscut - name of cutout
    ra      - degrees
    dec     - degrees
    imsize  - size of cutout in degrees
kwargs:
    local_file     - name of fits file from which to make cutout
    """

    if os.path.exists(fitscut):
        if clobber:
            os.system('rm '+fitscut)
        else:
            print 'file exists and clobber is False: ',fitscut
            return

    import astropy.coordinates as ac
    from astropy.table import Table
    
    t = Table.read(local_file_list, format='ascii')
    C = ac.SkyCoord(ra,dec,unit='deg')
    idx, sep, dist =  ac.match_coordinates_sky(C, ac.SkyCoord(t['ra'],t['dec'],unit='deg'))
    fitsname = t['name'][idx]
    
    print 'nearest fitsimage is',fitsname
    
    cutout_from_local_file(fitscut, ra, dec, imsize, local_file=local_file_path+'/'+fitsname)
    
    return
예제 #32
0
파일: TAROT_PL.py 프로젝트: Noysena/TAROT
 def Data_match_catalog(self):
     if self.coordinate_catalog == [] and self.coordinate_data == []:
         self.SExdata, self.catalog, self.coordinate_catalog, self.coordinate_data = TarotPIP.readData(self)
     else:
         print("Matching will be done in seconds")
     self.idx, self.d2d, self.d3d, = match_coordinates_sky(self.coordinate_data, self.coordinate_catalog)
     self.matches = self.coordinate_catalog[self.idx]
     return self.idx, self.d2d, self.d3d, self.matches
예제 #33
0
파일: igmsurvey.py 프로젝트: pyigm/pyigm
    def __add__(self, other, toler=2*u.arcsec):
        """ Combine one or more IGMSurvey objects

        Routine does a number of checks on the abstype,
        the uniqueness of the sightlines and systems, etc.

        Parameters
        ----------
        other : IGMSurvey
        toler : Angle or Quantity
          Tolerance for uniqueness

        Returns
        -------
        combined : IGMSurvey

        """
        # Check the Surveys are the same type
        if self.abs_type != other.abs_type:
            raise IOError("Combined surveys need to be same abs_type")

        # Init
        combined = IGMSurvey(self.abs_type)
        if self.ref is not None:
            combined.ref = self.ref + ',' + other.ref
        else:
            combined.red = None

        # Check for unique systems
        other_coord =other.coord
        for abssys in self._abs_sys:
            if np.sum((abssys.coord.separation(other_coord) < toler) & (
                        np.abs(abssys.zabs-other.zabs) < (1000*(1+abssys.zabs)/3e5))) > 0:
                raise NotImplementedError("Need to deal with this")
        # Combine systems
        combined._abs_sys = self._abs_sys + other._abs_sys
        if self.mask is not None:
            combined.mask = np.concatenate((self.mask, other.mask)).flatten()
        else:
            combined.mask = None

        # Sightlines?
        if self.sightlines is not None:
            slf_scoord = SkyCoord(ra=self.sightlines['RA']*u.deg,
                                  dec=self.sightlines['DEC']*u.deg)
            oth_scoord = SkyCoord(ra=other.sightlines['RA']*u.deg,
                                  dec=other.sightlines['DEC']*u.deg)
            idx, d2d, d3d = coords.match_coordinates_sky(slf_scoord,
                                                         oth_scoord, nthneighbor=1)
            mt = d2d < toler
            if np.sum(mt) > 0:
                raise NotImplementedError("Need to deal with this")
            else:
                # Combine systems
                combined.sightlines = vstack([self.sightlines,
                                              other.sightlines])
        # Return
        return combined
예제 #34
0
파일: utils.py 프로젝트: rongmon/linetools
def unique_components(comps1, comps2, tol=5 * u.arcsec):
    """ Identify which AbsComponent members of the comps1 list
    are *not* within the comps2 list, to given tolerances.
    Note, AbsComponent objects in the comps1 list are not examined
    against each other for uniqueness.

    Unique if any apply (test is done in this order)
      1) coord.separation > tol
      2) Z,ion,Ej set is unique
      3) redshift limits do not overlap

    Parameters
    ----------
    comps1 : list of AbsComponent objects
    comps2 : list of AbsComponent objects

    Returns
    -------
    unique : bool array
      True = members of comps1 that are not currently in comps2

    """
    c_mks = const.c.to('km/s').value
    unique = np.array([True] * len(comps1))
    # Coordinates
    ras = [icomp.coord.ra.value for icomp in comps1]
    decs = [icomp.coord.dec.value for icomp in comps1]
    coords1 = SkyCoord(ra=ras, dec=decs, unit='deg')
    ras = [icomp.coord.ra.value for icomp in comps2]
    decs = [icomp.coord.dec.value for icomp in comps2]
    coords2 = SkyCoord(ra=ras, dec=decs, unit='deg')
    # Compare
    idx, d2d, d3d = match_coordinates_sky(coords1, coords2, nthneighbor=1)
    close_enough = d2d < tol
    if np.sum(close_enough) == 0:
        return unique
    # Next step (Z, ion, Ej)
    ZiE1 = np.array([(icomp.Zion[0], icomp.Zion[1], icomp.Ej.value)
                     for icomp in comps1])
    ZiE2 = np.array([(icomp.Zion[0], icomp.Zion[1], icomp.Ej.value)
                     for icomp in comps2])
    indices = np.where(close_enough)[0]
    for idx in indices:  # comp1 indices
        # Match on coords
        coord_mt = np.where(coords1[idx].separation(coords2) < tol)[0]
        # Match on ZiE
        mtZiE = np.where((ZiE2[coord_mt] == ZiE1[idx]).all(axis=1))[0]
        if len(mtZiE) > 0:  # Lastly redshift
            zlim_comp1 = comps1[idx].limits.zlim
            for idx2 in coord_mt[mtZiE]:
                zlim_comp2 = comps2[idx2].limits.zlim
                # Redshift overlap?
                if (zlim_comp1[0] < zlim_comp2[1]) & (zlim_comp1[1] >
                                                      zlim_comp2[0]):
                    unique[idx] = False
    # Return
    return unique
예제 #35
0
def combine_catalog_dfs_no_duplicates(catalog_dfs,
                                      match_threshes=None,
                                      verbose=True):
    ''' Make sure catalog_dfs is ordered where first catalog df is union crossmatch across all catalogs'''

    src_coords = []
    if match_threshes is None:
        match_threshes = [0.1 for x in range(len(catalog_dfs) - 1)]

    for cat_df in catalog_dfs:
        cat_src_coord = SkyCoord(ra=cat_df['ra'] * u.degree,
                                 dec=cat_df['dec'] * u.degree,
                                 frame='icrs')
        src_coords.append(cat_src_coord)

    if verbose:
        print('initial df has length ', len(catalog_dfs[0]))

    df_list = [catalog_dfs[0]]  # this is where order matters

    for j in range(len(catalog_dfs) - 1):

        if j == 0:
            idx_xmatch, d2d_xmatch, _ = match_coordinates_sky(
                src_coords[j + 1], src_coords[0])
        else:
            idx_xmatch, d2d_xmatch, _ = match_coordinates_sky(
                src_coords[j + 1], df_merge_coords)

        nodup_mask = np.where(d2d_xmatch.arcsec > match_threshes[j])[
            0]  # find all non-duplicates
        nodup_xmatch = catalog_dfs[j + 1].iloc[nodup_mask].copy()
        if verbose:
            print('nodup xmatch has length ', len(nodup_xmatch))
        df_list.append(nodup_xmatch)

        df_merge = pd.concat(df_list, ignore_index=True)
        df_merge_coords = SkyCoord(ra=df_merge['ra'] * u.degree,
                                   dec=df_merge['dec'] * u.degree,
                                   frame='icrs')
        if verbose:
            print('df merge has length ', len(df_merge))

    return df_merge
예제 #36
0
    def match(self, CAT, regime='band'):
        """
        INPUT: >List or single instance of ALS_DATA(), 
               >regime is 'band' or 'epoch' matching 'tile'
               >> for now, i think all epoch matching should be done first
        FUNC: adds additional data onto the sources in l=source list
        (should i retake averages inbetween matches?)
        this will be a loooot slower though, but maybe more representative
        """
        if type(CAT) == CATALOG: CAT = [CAT]

        for cat in CAT:
            logging.info('...%s' % cat)
            radec = np.array([[s.RA, s.DEC] for s in self.sourcelist])
            radec = SkyCoord(ra=Angle(radec[:, 0], unit=u.deg),
                             dec=Angle(radec[:, 1], unit=u.deg))
            for s in cat.sourcelist:
                s._voidCalcTileMeans()
            catradec = np.array([[s.RA, s.DEC] for s in cat.sourcelist])
            catradec = SkyCoord(ra=Angle(catradec[:, 0], unit=u.deg),
                                dec=Angle(catradec[:, 1], unit=u.deg))

            idx, d2d, d3d = match_coordinates_sky(radec, catradec)
            bad = np.where(d2d.arcsec > 1.0)[0]
            for i, IDX in enumerate(idx):
                if i not in bad:
                    ## need to put more conditions here
                    if d2d[i] == min(d2d[np.where(idx == IDX)]):
                        if regime == 'epoch':
                            self.sourcelist[i].append_Epoch(
                                cat.sourcelist[IDX])
                        elif regime == 'band':
                            self.sourcelist[i].append_Band(cat.sourcelist[IDX])
                        elif regime == 'tile':
                            self.sourcelist[i].append_Tile(cat.sourcelist[IDX])
                        self.sourcelist[i].quality = True
                if ((i in bad) and True):
                    if regime == 'epoch':
                        self.sourcelist[i].append_Epoch(bad=True)
                    elif regime == 'band':
                        self.sourcelist[i].append_Band(bad=True)
                    elif regime == 'tile':
                        self.sourcelist[i].append_Tile(bad=True)
                        #cat.sourcelist[IDX].append_Tile(bad=True)
                        #self.sourcelist = np.append(self.sourcelist, cat.sourcelist[IDX] )
            if regime == 'tile':
                for i in range(len(cat.sourcelist)):
                    if i not in idx:
                        self.sourcelist = np.append(self.sourcelist,
                                                    cat.sourcelist[i])
                #If i dont crop out the bad sources i will NEED to append 9999 values to sourcelist objects..
            #self.sourcelist = [s for s in self.sourcelist if s.quality]
            for s in self.sourcelist:
                s._voidCalcTileMeans()
            logging.info("Sources: %d" % len(self.sourcelist))
            logging.info("Not Matched: %d" % len(bad))
예제 #37
0
def flagTileBoundarySplits(tab, xMatchRadiusArcmin=2.5):
    """Flag objects that are closer than some matching radius but which appear in different tiles. These are
    potentially objects that have been de-blended across tile boundaries (in which case one entry in the
    catalog should be kept and the other discarded), but some objects in this category are genuine close
    pairs. At the moment, this is best resolved by visual inspection. This routine adds a flag column named
    `tileBoundarySplit` to the catalog to make it easy to spot these cases.
        
    Args:
        tab (:obj:`astropy.table.Table`): The object catalog to be checked.
        xMatchRadiusArcmin (float, optional): Cross-match radius in arcmin.

    Returns:
        Catalog (:obj:`astropy.table.Table`) with `tileBoundarySplit` column added - this is True for objects
        that may have been deblended across tiles, and require visual inspection.
    
    """

    if len(tab) == 1:
        return tab

    xMatchRadiusDeg = xMatchRadiusArcmin / 60.

    # Find all potential duplicates within a given matching radius
    cat = SkyCoord(ra=tab['RADeg'].data, dec=tab['decDeg'].data, unit='deg')
    xIndices, rDeg, sep3d = match_coordinates_sky(cat, cat, nthneighbor=2)
    mask = np.less(rDeg.value, xMatchRadiusDeg)
    noDupMask = np.greater_equal(rDeg.value, xMatchRadiusDeg)
    dupTab = tab[mask]
    noDupTab = tab[noDupMask]

    # Identify pairs split across tile boundaries
    tileBoundarySplit = np.zeros(len(dupTab), dtype=bool)
    for i in range(len(dupTab)):
        # NOTE: astCoords does not like atpy.Columns sometimes...
        rDeg = astCoords.calcAngSepDeg(dupTab['RADeg'][i], dupTab['decDeg'][i],
                                       dupTab['RADeg'].data,
                                       dupTab['decDeg'].data)
        mask = np.less_equal(rDeg, xMatchRadiusDeg)
        if mask.sum() == 0:  # This ought not to be possible but catch anyway
            bestIndex = i
        else:
            indices = np.where(mask == True)[0]
            bestIndex = indices[np.equal(dupTab['SNR'][mask],
                                         dupTab['SNR'][mask].max())][0]
            if np.unique(dupTab['tileName'][indices]).shape[0] > 1:
                tileBoundarySplit[indices] = True
    dupTab['tileBoundarySplit'] = tileBoundarySplit
    dupTab = dupTab[tileBoundarySplit]

    # Flag in the main table
    tab['tileBoundarySplit'] = np.zeros(len(tab), dtype=bool)
    for row in dupTab:
        mask = (tab['name'] == row['name'])
        tab['tileBoundarySplit'][mask] = True

    return tab
예제 #38
0
def unique_components(comps1, comps2, tol=5*u.arcsec):
    """ Identify which AbsComponent members of the comps1 list
    are *not* within the comps2 list, to given tolerances.
    Note, AbsComponent objects in the comps1 list are not examined
    against each other for uniqueness.

    Unique if any apply (test is done in this order)
      1) coord.separation > tol
      2) Z,ion,Ej set is unique
      3) redshift limits do not overlap

    Parameters
    ----------
    comps1 : list of AbsComponent objects
    comps2 : list of AbsComponent objects

    Returns
    -------
    unique : bool array
      True = members of comps1 that are not currently in comps2

    """
    c_mks = const.c.to('km/s').value
    unique = np.array([True]*len(comps1))
    # Coordinates
    ras = [icomp.coord.ra.value for icomp in comps1]
    decs = [icomp.coord.dec.value for icomp in comps1]
    coords1 = SkyCoord(ra=ras, dec=decs, unit='deg')
    ras = [icomp.coord.ra.value for icomp in comps2]
    decs = [icomp.coord.dec.value for icomp in comps2]
    coords2 = SkyCoord(ra=ras, dec=decs, unit='deg')
    # Compare
    idx, d2d, d3d = match_coordinates_sky(coords1, coords2, nthneighbor=1)
    close_enough = d2d < tol
    if np.sum(close_enough) == 0:
        return unique
    # Next step (Z, ion, Ej)
    ZiE1 = np.array([(icomp.Zion[0], icomp.Zion[1], icomp.Ej.value) for icomp in comps1])
    ZiE2 = np.array([(icomp.Zion[0], icomp.Zion[1], icomp.Ej.value) for icomp in comps2])
    indices = np.where(close_enough)[0]
    for idx in indices:  # comp1 indices
        # Match on coords
        coord_mt = np.where(coords1[idx].separation(coords2) < tol)[0]
        # Match on ZiE
        mtZiE = np.where((ZiE2[coord_mt] == ZiE1[idx]).all(axis=1))[0]
        if len(mtZiE) > 0: # Lastly redshift
            zlim_comp1 = comps1[idx].limits.zlim
            for idx2 in coord_mt[mtZiE]:
                zlim_comp2 = comps2[idx2].limits.zlim
                # Redshift overlap?
                if (zlim_comp1[0] < zlim_comp2[1]) & (zlim_comp1[1] > zlim_comp2[0]):
                    unique[idx] = False
    # Return
    return unique
예제 #39
0
def match_to_dr7_or_dr9(ra_dec_pairs,file_to_match, **keyword_parameter):

    """
    """

    plt.ion()
    catalogue_file = Table.read(file_to_match)
    cat_RA = catalogue_file['RA_dr7qso']
    cat_DEC = catalogue_file['DEC_dr7qso']
    object_table = Table([ra_dec_pairs[0],ra_dec_pairs[1],ra_dec_pairs[2],ra_dec_pairs[3]]\
        ,names=('obj1_RA, obj1_DEC, obj2_RA, obj2_DEC'))
    object_RA = object_table['obj1_RA']
    object_DEC = object_table['obj1_DEC']

    skycoord_cat = SkyCoord(cat_RA*u.degree,cat_DEC*u.degree, frame='icrs')
    skycoord_object = SkyCoord(object_RA*u.degree,object_DEC*u.degree, frame='icrs')
    idx, d2d, d3d = match_coordinates_sky(skycoord_cat, skycoord_object)
    separations = np.asarray(d2d)*3600.0
    upperlimit = 30.0
    separations_reduced = separations[(separations<=upperlimit)]

    masked_list_ra = np.asarray(skycoord_object.ra)[(idx)]
    masked_list_dec = np.asarray(skycoord_object.dec)[(idx)]

    masked_list_ra_cat = np.asarray(skycoord_cat.ra)
    masked_list_dec_cat = np.asarray(skycoord_cat.dec)

    difference_ra = ((masked_list_ra_cat-masked_list_ra)*np.cos(np.radians(masked_list_dec_cat)))*3600.0
    difference_dec = (masked_list_dec_cat-masked_list_dec)*3600.0

    fig = plt.figure(1, figsize=(8,6))
    ax1=fig.add_subplot(1,2,1)
    ndata = len(separations_reduced)
    ax1.hist(separations_reduced,bins=upperlimit/0.5, label=str(ndata))
    ax1.set_title("MATCH TO DR7")
    ax1.set_xlabel('Separation (arcseconds)')
    ax1.set_ylabel('Frequency')
    ax1.legend(loc='upper right')


    ax2 = fig.add_subplot(1,2,2)
    ax2.plot(difference_ra,difference_dec,'oc',markersize=5.0,alpha=0.3)
    ax2.locator_params(axis='x',nbins=4)
    ax2.set_title("MATCHED PAIRS")
    ax2.set_xlabel('DELTA RA')
    ax2.set_ylabel('DELTA DEC')
    plt.tight_layout(pad=0.4, w_pad=0.5)

    if ('save' in keyword_parameter):
        path_to_save = str(keyword_parameter['save'])
        plt.savefig(path_to_save,dpi=150)
    else:
        plt.show()
예제 #40
0
def remove_duplicates(file_cat='spidx-cat.txt'):
    """
    Remove duplicates from overlapping regions.
    For each source check if the closest file-center is the one from which is extracted.
    If not it means the same source is closer in another file, delete it.
    """
    from astropy.table import Table
    from astropy.coordinates import match_coordinates_sky
    from astropy.coordinates import SkyCoord
    import astropy.units as u

    # get all file centers
    print "Collecting centers..."
    centers = Table([[],[],[]], names=('Mask','RA','DEC'), dtype=['S100',float,float])
    centers['RA'].unit = 'deg'
    centers['DEC'].unit = 'deg'
    for i, mask_file in enumerate(glob.glob('masks/mask*.fits')):
        with pyfits.open(mask_file) as fits:
            head = fits[0].header
            ra = head['CRVAL1']
            dec = head['CRVAL2']
            centers.add_row([os.path.basename(mask_file), ra, dec])

    print "Matching catalogues..."
    sources = Table.read('spidx-cat.fits', format='fits')
    idx, _, _ = match_coordinates_sky(SkyCoord(sources['RA']*u.deg, sources['DEC']*u.deg),\
                                      SkyCoord(centers['RA'], centers['DEC']))

    print "Removing duplicates..."
    idx_duplicates = [] 
    for i, source in enumerate(sources):
        # check if closest
#        print idx[i], source['Mask'], centers[int(idx[i])]['Mask']
        if source['Mask'] != centers[int(idx[i])]['Mask']:
            idx_duplicates.append(i)
#            print "Removing source ", i
    print "Removing a total of", len(idx_duplicates), "sources over", len(sources)
    sources.remove_rows(idx_duplicates)

    print "Add unique Isl_id..."
    # add unique island idx based on combination of Mask name and blob idx
    last_isl_id = 0
    for mask in set(sources['Mask']):
        # first cycle add 0 to blob_id, next cycles add highest isl_id from previous cycle (blob_ids are >0)
        incr = np.max(sources['Isl_id'][ np.where(sources['Mask'] == mask) ])
        sources['Isl_id'][ np.where(sources['Mask'] == mask) ] += last_isl_id
        last_isl_id += incr

    sources.remove_columns('Mask')
    sources['Source_id'] = range(len(sources)) # set id after removal
    sources.write('spidx-cat-nodup.fits', format='fits', overwrite=True)
예제 #41
0
파일: cfhtw4.py 프로젝트: imcgreer/QLFz4
def match_target_tab(paperdir=None,obsdir=None):
	zcat = read_s82_catalog()
	if obsdir is None:
		obsdir = os.path.join(os.environ['HOME'],'research','LSST',
		                      'Stripe82','2014October')
	cans = Table.read(os.path.join(obsdir,'cfhtw4_candidates_v3.fits'))
	cans = cans[ (cans['dec']<1.25) & (cans['mags'][:,3]>21.5) & 
	             (cans['mags'][:,3]<22.5) ]
	c1 = SkyCoord(cans['ra'],cans['dec'],unit=(u.deg,u.deg))
	c2 = SkyCoord(zcat['ra'],zcat['dec'],unit=(u.deg,u.deg))
	idx,d2d,d3c = match_coordinates_sky(c1,c2)
	ii = np.where(d2d.arcsec < 3.0)[0]
	assert np.all(ii == np.arange(len(cans)) )
	cfhtw4 = hstack([cans,zcat['M1450','z','obsdate','name'][idx[ii]]])
	cfhtw4.write('cfhtw4qsos.fits',overwrite=True)
예제 #42
0
def match_cats(RA, Dec, refRA, refDec):
    """Match a catalog of RA's and Dec's to a reference catalog.

       Return the indices of the reference catalog that mach each
       source in the input catalog, and the on-sky separation
       between each source's closest match in arcsec.
    """
    # create SkyCoord objects to use with the matching
    SCcat = SkyCoord(ra=RA, dec=Dec, frame='icrs', unit=(u.deg,u.deg))
    SCrefcat = SkyCoord(ra=refRA, dec=refDec, frame='icrs', unit=(u.deg,u.deg))

    # idx    - indices of matched sources in reference cat
    # sep2d  - on-sky angular separation between closest match
    # dist3d - 3D distance between closest matches
    idx,sep2d,dist3d = match_coordinates_sky(SCcat, SCrefcat)

    return (idx, sep2d)
예제 #43
0
def find_closest_source2(star,table,rad=3):
    star_c = SkyCoord(ra=star.RAd*u.deg,dec=star.DECd*u.deg,frame='icrs')

    table_c = []
    for row in table:
        table_c.append((row['ra'],row['dec']))
    table_ra,table_dec = zip(*table_c)
    table_c = SkyCoord(ra=table_ra*u.deg,dec=table_dec*u.deg,frame='icrs')

    idx,sep2d,dist3d = match_coordinates_sky(star_c,table_c,
                                             storekdtree=u'_kdtree_sky')

    return table[int(idx)]
    if sep2d.is_within_bounds(upper=rad*u.arcsec):
        return table[int(idx)]
    else:
        return None
예제 #44
0
    def get_nearest_idx(self, ra, dec, wcs_coords=True, mask=None, infov=True):
        """ get the index of the nearest (masked-)catalogue entry
        
        Parameters
        ----------
        ra, dec : [float (or array-of), float (or array-of)]
            Coordinates that should be matched

        wcs_coords: [bool] -optional-
            True if the ra and dec are given in degree. Set to False
            if you provided the pixel coordinates.

        mask: [bool-array] -optional-
            Mask the catalogue to only given the nearest entry after of the given
            selection.
            ===
            **CAUTION** The idx will then be that of the **mask-catalogue**
            ===
            (use np.argwhere(mask)[get_nearest_idx(..)[0]] to get the index
            of the unmasked catalogue)
            
        Returns
        -------
        idx, sep2d
        """
        # --------------
        # - Input 
        if not wcs_coords and not self.has_wcs():
            raise AttributeError("Needs a wcs solution to get pixel coordinates")
        if not wcs_coords:
            ra,dec = np.asarray(self.wcs.pix2world(ra,dec)).T
        if "__iter__" not in dir(ra):
            ra = [ra]
            dec = [dec]
        skytarget = coordinates.SkyCoord(ra*units.degree,dec*units.degree)
        
        # -------------
        # - Cat matching
        catsky = self.sky_radec if infov else self._sky_radec
        if mask is not None:
            catsky = catsky[mask]
            
        return coordinates.match_coordinates_sky(skytarget, catsky)[:2]
예제 #45
0
파일: sources.py 프로젝트: panoptes/PIAA
def get_catalog_match(point_sources, wcs, table='full_catalog', **kwargs):
    assert point_sources is not None

    # Get coords from detected point sources
    stars_coords = SkyCoord(
        ra=point_sources['ra'].values * u.deg,
        dec=point_sources['dec'].values * u.deg
    )

    # Lookup stars in catalog
    logger.debug(f'Getting catalog stars')
    catalog_stars = helpers.get_stars_from_footprint(
        wcs.calc_footprint(),
        cursor_only=False,
        table=table,
        **kwargs
    )
    if catalog_stars is None:
        logger.warn('No catalog matches, returning table without ids')
        return point_sources

    logger.debug(f'Matched {len(catalog_stars)} sources to catalog')

    # Get coords for catalog stars
    catalog_coords = SkyCoord(
        ra=catalog_stars['ra'] * u.deg,
        dec=catalog_stars['dec'] * u.deg
    )

    # Do catalog matching
    logger.debug(f'Doing actual match')
    idx, d2d, d3d = match_coordinates_sky(stars_coords, catalog_coords)
    logger.debug(f'Got matched sources')

    # Get some properties from the catalog
    point_sources['id'] = catalog_stars[idx]['id']
    # point_sources['twomass'] = catalog_stars[idx]['twomass']
    point_sources['tmag'] = catalog_stars[idx]['tmag']
    point_sources['vmag'] = catalog_stars[idx]['vmag']
    point_sources['catalog_sep_arcsec'] = d2d.to(u.arcsec).value

    return point_sources
예제 #46
0
파일: chk_pairs.py 프로젝트: pyigm/igmspec
def chk_for_pairs(maindb, pair_sep=10*u.arcsec):
    """ Generate new IGM_IDs for an input DB

    Parameters
    ----------
    maindb : Table

    Return
    ------
    result : bool
      * True = pass
      * False = fail
    """
    c_main = SkyCoord(ra=maindb['RA'], dec=maindb['DEC'], unit='deg')
    # Find candidate dups
    idx, d2d, d3d = match_coordinates_sky(c_main, c_main, nthneighbor=2)
    cand_pairs = np.where(d2d < pair_sep)[0]
    # Finish
    print("There are {:d} potential pairs with separation theta<{:g}".format(len(cand_pairs)//2,pair_sep))
    return cand_pairs
예제 #47
0
파일: arflux.py 프로젝트: EdwardBetts/PYPIT
def load_extinction_data(slf, toler=1.*u.deg):
    """
    Find the best extinction file to use, based on longitude and latitude
    Loads it and returns a Table

    Parameters
    ----------
    slf : class
      Includes mosaic lon/lat
    toler : Angle
      Tolerance for matching detector to site (1 deg)

    Returns
    -------
    ext_file : Table
      astropy Table containing the 'wavelength', 'extinct' data for AM=1.
    """
    # Mosaic coord
    mosaic_coord = SkyCoord(slf._spect['mosaic']['longitude'],
        slf._spect['mosaic']['latitude'], frame='gcrs', unit=u.deg)
    # Read list
    extinct_path = slf._argflag['run']['pypitdir']+'/data/extinction/'
    extinct_summ = extinct_path+'README'
    extinct_files = Table.read(extinct_summ,comment='#',format='ascii')
    # Coords
    ext_coord = SkyCoord(extinct_files['Lon'], extinct_files['Lat'], frame='gcrs', unit=u.deg)
    # Match
    idx, d2d, d3d = coords.match_coordinates_sky(mosaic_coord, ext_coord, nthneighbor=1)
    if d2d < toler:
        extinct_file = extinct_files[int(idx)]['File']
        msgs.info("Using {:s} for extinction corrections.".format(extinct_file))
    else:
        msgs.warn("No file found for extinction corrections.  Applying none") 
        msgs.warn("You should generate a site-specific file")
        return None
    # Read
    extinct = Table.read(extinct_path+extinct_file,comment='#',format='ascii', names=('iwave','mag_ext'))
    wave = Column(np.array(extinct['iwave'])*u.AA, name='wave')
    extinct.add_column(wave)
    # Return
    return extinct[['wave','mag_ext']]
예제 #48
0
파일: mosaic.py 프로젝트: revoltek/scripts
    def calc_shift(self, ref_cat, separation=15):
        """
        Find a shift cross-matching source extracted from the image and a given catalog
        separation in arcsec
        """
        import bdsf
        from astropy.coordinates import match_coordinates_sky
        from astropy.coordinates import SkyCoord
        import astropy.units as u

        img_cat = self.imagefile+'.cat'
        if not os.path.exists(img_cat):
            bdsf_img = bdsf.process_image(self.imagefile, rms_box=(100,30), \
                thresh_pix=5, thresh_isl=3, atrous_do=False, \
                adaptive_rms_box=True, adaptive_thresh=100, rms_box_bright=(30,10), quiet=True)
            bdsf_img.write_catalog(outfile=img_cat, catalog_type='srl', format='fits', clobber=True)

        # read catlogue
        ref_t = Table.read(ref_cat)
        img_t = Table.read(img_cat)

        # cross match
        idx_match, sep, _ = match_coordinates_sky(SkyCoord(ref_t['RA'], ref_t['DEC']),\
                                                  SkyCoord(img_t['RA'], img_t['DEC']))
        idx_match_img = idx_match[sep<separation*u.arcsec]
        idx_match_ref = np.arange(0,len(ref_t))[sep<separation*u.arcsec]
        
        # find & apply shift
        if len(idx_match) == 0:
            logging.warning('No match found in TGSS.')
            return
        dra = ref_t['RA'][idx_match_ref] - img_t['RA'][idx_match_img]
        dra[ dra>180 ] -= 360
        dra[ dra<-180 ] += 360
        ddec = ref_t['DEC'][idx_match_ref] - img_t['DEC'][idx_match_img]
        self.apply_shift(np.mean(dra), np.mean(ddec))

        # clean up
        if not args.save:
            os.system('rm '+img_cat)
예제 #49
0
파일: sdss.py 프로젝트: Waelthus/igmspec
def meta_for_build():
    """ Load the meta info
    JXP made DR7 -- Should add some aspect of the official list..
      Am worried about the coordinates some..

    Returns
    -------

    """
    sdss_meta = grab_meta()
    # Cut down to unique sources
    coord = SkyCoord(ra=sdss_meta['RA'], dec=sdss_meta['DEC'], unit='deg')
    idx, d2d, d3d = match_coordinates_sky(coord, coord, nthneighbor=2)
    dups = np.where(d2d < 0.5*u.arcsec)[0]
    keep = np.array([True]*len(sdss_meta))
    for idup in dups:
        dcoord = SkyCoord(ra=sdss_meta['RA'][idup], dec=sdss_meta['DEC'][idup], unit='deg')
        sep = dcoord.separation(coord)
        isep = np.where(sep < 0.5*u.arcsec)[0]
        keep[isep] = False
        keep[np.min(isep)] = True  # Only keep 1
    sdss_meta = sdss_meta[keep]
    # Cut one more (pair of QSOs)
    bad_dup_c = SkyCoord(ra=193.96678*u.deg, dec=37.099741*u.deg)
    coord = SkyCoord(ra=sdss_meta['RA'], dec=sdss_meta['DEC'], unit='deg')
    sep = bad_dup_c.separation(coord)
    assert np.sum(sep < 2*u.arcsec) == 2
    badi = np.argmin(bad_dup_c.separation(coord))
    keep = np.array([True]*len(sdss_meta))
    keep[badi] = False
    sdss_meta = sdss_meta[keep]
    #
    nqso = len(sdss_meta)
    meta = Table()
    for key in ['RA', 'DEC', 'zem', 'sig_zem']:
        meta[key] = sdss_meta[key]
    meta['flag_zem'] = [str('SDSS')]*nqso
    meta['STYPE'] = [str('QSO')]*nqso
    # Return
    return meta
예제 #50
0
def match_cats(RA, Dec, refRA, refDec):
    """Match a catalog of RA's and Dec's to a reference catalog
    (refRA and refDec).
    
    Return the indices of the reference catalog that match each
    source in the input catalog, and the on-sky
    separation between each source's closest match in arcsec"""
    # create SkyCoord objects to use with the matching
    SC_cat = SkyCoord(ra=RA, dec=Dec, frame="icrs", unit=(u.deg, u.deg))
    SC_refcat = SkyCoord(ra=refRA, dec=refDec, frame="icrs", unit=(u.deg, u.deg))

    #    idx - indices of matched sources in reference cat
    #  sep2d - on-sky angular separation between closest match
    # dist3d - 3D distance between closest matches
    idx, sep2d, dist3d = match_coordinates_sky(SC_cat, SC_refcat)
    # convert separation to arcsecs
    # separc = sep2d * u.arcsec
    # separc = sep2d.to(u.arcsec)
    # separc = sep2d.is_within_bounds(upper=threshold*u.arcsec)
    # print threshold
    # return (idx,separc)
    return (idx, sep2d)
예제 #51
0
파일: xmatch.py 프로젝트: chbrandt/booq
def nn_serial(A_coord, B_coord):
    """
    Nearest-Neighbor search

    Input:
     - A_coord : ~astropy.coordinates.SkyCoord
            reference catalog (catalog "A")
     - B_coord : ~astropy.coordinates.SkyCoord
            matching catalog (catalog "B")

    Output:
     - tuple with ~numpy.ndarray , ~astropy.units.Quantity
            array of respective (to 'A_coord') index entries in 'B_coord'
            , array of respective pair distances
    """

    from astropy.coordinates import SkyCoord
    assert isinstance(A_coord,SkyCoord), "Was expecting a ~astropy.coordinates.SkyCoord instance."
    assert isinstance(B_coord,SkyCoord), "Was expecting a ~astropy.coordinates.SkyCoord instance."

    logging.info("Searching among {0} neighbors, {1} reference objects.".format(len(B_coord),len(A_coord)))
    _prau = A_coord.ra.unit
    _pdecu = A_coord.dec.unit
    _nrau = B_coord.ra.unit
    _ndecu = B_coord.dec.unit
    logging.debug("Unit of coordinates being matched: ({0},{1}) and ({2},{3})".format(_prau,_pdecu,_nrau,_ndecu))

    from astropy.coordinates import match_coordinates_sky
    match_A_nn_idx, match_A_nn_sep, _d3d = match_coordinates_sky(A_coord,B_coord)

    from booq.utils import stats
    _sts = stats.basic(match_A_nn_sep.value)
    logging.info("Basic stats of distances between matchings: {}".format(_sts))

    assert len(match_A_nn_idx) == len(A_coord)
    assert match_A_nn_idx.max() < len(B_coord)

    return (match_A_nn_idx, match_A_nn_sep)
예제 #52
0
파일: hst_qso.py 프로젝트: pyigm/igmspec
def grab_meta():
    """ Grab HSTQSO meta Table
    Returns
    -------

    """
    summ_file = os.getenv('RAW_IGMSPEC')+'/HSTQSO/hstqso.lst'
    hstqso_meta = Table.read(summ_file, format='ascii')
    spec_files = [str(ii) for ii in hstqso_meta['SPEC_FILE'].data]
    nspec = len(hstqso_meta)
    # RA/DEC
    radec_file = os.getenv('RAW_IGMSPEC')+'/HSTQSO/all_qso_table.txt'
    radec = Table.read(radec_file, format='ascii')
    # DATE-OBS
    date_files = glob.glob(os.getenv('RAW_IGMSPEC')+'/HSTQSO/date_obs*')
    for ss,date_file in enumerate(date_files):
        if ss == 0:
            tab_date = Table.read(date_file, format='ascii')
        else:
            tab_date = vstack([tab_date, Table.read(date_file, format='ascii')])
    # RA/DEC, DATE
    hstqso_meta.add_column(Column(['2000-01-01']*nspec, name='DATE-OBS'))
    for jj,row in enumerate(hstqso_meta):
        if row['INST'] == 'COS':
            spec_files[jj] = str(row['QSO_ALT_NAME']+'_hsla.fits')
            continue
        # DATE
        spec = row['SPEC_FILE'].split('.')[0]
        mt1 = np.where(tab_date['SPEC'] == spec)[0]
        if len(mt1) == 0:
            print("NO DATE MATCH for {:s}!".format(spec))
            pdb.set_trace()
        else:
            mt1 = mt1[0] # TAKING THE FIRST ONE
        joe_date = tab_date['DATE-OBS'][mt1].split('-')
        hstqso_meta[jj]['DATE-OBS'] = '{:s}-{:02d}-{:02d}'.format(joe_date[0], int(joe_date[1]), int(joe_date[2]))
        if int(joe_date[1]) > 12:
            pdb.set_trace()
        # RA/DEC
        if row['INST'] != 'FOS':
            continue
        mt = np.where(radec['File_ID'] == row['QSO_ALT_NAME'])[0]
        if len(mt) == 0:
            mt = np.where(radec['File_ID'] == row['QSO_NAME'])[0]
            if len(mt) == 0:
                print("NO RA/DEC MATCH!")
                pdb.set_trace()
            else:
                mt = mt[0]
        else:
            mt = mt[0]
        hstqso_meta[jj]['RA'] = radec['RA'][mt]
        hstqso_meta[jj]['DEC'] = radec['DEC'][mt]
    # Deal with Dups (mainly bad FOS coords)
    coord = SkyCoord(ra=hstqso_meta['RA'], dec=hstqso_meta['DEC'], unit='deg')
    idx, d2d, d3d = match_coordinates_sky(coord, coord, nthneighbor=2)
    dups = np.where(d2d < 2.0*u.arcsec)[0]  # Closest lens is ~2"
    flag_dup = np.array([False]*len(hstqso_meta))
    for idup in dups:
        if flag_dup[idup]:
            continue
        dcoord = SkyCoord(ra=hstqso_meta['RA'][idup], dec=hstqso_meta['DEC'][idup], unit='deg')
        sep = dcoord.separation(coord)
        isep = np.where(sep < 2.0*u.arcsec)[0]
        # Search for COS first
        icos = np.where(hstqso_meta['INST'][isep] == 'COS')[0]
        if len(icos) > 0:
            hstqso_meta['RA'][isep] = hstqso_meta['RA'][isep[icos[0]]]
            hstqso_meta['DEC'][isep] = hstqso_meta['DEC'][isep[icos[0]]]
            flag_dup[isep] = True
        else: # STIS
            istis = np.where(hstqso_meta['INST'][isep] == 'STIS')[0]
            if len(istis) > 0:
                hstqso_meta['RA'][isep] = hstqso_meta['RA'][isep[istis[0]]]
                hstqso_meta['DEC'][isep] = hstqso_meta['DEC'][isep[istis[0]]]
                flag_dup[isep] = True
            else: # FOS only -- taking first value
                hstqso_meta['RA'][isep] = hstqso_meta['RA'][isep[0]]
                hstqso_meta['DEC'][isep] = hstqso_meta['DEC'][isep[0]]
    # REPLACE
    hstqso_meta.rename_column('SPEC_FILE', 'ORIG_SPEC_FILE')
    hstqso_meta['SPEC_FILE'] = spec_files
    # RENAME
    hstqso_meta.rename_column('GRATE', 'DISPERSER')
    hstqso_meta.rename_column('QSO_ZEM', 'zem_GROUP')
    hstqso_meta.rename_column('INST', 'INSTR')
    hstqso_meta['STYPE'] = str('QSO')
    hstqso_meta.rename_column('RA', 'RA_GROUP')
    hstqso_meta.rename_column('DEC', 'DEC_GROUP')
    # ADD
    hstqso_meta.add_column(Column(['HST']*nspec, name='TELESCOPE'))
    hstqso_meta['sig_zem'] = 0.
    hstqso_meta['flag_zem'] = str('UNKWN')
    # Check
    assert chk_meta(hstqso_meta, chk_cat_only=True)
    # Return
    return hstqso_meta
예제 #53
0
파일: hst_z2.py 프로젝트: Waelthus/igmspec
def hdf5_adddata(hdf, IDs, sname, debug=False, chk_meta_only=False,
                 mk_test_file=False):
    """ Append HST_z2 data to the h5 file

    Parameters
    ----------
    hdf : hdf5 pointer
    IDs : ndarray
      int array of IGM_ID values in mainDB
    sname : str
      Survey name
    chk_meta_only : bool, optional
      Only check meta file;  will not write
    mk_test_file : bool, optional
      Generate the debug test file for Travis??

    Returns
    -------

    """
    # Add Survey
    print("Adding {:s} survey to DB".format(sname))
    hstz2_grp = hdf.create_group(sname)
    # Load up
    meta = grab_meta()
    bmeta = meta_for_build()
    # Checks
    if sname != 'HST_z2':
        raise IOError("Not expecting this survey..")
    if np.sum(IDs < 0) > 0:
        raise ValueError("Bad ID values")
    # Open Meta tables
    if len(bmeta) != len(IDs):
        raise ValueError("Wrong sized table..")

    # Generate ID array from RA/DEC
    c_cut = SkyCoord(ra=bmeta['RA'], dec=bmeta['DEC'], unit='deg')
    c_all = SkyCoord(ra=meta['RA'], dec=meta['DEC'], unit='deg')
    # Find new sources
    idx, d2d, d3d = match_coordinates_sky(c_all, c_cut, nthneighbor=1)
    if np.sum(d2d > 0.1*u.arcsec):
        raise ValueError("Bad matches in HST_z2")
    meta_IDs = IDs[idx]

    # Loop me to bid the full survey catalog
    meta.add_column(Column(meta_IDs, name='IGM_ID'))

    # Build spectra (and parse for meta)
    nspec = len(meta)
    max_npix = 300  # Just needs to be large enough
    data = np.ma.empty((1,),
                       dtype=[(str('wave'), 'float64', (max_npix)),
                              (str('flux'), 'float32', (max_npix)),
                              (str('sig'),  'float32', (max_npix)),
                              #(str('co'),   'float32', (max_npix)),
                             ])
    # Init
    spec_set = hdf[sname].create_dataset('spec', data=data, chunks=True,
                                         maxshape=(None,), compression='gzip')
    spec_set.resize((nspec,))
    Rlist = []
    wvminlist = []
    wvmaxlist = []
    gratinglist = []
    npixlist = []
    speclist = []
    # Loop
    #path = os.getenv('RAW_IGMSPEC')+'/KODIAQ_data_20150421/'
    path = os.getenv('RAW_IGMSPEC')+'/HST_z2/'
    maxpix = 0
    for jj,row in enumerate(meta):
        # Generate full file
        if row['INSTR'] == 'ACS':
            full_file = path+row['qso']+'.fits.gz'
        elif row['INSTR'] == 'WFC3':
            coord = ltu.radec_to_coord((row['RA'],row['DEC']))
            full_file = path+'/J{:s}{:s}_wfc3.fits.gz'.format(coord.ra.to_string(unit=u.hour,sep='',precision=2,pad=True),
                                               coord.dec.to_string(sep='',pad=True,alwayssign=True,precision=1))
        # Extract
        print("HST_z2: Reading {:s}".format(full_file))
        hduf = fits.open(full_file)
        head = hduf[0].header
        spec = lsio.readspec(full_file)
        # Parse name
        fname = full_file.split('/')[-1]
        # npix
        npix = spec.npix
        if npix > max_npix:
            raise ValueError("Not enough pixels in the data... ({:d})".format(npix))
        else:
            maxpix = max(npix,maxpix)
        # Some fiddling about
        for key in ['wave','flux','sig']:
            data[key] = 0.  # Important to init (for compression too)
        data['flux'][0][:npix] = spec.flux.value
        data['sig'][0][:npix] = spec.sig.value
        data['wave'][0][:npix] = spec.wavelength.value
        # Meta
        speclist.append(str(fname))
        wvminlist.append(np.min(data['wave'][0][:npix]))
        wvmaxlist.append(np.max(data['wave'][0][:npix]))
        npixlist.append(npix)
        if chk_meta_only:
            continue
        # Only way to set the dataset correctly
        spec_set[jj] = data

    #
    print("Max pix = {:d}".format(maxpix))
    # Add columns
    meta.add_column(Column([2000.]*nspec, name='EPOCH'))
    meta.add_column(Column(speclist, name='SPEC_FILE'))
    meta.add_column(Column(npixlist, name='NPIX'))
    meta.add_column(Column(wvminlist, name='WV_MIN'))
    meta.add_column(Column(wvmaxlist, name='WV_MAX'))
    meta.add_column(Column(np.arange(nspec,dtype=int),name='SURVEY_ID'))

    # Add HDLLS meta to hdf5
    if iiu.chk_meta(meta):
        if chk_meta_only:
            pdb.set_trace()
        hdf[sname]['meta'] = meta
    else:
        raise ValueError("meta file failed")
    # References
    refs = [dict(url='http://adsabs.harvard.edu/abs/2011ApJS..195...16O',
                 bib='omeara11')
            ]
    jrefs = ltu.jsonify(refs)
    hdf[sname]['meta'].attrs['Refs'] = json.dumps(jrefs)
    #
    return
예제 #54
0
파일: kodiaq.py 프로젝트: Waelthus/igmspec
def hdf5_adddata(hdf, IDs, sname, debug=False, chk_meta_only=False):
    """ Append KODIAQ data to the h5 file

    Parameters
    ----------
    hdf : hdf5 pointer
    IDs : ndarray
      int array of IGM_ID values in mainDB
    sname : str
      Survey name
    chk_meta_only : bool, optional
      Only check meta file;  will not write

    Returns
    -------

    """
    # Add Survey
    print("Adding {:s} survey to DB".format(sname))
    kodiaq_grp = hdf.create_group(sname)
    # Load up
    meta = grab_meta()
    bmeta = meta_for_build()
    # Checks
    if sname != 'KODIAQ_DR1':
        raise IOError("Not expecting this survey..")
    if np.sum(IDs < 0) > 0:
        raise ValueError("Bad ID values")
    # Open Meta tables
    if len(bmeta) != len(IDs):
        raise ValueError("Wrong sized table..")

    # Generate ID array from RA/DEC
    c_cut = SkyCoord(ra=bmeta['RA'], dec=bmeta['DEC'], unit='deg')
    c_all = SkyCoord(ra=meta['RA'], dec=meta['DEC'], unit='deg')
    # Find new sources
    idx, d2d, d3d = match_coordinates_sky(c_all, c_cut, nthneighbor=1)
    if np.sum(d2d > 0.1*u.arcsec):
        raise ValueError("Bad matches in KODIAQ")
    meta_IDs = IDs[idx]

    # Loop me to bid the full survey catalog
    meta.add_column(Column(meta_IDs, name='IGM_ID'))

    # Build spectra (and parse for meta)
    nspec = len(meta)
    max_npix = 200000  # Just needs to be large enough
    data = np.ma.empty((1,),
                       dtype=[(str('wave'), 'float64', (max_npix)),
                              (str('flux'), 'float32', (max_npix)),
                              (str('sig'),  'float32', (max_npix)),
                              #(str('co'),   'float32', (max_npix)),
                             ])
    # Init
    spec_set = hdf[sname].create_dataset('spec', data=data, chunks=True,
                                         maxshape=(None,), compression='gzip')
    spec_set.resize((nspec,))
    Rlist = []
    wvminlist = []
    wvmaxlist = []
    gratinglist = []
    npixlist = []
    speclist = []
    # Loop
    #path = os.getenv('RAW_IGMSPEC')+'/KODIAQ_data_20150421/'
    path = os.getenv('RAW_IGMSPEC')+'/KODIAQ_data_20160618/'  # BZERO FIXED
    maxpix = 0
    for jj,row in enumerate(meta):
        # Generate full file
        full_file = path+row['qso']+'/'+row['pi_date']+'/'+row['spec_prefix']+'_f.fits'
        # Extract
        print("KODIAQ: Reading {:s}".format(full_file))
        hduf = fits.open(full_file)
        head = hduf[0].header
        spec = lsio.readspec(full_file)
        # Parse name
        fname = full_file.split('/')[-1]
        # npix
        npix = spec.npix
        if npix > max_npix:
            raise ValueError("Not enough pixels in the data... ({:d})".format(npix))
        else:
            maxpix = max(npix,maxpix)
        # Some fiddling about
        for key in ['wave','flux','sig']:
            data[key] = 0.  # Important to init (for compression too)
        data['flux'][0][:npix] = spec.flux.value
        data['sig'][0][:npix] = spec.sig.value
        data['wave'][0][:npix] = spec.wavelength.value
        # Meta
        speclist.append(str(fname))
        wvminlist.append(np.min(data['wave'][0][:npix]))
        wvmaxlist.append(np.max(data['wave'][0][:npix]))
        if head['XDISPERS'].strip() == 'UV':
            gratinglist.append('BLUE')
        else:
            gratinglist.append('RED')
        npixlist.append(npix)
        try:
            Rlist.append(iiu.set_resolution(head))
        except ValueError:
            pdb.set_trace()
        # Only way to set the dataset correctly
        if chk_meta_only:
            continue
        spec_set[jj] = data

    #
    print("Max pix = {:d}".format(maxpix))
    # Add columns
    meta.add_column(Column([2000.]*nspec, name='EPOCH'))
    meta.add_column(Column(speclist, name='SPEC_FILE'))
    meta.add_column(Column(npixlist, name='NPIX'))
    meta.add_column(Column(wvminlist, name='WV_MIN'))
    meta.add_column(Column(wvmaxlist, name='WV_MAX'))
    meta.add_column(Column(Rlist, name='R'))
    meta.add_column(Column(gratinglist, name='GRATING'))
    meta.add_column(Column(np.arange(nspec,dtype=int),name='SURVEY_ID'))

    # Add HDLLS meta to hdf5
    if iiu.chk_meta(meta):
        if chk_meta_only:
            pdb.set_trace()
        hdf[sname]['meta'] = meta
    else:
        raise ValueError("meta file failed")
    # References
    refs = [dict(url='http://adsabs.harvard.edu/abs/2015AJ....150..111O',
                 bib='kodiaq')
            ]
    jrefs = ltu.jsonify(refs)
    hdf[sname]['meta'].attrs['Refs'] = json.dumps(jrefs)
    #
    return
예제 #55
0
파일: arflux.py 프로젝트: EdwardBetts/PYPIT
def find_standard_file(argflag, radec, toler=20.*u.arcmin, check=False):
    """
    Find a match for the input file to one of the archived
    standard star files (hopefully).  Priority is by order of search.

    Parameters
    ----------
    argflag : dict
      Arguments and flags used for reduction
    radec : tuple
      ra, dec in string format ('05:06:36.6','52:52:01.0')
    toler : Angle
      Tolerance on matching archived standards to input
    check : bool
      If True, the routine will only check to see if a
      standard star exists within the input ra, dec, and toler range.

    Returns
    -------
    sdict : dict
      'file': str -- Filename
      'fmt': int -- Format flag
           1=Calspec style FITS binary table
      'name': str -- Star name
      'ra': str -- RA(2000)
      'dec': str -- DEC(2000)
    """
    # Priority
    std_sets = [load_calspec]
    std_file_fmt = [1]  # 1=Calspec style FITS binary table

    # SkyCoord
    obj_coord = SkyCoord(radec[0], radec[1], unit=(u.hourangle, u.deg))

    # Loop on standard sets
    closest = dict(sep=999*u.deg)
    for qq,sset in enumerate(std_sets):
        # Stars
        path, star_tbl = sset(argflag)
        star_coords = SkyCoord(star_tbl['RA_2000'], star_tbl['DEC_2000'], 
            unit=(u.hourangle, u.deg))
        # Match
        idx, d2d, d3d = coords.match_coordinates_sky(obj_coord, star_coords, nthneighbor=1)
        if d2d < toler:
            if check: return True
            else:
                # Generate a dict
                std_dict = dict(file=path+star_tbl[int(idx)]['File'],
                    name=star_tbl[int(idx)]['Name'], fmt=std_file_fmt[qq],
                    ra=star_tbl[int(idx)]['RA_2000'],
                    dec=star_tbl[int(idx)]['DEC_2000'])
                # Return
                msgs.info("Using standard star {:s}".format(std_dict['name']))
                return std_dict
        else: # Save closest, if it is
            imind2d = np.argmin(d2d)
            mind2d = d2d[imind2d]
            if mind2d < closest['sep']:
                closest['sep'] = mind2d
                closest.update(dict(name=star_tbl[int(idx)]['Name'], 
                    ra=star_tbl[int(idx)]['RA_2000'], 
                    dec=star_tbl[int(idx)]['DEC_2000']))
    # Standard star not found
    if check: return False
    msgs.warn("No standard star was found within a tolerance of {:g}".format(toler))
    msgs.info("Closest standard was {:s} at separation {:g}".format(closest['name'],closest['sep'].to('arcmin')))
    msgs.warn("Flux calibration will not be performed")
    return None
예제 #56
0
def cross_match_ffi(reference_sources, input_sources):
    # TODO sort out the matchings from the tuple, do something with the
    # duplicates
    matching = match_coordinates_sky(reference_sources, input_sources)

    return matching
예제 #57
0
파일: xq100.py 프로젝트: pyigm/igmspec
def grab_meta():
    """ Grab XQ-100 meta Table

    Returns
    -------

    """
    from specdb.specdb import IgmSpec
    igmsp = IgmSpec()
    #
    xq100_table = Table.read(os.getenv('RAW_IGMSPEC')+'/XQ-100/XQ100_v1_2.fits.gz')
    nqso = len(xq100_table)
    # ESO meta
    eso_tbl = Table.read(os.getenv('RAW_IGMSPEC')+'/XQ-100/metadata_eso_XQ100.csv', format='ascii.csv')
    ar_files = eso_tbl['ARCFILE'].data
    # Spectral files
    spec_files = glob.glob(os.getenv('RAW_IGMSPEC')+'/XQ-100/ADP.*')
    # Dummy column
    xq100_coords = SkyCoord(ra=xq100_table['RA'], dec=xq100_table['DEC'], unit='deg')
    matches = []
    sv_spec_files = []
    sv_orig_files = []
    sv_rescale_files = []
    for spec_file in spec_files:
        if 'ADP.2016-07-15T08:22:40.682.fits' in spec_file:
            print("XQ-100: Skipping summary file")
            continue
        # ESO file
        ssfile = spec_file[spec_file.rfind('/')+1:-5]
        eso_mt = np.where(ar_files == ssfile)[0]
        try:
            ofile = eso_tbl['ORIGFILE'][eso_mt][0]
        except IndexError:
            print("XQ-100: File {:s} not really in XQ100!".format(spec_file))
            continue
        if ('_1' in ofile) or ('_2' in ofile) or ('_3' in ofile) or ('_4' in ofile):
            print("XQ-100: Skipping additional file: {:s}".format(ofile))
            continue
        # Match
        hdu = fits.open(spec_file)
        head0 = hdu[0].header
        if head0['DISPELEM'] == 'UVB,VIS,NIR':
            print("XQ-100: Skipping merged spectrum file")
            if 'rescale' not in ofile:
                print('no rescale')
                pdb.set_trace()
            continue
        try:
            coord = SkyCoord(ra=head0['RA'], dec=head0['DEC'], unit='deg')
        except KeyError:
            pdb.set_trace()
        sep = coord.separation(xq100_coords)
        imt = np.argmin(sep)
        if sep[imt] > 0.1*u.arcsec:
            pdb.set_trace()
            raise ValueError("Bad offset")
        # Save
        matches.append(imt)
        sv_spec_files.append(spec_file)
        sv_orig_files.append(ofile)
    # Finish up
    xq100_meta = xq100_table[np.array(matches)]
    nspec = len(xq100_meta)
    # Add spec_files
    xq100_meta['SPEC_FILE'] = sv_spec_files
    xq100_meta['ORIG_FILE'] = sv_orig_files
    # Add zem
    xq100_meta['zem_GROUP'] = xq100_meta['Z_QSO']
    xq100_meta['sig_zem'] = xq100_meta['ERR_ZQSO']
    xq100_meta['flag_zem'] = [str('XQ-100')]*nspec
    # Rename
    xq100_meta.rename_column('RA','RA_GROUP')
    xq100_meta.rename_column('DEC','DEC_GROUP')
    # Match to Myers
    myers = Table(igmsp.hdf['quasars'].value)
    myers_coord = SkyCoord(ra=myers['RA'], dec=myers['DEC'], unit='deg')
    xq100_coord = SkyCoord(ra=xq100_meta['RA_GROUP'], dec=xq100_meta['DEC_GROUP'], unit='deg')
    idx, d2d, _ = match_coordinates_sky(xq100_coord, myers_coord, nthneighbor=1)
    xq100_meta['RA_GROUP'] = myers_coord.ra.value[idx]
    xq100_meta['DEC_GROUP'] = myers_coord.dec.value[idx]
    # One bad one (Taking RA/DEC from Simbad)
    bad_c = d2d.to('arcsec') > 20*u.arcsec
    xq100_meta['RA_GROUP'][bad_c] = 215.2823
    xq100_meta['DEC_GROUP'][bad_c] = -6.73232
    # DATE-OBS
    meanmjd = []
    for row in xq100_meta:
        gdm = row['MJD_OBS'] > 0.
        meanmjd.append(np.mean(row['MJD_OBS'][gdm]))
    t = Time(meanmjd, format='mjd', out_subfmt='date')  # Fixes to YYYY-MM-DD
    xq100_meta.add_column(Column(t.iso, name='DATE-OBS'))
    #
    xq100_meta.add_column(Column([2000.]*nspec, name='EPOCH'))
    xq100_meta['STYPE'] = str('QSO')
    # Sort
    xq100_meta.sort('RA_GROUP')
    # Check
    assert chk_meta(xq100_meta, chk_cat_only=True)
    #
    return xq100_meta
예제 #58
0
파일: sdss.py 프로젝트: Waelthus/igmspec
def hdf5_adddata(hdf, IDs, sname, debug=False, chk_meta_only=False):
    """ Add SDSS data to the DB

    Parameters
    ----------
    hdf : hdf5 pointer
    IDs : ndarray
      int array of IGM_ID values in mainDB
    sname : str
      Survey name
    chk_meta_only : bool, optional
      Only check meta file;  will not write

    Returns
    -------

    """
       # Add Survey
    print("Adding {:s} survey to DB".format(sname))
    sdss_grp = hdf.create_group(sname)
    # Load up
    meta = grab_meta()
    bmeta = meta_for_build()
    # Checks
    if sname != 'SDSS_DR7':
        raise IOError("Not expecting this survey..")
    if np.sum(IDs < 0) > 0:
        raise ValueError("Bad ID values")
    # Open Meta tables
    if len(bmeta) != len(IDs):
        raise ValueError("Wrong sized table..")

    # Generate ID array from RA/DEC
    c_cut = SkyCoord(ra=bmeta['RA'], dec=bmeta['DEC'], unit='deg')
    c_all = SkyCoord(ra=meta['RA'], dec=meta['DEC'], unit='deg')
    # Find new sources
    idx, d2d, d3d = match_coordinates_sky(c_all, c_cut, nthneighbor=1)
    if np.sum(d2d > 1.2*u.arcsec):  # There is one system offset by 1.1"
        raise ValueError("Bad matches in SDSS")
    meta_IDs = IDs[idx]
    meta.add_column(Column(meta_IDs, name='IGM_ID'))

    # Add zem

    # Build spectra (and parse for meta)
    nspec = len(meta)
    max_npix = 4000  # Just needs to be large enough
    data = np.ma.empty((1,),
                       dtype=[(str('wave'), 'float64', (max_npix)),
                              (str('flux'), 'float32', (max_npix)),
                              (str('sig'),  'float32', (max_npix)),
                              #(str('co'),   'float32', (max_npix)),
                             ])
    # Init
    spec_set = hdf[sname].create_dataset('spec', data=data, chunks=True,
                                         maxshape=(None,), compression='gzip')
    spec_set.resize((nspec,))
    wvminlist = []
    wvmaxlist = []
    npixlist = []
    speclist = []
    # Loop
    maxpix = 0
    for jj,row in enumerate(meta):
        full_file = get_specfil(row)
        # Extract
        print("SDSS: Reading {:s}".format(full_file))
        # Parse name
        fname = full_file.split('/')[-1]
        if debug:
            if jj > 500:
                speclist.append(str(fname))
                if not os.path.isfile(full_file):
                    raise IOError("SDSS file {:s} does not exist".format(full_file))
                wvminlist.append(np.min(data['wave'][0][:npix]))
                wvmaxlist.append(np.max(data['wave'][0][:npix]))
                npixlist.append(npix)
                continue
        # Generate full file
        spec = lsio.readspec(full_file)
        # npix
        npix = spec.npix
        if npix > max_npix:
            raise ValueError("Not enough pixels in the data... ({:d})".format(npix))
        else:
            maxpix = max(npix,maxpix)
        # Some fiddling about
        for key in ['wave','flux','sig']:
            data[key] = 0.  # Important to init (for compression too)
        data['flux'][0][:npix] = spec.flux.value
        data['sig'][0][:npix] = spec.sig.value
        data['wave'][0][:npix] = spec.wavelength.value
        # Meta
        speclist.append(str(fname))
        wvminlist.append(np.min(data['wave'][0][:npix]))
        wvmaxlist.append(np.max(data['wave'][0][:npix]))
        npixlist.append(npix)
        # Only way to set the dataset correctly
        if chk_meta_only:
            continue
        spec_set[jj] = data

    #
    print("Max pix = {:d}".format(maxpix))
    # Add columns
    meta.add_column(Column(speclist, name='SPEC_FILE'))
    meta.add_column(Column(npixlist, name='NPIX'))
    meta.add_column(Column(wvminlist, name='WV_MIN'))
    meta.add_column(Column(wvmaxlist, name='WV_MAX'))
    meta.add_column(Column(np.arange(nspec,dtype=int),name='SURVEY_ID'))

    # Add HDLLS meta to hdf5
    if iiu.chk_meta(meta):
        if chk_meta_only:
            pdb.set_trace()
        hdf[sname]['meta'] = meta
    else:
        raise ValueError("meta file failed")
    # References
    refs = [dict(url='http://adsabs.harvard.edu/abs/2010AJ....139.2360S',
                 bib='sdss_qso_dr7'),
            ]
    jrefs = ltu.jsonify(refs)
    hdf[sname]['meta'].attrs['Refs'] = json.dumps(jrefs)
    #
    return