Esempio n. 1
0
def Gaia_adql(query, upload=None):
    """Run query on gaia archive and return results as a pandas dataframe.
    If upload is a pandas dataframe then this is uploaded to the archive
    and available as the table tap_upload.uploadedtable"""

    # We take and return pandas DataFrames but use astropy tables to make a votable of coordinates to upload

    with tempfile.NamedTemporaryFile(suffix='.xml',
                                     mode='w') as file_to_upload:
        if upload is not None:
            table = Table.from_pandas(upload)
            table.write(file_to_upload, format='votable')

        # It seems Gaia DR2 source table produces many votable warnings that aren't important
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore')
            with suppress_stdout():
                if upload is not None:
                    job = Gaia.launch_job_async(
                        query=query,
                        upload_resource=file_to_upload.name,
                        upload_table_name="uploadedtable")
                else:
                    job = Gaia.launch_job_async(query=query)

        result = job.get_results()
        result_df = result.to_pandas()

    return result_df
Esempio n. 2
0
    def add_gaia(self):
        """Retrieve and add Gaia DR2 or DR1 parameters."""

        if hasattr(self, 'IDS') is False:
            raise RuntimeError('RUN set_simbad_fields() first!')
        try:
            self.simbad_identifiers = self.IDS.decode().split('|')
        except AttributeError:
            self.simbad_identifiers = self.IDS.split('|')

        try:
            gaia_dr2_id = [id for id in self.simbad_identifiers if 'Gaia DR2 ' in id][0].replace("'", "")
            # print(gaia_dr2_id)
            gacs_query = "SELECT * FROM gaiadr2.gaia_source WHERE source_id={}".format(gaia_dr2_id.split(' ')[-1])
            job = Gaia.launch_job_async(gacs_query)
            self.gaiadr2_table = job.get_results()
        except IndexError:
            try:
                gaia_dr1_id = [id for id in self.simbad_identifiers if 'Gaia DR1 ' in id][0]
                # return_gacs_query_as_table(query_string, output_file_seed, overwrite=False, verbose=True):
                gacs_query = "SELECT * FROM gaiadr1.gaia_source WHERE source_id={}".format(gaia_dr1_id.split(' ')[-1])
                job = Gaia.launch_job_async(gacs_query)
                self.gaiadr1_table = job.get_results()
            except IndexError:
                print('No Gaia identifier listed in Simbad!')
Esempio n. 3
0
def getGaiaBox(ra, dec, width, height=None):
    """
    Acquire table of all Gaia stars in a box centered at ra, dec
    and having half-side-length equal to radius.
    All three arguments should be in degrees.
    Returns an astropy Table object.
    """

    if height is None:
        height = width

    query ="SELECT s.source_id, s.ra, s.ra_error, s.dec, s.dec_error, s.parallax, s.parallax_error, " + \
               "s.pmra, s.pmra_error, s.pmdec, s.pmdec_error, " + \
               "s.ra_dec_corr, s.ra_parallax_corr, s.ra_pmra_corr, s.ra_pmdec_corr, " + \
               "s.dec_parallax_corr, s.dec_pmra_corr, s.dec_pmdec_corr, " + \
               "s.parallax_pmra_corr, s.parallax_pmdec_corr, " + \
               "s.pmra_pmdec_corr, " + \
               "s.phot_g_mean_mag as Gmag, s.phot_g_mean_flux_over_error as g_sn, " + \
               "s.phot_bp_mean_mag as bpmag, s.phot_rp_mean_mag as rpmag " + \
               "FROM gaiadr2.gaia_source s " + \
               "INNER JOIN gaiadr2.ruwe r on s.source_id=r.source_id " + \
               "WHERE s.astrometric_params_solved=31 " + \
               " AND r.ruwe<1.4 AND s.visibility_periods_used>8 AND " + \
               "CONTAINS(POINT('ICRS',s.ra,s.dec),BOX('ICRS',{:f},{:f},{:f},{:f}))=1;".format(\
                                                                ra, dec, \
                                                                width/np.cos(dec*np.pi/180.), height)
    job = Gaia.launch_job_async(query, dump_to_file=False)
    #print("Job launched")
    #print(job)
    return job.get_data()
Esempio n. 4
0
def GaiaTable(gaia_ra, gaia_dec, starname):
    global radsec
    global info
    global limmag

    fov = radsec / 3600

    try:
        job = Gaia.launch_job_async("SELECT * FROM gaiadr2.gaia_source \
            WHERE CONTAINS(POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec), \
            CIRCLE('ICRS',%f,%f,%f))=1 AND  phot_g_mean_mag<%f \
            AND pmra IS NOT NULL AND abs(pmra)>0 AND pmdec IS NOT NULL AND abs(pmdec)>0;" \
                                    % (gaia_ra, gaia_dec, fov, limmag), dump_to_file=False)

        gaiat = job.get_results()[
            ['source_id'] + list(info.columns)[:-2] +
            ['pmra', 'pmdec', 'pmra_error', 'pmdec_error']]
        gaiat['phot_g_mean_mag'].name = 'mag'

        df = gaiat.to_pandas()
        df = df.set_index('source_id')
        try:
            df = df.drop(index=starname)
            return df, True
        except:
            return df, True
    except Exception as e:
        print('Gaia \t' + ' ERROR:', e)
        return ['NaN', False]
def download_gaia_data(colname: str, xindex: Union[str, Table], outfile_path: Path) -> None:
    """Query and download Gaia data."""

    if isinstance(xindex, str):
        xindex_name = cast(str, xindex)
        upload_resource = None
        upload_table_name = None
    else:
        xindex_name = "TAP_UPLOAD.cel_xindex"
        upload_resource = cast(Table, xindex)
        upload_table_name = 'cel_xindex'

    query = f"""SELECT
    x.source_id, x.original_ext_source_id AS {colname},
    g.ra, g.dec, g.parallax, g.parallax_error, g.pmra,
    g.pmdec, g.phot_g_mean_mag, g.bp_rp, g.teff_val,
    d.r_est, d.r_lo, d.r_hi
FROM
    {xindex_name} x
    JOIN gaiadr2.gaia_source g ON g.source_id = x.source_id
    LEFT JOIN external.gaiadr2_geometric_distance d ON d.source_id = x.source_id"""

    print(query)
    job = Gaia.launch_job_async(
        query,
        upload_resource=upload_resource,
        upload_table_name=upload_table_name,
        dump_to_file=True,
        output_file=outfile_path,
        output_format='csv',
    )
    try:
        job.save_results()
    finally:
        Gaia.remove_jobs(job.jobid)
Esempio n. 6
0
def get_gaia_catalog(ra,
                     dec,
                     radius=4.,
                     limit=200,
                     catalog_fname='',
                     database='edr3'):
    from astroquery.gaia import Gaia
    query_args = {
        'limit': limit,
        'ra': ra,
        'dec': dec,
        'radius': radius / 60.,
        'dr': database
    }
    query = """SELECT TOP {limit} ra, dec, phot_g_mean_mag FROM gaia{dr}.gaia_source
    WHERE CONTAINS(POINT('ICRS', gaia{dr}.gaia_source.ra, gaia{dr}.gaia_source.dec),
                   CIRCLE('ICRS', {ra}, {dec}, {radius}))=1;""".format(
        **query_args)
    job = Gaia.launch_job_async(query,
                                dump_to_file=True,
                                output_format='csv',
                                verbose=False,
                                output_file=catalog_fname)
    result = job.get_results()
    return result
    def query_gaia_KiDS_testpatch(
            self, pointing):  #pointing = [central_ra, central_dec]
        patch_ra = [pointing[0] - 0.5, pointing[0] + 0.5]
        patch_dec = [pointing[1] - 0.5, pointing[1] + 0.5]
        query_KiDS_testpatch = "select * from gaiadr2.gaia_source where ra<{} and ra>{} and dec<{} and dec>{} and parallax IS NOT NULL".format(
            patch_ra[1], patch_ra[0], patch_dec[1], patch_dec[0])
        print("Start query_gaia_KiDS_testpatch()")

        if not os.path.exists(
                "../../data/gaia/gaia_KiDS_testpatch[{}_{}].csv".format(
                    pointing[0], pointing[1])):
            filename = "gaia_KiDS_testpatch[{}_{}].csv".format(
                pointing[0], pointing[1])
        else:
            print(
                "PATH ../../data/gaia_KiDS_testpatch[{}_{}].csv already exist \n Overwrite existing file? [y/n]"
            ).format(pointing[0], pointing[1])
            answer = input()
            if answer == "y" or answer == "yes":
                filename = "gaia_euclid"
            elif answer == "n" or answer == "no":
                print(
                    "Give another filename (no gaia_KiDS_testpatch) to save the data"
                )
                filename = input()
            else:
                print(
                    "Your answer: {} is not valid: Please return y, yes, n or no"
                    .format(answer))
                return query_gaia_KiDS_testpatch()
        job = Gaia.launch_job_async(query_KiDS_testpatch)
        r = job.get_results()
        ascii.write(r, "../../data/gaia/{}".format(filename), delimiter=',')
        return None
Esempio n. 8
0
File: data.py Progetto: profjsb/pyia
    def from_query(cls, query_str, login_info=None):
        """
        Run the specified query and return a `GaiaData` instance with the
        returned data.

        This is meant only to be used for quick queries to the main Gaia science archive. For longer queries and more customized usage, use TAP access to any of the Gaia mirrors with, e.g., astroquery or pyvo.

        This requires ``astroquery`` to be installed.

        Parameters
        ----------
        query_str : str
            The string ADQL query to execute.
        login_info : dict, optional
            Username and password for the Gaia science archive as keys "user"
            and "password". If not specified, will use anonymous access, subject
            to the query limits.

        Returns
        -------
        gaiadata : `GaiaData`
            An instance of this object.

        """
        from astroquery.gaia import Gaia

        if login_info is not None:
            Gaia.login(**login_info)

        job = Gaia.launch_job_async(query_str)
        tbl = job.get_results()

        return cls(tbl)
Esempio n. 9
0
 def gaia_params(self):
     """Retrieve parallax, radius, teff and lum from Gaia."""
     # If gaia DR2 id is provided, query by id
     fields = np.array([
         'parallax', 'parallax_error', 'teff_val',
         'teff_percentile_lower', 'teff_percentile_upper',
         'radius_val', 'radius_percentile_lower',
         'radius_percentile_upper', 'lum_val',
         'lum_percentile_lower', 'lum_percentile_upper'
     ])
     query = 'select '
     for f in fields[:-1]:
         query += 'gaia.' + f + ', '
     query += 'gaia.' + fields[-1]
     query += ' from gaiadr2.gaia_source as gaia'
     query += ' where gaia.source_id={0}'.format(self.g_id)
     j = Gaia.launch_job_async(query)
     res = j.get_results()
     self.plx, self.plx_e = self._get_parallax(res)
     self.temp, self.temp_e = self._get_teff(res)
     self.rad, self.rad_e = self._get_radius(res)
     self.lum, self.lum_e = self._get_lum(res)
     self.dist, self.dist_e = self._get_distance(self.ra, self.dec,
                                                 self.radius, self.g_id)
     pass
    def query_gaia_limEUCLIDSKY(self):
        query = "select * from gaiadr2.gaia_source where abs(B)>{} and abs(ecl_lat)>{}".format(
            self.galactic_latitude, self.celestial_latitude)
        print("Start query_gaia_limEUCLIDSKY()")

        if not os.path.exists("../../data/gaia_euclid.csv"):
            filename = "gaia_euclid"
        else:
            print(
                "PATH ../../data/gaia_euclid.csv already exist \n Overwrite existing file? [y/n]"
            )
            answer = input()
            if answer == "y" or answer == "yes":
                filename = "gaia_euclid"
            elif answer == "n" or answer == "no":
                print(
                    "Give another filename (no gaia_euclid) to save the data")
                filename = input()
            else:
                print(
                    "Your answer: {} is not valid: Please return y, yes, n or no"
                    .format(answer))
                return query_gaia_EUCLIDSKY()
        job = Gaia.launch_job_async(query)
        r = job.get_results()
        ascii.write(r,
                    "../../data/gaia/{}.csv".format(filename),
                    delimiter=',')
        return None
Esempio n. 11
0
    def get(self, **kwargs):
        """Build and performe query.

        Parameters
        ----------
        Parameters that are passed through **kwargs to
        astroquery.gaia.Gaia.launch_job_async
        For example:
        dump_to_file : bool
            If True, results will be stored in file
            (default is False).
        output_file : str
            Name of the output file

        Returns
        -------
        octable : opencluster.OCTable
            Instance with query results,
            None if dump_to_file is True
        """
        query = self.build()
        job = Gaia.launch_job_async(query=query, **kwargs)
        if not kwargs.get("dump_to_file"):
            table = job.get_results()
            return table
Esempio n. 12
0
    def __init__(self, gaia_num, hiplogprob, dr='dr2'):

        self.gaia_num = gaia_num
        self.hiplogprob = hiplogprob
        self.dr = dr

        if self.dr == 'edr3':
            self.gaia_epoch = 2016.0
        elif self.dr == 'dr2':
            self.gaia_epoch = 2015.5
        else:
            raise ValueError("`dr` must be either `dr2` or `edr3`")
        self.hipparcos_epoch = 1991.25


        query = """SELECT
        TOP 1
        ra, dec, ra_error, dec_error
        FROM gaia{}.gaia_source
        WHERE source_id = {}
        """.format(self.dr, self.gaia_num)

        job = Gaia.launch_job_async(query)
        gaia_data = job.get_results()

        self.ra = gaia_data['ra']
        self.ra_err = gaia_data['ra_error']
        self.dec = gaia_data['dec']
        self.dec_err = gaia_data['dec_error']

        # keep this number on hand for use in lnlike computation 
        self.mas2deg = (u.mas).to(u.degree)
Esempio n. 13
0
def query_gaia(ra_deg,
               dec_deg,
               radius_deg,
               minmag=10,
               maxmag=20,
               maxsources=10000,
               catalogname='gaiacatalog'):
    job = Gaia.launch_job_async(
        "SELECT * FROM gaiadr2.gaia_source AS g, gaiadr2.panstarrs1_best_neighbour \
    AS pbest, gaiadr2.panstarrs1_original_valid AS ps1 WHERE g.source_id = pbest.source_id AND \
    pbest.original_ext_source_id = ps1.obj_id AND CONTAINS(POINT('ICRS', g.ra, g.dec), \
        CIRCLE('ICRS', %.4f, %.4f, %.4f))=1 AND ps1.r_mean_psf_mag > %.2f AND ps1.r_mean_psf_mag \
        < %.2f AND pmra IS NOT NULL AND pmdec IS NOT NULL AND abs(pmdec) > 0 AND \
        abs(pmdec) < 40 AND abs(pmra)>0 AND abs(pmra) < 40 AND ps1.n_detections > 6 \
        AND pbest.number_of_mates=0 AND \ pbest.number_of_neighbours=1;" %
        (ra_deg, dec_deg, radius_deg, 10, 20))

    p = job.get_results()
    p['ra_errdeg'] = p['ra_error'] / 3.6e6
    p['dec_errdeg'] = p['dec_error'] / 3.6e6
    p['FLAGS'] = 0
    if os.path.exists(catalogname + '.ldac'):
        os.remove(catalogname + '.ldac')

    if os.path.exists('gaiacatalog.dat'):
        os.remove('gaiacatalog.dat')

    save_table_as_ldac(p, catalogname + '.ldac')
Esempio n. 14
0
def get_data_subset(ra_deg,
                    dec_deg,
                    rad_deg,
                    dist,
                    dist_span=None,
                    rv_only=False):
    if dist_span is not None:
        max_parallax = 1e3 / (max(dist - dist_span, 1.))
        min_parallax = 1e3 / (dist + dist_span)
    else:
        min_parallax = -1
        max_parallax = 100
    gaia_query = "SELECT source_id,ra,dec,parallax,parallax_error,pmra,pmra_error,pmdec,pmdec_error,phot_g_mean_mag,phot_bp_mean_mag,phot_rp_mean_mag,radial_velocity,radial_velocity_error,phot_variable_flag,a_g_val " +\
                 "FROM gaiadr2.gaia_source " +\
                 "WHERE parallax >= {:.4f} AND parallax <= {:.4f} ".format(min_parallax, max_parallax) +\
                 "AND CONTAINS(POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec),CIRCLE('ICRS',{:.7f},{:.7f},{:.7f}))=1 ".format(ra_deg, dec_deg, rad_deg)
    if rv_only:
        gaia_query += 'AND (radial_velocity IS NOT NULL) '
    # print ' QUERY:', gaia_quer
    try:
        gaia_job = Gaia.launch_job_async(gaia_query, dump_to_file=False)
        gaia_data = gaia_job.get_results()
    except:
        print ' Problem querying data.'
        return list([])
    for g_c in gaia_data.colnames:
        gaia_data[g_c].unit = ''
    gaia_data['radial_velocity'].name = 'rv'
    gaia_data['radial_velocity_error'].name = 'rv_error'
    # print gaia_data
    # print ' QUERY complete'
    print ' Retireved lines:', len(gaia_data)
    return gaia_data
Esempio n. 15
0
def jobGaiaDR2(ra0, dec0, limgmag, fov, yr):
    ssl._create_default_https_context = ssl._create_unverified_context
    job = Gaia.launch_job_async("SELECT * \
    FROM gaiadr2.gaia_source \
    WHERE CONTAINS(POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec),CIRCLE('ICRS',%f,%f,%f))=1\
                               AND pmra IS NOT NULL AND abs(pmra)>0 \
    AND pmdec IS NOT NULL AND abs(pmdec)>0\
                               AND  phot_g_mean_mag<%f;"                                                        %(ra0,dec0,fov,limgmag) \
                                , dump_to_file=False)

    gaiat = job.get_results()
    # print(gaiat)
    ra = np.array(gaiat['ra'])
    dec = np.array(gaiat['dec'])
    pmra = np.array(gaiat['pmra'])
    pmdec = np.array(gaiat['pmdec'])

    gmag = np.array(gaiat['phot_g_mean_mag'])
    bpmag = np.array(gaiat['phot_bp_mean_mag'])
    rpmag = np.array(gaiat['phot_rp_mean_mag'])

    des = np.array(gaiat['designation'])

    N = np.size(gmag)

    for k in range(N):
        alpha, delta = RADecFromTang((yr - 2015.5) * math.pi * pmra[k] / 648000000.0, \
                                          (yr - 2015.5) * math.pi * pmdec[k] / 648000000.0, \
                                          math.radians(ra[k]), math.radians(dec[k]))
        print(ra[k], dec[k], math.degrees(alpha), math.degrees(delta))
        ra[k], dec[k] = math.degrees(alpha), math.degrees(delta)

    return ra, dec, gmag, bpmag, rpmag, des
Esempio n. 16
0
def download_votable(tile,
                     offset=0.005,
                     output_dir=dirconfig.raw_gaia,
                     gaia_source='gaiadr2.gaia_source'):
    """
    This function automatically download gaia-data from gaia-archive using AstroQuery module.
    tile: a Tile objects.
    Implemented query extract all the sources within the coordinates
    offset: is a safe-margin (in degrees) added to the borders of each tile
    output_dir : where vo-tables are saved
    gaia_source: gaiaedr3.gaia_source or gaiadr2.gaia_source
    """

    print('---------------------------------------------')
    lmin = tile.lmin - offset
    lmax = tile.lmax + offset
    bmin = tile.bmin - offset
    bmax = tile.bmax + offset
    query = f'SELECT * FROM {gaia_source} WHERE l > {lmin:.4f} AND l < {lmax:.4f} AND b > {bmin:.4f} AND b < {bmax:.4f} '
    print(query)
    file_path = path.join(output_dir, tile.name + '_gaia.vot.gz')
    t1 = time.time()
    job = Gaia.launch_job_async(query,
                                dump_to_file=True,
                                output_file=file_path)
    t2 = time.time()
    print(f'Delta t: {t2 - t1:.4f} s')
Esempio n. 17
0
def gaiadr2xdr3(source_idlist, nearest=True):
    """
	returns the dr2 to dr3 cross matches for the given source_idlist
	"""

    upload_tablename, upload_resource, sidcol = source_id_to_xmlfile(
        source_idlist)

    query_str = ' '.join([
        f'SELECT tu.{sidcol}, dr2.*',
        f'from tap_upload.{upload_tablename} tu left join gaiaedr3.dr2_neighbourhood dr2',
        f'on tu.{sidcol} = dr2.dr2_source_id'
    ])
    try:
        job = Gaia.launch_job_async(query=query_str,
                                    upload_resource=upload_resource,
                                    upload_table_name=upload_tablename)

        df = job.get_results().to_pandas()
    finally:
        os.remove(upload_resource)

    if nearest:
        #just return the nearest dr3 source id based on angular distance
        ret_df = df.sort_values(['dr2_source_id', 'angular_distance']).groupby(
            'dr2_source_id', as_index=False).first().set_index('source_id')
    else:
        ret_df = df.set_index('source_id')

    return ret_df
Esempio n. 18
0
def gaia_query(n, distance=200, **kwargs):
    """
    Sends an archive query for d < 200 pc, with additional filters taken from
    Gaia Data Release 2: Observational Hertzsprung-Russell diagrams (Sect. 2.1)
    Gaia Collaboration, Babusiaux et al. (2018)
    (https://doi.org/10.1051/0004-6361/201832843)

    NOTE: 10000000 is a maximum query size (~76 MB / column)

    Additional keyword arguments are passed to TapPlus.launch_job_async method.
    """
    return Gaia.launch_job_async(
        "select top {}".format(n) +
        #" lum_val, teff_val,"
        #" ra, dec, parallax,"
        " bp_rp, phot_g_mean_mag+5*log10(parallax)-10 as mg"
        " from gaiadr2.gaia_source"
        " where parallax_over_error > 10"
        " and visibility_periods_used > 8"
        " and phot_g_mean_flux_over_error > 50"
        " and phot_bp_mean_flux_over_error > 20"
        " and phot_rp_mean_flux_over_error > 20"
        " and phot_bp_rp_excess_factor <"
        " 1.3+0.06*power(phot_bp_mean_mag-phot_rp_mean_mag,2)"
        " and phot_bp_rp_excess_factor >"
        " 1.0+0.015*power(phot_bp_mean_mag-phot_rp_mean_mag,2)"
        " and astrometric_chi2_al/(astrometric_n_good_obs_al-5)<"
        "1.44*greatest(1,exp(-0.4*(phot_g_mean_mag-19.5)))" +
        " and 1000/parallax <= {}".format(distance),
        **kwargs)
Esempio n. 19
0
def download_gaia(tbl: Table) -> None:
    """Downloads Gaia data for stars not in catalog."""

    if os.path.isfile(GAIA_PATH):
        print('Gaia data already downloaded, skipping')
        return

    print("Querying Gaia")

    source_ids = Table([
        tbl[np.logical_not(np.logical_or(tbl['cel_exists'],
                                         tbl['gaia'].mask))]['gaia']
    ])
    query = r"""
        SELECT
            g.source_id, g.ra, g.dec, g.phot_g_mean_mag, g.bp_rp, d.r_est
        FROM
            TAP_UPLOAD.source_ids s
            JOIN gaiadr2.gaia_source g ON g.source_id = s.gaia
            LEFT JOIN external.gaiadr2_geometric_distance d ON d.source_id = g.source_id
    """

    job = Gaia.launch_job_async(query,
                                output_file=GAIA_PATH,
                                output_format='votable',
                                dump_to_file=True,
                                upload_resource=source_ids,
                                upload_table_name='source_ids')
    job.wait_for_job_end()
Esempio n. 20
0
def fetch_gaia(tmass_ids):
    from astroquery.gaia import Gaia

    # Can't prefix columns with the table name, so have to settle on using 'panda_start' and
    # 'wise_start'
    query = """
        select
            mine.tmass_id as tmass_id,
            '' as gaia_start,
            gaia.*, 
            '' as wise_start,
            allwise.*
        from gaiadr2.gaia_source as gaia

            inner join gaiadr2.tmass_best_neighbour as tmass_xmatch
                on gaia.source_id = tmass_xmatch.source_id
            inner join tap_upload.mine as mine 
                on tmass_xmatch.original_ext_source_id = mine.tmass_id

            inner join gaiadr2.allwise_best_neighbour as allwise_xmatch
                on gaia.source_id = allwise_xmatch.source_id
            inner join gaiadr1.allwise_original_valid as allwise
                on allwise_xmatch.original_ext_source_id = allwise.designation"""

    with tempfile.NamedTemporaryFile(suffix='.xml') as tmp:
        os.remove(tmp.name)  # astropy will complain if the file already exists
        (astropy.table.Table(tmass_ids[:, None].astype(bytes),
                             names=['tmass_id']).write(tmp.name,
                                                       format='votable'))

        log.info(f'Launching job for {len(tmass_ids)} 2MASS IDs')
        job = Gaia.launch_job_async(query,
                                    upload_resource=tmp.name,
                                    upload_table_name='mine')
        log.info(f'Job ID is {job.get_jobid()}')

    while True:
        time.sleep(5)
        log.info(f'Job is {job.get_phase()}')
        if job.get_phase() == 'COMPLETED':
            table = job.get_results()
            break

    df = table.to_pandas().pipe(stringify)

    gaia_start = list(df.columns).index('gaia_start')
    wise_start = list(df.columns).index('wise_start')
    indices = sp.arange(len(df.columns))
    masks = {
        'tmass': df.columns == 'tmass_id',
        'gaia': (gaia_start < indices) & (indices < wise_start),
        'wise': (wise_start < indices)
    }
    df = pd.concat({k: df.loc[:, m] for k, m in masks.items()}, 1)

    # Some fields have a _2 suffixed because they're replicated in GAIA and WISE
    df = df.rename(columns=lambda c: c.split('_2')[0])

    return df
Esempio n. 21
0
File: query.py Progetto: linj24/ypp
def gaia_query(ra_0, ra_1, dec_0, dec_1, threshold=20):
    jobquery = (
        "SELECT gaia_source.source_id,gaia_source.ra,gaia_source.dec,gaia_source.parallax,gaia_source.parallax_error,gaia_source.pmra,gaia_source.pmdec,gaia_source.phot_g_mean_mag,gaia_source.phot_bp_mean_mag,gaia_source.phot_rp_mean_mag,gaia_source.bp_rp,gaia_source.radial_velocity,gaia_source.phot_variable_flag,gaia_source.teff_val,gaia_source.lum_val FROM gaiadr2.gaia_source  WHERE CONTAINS(POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec),BOX('ICRS',"
        + str((ra_0 + ra_1) / 2) + "," + str((dec_0 + dec_1) / 2) + "," +
        str(abs(ra_1 - ra_0)) + "," + str(abs(dec_1 - dec_0)) +
        "))=1    AND  (phot_bp_mean_mag<= " + str(threshold) + ")")
    job = Gaia.launch_job_async(jobquery, dump_to_file=True)
    return job.get_results()
Esempio n. 22
0
def query_gaia_match(user_name,table_2_match,radius_arc):
	'''
	This function queries Gaia DR2 to find the match with table
	table_2_match. This table needs to be uploaded to the Gaia
	Archive in advance. The steps to upload the table can be found
	in http://gea.esac.esa.int/archive-help/index.html.

	Input:
	user_name(str): User name in the Gaia Archive.
	table_2_match(str): Name of the table uploaded to the Gaia 
	Archive.
	radius_arc(float): Radius of search in arcsec.

	Output:
	file.vot: Output file from the Archive with all the columns
	in table_2_match plus all Gaia DR2 columns. 
	'''

	Gaia.login_gui() 
	#Ask for userName and userPassword to authenticated access mode
	#This could be done with: 
	#Gaia.login(user='******', password='******') 

	#Cross-match user table and gaia source
	job = Gaia.launch_job_async("""\
		SELECT crossmatch_positional(\
		'user_{}','{}',\
		'gaiadr2','gaia_source',\
		{},\
		'xmatch')\
		FROM dual;\
		""".format(user_name,table_2_match,radius_arc))

	#For the matches saved in test_xmatch, get the information from Gaia. 
	#The last line saves the information into a vot table
	job2 = Gaia.launch_job_async("""\
		SELECT c."dist", a.*, b.* \
		FROM user_{}.{} AS a, \
		gaiadr2.gaia_source AS b, \
		user_{}.xmatch AS c \
		WHERE (c.{}_{}_oid = a.{}_oid AND \
		c.gaia_source_source_id = b.source_id)\
		""".format(user_name,table_2_match,user_name,
			table_2_match,table_2_match,table_2_match), dump_to_file=True)

	Gaia.logout()
Esempio n. 23
0
def get_gaia(df):

    import time
    import numpy as np
    import pandas as pd
    from astroquery.vizier import Vizier
    from astropy.coordinates import SkyCoord
    from astropy.coordinates import Angle
    from astropy import units as u
    from astropy.table import Column, Table, join
    from astroquery.gaia import Gaia
    Gaia.login(user='******',password='******')
    
    qry = """
        SELECT TOP 10 g.*, t.*
        FROM gaiadr1.tmass_original_valid AS t
        JOIN gaiadr2.tmass_neighbourhood AS xt ON xt.tmass_oid=t.tmass_oid
        JOIN gaiadr2.gaia_source AS g ON g.source_id=xt.source_id
        WHERE g.phot_g_mean_mag IS NOT NULL
        """

    bkg=Gaia.launch_job_async(qry).get_results().to_pandas()
    bkg['abs_g']=bkg.phot_g_mean_mag-5*np.log10(1000./bkg.parallax)+5.
    columns=bkg.columns.tolist()
    
    GAIAdf=pd.DataFrame(index=df.index,columns=columns)
    
    for k,row in df.iterrows():
        qry="""
            SELECT g.*, t.*
            FROM gaiadr1.tmass_original_valid AS t
            LEFT OUTER JOIN gaiadr2.tmass_neighbourhood AS xt ON xt.tmass_oid = t.tmass_oid
            LEFT OUTER JOIN gaiadr2.gaia_source AS g ON xt.source_id = g.source_id
            where 1=CONTAINS(POINT('ICRS', t.ra, t.dec),CIRCLE('ICRS', {}, {}, 5./3600))
            """.format(row['RA (deg)'],row['Dec (deg)'])
        
        data=Gaia.launch_job_async(qry).get_results().to_pandas()
        
        data['INDEX']=k
        print(k,data.shape)
        GAIAdf=GAIAdf.append(data).dropna(how='all')
        Gaia.logout()
    
    return GAIAdf
Esempio n. 24
0
    def query_gaia(self):
        """
        Query Gaia EDR3 catalogue for bright stars (G < 12) and 
        store results in a FITS file.

        """

        from astroquery.gaia import Gaia

        gaia_dir = self.index_dir

        # Query in two parts to overcome the 3-million-row limit
        query = ('SELECT ra,dec,pmra,pmdec,phot_g_mean_mag,'
                 'phot_bp_mean_mag,phot_rp_mean_mag '
                 'FROM gaiaedr3.gaia_source '
                 'WHERE phot_g_mean_mag<11.5 '
                 'AND astrometric_params_solved=31')
        fn_tab1 = os.path.join(gaia_dir, 'gaiaedr3_pyplate_1.fits')
        job = Gaia.launch_job_async(query,
                                    output_file=fn_tab1,
                                    output_format='fits',
                                    dump_to_file=True)

        query = ('SELECT ra,dec,pmra,pmdec,phot_g_mean_mag,'
                 'phot_bp_mean_mag,phot_rp_mean_mag '
                 'FROM gaiaedr3.gaia_source '
                 'WHERE phot_g_mean_mag BETWEEN 11.5 AND 12 '
                 'AND astrometric_params_solved=31')
        fn_tab2 = os.path.join(gaia_dir, 'gaiaedr3_pyplate_2.fits')
        job = Gaia.launch_job_async(query,
                                    output_file=fn_tab2,
                                    output_format='fits',
                                    dump_to_file=True)

        # Read two tables and concatenate them
        tab1 = Table.read(fn_tab1)
        tab2 = Table.read(fn_tab2)
        tab = vstack([tab1, tab2], join_type='exact')
        fn_tab = os.path.join(gaia_dir, 'gaiaedr3_pyplate.fits')
        tab.write(fn_tab, format='fits', overwrite=True)

        # Remove partial tables
        os.remove(fn_tab1)
        os.remove(fn_tab2)
Esempio n. 25
0
def getdata(RA,DEC,SR):
    job = Gaia.launch_job_async("SELECT * \
	FROM gaiadr2.gaia_source  \
	WHERE CONTAINS(POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec),\
	CIRCLE('ICRS'," + str(RA) + "," + str(DEC) + "," + str(SR) + "))=1 ORDER BY phot_g_mean_mag ASC;" \
, dump_to_file=True)

    r = job.get_results()

    return r
Esempio n. 26
0
def target_contam_gaia(ra, dec, srad=15.5 * 20):
    c = SkyCoord(ra=ra * u.degree, dec=dec * u.degree)
    # Pgaia = Gaia.query_object_async(coordinate=c, radius=(5.0*u.arcsec))
    sqltext = "SELECT * FROM gaiaedr3.gaia_source WHERE CONTAINS( \
               POINT('ICRS',gaiaedr3.gaia_source.ra,gaiaedr3.gaia_source.dec), \
               CIRCLE('ICRS'," + str(c.ra.value) + "," + str(
        c.dec.value) + "," + str(srad / 3600.0) + "))=1;"
    job = Gaia.launch_job_async(sqltext, dump_to_file=False)
    obs_table_gaia = job.get_results()
    return (obs_table_gaia)
Esempio n. 27
0
def genSimulation(configpath, star, orders = [-1,0,1], seeing = 1):
   """"Generates the arguments for plot_all to simulate the spectrogram of star through a slitless spectrogram which properties are contained in the configuration file.
   
   Parameters
   ----------
   configpath : string
      path to the slitless spectrograph configutation file
   star : string
      the studied star identifier
   orders : list of integers
      the list of orders to represent.
   seeing : float
      the wanted seeing for the spectrogram in arcseconds, 1 arcsec by default
   
   Returns
   -------
   stars : list
      a list of star_class objects, containing the stars to take into account with their position on the captor, 
   img : 2d ndarray
      a background image for the stars, 
   angle : float
      the angle of dispersion used for the spectrogram, 
   config : configuration
      the configuration used for the spectrogram, 
   LTh : list
      a list of angles for Overlap_list, 
   Overlap_list : list
      the list of the total overlap caused by surrounding stars on the studied star spectrum
   """
   config = configuration(configpath)
   imsize, pix2ars = config.ccd_imsize, config.pixel2arcsec
   
   query = customSimbad.query_object(star)
   ra0, dec0, mag0 = query["RA_d"][0], query["DEC_d"][0], query["FLUX_V"][0]
   r = 2*imsize*config.pixel2arcsec/3600
   
   maglim = mag0 + 7.5
   job = Gaia.launch_job_async("SELECT ra, dec, DISTANCE(POINT('ICRS', ra, dec),POINT('ICRS', {0}, {1})) AS dist, phot_g_mean_mag AS flux FROM gaiadr2.gaia_source WHERE CONTAINS(POINT('ICRS', ra, dec),CIRCLE('ICRS', {0}, {1}, {2})) = 1 AND phot_g_mean_mag < {3} ORDER BY dist".format(ra0, dec0, r, maglim))
   result = job.get_results()
   
   ra0, dec0 = result['ra'][0], result['dec'][0]
   config.wcs.wcs.crval = [ra0,dec0]
   
   cra, cdec = config.wcs.wcs_pix2world([[imsize/2, imsize/2]], 0)[0]
   stars = list_stars(result,config,maglim,seeing, orders)
   angle, LTh, Overlap_list = best_angle(stars, config)
   
   for star in stars :
      star.rotate_orders(angle, use_radians = True)
   
   LTh = LTh*180/np.pi
   img = SkyView.get_images(position = '{}, {}'.format(cra, cdec), survey = 'DSS', width = imsize*pix2ars*u.arcsec, height = imsize*pix2ars*u.arcsec)[0][0].data
   
   
   return (stars, img, angle, config, LTh, Overlap_list)
Esempio n. 28
0
def make_gaia_catalog(ra, dec, radius_deg, catalog_min_mag, catalog_max_mag,
                      catname):
    job = Gaia.launch_job_async(
        "SELECT * FROM gaiadr2.gaia_source AS g, gaiadr2.panstarrs1_best_neighbour AS pbest, gaiadr2.panstarrs1_original_valid AS ps1 WHERE g.source_id = pbest.source_id AND pbest.original_ext_source_id = ps1.obj_id AND CONTAINS(POINT('ICRS', g.ra, g.dec), CIRCLE('ICRS', %.4f, %.4f, %.4f))=1 AND ps1.r_mean_psf_mag > %.2f AND ps1.r_mean_psf_mag < %.2f AND pmra IS NOT NULL AND pmdec IS NOT NULL AND abs(pmdec) > 0 AND abs(pmdec) < 40 AND abs(pmra)>0 AND abs(pmra) < 40 AND ps1.n_detections > 6 AND pbest.number_of_mates=0 AND pbest.number_of_neighbours=1;"
        % (ra, dec, radius_deg, catalog_min_mag, catalog_max_mag),
        dump_to_file=False)

    p = job.get_results()

    # convert RA and DEC errors from mas(milli arc second) to degrees

    p['ra_errdeg'] = p['ra_error'] / 3.6e6
    p['dec_errdeg'] = p['dec_error'] / 3.6e6
    p['FLAGS'] = 0

    p.remove_columns([
        'astrometric_n_obs_al', 'astrometric_n_obs_ac',
        'astrometric_n_good_obs_al', 'astrometric_n_bad_obs_al',
        'astrometric_gof_al', 'astrometric_chi2_al',
        'astrometric_excess_noise', 'astrometric_excess_noise_sig',
        'astrometric_params_solved', 'astrometric_primary_flag',
        'astrometric_weight_al', 'astrometric_pseudo_colour',
        'astrometric_pseudo_colour_error', 'mean_varpi_factor_al',
        'astrometric_matched_observations', 'visibility_periods_used',
        'astrometric_sigma5d_max', 'frame_rotator_object_type',
        'matched_observations', 'duplicated_source', 'phot_g_n_obs',
        'phot_g_mean_flux', 'phot_g_mean_flux_error',
        'phot_g_mean_flux_over_error', 'phot_g_mean_mag', 'phot_bp_n_obs',
        'phot_bp_mean_flux', 'phot_bp_mean_flux_error',
        'phot_bp_mean_flux_over_error', 'phot_bp_mean_mag', 'phot_rp_n_obs',
        'phot_rp_mean_flux', 'phot_rp_mean_flux_error',
        'phot_rp_mean_flux_over_error', 'phot_rp_mean_mag',
        'phot_bp_rp_excess_factor', 'phot_proc_mode', 'bp_rp', 'bp_g', 'g_rp',
        'radial_velocity', 'radial_velocity_error', 'rv_nb_transits',
        'rv_template_teff', 'rv_template_logg', 'rv_template_fe_h', 'l', 'b',
        'ecl_lon', 'ecl_lat', 'priam_flags', 'teff_val',
        'teff_percentile_lower', 'teff_percentile_upper', 'a_g_val',
        'a_g_percentile_lower', 'a_g_percentile_upper', 'e_bp_min_rp_val',
        'e_bp_min_rp_percentile_lower', 'e_bp_min_rp_percentile_upper',
        'flame_flags', 'radius_val', 'radius_percentile_lower',
        'radius_percentile_upper', 'lum_val', 'lum_percentile_lower',
        'lum_percentile_upper', 'gaia_astrometric_params', 'obj_name',
        'obj_id', 'ra_2', 'dec_2', 'ra_error_2', 'dec_error_2', 'epoch_mean',
        'zone_id', 'obj_info_flag', 'quality_flag', 'designation',
        'phot_variable_flag', 'datalink_url', 'epoch_photometry_url',
        'original_ext_source_id'
    ])

    if os.path.exists(catname + '.txt'):
        os.remove(catname + '.txt')
    ascii.write(p, catname + '.txt')

    if os.path.exists(catname + '.ldac'):
        os.remove(catname + '.ldac')
    save_table_as_ldac(p, catname + '.ldac')
    def gaia_query(self):
        """
        Performs a query on gaia DR2 using self.skycoord and self.radius.
        Note: "Exception: 500" tends to happen when the coordinates used in search_string is not formatted correctly.

        Arguments:
            [none]

        Returns:
            CatalogTable with query result.
        """

        catalog = ["gaia"]

        ra = self.skycoord.ra.degree

        dec = self.skycoord.dec.degree

        #TODO: include code to write diagnostic info to log file incl. ra, dec, radius, search string

        search_string = "SELECT {} FROM gaiadr2.gaia_source WHERE CONTAINS(POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec),CIRCLE('ICRS',{},{},{}))=1;".format(
            gaia_cols, ra, dec, self.radius)

        #try:

        out("Creating query...")
        out(search_string)
        job = Gaia.launch_job_async(search_string, dump_to_file=False)

        #except gaierror as e:

        #    if str(e) != "[Errno 11001] getaddrinfo failed":

        #        raise

        #    else:

        #        out("This error is typically raised when there is no internet connection.")

        #        raise

        out("Retrieving results...")
        query_results = job.get_results()

        out("Results retrieved.")
        info_out(str(len(query_results['designation'])) + " sources detected.")

        print(query_results.colnames)

        # write Gaia query results to file
        fname = tpath + "/gaia_query.dat"
        query_results.write(fname, format='ascii.ecsv')

        return (CatalogTable(catalog, query_results))
Esempio n. 30
0
    def run_cone_search(self,
                        qpar_SN=10,
                        qpar_vis=7,
                        qpar_ruwe=1.40,
                        verbose=True):
        """
        Run a Cone Search ADQL query on the Gaia DR2 archive. By default the code applies a selection criteria: objets with
        parallax S/N <10, visibility periods used < 7, and RUWE >1.40 are excluded.
        """
        # 1.- Define ADQL query ================================
        query = (
            "SELECT " + self.gaia_cols + " "
            ",sqrt(gaia.astrometric_chi2_al/(gaia.astrometric_n_good_obs_al-5)) as unit_weight_e, g_ruwe.ruwe as ruwe "
            "FROM gaiadr2.gaia_source as gaia "
            "LEFT OUTER JOIN gaiadr2.ruwe  AS g_ruwe ON gaia.source_id = g_ruwe.source_id "
            "WHERE 1=CONTAINS( "
            "POINT('ICRS',ra,dec), "
            f"CIRCLE('ICRS',{self.ADQL['ra']:5.2F}, {self.ADQL['dec']:5.2F}, {self.ADQL['radii']:5.2F})) "
            f"AND parallax >= {self.ADQL['para_m']:5.2F} AND parallax <= {self.ADQL['para_M']:5.2F} "
            f"AND gaia.source_id IS NOT NULL AND gaia.parallax/gaia.parallax_error >{qpar_SN} "
            f"AND gaia.visibility_periods_used >{qpar_vis} AND g_ruwe.ruwe <{qpar_ruwe}"
        )
        # 2.- Run ADQL query ===================================
        warnings.simplefilter('ignore', category=AstropyWarning)
        print(f'RUNNING ADQL ASYNCRHRONOUS QUERY ' + '=' * 57)
        job = Gaia.launch_job_async(query=query, verbose=True)
        self.cat = job.get_results()

        flag_psn, flag_vis, flag_ruwe = '', '', ''
        if qpar_SN == 10: flag_psn = '(Default)'
        if qpar_vis == 7: flag_vis = '(Default)'
        if qpar_ruwe == 1.40: flag_ruwe = '(Default)'

        if qpar_SN < 10:
            text = 'WARNING: Parallax->Distance conversion as d = 1/parallax is not realiable if parallax S/N < 10'
            print('!' * len(text))
            print(text)
            print('!' * len(text))

        if verbose:
            print('=' * 90)
            print(
                f'Selection Criteria in Parallax S/N:             Parallax S/N > {qpar_SN}   {flag_psn}'
            )
            print(
                f'Selection Criteria in Visibility Periods Used:  Vis          > {qpar_vis}    {flag_vis}'
            )
            print(
                f'Selection Criteria in RUWE:                     RUWE         < {qpar_ruwe} {flag_ruwe}'
            )
            print()
            print(f'SAMPLE OUTPUT  N_els = {len(self.cat):3.0f}')
            print('=' * 90)
            print()
Esempio n. 31
0
#~ INNER JOIN gaiadr2.tmass_best_neighbour AS tmass
    #~ ON gaia.source_id = tmass.source_id
#~ WHERE tmass.original_ext_source_id IN %s
#~ '''%str(tuple(tmass))

# Search with Gaia IDs
#gaiaid=[5283961585534643712, 5283965296387249920]
gaiaid = np.load('../data/bpmg_sourceids.npy')

query='''
SELECT gaia.*
FROM gaiadr2.gaia_source AS gaia
WHERE gaia.source_id IN %s
'''%str(tuple(gaiaid))

job = Gaia.launch_job_async(query, dump_to_file=True)

# Your astropy table with results
r = job.get_results()

#keys=['source_id', 'phot_bp_mean_flux','ra_pmdec_corr','ra_error','ra','pmra_error','ecl_lon','designation','l','phot_rp_mean_mag','parallax_pmdec_corr','ra_parallax_corr','pmdec_error','phot_g_mean_mag','pmra','parallax','radial_velocity','radial_velocity_error','ra_dec_corr','parallax_error','dec_pmdec_corr','dec_error','pmdec','parallax_over_error','b','ref_epoch','ra_pmra_corr','dec_parallax_corr','phot_bp_mean_mag','dec','dec_pmra_corr','pmra_pmdec_corr','parallax_pmra_corr','bp_rp','ecl_lat']
keys=['source_id','phot_bp_mean_flux','ra_error','ra','phot_rp_mean_mag',
      'phot_g_mean_mag','phot_bp_mean_mag','bp_rp']

r2=r[keys]
d=dict(zip(r2['source_id'], r2)) # for easier crossmatch with source_id

print r2

# for source_id in gaiaid:
#     try: