Ejemplo n.º 1
0
    def test_list_async_jobs(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        response = DummyResponse()
        response.set_status_code(500)
        response.set_message("ERROR")
        jobDataFile = data_path('jobs_list.xml')
        jobData = utils.read_file_content(jobDataFile)
        response.set_data(method='GET',
                          context=None,
                          body=jobData,
                          headers=None)
        req = "async"
        connHandler.set_response(req, response)
        with pytest.raises(Exception):
            tap.list_async_jobs()

        response.set_status_code(200)
        response.set_message("OK")
        jobs = tap.list_async_jobs()
        assert len(jobs) == 2, \
            "Wrong jobs number. Expected: %d, found %d" % \
            (2, len(jobs))
        assert jobs[0].get_jobid() == '12345', \
            "Wrong job id. Expected: %s, found %s" % \
            ('12345', jobs[0].get_jobid())
        assert jobs[0].get_phase() == 'COMPLETED', \
            "Wrong job phase for job %s. Expected: %s, found %s" % \
            (jobs[0].get_jobid(), 'COMPLETED', jobs[0].get_phase())
        assert jobs[1].get_jobid() == '77777', \
            "Wrong job id. Expected: %s, found %s" % \
            ('77777', jobs[1].get_jobid())
        assert jobs[1].get_phase() == 'ERROR', \
            "Wrong job phase for job %s. Expected: %s, found %s" % \
            (jobs[1].get_jobid(), 'ERROR', jobs[1].get_phase())
Ejemplo n.º 2
0
def tap_query(url: str, query: str) -> Union[Table, None]:
    """
    Query via a TapPlus query

    :param url: str, the URL to the SQL database
    :param query: str, the SQL query to run

    :return: astropy.table.Table or None - the results of the gaia TAP query
    """
    # ------------------------------------------------------------------
    # try running gaia query
    with warnings.catch_warnings(record=True) as _:
        # construct TapPlus instance
        tap = TapPlus(url=url, verbose=False)
        # launch tap job
        job = tap.launch_job(query=query)
        # get tap table
        table = job.get_results()
    # ------------------------------------------------------------------
    # if we have no entries we did not find object
    if len(table) == 0:
        # return None
        return None
    # else we return result
    return table
Ejemplo n.º 3
0
def gaia_search(coords, radius):
    """
    Gaia ADQL query cone searcher
    Returns tables from both DR2 and EDR3
    """
    radius_deg = (radius * u.arcsec).to(u.deg).value
    gaia = TapPlus(url="https://gea.esac.esa.int/tap-server/tap",
                   verbose=False)

    adql_query_dr2 = f"""SELECT *
                         FROM gaiadr2.gaia_source
                         WHERE 
                             1=CONTAINS(POINT('ICRS', ra, dec),
                                        CIRCLE('ICRS', {coords.ra.deg}, {coords.dec.deg}, {radius_deg}))
                        """

    job_dr2 = gaia.launch_job(adql_query_dr2)
    #print(job_dr2)
    gaia_dr2_result = job_dr2.get_results()
    print(f'DR2: Found {len(gaia_dr2_result)} sources')

    adql_query_edr3 = f"""SELECT *
                          FROM gaiaedr3.gaia_source
                          WHERE 
                              1=CONTAINS(POINT('ICRS', ra, dec),
                                         CIRCLE('ICRS', {coords.ra.deg}, {coords.dec.deg}, {radius_deg}))
                        """

    job_edr3 = gaia.launch_job(adql_query_edr3)
    #print(job_edr3)
    gaia_edr3_result = job_edr3.get_results()
    print(f'DR3: Found {len(gaia_edr3_result)} sources')
    return gaia_dr2_result, gaia_edr3_result
Ejemplo n.º 4
0
def _download_ps_table():
    exoarch = TapPlus(url="https://exoplanetarchive.ipac.caltech.edu/TAP")
    job = exoarch.launch_job_async("select * from ps")
    # TODO: fix dtype conversion
    df = job.get_results().to_pandas()
    setattr(df, "_is_ps_table", True)
    return df
Ejemplo n.º 5
0
def twomass_from_gaia(df):
    """
    Get 2MASS IDs from Gaia DR2 ID and add in column of dataframe
    Args:
        df (pd.Dataframe): dataframe with object list
    Returns:
        df with updated 2mass IDs
    """
    gaia_ids = df['GAIADR2ID'].astype(str).tolist()
    # gaia_ids_end = df['GAIADR2ID'].astype(str).tolist()[100:]
    # gaia_ids.extend(gaia_ids_end)
    gaia_ids = '(' + ', '.join(gaia_ids) + ')'
    query = ('SELECT source_id, original_ext_source_id '
             'FROM gaiadr2.tmass_best_neighbour '
             'WHERE source_id IN {}').format(gaia_ids)
    gaia = TapPlus(url=GAIA_2MASS_URL)
    job = gaia.launch_job(query=query)
    tbl = job.get_results()
    df_2mass = tbl.to_pandas()

    df_2mass = df_2mass.astype(str).set_index('source_id', drop=False)
    df_2mass = df_2mass.reindex(df['GAIADR2ID'].astype(str).tolist())
    df_2mass = df_2mass['original_ext_source_id']

    # Weird encoding workaround
    df['TWOMASSID'] = df_2mass.str.strip("b\'\"").values

    return df
Ejemplo n.º 6
0
def test_list_async_jobs():
    connHandler = DummyConnHandler()
    tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
    response = DummyResponse()
    response.set_status_code(500)
    response.set_message("ERROR")
    jobDataFile = data_path('jobs_list.xml')
    jobData = utils.read_file_content(jobDataFile)
    response.set_data(method='GET',
                      context=None,
                      body=jobData,
                      headers=None)
    req = "async"
    connHandler.set_response(req, response)
    with pytest.raises(Exception):
        tap.list_async_jobs()

    response.set_status_code(200)
    response.set_message("OK")
    jobs = tap.list_async_jobs()
    assert len(jobs) == 2
    assert jobs[0].jobid == '12345'
    assert jobs[0].get_phase() == 'COMPLETED'
    assert jobs[1].jobid == '77777'
    assert jobs[1].get_phase() == 'ERROR'
Ejemplo n.º 7
0
def getColumns(catalog):
    from astroquery.utils.tap.core import TapPlus
    tap = TapPlus(url="http://TAPVizieR.u-strasbg.fr/TAPVizieR/tap") 
    query_string = """SELECT Top 1 * FROM {}""".format(catalog)
    job = tap.launch_job_async(query_string, dump_to_file=False)
    res = job.get_results()
    return res.columns
Ejemplo n.º 8
0
    def __init__(self, tap_handler=None):
        super(ISOClass, self).__init__()

        if tap_handler is None:
            self._tap = TapPlus(url=self.metadata_url)
        else:
            self._tap = tap_handler
Ejemplo n.º 9
0
    def __init__(self, tap_handler=None):
        super(ESAHubbleClass, self).__init__()

        if tap_handler is None:
            self._tap = TapPlus(url="http://hst.esac.esa.int"
                                "/tap-server/tap/")
        else:
            self._tap = tap_handler
Ejemplo n.º 10
0
def master_table_query(args):

    import numpy as np
    import pandas as pd
    import time
    import datetime
    from astroquery.utils.tap.core import TapPlus

    if args.verbose > 0:
        print
        print '* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *'
        print 'master_table_query'
        print '* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *'
        print

    print datetime.datetime.now()

    # Extract info from argument list...
    outputFile = args.outputFile
    object_id_lo = args.object_id_lo
    object_id_hi = args.object_id_hi

    # Establish TAP connection to SkyMapper database...
    skymapper = TapPlus(url="http://skymappertap.asvo.nci.org.au/ncitap/tap")

    # Formulate the query...
    query = """SELECT * FROM dr1.master where object_id between %d and %d""" \
        % (object_id_lo, object_id_hi)
    if args.verbose > 0:
        print "Query:  ", query

    # Submit the query as an asynchronous job...
    if args.verbose > 0:
        print "Query start:      ", datetime.datetime.now()
    job = skymapper.launch_job_async(query)
    if args.verbose > 0:
        print "Query completed:  ", datetime.datetime.now()

    # Retrieve the results as an astropy Table...
    if args.verbose > 0:
        print "Retrieval start:      ", datetime.datetime.now()
    master_table = job.get_results()
    if args.verbose > 0:
        print "Retrieval completed:  ", datetime.datetime.now()

    # Sort table by image_id
    master_table.sort('object_id')

    # Save table to outputFile...
    if args.verbose > 0:
        print "File output start:      ", datetime.datetime.now()
    master_table.write(outputFile)
    if args.verbose > 0:
        print "File output completed:  ", datetime.datetime.now()

    if args.verbose > 0: print

    return 0
Ejemplo n.º 11
0
def VizierTables():
    from astroquery.utils.tap.core import TapPlus
    tap = TapPlus(url="http://TAPVizieR.u-strasbg.fr/TAPVizieR/tap")
    tables = tap.load_tables(only_names=True)
    interestingNames = ("sdss", "2mass", "gaia", "tess", "usno")
    for table in tables:
        name = table.get_qualified_name().lower()
        for iname in interestingNames:
            if iname in name:
                print(table.get_qualified_name())
Ejemplo n.º 12
0
    def test_launch_sync_job(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        responseLaunchJob = DummyResponse()
        responseLaunchJob.set_status_code(500)
        responseLaunchJob.set_message("ERROR")
        jobDataFile = data_path('job_1.vot')
        jobData = utils.read_file_content(jobDataFile)
        responseLaunchJob.set_data(method='POST',
                                   context=None,
                                   body=jobData,
                                   headers=None)
        query = 'select top 5 * from table'
        dTmp = {"q": query}
        dTmpEncoded = connHandler.url_encode(dTmp)
        p = dTmpEncoded.find("=")
        q = dTmpEncoded[p + 1:]
        dictTmp = {
            "REQUEST": "doQuery",
            "LANG": "ADQL",
            "FORMAT": "votable",
            "tapclient": str(TAP_CLIENT_ID),
            "PHASE": "RUN",
            "QUERY": str(q)
        }
        sortedKey = taputils.taputil_create_sorted_dict_key(dictTmp)
        jobRequest = "sync?" + sortedKey
        connHandler.set_response(jobRequest, responseLaunchJob)

        with pytest.raises(Exception):
            tap.launch_job(query)

        responseLaunchJob.set_status_code(200)
        responseLaunchJob.set_message("OK")
        job = tap.launch_job(query)
        assert job is not None, "Expected a valid job"
        assert job.async_ is False, "Expected a synchronous job"
        assert job.get_phase() == 'COMPLETED', \
            "Wrong job phase. Expected: %s, found %s" % \
            ('COMPLETED', job.get_phase())
        assert job.failed is False, "Wrong job status (set Failed = True)"
        # results
        results = job.get_results()
        assert len(results) == 3, \
            "Wrong job results (num rows). Expected: %d, found %d" % \
            (3, len(results))
        self.__check_results_column(results, 'alpha', 'alpha', None,
                                    np.float64)
        self.__check_results_column(results, 'delta', 'delta', None,
                                    np.float64)
        self.__check_results_column(results, 'source_id', 'source_id', None,
                                    np.object)
        self.__check_results_column(results, 'table1_oid', 'table1_oid', None,
                                    np.int32)
Ejemplo n.º 13
0
 def connect(self, url=""):
     if self.__tap_connector == None:
         if url == "":
             raise Exception("provide URL TAP Server")
         else:
             self.__tap_connector = TapPlus(url=url)
     else:
         if url != "":
             self.__tap_connector = TapPlus(url=url)
         else:
             return self.__tap_connector
Ejemplo n.º 14
0
    def test_load_tables(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        responseLoadTable = DummyResponse()
        responseLoadTable.set_status_code(500)
        responseLoadTable.set_message("ERROR")
        tableDataFile = data_path('test_tables.xml')
        tableData = utils.read_file_content(tableDataFile)
        responseLoadTable.set_data(method='GET',
                                   context=None,
                                   body=tableData,
                                   headers=None)
        tableRequest = "tables"
        connHandler.set_response(tableRequest, responseLoadTable)
        with pytest.raises(Exception):
            tap.load_tables()

        responseLoadTable.set_status_code(200)
        responseLoadTable.set_message("OK")
        res = tap.load_tables()
        assert len(res) == 2, \
            "Number of tables expected: %d, found: %d" % (2, len(res))
        # Table 1
        table = self.__find_table('public', 'table1', res)
        assert table.description == 'Table1 desc', \
            "Wrong description for table1. Expected: %s, found %s" % \
            ('Table1 desc', table.description)
        columns = table.columns
        assert len(columns) == 2, \
            "Number of columns for table1. Expected: %d, found: %d" % \
            (2, len(columns))
        col = self.__find_column('table1_col1', columns)
        self.__check_column(col, 'Table1 Column1 desc', '', 'VARCHAR',
                            'indexed')
        col = self.__find_column('table1_col2', columns)
        self.__check_column(col, 'Table1 Column2 desc', '', 'INTEGER', None)
        # Table 2
        table = self.__find_table('public', 'table2', res)
        assert table.description == 'Table2 desc', \
            "Wrong description for table2. Expected: %s, found %s" % \
            ('Table2 desc', table.description)
        columns = table.columns
        assert len(columns) == 3, \
            "Number of columns for table2. Expected: %d, found: %d" % \
            (3, len(columns))
        col = self.__find_column('table2_col1', columns)
        self.__check_column(col, 'Table2 Column1 desc', '', 'VARCHAR',
                            'indexed')
        col = self.__find_column('table2_col2', columns)
        self.__check_column(col, 'Table2 Column2 desc', '', 'INTEGER', None)
        col = self.__find_column('table2_col3', columns)
        self.__check_column(col, 'Table2 Column3 desc', '', 'INTEGER', None)
Ejemplo n.º 15
0
    def test_abort_job(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        jobid = '12345'
        # Phase POST response
        responsePhase = DummyResponse()
        responsePhase.set_status_code(200)
        responsePhase.set_message("OK")
        responsePhase.set_data(method='POST',
                               context=None,
                               body=None,
                               headers=None)
        req = "async/" + jobid + "/phase?PHASE=ABORT"
        connHandler.set_response(req, responsePhase)
        # Launch response
        responseLaunchJob = DummyResponse()
        responseLaunchJob.set_status_code(303)
        responseLaunchJob.set_message("OK")
        # list of list (httplib implementation for headers in response)
        launchResponseHeaders = [[
            'location', 'http://test:1111/tap/async/' + jobid
        ]]
        responseLaunchJob.set_data(method='POST',
                                   context=None,
                                   body=None,
                                   headers=launchResponseHeaders)
        query = 'query'
        dictTmp = {
            "REQUEST": "doQuery",
            "LANG": "ADQL",
            "FORMAT": "votable",
            "tapclient": str(TAP_CLIENT_ID),
            "QUERY": str(query)
        }
        sortedKey = taputils.taputil_create_sorted_dict_key(dictTmp)
        req = "async?" + sortedKey
        connHandler.set_response(req, responseLaunchJob)

        job = tap.launch_job_async(query, autorun=False)
        assert job is not None, "Expected a valid job"
        assert job.get_phase() == 'PENDING', \
            "Wrong job phase. Expected: %s, found %s" % \
            ('PENDING', job.get_phase())
        # abort job
        job.abort()
        assert job.get_phase() == 'ABORT', \
            "Wrong job phase. Expected: %s, found %s" % \
            ('ABORT', job.get_phase())
        # try to abort again
        with pytest.raises(Exception):
            job.abort()
Ejemplo n.º 16
0
    def test_load_tables(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        responseLoadTable = DummyResponse()
        responseLoadTable.set_status_code(500)
        responseLoadTable.set_message("ERROR")
        tableDataFile = data_path('test_tables.xml')
        tableData = utils.read_file_content(tableDataFile)
        responseLoadTable.set_data(method='GET',
                                   context=None,
                                   body=tableData,
                                   headers=None)
        tableRequest = "tables"
        connHandler.set_response(tableRequest, responseLoadTable)
        with pytest.raises(Exception):
            tap.load_tables()

        responseLoadTable.set_status_code(200)
        responseLoadTable.set_message("OK")
        res = tap.load_tables()
        assert len(res) == 2, \
            "Number of tables expected: %d, found: %d" % (2, len(res))
        # Table 1
        table = self.__find_table('public', 'table1', res)
        assert table.get_description() == 'Table1 desc', \
            "Wrong description for table1. Expected: %s, found %s" % \
            ('Table1 desc', table.get_description())
        columns = table.get_columns()
        assert len(columns) == 2, \
            "Number of columns for table1. Expected: %d, found: %d" % \
            (2, len(columns))
        col = self.__find_column('table1_col1', columns)
        self.__check_column(col, 'Table1 Column1 desc', '', 'VARCHAR', 'indexed')
        col = self.__find_column('table1_col2', columns)
        self.__check_column(col, 'Table1 Column2 desc', '', 'INTEGER', None)
        # Table 2
        table = self.__find_table('public', 'table2', res)
        assert table.get_description() == 'Table2 desc', \
            "Wrong description for table2. Expected: %s, found %s" % \
            ('Table2 desc', table.get_description())
        columns = table.get_columns()
        assert len(columns) == 3, \
            "Number of columns for table2. Expected: %d, found: %d" % \
            (3, len(columns))
        col = self.__find_column('table2_col1', columns)
        self.__check_column(col, 'Table2 Column1 desc', '', 'VARCHAR', 'indexed')
        col = self.__find_column('table2_col2', columns)
        self.__check_column(col, 'Table2 Column2 desc', '', 'INTEGER', None)
        col = self.__find_column('table2_col3', columns)
        self.__check_column(col, 'Table2 Column3 desc', '', 'INTEGER', None)
Ejemplo n.º 17
0
def bailerjones_new(src_id):
    gaia = TapPlus(url="http://dc.zah.uni-heidelberg.de/tap", verbose=False)

    adql_query_newbj = f"""SELECT 
                                source_id,
                                r_med_geo, r_lo_geo, r_hi_geo,
                                r_med_photogeo, r_lo_photogeo, r_hi_photogeo 
                           FROM gedr3dist.main
                              WHERE source_id = {src_id}
                        """

    job = gaia.launch_job(adql_query_newbj)
    result = job.get_results()
    return result
Ejemplo n.º 18
0
def doquery_from_table(table):
    #Execute a TAP+ query using information from a single row of an astropy table
    server = TapPlus(url = table['queryurl'], \
                     default_protocol_is_https = True, \
                     verbose = True)
    if table['upload_table_name'] == '':
        result = server.launch_job_async(query = table['query'], verbose = True, \
                                         dump_to_file = True, output_file = table['outfile'])
    else:
        result = server.launch_job_async(query = table['query'], verbose = True, \
                                         dump_to_file = True, output_file = table['outfile'], \
                                         upload_table_name = table['upload_table_name'], \
                                         upload_resource = table['upload_resource'])
    return result
Ejemplo n.º 19
0
def get_nasa_exoplanet_archive_pscomppars(ver=TODAYSTR, N_max=int(1e4)):
    """
    If newestpossible is True, will download the latest NEA pscomppars table,
    from today. Otherwise, it'll take the most recent from those already
    downloaded.
    """

    from astroquery.utils.tap.core import TapPlus

    savedir = os.path.join(LOCALDIR, 'catalogs')

    if not os.path.exists(savedir):
        try:
            os.mkdir(savedir)
        except:
            raise NotImplementedError(f'Tried to make {savedir} and failed.')

    nea_path = os.path.join(savedir,
                            f'nasaexoplanetarchive-pscomppars-{ver}.csv')

    if not os.path.exists(nea_path):

        tap = TapPlus(url="https://exoplanetarchive.ipac.caltech.edu/TAP/")
        query = (
            f'select top {N_max} ' +
            'pl_name, hostname, pl_letter, gaia_id, tic_id, ra, dec, ' +
            'discoverymethod, disc_year, pl_orbper, pl_orbsmax, pl_rade, ' +
            'pl_radeerr1, pl_radeerr2, ' + 'pl_radjerr1, pl_radjerr2, ' +
            'pl_radj, pl_bmasse, pl_bmasseerr1, pl_bmasseerr2, pl_bmassj, ' +
            'pl_bmassjerr1, pl_bmassjerr2, pl_orbeccen, pl_imppar, ' +
            'pl_insol, pl_insolerr1, pl_insolerr2, ' +
            'pl_eqt, pl_eqt, pl_eqt, ' +
            'st_teff, st_rad, st_mass, st_met, st_logg, st_rotp, sy_dist, ' +
            'sy_disterr1, sy_disterr2, sy_plx, sy_plxerr1, sy_plxerr2, ' +
            'sy_vmag, sy_tmag, ' + 'tran_flag, ' +
            'st_age, st_ageerr1, st_ageerr2 from pscomppars')
        print(query)
        j = tap.launch_job(query=query)
        r = j.get_results()

        assert len(r) != N_max

        df = r.to_pandas()
        df.to_csv(nea_path, index=False)

    else:
        df = pd.read_csv(nea_path, sep=',')

    return df
Ejemplo n.º 20
0
def test_launch_sync_job():
    connHandler = DummyConnHandler()
    tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
    responseLaunchJob = DummyResponse()
    responseLaunchJob.set_status_code(500)
    responseLaunchJob.set_message("ERROR")
    jobDataFile = data_path('job_1.vot')
    jobData = utils.read_file_content(jobDataFile)
    responseLaunchJob.set_data(method='POST',
                               context=None,
                               body=jobData,
                               headers=None)
    query = 'select top 5 * from table'
    dTmp = {"q": query}
    dTmpEncoded = connHandler.url_encode(dTmp)
    p = dTmpEncoded.find("=")
    q = dTmpEncoded[p + 1:]
    dictTmp = {
        "REQUEST": "doQuery",
        "LANG": "ADQL",
        "FORMAT": "votable",
        "tapclient": str(TAP_CLIENT_ID),
        "PHASE": "RUN",
        "QUERY": str(q)
    }
    sortedKey = taputils.taputil_create_sorted_dict_key(dictTmp)
    jobRequest = f"sync?{sortedKey}"
    connHandler.set_response(jobRequest, responseLaunchJob)

    with pytest.raises(Exception):
        tap.launch_job(query)

    responseLaunchJob.set_status_code(200)
    responseLaunchJob.set_message("OK")
    job = tap.launch_job(query)

    assert job is not None
    assert job.async_ is False
    assert job.get_phase() == 'COMPLETED'
    assert job.failed is False

    # results
    results = job.get_results()
    assert len(results) == 3
    __check_results_column(results, 'ra', 'ra', None, np.float64)
    __check_results_column(results, 'dec', 'dec', None, np.float64)
    __check_results_column(results, 'source_id', 'source_id', None, object)
    __check_results_column(results, 'table1_oid', 'table1_oid', None, np.int32)
Ejemplo n.º 21
0
 def test_get_epic_metadata(self):
     tap_url = "http://nxsadev.esac.esa.int/tap-server/tap/"
     target_name = "4XMM J122934.7+015657"
     radius = 0.01
     epic_source_table = "xsa.v_epic_source"
     epic_source_column = "epic_source_equatorial_spoint"
     cat_4xmm_table = "xsa.v_epic_source_cat"
     cat_4xmm_column = "epic_source_cat_equatorial_spoint"
     stack_4xmm_table = "xsa.v_epic_xmm_stack_cat"
     stack_4xmm_column = "epic_stack_cat_equatorial_spoint"
     slew_source_table = "xsa.v_slew_source_cat"
     slew_source_column = "slew_source_cat_equatorial_spoint"
     xsa = XMMNewtonClass(TapPlus(url=tap_url))
     epic_source, cat_4xmm, stack_4xmm, slew_source = xsa.get_epic_metadata(
         target_name=target_name, radius=radius)
     c = SkyCoord.from_name(target_name, parse=True)
     query = ("select * from {} "
              "where 1=contains({}, circle('ICRS', {}, {}, {}));")
     table = xsa.query_xsa_tap(
         query.format(epic_source_table, epic_source_column, c.ra.degree,
                      c.dec.degree, radius))
     assert report_diff_values(epic_source, table)
     table = xsa.query_xsa_tap(
         query.format(cat_4xmm_table, cat_4xmm_column, c.ra.degree,
                      c.dec.degree, radius))
     assert report_diff_values(cat_4xmm, table)
     table = xsa.query_xsa_tap(
         query.format(stack_4xmm_table, stack_4xmm_column, c.ra.degree,
                      c.dec.degree, radius))
     assert report_diff_values(stack_4xmm, table)
     table = xsa.query_xsa_tap(
         query.format(slew_source_table, slew_source_column, c.ra.degree,
                      c.dec.degree, radius))
     assert report_diff_values(slew_source, table)
Ejemplo n.º 22
0
def get_exoplanetarchive_planetarysystems(tabletype="ps", overwrite=1,
                                          n_max=int(3e5), verbose=1):
    """
    Args:
        overwrite: if true, will download the LATEST tables from the NASA
        exoplanet archive. otherwise, uses the cache.

        tabletype: "ps" or "pscomppars"

    Returns:
        dataframe with the Planetary Systems, "ps" table. (Nb. there is a
        composite parameters table too, "pscomppars").
        https://exoplanetarchive.ipac.caltech.edu/docs/API_PS_columns.html
    """

    dlpath = os.path.join(
        CACHEDIR, f'{tabletype}_exoplanetarchive.xml.gz'
    )

    if os.path.exists(dlpath) and overwrite:
        os.remove(dlpath)

    if not os.path.exists(dlpath):

        tap = TapPlus(url="https://exoplanetarchive.ipac.caltech.edu/TAP/")

        jobstr = (
        f'''
        SELECT top {n_max:d} *
        FROM {tabletype}
        '''
        )
        query = jobstr

        if verbose:
            print(20*'.')
            print('Executing:')
            print(query)
            print(20*'.')

        j = tap.launch_job(query=query, verbose=True, dump_to_file=True,
                           output_file=dlpath)

    df = given_votable_get_df(dlpath, assert_equal=None)

    return df
Ejemplo n.º 23
0
def test_datalink():
    connHandler = DummyConnHandler()
    tap = TapPlus("http://test:1111/tap",
                  datalink_context="datalink",
                  connhandler=connHandler)
    responseResultsJob = DummyResponse()
    responseResultsJob.set_status_code(200)
    responseResultsJob.set_message("OK")
    jobDataFile = data_path('job_1.vot')
    jobData = utils.read_file_content(jobDataFile)
    responseResultsJob.set_data(method='GET',
                                context=None,
                                body=jobData,
                                headers=None)
    req = "links?ID=1,2"
    connHandler.set_response(req, responseResultsJob)

    # error
    responseResultsJob.set_status_code(500)
    responseResultsJob.set_message("ERROR")
    with pytest.raises(Exception):
        # missing IDS parameter
        tap.get_datalinks(ids=None)

    # OK
    responseResultsJob.set_status_code(200)
    responseResultsJob.set_message("OK")
    # results
    results = tap.get_datalinks("1,2")
    assert len(results) == 3
    results = tap.get_datalinks([1, 2])
    assert len(results) == 3
    results = tap.get_datalinks(['1', '2'])
    assert len(results) == 3
Ejemplo n.º 24
0
 def test_logout(self, mock_logout):
     conn_handler = DummyConnHandler()
     tapplus = TapPlus("http://test:1111/tap", connhandler=conn_handler)
     tap = GaiaClass(conn_handler, tapplus, show_server_messages=False)
     tap.logout()
     assert (mock_logout.call_count == 2)
     mock_logout.side_effect = HTTPError("Login error")
     tap.logout()
     assert (mock_logout.call_count == 3)
Ejemplo n.º 25
0
 def test_login_gui(self, mock_login_gui, mock_login):
     conn_handler = DummyConnHandler()
     tapplus = TapPlus("http://test:1111/tap", connhandler=conn_handler)
     tap = GaiaClass(conn_handler, tapplus, show_server_messages=False)
     tap.login_gui()
     assert (mock_login_gui.call_count == 1)
     mock_login_gui.side_effect = HTTPError("Login error")
     tap.login("user", "password")
     assert (mock_login.call_count == 1)
Ejemplo n.º 26
0
    def download_extended_catalog(self, merge=True):
        """ Dowloads smaller catalog source from PS:
        https://outerspace.stsci.edu/display/PANSTARRS/PS1+Source+extraction+and+catalogs
        
        Here merging:
        StackModelFitDeV: 
            Contains the de Vaucouleurs fit parameters for stack
            detections brighter than some limit (is this a S/N cut or a mag limit?) 
            outside the galactic plane. 
            All filters are matched into a single row. 
            Given are mag, radius, axial ratio, position angle, RA, Dec, chisq of fit.
        
        StackModelFitSer: 
            Contains the Sersic fit parameters for stack 
            detections brighter than magnitude 21.5 outside the galactic plane. 
            All filters are matched into a single row. 
            Given are mag, radius, axial ratio, Sersic index, position angle, RA, Dec, chisq of fit.


        """
        if self.catdata is None:
            self.download_catalog(update=True)
        objid = ",".join(["%s" % s for s in self.catdata["objID"].values])

        from astroquery.utils.tap.core import TapPlus
        tap_service = TapPlus(
            url="http://vao.stsci.edu/PS1DR2/tapservice.aspx")

        job = tap_service.launch_job_async("""
        SELECT *
        FROM dbo.StackModelFitDeV AS dev
        LEFT JOIN dbo.StackModelFitSer AS ser
            ON dev.objID = ser.objID
        WHERE
        dev.objID IN ({})
        """.format(objid))
        results = job.get_results().to_pandas()

        if merge:
            self._catdata = pandas.merge(self.catdata, results, on="objID")
            self._is_extended_cat_set = True
        else:
            self._is_extended_cat_set = False
            return results
    def test_query_object(self):
        connHandler = DummyConnHandler()
        tapplus = TapPlus("http://test:1111/tap", connhandler=connHandler)
        tap = GaiaClass(connHandler, tapplus)
        # Launch response: we use default response because the query contains
        # decimals
        responseLaunchJob = DummyResponse()
        responseLaunchJob.set_status_code(200)
        responseLaunchJob.set_message("OK")
        jobDataFile = data_path('job_1.vot')
        jobData = utils.read_file_content(jobDataFile)
        responseLaunchJob.set_data(method='POST',
                                   context=None,
                                   body=jobData,
                                   headers=None)
        # The query contains decimals: force default response
        connHandler.set_default_response(responseLaunchJob)
        sc = SkyCoord(ra=29.0,
                      dec=15.0,
                      unit=(u.degree, u.degree),
                      frame='icrs')
        with pytest.raises(ValueError) as err:
            tap.query_object(sc)
        assert "Missing required argument: 'width'" in err.value.args[0]

        width = Quantity(12, u.deg)

        with pytest.raises(ValueError) as err:
            tap.query_object(sc, width=width)
        assert "Missing required argument: 'height'" in err.value.args[0]

        height = Quantity(10, u.deg)
        table = tap.query_object(sc, width=width, height=height)
        assert len(table) == 3, \
            "Wrong job results (num rows). Expected: %d, found %d" % \
            (3, len(table))
        self.__check_results_column(table, 'alpha', 'alpha', None, np.float64)
        self.__check_results_column(table, 'delta', 'delta', None, np.float64)
        self.__check_results_column(table, 'source_id', 'source_id', None,
                                    np.object)
        self.__check_results_column(table, 'table1_oid', 'table1_oid', None,
                                    np.int32)
        # by radius
        radius = Quantity(1, u.deg)
        table = tap.query_object(sc, radius=radius)
        assert len(table) == 3, \
            "Wrong job results (num rows). Expected: %d, found %d" % \
            (3, len(table))
        self.__check_results_column(table, 'alpha', 'alpha', None, np.float64)
        self.__check_results_column(table, 'delta', 'delta', None, np.float64)
        self.__check_results_column(table, 'source_id', 'source_id', None,
                                    np.object)
        self.__check_results_column(table, 'table1_oid', 'table1_oid', None,
                                    np.int32)
Ejemplo n.º 28
0
 def __init__(self, service_url, schema, master_table, natural_join=None):
     self.service_url = service_url
     self.schema = schema
     self.natural_join = natural_join
     self.service = TapPlus(url=self.service_url)
     self.master_table = GraphControl.get_table(self.service,
                                                schema,
                                                master_table,
                                                None,
                                                None,
                                                natural_join=natural_join)
Ejemplo n.º 29
0
def test_rename_table():
    tableName = 'user_test.table_test_rename'
    newTableName = 'user_test.table_test_rename_new'
    newColumnNames = {'ra': 'alpha', 'dec': 'delta'}
    connHandler = DummyConnHandler()
    tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
    dummyResponse = DummyResponse()
    dummyResponse.set_status_code(200)
    dummyResponse.set_message("OK")
    tableDataFile = data_path('test_table_rename.xml')
    tableData = utils.read_file_content(tableDataFile)
    dummyResponse.set_data(method='GET',
                           context=None,
                           body=tableData,
                           headers=None)

    with pytest.raises(Exception):
        tap.rename_table()
    with pytest.raises(Exception):
        tap.rename_table(table_name=tableName)
    with pytest.raises(Exception):
        tap.rename_table(table_name=tableName,
                         new_table_name=None,
                         new_column_names_dict=None)

    # Test OK.
    responseRenameTable = DummyResponse()
    responseRenameTable.set_status_code(200)
    responseRenameTable.set_message("OK")
    dictArgs = {
        "action": "rename",
        "new_column_names": "ra:alpha,dec:delta",
        "new_table_name": newTableName,
        "table_name": tableName,
    }
    data = connHandler.url_encode(dictArgs)
    req = f"TableTool?{data}"
    connHandler.set_response(req, responseRenameTable)
    tap.rename_table(table_name=tableName,
                     new_table_name=newTableName,
                     new_column_names_dict=newColumnNames)
Ejemplo n.º 30
0
def query_gaia(url, query) -> Union[Table, None]:
    """
    Query Gaia via a TapPlus query

    :param url: str, the URL to the SQL database
    :param query: str, the SQL query to run

    :return: astropy.table.Table or None - the results of the gaia TAP query
    """
    # set fucntion name
    func_name = __NAME__ + '.query_gaia()'
    # check for astroquery and return a fail and warning if not installed
    try:
        from astroquery.utils.tap.core import TapPlus

    except Exception as e:
        eargs = [type(e), str(e), func_name]
        WLOG(params, 'warning', TextEntry('10-016-00009', args=eargs))
        return None
    # ------------------------------------------------------------------
    # try running gaia query
    try:
        with warnings.catch_warnings(record=True) as _:
            # construct gaia TapPlus instance
            gaia = TapPlus(url=url)
            # launch gaia job
            job = gaia.launch_job(query=query)
            # get gaia table
            table = job.get_results()
    except Exception as e:
        wargs = [url, query, type(e), e, func_name]
        WLOG(params, 'warning', TextEntry('10-016-00008', args=wargs))
        # return No row and True to fail
        return None
    # ------------------------------------------------------------------
    # if we have no entries we did not find object
    if len(table) == 0:
        # return None
        return None
    # else we return result
    return table
Ejemplo n.º 31
0
def query_gaia():
    gaia = TapPlus(url='http://gea.esac.esa.int/tap-server/tap')

    job = gaia.launch_job_async('select top 100000 ra, dec, phot_g_mean_mag \
        from gaiadr1.gaia_source order by source_id')

    results = job.get_results()

    results = np.array(
        [result.as_void().view((float, 3)) for result in results])

    x = results[:, 2] * np.cos(results[:, 0]) * np.cos(results[:, 1])
    y = results[:, 2] * np.sin(results[:, 0]) * np.cos(results[:, 1])
    z = results[:, 2] * np.sin(results[:, 1])

    sample_size = 1000
    random_idns = np.random.choice(results.shape[0], sample_size)

    ax.scatter(x[random_idns], y[random_idns], z[random_idns], s=3)

    plt.show()
Ejemplo n.º 32
0
    def test_load_table(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        responseLoadTable = DummyResponse()
        responseLoadTable.set_status_code(500)
        responseLoadTable.set_message("ERROR")
        tableDataFile = data_path('test_table1.xml')
        tableData = utils.read_file_content(tableDataFile)
        responseLoadTable.set_data(method='GET',
                                   context=None,
                                   body=tableData,
                                   headers=None)
        tableSchema = "public"
        tableName = "table1"
        fullQualifiedTableName = tableSchema + "." + tableName
        tableRequest = "tables?tables=" + fullQualifiedTableName
        connHandler.set_response(tableRequest, responseLoadTable)

        with pytest.raises(Exception):
            tap.load_table(fullQualifiedTableName)

        responseLoadTable.set_status_code(200)
        responseLoadTable.set_message("OK")
        table = tap.load_table(fullQualifiedTableName)
        assert table is not None, \
            "Table '%s' not found" % (fullQualifiedTableName)
        assert table.get_description() == 'Table1 desc', \
            "Wrong description for table1. Expected: %s, found %s" % \
            ('Table1 desc', table.get_description())
        columns = table.get_columns()
        assert len(columns) == 2, \
            "Number of columns for table1. Expected: %d, found: %d" % \
            (2, len(columns))
        col = self.__find_column('table1_col1', columns)
        self.__check_column(col, 'Table1 Column1 desc', '', 'VARCHAR', 'indexed')
        col = self.__find_column('table1_col2', columns)
        self.__check_column(col, 'Table1 Column2 desc', '', 'INTEGER', None)
Ejemplo n.º 33
0
 def test_load_tables_parameters(self):
     connHandler = DummyConnHandler()
     tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
     responseLoadTable = DummyResponse()
     responseLoadTable.set_status_code(200)
     responseLoadTable.set_message("OK")
     tableDataFile = data_path('test_tables.xml')
     tableData = utils.read_file_content(tableDataFile)
     responseLoadTable.set_data(method='GET',
                                context=None,
                                body=tableData,
                                headers=None)
     tableRequest = "tables"
     connHandler.set_response(tableRequest, responseLoadTable)
     # empty request
     tap.load_tables()
     request = connHandler.get_last_request()
     assert request == tableRequest, \
         "Empty request. Expected: '%s', found: '%s'" % \
         (tableRequest, request)
     # flag only_names=false & share_accessible=false: equals to empty request
     tap.load_tables(only_names=False, include_shared_tables=False)
     request = connHandler.get_last_request()
     assert request == tableRequest, \
         "Empty request. Expected: '%s', found: '%s'" % \
         (tableRequest, request)
     # flag only_names
     tableRequest = "tables?only_tables=true"
     connHandler.set_response(tableRequest, responseLoadTable)
     tap.load_tables(only_names=True)
     request = connHandler.get_last_request()
     assert request == tableRequest, \
         "Flag only_names. Expected: '%s', found: '%s'" % \
         (tableRequest, request)
     # flag share_accessible=true
     tableRequest = "tables?share_accessible=true"
     connHandler.set_response(tableRequest, responseLoadTable)
     tap.load_tables(include_shared_tables=True)
     request = connHandler.get_last_request()
     assert request == tableRequest, \
         "Flag share_accessigle. Expected: '%s', found: '%s'" % \
         (tableRequest, request)
     # flag only_names=true & share_accessible=true
     tableRequest = "tables?only_tables=true&share_accessible=true"
     connHandler.set_response(tableRequest, responseLoadTable)
     tap.load_tables(only_names=True, include_shared_tables=True)
     request = connHandler.get_last_request()
     assert request == tableRequest, \
         "Flags only_names and share_accessible. Expected: '%s', found: '%s'" % \
         (tableRequest, request)
Ejemplo n.º 34
0
    def test_launc_async_job(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        jobid = '12345'
        # Launch response
        responseLaunchJob = DummyResponse()
        responseLaunchJob.set_status_code(500)
        responseLaunchJob.set_message("ERROR")
        # list of list (httplib implementation for headers in response)
        launchResponseHeaders = [
            ['location', 'http://test:1111/tap/async/' + jobid]
            ]
        responseLaunchJob.set_data(method='POST',
                                   context=None,
                                   body=None,
                                   headers=launchResponseHeaders)
        query = 'query'
        dictTmp = {
            "REQUEST": "doQuery",
            "LANG": "ADQL",
            "FORMAT": "votable",
            "tapclient": str(TAP_CLIENT_ID),
            "PHASE": "RUN",
            "QUERY": str(query)}
        sortedKey = taputils.taputil_create_sorted_dict_key(dictTmp)
        req = "async?" + sortedKey
        connHandler.set_response(req, responseLaunchJob)
        # Phase response
        responsePhase = DummyResponse()
        responsePhase.set_status_code(500)
        responsePhase.set_message("ERROR")
        responsePhase.set_data(method='GET',
                               context=None,
                               body="COMPLETED",
                               headers=None)
        req = "async/" + jobid + "/phase"
        connHandler.set_response(req, responsePhase)
        # Results response
        responseResultsJob = DummyResponse()
        responseResultsJob.set_status_code(500)
        responseResultsJob.set_message("ERROR")
        jobDataFile = data_path('job_1.vot')
        jobData = utils.read_file_content(jobDataFile)
        responseResultsJob.set_data(method='GET',
                                    context=None,
                                    body=jobData,
                                    headers=None)
        req = "async/" + jobid + "/results/result"
        connHandler.set_response(req, responseResultsJob)

        with pytest.raises(Exception):
            tap.launch_job_async(query)

        responseLaunchJob.set_status_code(303)
        responseLaunchJob.set_message("OK")
        with pytest.raises(Exception):
            tap.launch_job_async(query)

        responsePhase.set_status_code(200)
        responsePhase.set_message("OK")
        with pytest.raises(Exception):
            tap.launch_job_async(query)

        responseResultsJob.set_status_code(200)
        responseResultsJob.set_message("OK")
        job = tap.launch_job_async(query)
        assert job is not None, "Expected a valid job"
        assert job.is_sync() is False, "Expected an asynchronous job"
        assert job.get_phase() == 'COMPLETED', \
            "Wrong job phase. Expected: %s, found %s" % \
            ('COMPLETED', job.get_phase())
        assert job.is_failed() is False, "Wrong job status (set Failed = True)"
        # results
        results = job.get_results()
        assert len(results) == 3, \
            "Wrong job results (num rows). Expected: %d, found %d" % \
            (3, len(results))
        self.__check_results_column(results,
                                    'alpha',
                                    'alpha',
                                    None,
                                    np.float64)
        self.__check_results_column(results,
                                    'delta',
                                    'delta',
                                    None,
                                    np.float64)
        self.__check_results_column(results,
                                    'source_id',
                                    'source_id',
                                    None,
                                    np.object)
        self.__check_results_column(results,
                                    'table1_oid',
                                    'table1_oid',
                                    None,
                                    np.int32)
Ejemplo n.º 35
0
    def test_launch_sync_job(self):
        connHandler = DummyConnHandler()
        tap = TapPlus("http://test:1111/tap", connhandler=connHandler)
        responseLaunchJob = DummyResponse()
        responseLaunchJob.set_status_code(500)
        responseLaunchJob.set_message("ERROR")
        jobDataFile = data_path('job_1.vot')
        jobData = utils.read_file_content(jobDataFile)
        responseLaunchJob.set_data(method='POST',
                                   context=None,
                                   body=jobData,
                                   headers=None)
        query = 'select top 5 * from table'
        dTmp = {"q": query}
        dTmpEncoded = connHandler.url_encode(dTmp)
        p = dTmpEncoded.find("=")
        q = dTmpEncoded[p+1:]
        dictTmp = {
            "REQUEST": "doQuery",
            "LANG": "ADQL",
            "FORMAT": "votable",
            "tapclient": str(TAP_CLIENT_ID),
            "PHASE": "RUN",
            "QUERY": str(q)}
        sortedKey = taputils.taputil_create_sorted_dict_key(dictTmp)
        jobRequest = "sync?" + sortedKey
        connHandler.set_response(jobRequest, responseLaunchJob)

        with pytest.raises(Exception):
            tap.launch_job(query)

        responseLaunchJob.set_status_code(200)
        responseLaunchJob.set_message("OK")
        job = tap.launch_job(query)
        assert job is not None, "Expected a valid job"
        assert job.is_sync(), "Expected a synchronous job"
        assert job.get_phase() == 'COMPLETED', \
            "Wrong job phase. Expected: %s, found %s" % \
            ('COMPLETED', job.get_phase())
        assert job.is_failed() is False, "Wrong job status (set Failed = True)"
        # results
        results = job.get_results()
        assert len(results) == 3, \
            "Wrong job results (num rows). Expected: %d, found %d" % \
            (3, len(results))
        self.__check_results_column(results,
                                    'alpha',
                                    'alpha',
                                    None,
                                    np.float64)
        self.__check_results_column(results,
                                    'delta',
                                    'delta',
                                    None,
                                    np.float64)
        self.__check_results_column(results,
                                    'source_id',
                                    'source_id',
                                    None,
                                    np.object)
        self.__check_results_column(results,
                                    'table1_oid',
                                    'table1_oid',
                                    None,
                                    np.int32)