Esempio n. 1
0
 def __init__(self, coord, radius, **kwargs):
     surveycoord.SurveyCoord.__init__(self, coord, radius, **kwargs)
     self.survey = 'WISE'
     self.bands = WISE_bands
     self.service = TAPService('https://irsa.ipac.caltech.edu/TAP')
     self.query = None
     self.database = "allwise_p3as_psd"
Esempio n. 2
0
    def conesearch(self, ra, dec, radius, **kwargs):
        ## ToDo: rewrite to use Gaia Archive, not tap service.  Need to ditch distance param and just use parallax
        # input parameters in degrees:
        ra_ = ra.to(u.degree); dec_= dec.to(u.degree); rad_=radius.to(u.degree)

        maxrec = kwargs.get('maxrec', 20000)

        columnlist = self._get_col_list() + ', gd.r_est'
        dbsource = '\n\t'.join(['\nFROM gaiadr2_complements.geometric_distance gd',
                    'INNER JOIN gaiadr2.gaia_source gs using (source_id) '])

        constraints =  '\n\t'.join(['\nWHERE ', 
                'CONTAINS(POINT(\'\', gs.ra, gs.dec), ',
                '\tCIRCLE(\'\', {ra}, {dec}, {rad})) = 1 '.format(ra=ra_.value, dec=dec_.value, rad=rad_.value)])

        if self.source_constraints is not None:
            constraints = constraints + ' AND '+ self.source_constraints

        self.tap_query_string = 'SELECT \n\t\t'+ columnlist + dbsource + constraints
        
        tap_service = TAPService(self.tap_service_url)
        tap_results = tap_service.search(self.tap_query_string, maxrec=maxrec)

        self.objs = tap_results.to_table().to_pandas()
        self.objs.set_index('source_id', inplace=True)
Esempio n. 3
0
 def connect(self, login, password):
     '''
     Authenticate the service through CEFCA portal.
     
     Parameters
     ----------
     
         login : str
             User login, usually an email.
         
         password : str
             User password.
     '''
     self.auth = CEFCA_authenticate(login, password)
     self.service = TAPService(self.serviceUrl, self.auth)
Esempio n. 4
0
 def from_source_idlist(self, source_idlist, source_idcol=None, filters=False):
     #xml-ify the source_idlist to a file
     if isinstance(source_idlist, Table):
         #guess which column contains the source ids
         if source_idcol is None:
             if 'source_id' in source_idlist.colnames:
                 sidcol = 'source_id'
             elif 'source' in source_idlist.colnames:
                 sidcol = 'source'
             else:
                 raise ValueError('no column to use as source_id')
         else:
             if source_idcol not in source_idlist.colnames:
                 raise ValueError(f'invalid column specified as source id column: {source_idcol}')
             sidcol = source_idcol
             
         tbl = source_idlist
     elif isinstance(source_idlist, np.ndarray) or isinstance(source_idlist, list):
         sidcol = 'source_id'
         tbl = Table({sidcol:source_idlist})
     else:
         raise ValueError(f'invalid source_idlist type: {type(source_idlist)}')
     xml_path = 'source_idlist.xml'
     tbl.write(xml_path, table_id='source_idlist', format='votable', overwrite=True)
     
     #build the query:
     col_list = self._get_col_list() + ', gd.r_est'
     
     dbsource =  ''.join([' FROM tap_upload.source_idlist sidl',
                         f' LEFT JOIN gaiadr2.gaia_source gs ON gs.source_id = sidl.{sidcol}',
                         f' LEFT JOIN gaiadr2_complements.geometric_distance gd on gs.source_id = gd.source_id' ])
     
     query_str = f'SELECT sidl.{sidcol} as "source", '+col_list+dbsource
     if filters:
         query_str = query_str + ' WHERE '+ self.source_constraints
     self.tap_query_string = query_str
     
     #fetch the data
     #job = Gaia.launch_job_async(query=query_str, upload_resource=xml_path,upload_table_name='source_idlist')
     #self.objs = job.get_results().to_pandas()
     
     #fetch data via tap query
     tap_service = TAPService(self.tap_service_url)
     tap_results = tap_service.search(self.tap_query_string, maxrec=len(tbl),uploads={'source_idlist':tbl})
     self.objs = tap_results.to_table().to_pandas()
     
     
     self.objs.set_index('source', inplace=True)
Esempio n. 5
0
 def __init__(self):
     self.service = TAPService(nancay_tap)
     log.info(f'TAP service {nancay_tap} accessed.')
     self.meta_names = [
         'target_name', 'obs_creator_did', 's_ra', 's_dec', 't_min',
         't_max', 'em_min', 'em_max'
     ]
     self._conditions = {
         'time': '',
         'freq': '',
         'pos': '',
     }
     self.time_range = [None, None]
     self.freq_range = [None, None]
     self.fov_radius = 180 * u.deg
     self.fov_center = None
Esempio n. 6
0
class WISE_Survey(surveycoord.SurveyCoord):
    """
    Class to handle queries on the WISE survey

    Child of DL_Survey which uses datalab to access NOAO

    Args:
        coord (SkyCoord): Coordiante for surveying around
        radius (Angle): Search radius around the coordinate

    """
    def __init__(self, coord, radius, **kwargs):
        surveycoord.SurveyCoord.__init__(self, coord, radius, **kwargs)
        self.survey = 'WISE'
        self.bands = WISE_bands
        self.service = TAPService('https://irsa.ipac.caltech.edu/TAP')
        self.query = None
        self.database = "allwise_p3as_psd"

    def get_catalog(self,
                    query=None,
                    query_fields=_DEFAULT_query_fields,
                    print_query=False,
                    system='AB'):
        """
        Grab a catalog of sources around the input coordinate to the search radius

        Args:
            query: Not used
            query_fields (list, optional): Over-ride list of items to query
            print_query (bool): Print the SQL query generated
            system (str): Magnitude system ['AB', 'Vega']

        Returns:
            astropy.table.Table:  Catalog of sources returned.  Includes WISE
            photometry for matched sources.

            Magnitudes are in AB by default
        """
        # Main WISE query
        if query is None:
            self._gen_cat_query(query_fields)
        if print_query:
            print(self.query)
        main_cat = self.service.run_async(self.query).to_table()
        main_cat.meta['radius'] = self.radius
        main_cat.meta['survey'] = self.survey
        if len(main_cat) == 0:
            main_cat = catalog_utils.clean_cat(main_cat,
                                               photom['WISE'],
                                               fill_mask=-999.)
            return main_cat

        main_cat = catalog_utils.clean_cat(main_cat,
                                           photom['WISE'],
                                           fill_mask=-999.)

        # Convert to AB mag
        if system == 'AB':
            fnu0 = {
                'WISE_W1': 309.54,
                'WISE_W2': 171.787,
                'WISE_W3': 31.674,
                'WISE_W4': 8.363
            }
            for item in ['1', '2', '3', '4']:
                filt = 'WISE_W' + item
                main_cat[filt] -= 2.5 * np.log10(fnu0[filt] / 3631.)
        elif system == 'Vega':
            pass

        # Finish
        self.catalog = main_cat
        self.validate_catalog()
        return self.catalog.copy()

    def get_cutout(self, imsize, filter, timeout=120):
        """
        Download an image from IRSA
        Args:
            imsize(Quantity): Size of the cutout in angular units.
            filter(str): One of "W1", "W2", "W3" or "W4"
            timeout(float): Number of seconds to wait to hear a response from
                the IRSA SIA server.
        Returns:
            imghdu(fits.HDU): Fits HDU with image
        """
        assert filter.upper() in self.bands, "Invalid filter name " + filter
        # First get a table with the image coadd_id
        meta_url = "https://irsa.ipac.caltech.edu/ibe/search/wise/allsky/4band_p3am_cdd?POS={:f},{:f}".format(
            self.coord.ra.value, self.coord.dec.value)
        img_metatab = Table.read(utils.data.download_file(meta_url,
                                                          cache=True,
                                                          show_progress=False,
                                                          timeout=timeout),
                                 format="ascii")
        coadd_id = img_metatab['coadd_id'][img_metatab['band'] == int(
            filter[1])][0]

        # Now generate the image url
        img_url = "https://irsa.ipac.caltech.edu/ibe/data/wise/allsky/4band_p3am_cdd/{:s}/{:s}/{:s}/{:s}-{:s}-int-3.fits".format(
            coadd_id[:2], coadd_id[:4], coadd_id, coadd_id, filter.lower())
        img_url += "?center={:f},{:f}&size={:f}pix".format(
            self.coord.ra.value, self.coord.dec.value,
            imsize.to("arcsec").value / 2.75)
        self.cutout = io.fits.open(
            utils.data.download_file(img_url,
                                     cache=True,
                                     show_progress=False,
                                     timeout=timeout))[0]
        self.cutout_size = imsize
        return self.cutout.copy()

    def _gen_cat_query(self, query_fields=_DEFAULT_query_fields):
        """
        Generate ADQL query for catalog search

        self.query is modified in place

        Args:
            query_fields (list):  Override the default list for the SQL query

        """
        query_field_str = ""
        for field in query_fields:
            query_field_str += " {:s},".format(field)
        # Remove last comma
        query_field_str = query_field_str[:-1]
        self.query = """SELECT{:s}
        FROM {:s}
        WHERE CONTAINS(POINT('ICRS',ra, dec), CIRCLE('ICRS',{:f},{:f},{:f}))=1""".format(
            query_field_str, self.database, self.coord.ra.value,
            self.coord.dec.value,
            self.radius.to(units.deg).value)
        return self.query
Esempio n. 7
0
import numpy as np
import pandas as pd
import pickle

import astropy.units as u
import astropy.coordinates as coord
from astropy.coordinates.sky_coordinate import SkyCoord
from astropy.units import Quantity
from pyvo.dal import TAPService

import matplotlib.pyplot as plt
from matplotlib.pyplot import cm


tap_service_url = "http://gaia.ari.uni-heidelberg.de/tap"
tap_service = TAPService(tap_service_url)

#default query columns:

column_list = ['source_id', 'ra','dec','parallax','pmra','pmdec','radial_velocity',
                    'phot_g_mean_mag','phot_bp_mean_mag', 'phot_rp_mean_mag','r_est']

class fieldstars():


    def __init__(self, name:str):
        self.name=name
        self.coords = None
        self.tap_query_string = None

Esempio n. 8
0
class TAPQueueManager(object):
    '''
    Submit, run, review and fetch results of TAP jobs.
    This is a little overcomplicated because, for some reason, the job list
    returned by JPAS TAPService is always empty.
    Here we keep the job list in a local file.
    
    Parameters
    ----------
    
        service_url : str
            TAP service to connect.
            Example: https://archive.cefca.es/catalogues/vo/tap/minijpas-idr201910
        
        tables_dir : str, optional
            Directory to save tables if no path is specified when downloading.
            Default: current directory.
    '''
    def __init__(self, service_url, tables_dir='.'):
        self.serviceUrl = service_url
        self.tablesDir = tables_dir
        self.auth = None
        self.service = None
        self.jobs = {}

    def connect(self, login, password):
        '''
        Authenticate the service through CEFCA portal.
        
        Parameters
        ----------
        
            login : str
                User login, usually an email.
            
            password : str
                User password.
        '''
        self.auth = CEFCA_authenticate(login, password)
        self.service = TAPService(self.serviceUrl, self.auth)

    def loadJobList(self, filename):
        '''
        Recover job information from a list kept in an ascii table in `filename`.
        The lines must contain:

            table_name job_id PHASE
            
        where job_id is an integer, and PHASE is a (informative only) string.
        
        Parameters
        ----------
        
            filename : str
                Ascii file containing the submitted jobs.
        '''
        with open(filename, 'r') as f:
            for l in f.readlines():
                w = l.split()
                if len(w) != 3:
                    continue
                tablename, job_id, phase = w
                log.debug('Recovering table %s (job id %s, phase %s)' %
                          (tablename, job_id, phase))
                self._recoverJob(tablename, job_id)

    @suppress_spec_warnings
    def saveJobList(self, filename):
        '''
        Save current submitted jobs information.
        NOTE: this will overwrite the file.
        
        Paramters
        ---------
            filename : str
                Ascii file containing the submitted jobs.
        
        '''
        with open(filename, 'w') as f:
            f.writelines('%s %s %s\n' % (t, j.job_id, j.phase)
                         for t, j in self.jobs.items())

    def _query(self, tablename):
        return 'select * from minijpas.%s' % tablename

    def _filePath(self, tablename):
        return path.join(self.tablesDir, '%s.fits' % tablename)

    @suppress_spec_warnings
    def requestTable(self, tablename, force=False, maxrec=1000000):
        '''
        
        Parameters
        ----------
        
            tablename : str
                Name of the table, must be present in the database.
            
            force : bool, optional
                If the table is already in the queue, the default
                behaviour is to use that job. if `force=True`, submit another job.
            
            maxrec : int, optional
                Maximum number of records to return.
                Default: 1000000
        '''
        if tablename in self.jobs and not force:
            log.debug(
                'Table %s already submitted. Use force=True to resubmit.' %
                tablename)
            return
        query = self._query(tablename)
        j = self.service.submit_job(query, maxrec=maxrec)
        log.debug('Created job %s for table %s.' % (j.job_id, tablename))
        self.jobs[tablename] = j
        return j

    @suppress_spec_warnings
    def runNextJob(self):
        '''
        Run next pending job.
        '''
        for tab, j in self.jobs.items():
            if j.phase == 'PENDING':
                j.run()
                log.info('Started job %s (%s).' % (j.job_id, tab))
                return j
        return None

    @suppress_spec_warnings
    def _listFiltered(self, phases):
        return [t for t, j in self.jobs.items() if j.phase in phases]

    def listComplete(self):
        '''
        List complete jobs, which have results ready for download.
        
        Returns
        -------

            complete_jobs : list
                List containing the table names of the complete jobs.
        '''
        return self._listFiltered(['COMPLETED'])

    def listRunning(self):
        '''
        List jobs currently running.

        Returns
        -------

            complete_jobs : list
                List containing the table names of the running jobs.
        '''
        return self._listFiltered(['RUN', 'EXECUTING'])

    def listPending(self):
        '''
        List jobs currently in queue.

        Returns
        -------

            complete_jobs : list
                List containing the table names of the queued jobs.
        '''
        return self._listFiltered(['PENDING'])

    def _downloaded(self, tablename):
        filename = self._filePath(tablename)
        return path.exists(filename)

    def removeDownload(self, tablename):
        filename = self._filePath(tablename)
        if path.exists(filename):
            log.debug('Deleting file %s.' % tablename)

    def listDownloadPending(self):
        '''
        List complete jobs, not yet downloaded to the default path.
        

        Returns
        -------

            available_jobs : list
                List containing the table names of the downloadable jobs.
        '''
        return [
            tab for tab in self.listComplete() if not self._downloaded(tab)
        ]

    @suppress_spec_warnings
    def download(self, tablename, filename=None, overwrite=False):
        '''
        Download results of job. The job must be already completed.
        The table will be saved as an `astropy.table.Table`, in FITS format.
        
        Parameters
        ---------
            tablename : str
                Name of the table to download.
            
            filename : str, optional
                Path to the save the table.
                Default: `'table_dir/tablename.fits'`, where
                `table_dir` may be set in the constructor(falls back to `'.'`).
                
            overwrite : bool, optional
                Overwrite the file if it already exists.
                Will skip the download otherwise.
                Default: `False`
                
        '''
        if filename is None:
            filename = self._filePath(tablename)
        if path.exists(filename) and not overwrite:
            log.debug('File %s already exists, skipping download.' % filename)
            return

        j = self.jobs[tablename]
        if j.phase != 'COMPLETED':
            raise Exception(
                'Job %s (table %s) is in phase %s. Download unavailable.' %
                (j.job_id, tablename, j.phase))

        log.debug('Fetching job %s results...' % j.job_id)
        res = j.fetch_result()

        j.raise_if_error()
        t = res.to_table()
        log.debug('Fixing table structure.')
        fix_names(t)
        convert_dtype(t)
        del t.meta['description']
        log.info('Saving table to %s.' % filename)
        t.write(filename, overwrite=overwrite)

    def downloadPending(self, overwrite=False):
        '''
        Download all completed jobs to their default paths.
        
        Parameters
        ----------
        
            overwrite : bool, optional
                Overwrite the files if they already exist.
                Will skip the download otherwise.
                Default: `False`
            
        '''
        for tab in self.listDownloadPending():
            log.info('Downloading table %s.' % tab)
            self.download(tab, overwrite=overwrite)

    @suppress_spec_warnings
    def _recoverJob(self, tablename, job_id):
        url = '%s/async/%s' % (self.serviceUrl, job_id)
        job = AsyncTAPJob(url, self.auth)
        self.jobs[tablename] = job
Esempio n. 9
0
from pyvo.dal import TAPService
import astropy
from astropy.table import Table, vstack
import sys

tap_service = TAPService("http://dc.g-vo.org/tap")
tap_results = tap_service.search("SELECT TOP 10 * FROM ivoa.obscore")
print(tap_results)
Esempio n. 10
0
class ObsDatabase(object):
    """ Class to access NenuFAR BST TAP service.
    """
    def __init__(self):
        self.service = TAPService(nancay_tap)
        log.info(f'TAP service {nancay_tap} accessed.')
        self.meta_names = [
            'target_name', 'obs_creator_did', 's_ra', 's_dec', 't_min',
            't_max', 'em_min', 'em_max'
        ]
        self._conditions = {
            'time': '',
            'freq': '',
            'pos': '',
        }
        self.time_range = [None, None]
        self.freq_range = [None, None]
        self.fov_radius = 180 * u.deg
        self.fov_center = None

    # --------------------------------------------------------- #
    # --------------------- Getter/Setter --------------------- #
    @property
    def meta_names(self):
        """ Column names (observation properties) to return using
            :meth:`~nenupy.observation.tapdatabase.ObsDatabase.search`.

            :setter: `list` of column names.

            :getter: Properties to query.
            
            :type: `str`

            :seealso:
                `Database description
                <http://vogate.obs-nancay.fr/__system__/dc_tables/show/tableinfo/nenufar.bst>`_
        
        """
        return ', '.join(self._meta_names)

    @meta_names.setter
    def meta_names(self, m):
        if not isinstance(m, list):
            raise TypeError('meta_names must be a list')
        unknown_mask = ~np.isin(m, colnames)
        if unknown_mask.any():
            unknown = np.array(m)[unknown_mask]
            raise ValueError('Unknown meta_names: {}, available: {}'.format(
                unknown, colnames))
        self._meta_names = m
        return

    @property
    def time_range(self):
        """ Time range selection for the ADQL query using
            :meth:`~nenupy.observation.tapdatabase.ObsDatabase.search`.
            Default is ``[None, None]`` which means that no
            condition based on observation time will be applied.

            :setter: Length-2 list of ``[start, stop]``. ``start``
                and ``stop`` may be passed as :class:`~astropy.time.Time`
                instances or as ISO/ISOT `str`.

            :getter: ``[start, stop]`` list.
            
            :type: `list`
        """
        return self._time_range

    @time_range.setter
    def time_range(self, t):
        if t == [None, None]:
            self._conditions['time'] = ''
            self._time_range = t
            return
        if not isinstance(t, list):
            raise TypeError('time_range must be a list')
        if not len(t) == 2:
            raise ValueError('time_range must be of length 2')
        if not all([isinstance(ti, Time) for ti in t]):
            t = [Time(ti) for ti in t]
        self._conditions['time'] = f'(t_min >= {t[0].mjd} '\
            f'AND t_max <= {t[1].mjd})'
        log.info(f'time_range set to {t}.')
        self._time_range = t
        return

    @property
    def freq_range(self):
        """ Frequency range selection for the ADQL query using
            :meth:`~nenupy.observation.tapdatabase.ObsDatabase.search`.
            Default is ``[None, None]`` which means that no
            condition based on observation frequencies will be applied.

            :setter: Length-2 list of ``[fmin, fmax]``. ``fmin``
                and ``fmax`` may be passed as :class:`~astropy.units.Quantity`
                instances or as `float` (assumed to be expressed
                in MHz).

            :getter: ``[fmin, fmax]`` list.
            
            :type: `list`
        """
        return self._freq_range

    @freq_range.setter
    def freq_range(self, f):
        if f == [None, None]:
            self._conditions['freq'] = ''
            self._freq_range = f
            return
        if not isinstance(f, list):
            raise TypeError('freq_range must be a list')
        if not len(f) == 2:
            raise ValueError('freq_range must be of length 2')
        if not all([isinstance(fi, u.Quantity) for fi in f]):
            f = [fi * u.MHz for fi in f]
        lmax = wavelength(f[0]).to(u.m).value
        lmin = wavelength(f[1]).to(u.m).value
        self._conditions['freq'] = f'(em_min >= {lmin} AND '\
            f'em_max <= {lmax})'
        log.info(f'freq_range set to {f}.')
        self._freq_range = f
        return

    @property
    def fov_radius(self):
        """ Radius of the query, in combination with the query
            center :attr:`~nenupy.observation.tapdatabase.ObsDatabase.fov_center`.
    
            :setter: Radius (in degrees if no unit is provided). Default is ``180 deg``.

            :getter: Radius in degrees.
            
            :type: `float` or :class:`~astropy.units.Quantity`

            .. warning::
                Must be set **before**
                :attr:`~nenupy.observation.tapdatabase.ObsDatabase.fov_center`.
        """
        return self._fov_radius

    @fov_radius.setter
    def fov_radius(self, r):
        if r is None:
            r = 180 * u.deg
        if not isinstance(r, u.Quantity):
            r *= u.deg
        if not r.isscalar:
            raise ValueError('FOV radius must be a scalar.')
        self._fov_radius = r
        return

    @property
    def fov_center(self):
        """ Center of the field of view queried, in comination
            with the radius :attr:`~nenupy.observation.tapdatabase.ObsDatabase.fov_radius`.

            :setter: Center of the field of view.
            
            :getter: Center of the field of view.

            :type: :class:`~astropy.coordinates.SkyCoord`

            .. warning::
                Must be set **after**
                :attr:`~nenupy.observation.tapdatabase.ObsDatabase.fov_radius`.
        """
        return self._fov_center

    @fov_center.setter
    def fov_center(self, f):
        if f is None:
            self._conditions['pos'] = ''
            self._fov_center = f
            return
        if not isinstance(f, SkyCoord):
            raise TypeError('fov_center must be a SkyCoord instance.')
        if not f.isscalar:
            raise ValueError('fov_center must be scalar.')
        radius = self.fov_radius.to(u.deg).value
        self._conditions['pos'] = f'1 = CONTAINS'\
            f"(POINT('ICRS', s_ra, s_dec), CIRCLE('ICRS', "\
            f'{f.ra.deg}, {f.dec.deg}, {radius}))'
        log.info(f'fov_center set to {f}, radius={radius} deg.')
        self._fov_center = f
        return

    @property
    def conditions(self):
        """ Conditions summary of the query.

            :getter: Query conditions.

            :type: `str`
        """
        conds = []
        for key in self._conditions.keys():
            if self._conditions[key] != '':
                conds.append(self._conditions[key])
        conditions = ' AND '.join(conds)
        return conditions

    @property
    def query(self):
        """ Full query, combining returned parameteres
            :attr:`~nenupy.observation.tapdatabase.ObsDatabase.meta_names`
            and the conditions
            :attr:`~nenupy.observation.tapdatabase.ObsDatabase.conditions`.

            :getter: Query.

            :type: `str`
        """
        q = f'SELECT {self.meta_names} from nenufar.bst '\
            f'WHERE ({self.conditions})'
        return q

    # --------------------------------------------------------- #
    # ------------------------ Methods ------------------------ #
    def search(self):
        """ Run the TAP :attr:`~nenupy.observation.tapdatabase.ObsDatabase.query`
            on the `NenuFAR BST service <http://vogate.obs-nancay.fr/tap>`_.

            :returns:
                NenuFAR observation properties resulting from the
                ADQL query.
            :rtype: :class:`~astropy.table.Table`
        """
        if self.conditions == '':
            raise ValueError('Empty query, fill out some attributes first.')
        log.info(f'TAP service columns to return: {self._meta_names}.')
        log.debug(f"Querying: '{self.query}'")
        result = self.service.search(self.query)
        return result.to_table()

    def reset(self):
        """ Reset query parameters to default values.
        """
        self.meta_names = [
            'target_name', 'obs_creator_did', 's_ra', 's_dec', 't_min',
            't_max', 'em_min', 'em_max'
        ]
        self.time_range = [None, None]
        self.freq_range = [None, None]
        self.fov_radius = 180 * u.deg
        self.fov_center = None
        log.info('Query parameters reset to default values.')
        return