def _get_catalog(self): """Returns GAIA catalog for current telescope coordinates.""" if self._catalog_coords is None or self._catalog_coords.separation( self.telescope.real_pos) > 10. * u.arcmin: self._catalog = Gaia.query_object_async( coordinate=self.telescope.real_pos, radius=1. * u.deg) return self._catalog
def get_gaia_cat(ims, cat_name='gaia'): """Get the Gaia catalog for the area of input images""" from calc_bounds import bounds, get_footprints print 'Calculating coordinate ranges for Gaia query:' footprint_list = map(get_footprints, ims) ras, decs = bounds(footprint_list) ra_midpt = (np.amax(ras)+np.amin(ras))/2. dec_midpt = (np.amax(decs)+np.amin(decs))/2. ra_width = (np.amax(ras)-np.amin(ras)) dec_height = (np.amax(decs)-np.amin(decs)) print '\nPerforming Gaia query:' coord = SkyCoord(ra=ra_midpt, dec=dec_midpt, unit=(u.degree, u.degree), frame='icrs') width = Quantity(ra_width, u.deg) height = Quantity(dec_height, u.deg) r = Gaia.query_object_async(coordinate=coord, width=width, height=height) print 'Sources returned: {}'.format(len(r)) assert len(r) > 0, 'No sources found in Gaia query\n' cat_file_name = '{}.cat'.format(cat_name) print 'Writing Gaia source catalog: {}\n'.format(cat_file_name) Table([r['ra'], r['dec']]).write(cat_file_name, format='ascii.fast_commented_header') return cat_file_name
def radec_gaia_stars(ra, dec, radius=1.0, width=None, height=None, verbose=False, tap_url=None): """Search for bright stars using GAIA catalog. TODO: Should be absorbed by the object for image later. TODO: Should have a version that just uses the local catalog. """ # Central coordinate radec = SkyCoord(ra, dec, unit=('deg', 'deg'), frame='icrs') # Default searching radius is 1.0 deg r_search = Quantity(radius, u.degree) if width is not None: w_search = Quantity(width, u.degree) if height is None: # Search in a square h_search = w_search else: h_search = Quantity(height, u.degree) # Search for stars if tap_url is not None: from astroquery.gaia import TapPlus, GaiaClass Gaia = GaiaClass(TapPlus(url=tap_url)) else: from astroquery.gaia import Gaia if width is not None: gaia_results = Gaia.query_object_async(coordinate=radec, width=w_search, height=h_search, verbose=verbose) else: gaia_results = Gaia.query_object_async(coordinate=radec, radius=r_search, verbose=verbose) return gaia_results
def get_gaia_stars(name, width): result_table = Simbad.query_object(name) ra, dec = result_table["RA"][0], result_table["DEC"][0] coord = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs') r = Gaia.query_object_async(coordinate=coord, width=width, height=width) return r
def query_gaia(): # Westerlund 1 coord = SkyCoord(ra="16 47 04.00", dec="-45 51 04.9", unit=(u.degree, u.degree), frame='icrs') width = 1 * u.deg height = 1 * u.deg r = Gaia.query_object_async(coordinate=coord, width=width, height=height) return r
def searchGaiaArchives(ra, dec, height, width): coord = SkyCoord(ra=ra * u.deg, dec=dec * u.deg) width = u.Quantity(width * u.deg) height = u.Quantity(height * u.deg) results = Gaia.query_object_async(coordinate=coord, width=width, height=height) return np.array(list(results['ra'])), np.array(list(results['dec']))
def gaia_xmatch_results(): # get a sample of stars to cross match center_coord = SkyCoord(ra=280, dec=-60, unit=(u.degree, u.degree), frame='icrs') width = u.Quantity(0.05, u.deg) height = width with warnings.catch_warnings(): warnings.filterwarnings('ignore') sample_of_stars = Gaia.query_object_async(coordinate=center_coord, width=width, height=height) # cross match the coordinates and check we get back the same thing coordinates_of_stars = pd.DataFrame({'ra': sample_of_stars['ra'], 'de': sample_of_stars['dec']}) xmatch_results = mwtools.xmatch.Gaia_DR2_Xmatch(coordinates_of_stars) return xmatch_results
def get_gaia_cat(input_images, cat_name='gaia'): """ Get the Gaia catalog for the area of input images. This function queries Gaia for a table of sources. It determines the dimensions to use for the query by finding the sky positions of the corners of each of the images. """ print('Calculating coordinate ranges for Gaia query:') footprint_list = map(get_footprints, input_images) merged = [] for im in footprint_list: for ext in im: merged.append(ext) merged = np.vstack(merged) ras = merged[:,0] decs = merged[:,1] ra_midpt = (max(ras)+min(ras))/2. dec_midpt = (max(decs)+min(decs))/2. ra_width = (np.amax(ras)-np.amin(ras)) dec_height = (np.amax(decs)-np.amin(decs)) coord = SkyCoord(ra=ra_midpt, dec=dec_midpt, unit=(u.degree, u.degree), frame='icrs') width = Quantity(ra_width, u.deg) * 1.1 height = Quantity(dec_height, u.deg) * 1.1 r = Gaia.query_object_async(coordinate=coord, width=width, height=height) assert len(r) > 0, 'No sources found in Gaia query\n' print('Sources returned: {}'.format(len(r))) cat = r['ra', 'dec'] cat.write('{}.cat'.format(cat_name), format='ascii.commented_header') return '{}.cat'.format(cat_name)
def gaia_results(image,saveas=None): # image wcs and frame, for conversions pixels/skycoord wcs,frame=WCS(image.header),image.header['RADESYS'].lower() # coord of strong lensing galaxy ra=image.header['CAT-RA'] dec=image.header['CAT-DEC'] coord = SkyCoord(ra,dec,unit=(u.hourangle,u.deg)) # the pixscale is same along x&y and rotated to default (N up, E left) cdi_j ~ delta_ij cdi_i = image.header['CD1_1'] # deg/pixel naxis = image.header['NAXIS1'] # naxis1=naxis2 radius = 3600*cdi_i*naxis/2 # approx 800 arcsec entire image radius*=.75 # do 3/4 of that # do the search r = Gaia.query_object_async(coordinate=coord, radius=radius*u.arcsec) if saveas: if not os.path.exists(saveas): os.makedirs(saveas) pickle.dump(r,open(saveas+"/gaia_results.pkl","wb")) return r,image
def get_gaia_star(coord_center, radius_arcsec, g_mag_cut=15.0): ''' Retruns list of coordinates of GAIA stars in the field. ''' # set up coordinates coord = SkyCoord(ra=coord_center[0][0], dec=coord_center[0][1], unit=(u.degree, u.degree), frame='icrs') radius = u.Quantity(radius_arcsec, u.arcsec) # query GAIA stars gaia_stars = Gaia.query_object_async(coordinate=coord, radius=radius) # choose stars with proper motion and mag cut prop_motion = np.sqrt(gaia_stars['pmra'].data.data**2 + gaia_stars['pmdec'].data.data**2) idx_good_stars = ( (gaia_stars['astrometric_excess_noise'].data.data < 1.0) & (prop_motion < 20.0).data & (gaia_stars['phot_g_mean_mag'].data.data > g_mag_cut)).astype(bool) print('number of stars found in GAIA :', np.sum(idx_good_stars)) coord_gaia = SkyCoord(ra=gaia_stars[idx_good_stars]['ra'], dec=gaia_stars[idx_good_stars]['dec'], unit=(u.deg, u.deg)) return (coord_gaia)
def query_one_star(ra_deg, dec_deg, radius_arcsec=10): # get gaia data coord = SkyCoord(ra=ra_deg, dec=dec_deg, unit=(u.degree, u.degree), frame='icrs') rad = u.Quantity(float(radius_arcsec), u.arcsec) rG = Gaia.query_object_async(coordinate=coord, radius=rad) # return NaN if no GAIA star is found if len(rG) == 0: return np.repeat(np.nan, 18).reshape(9, 2) # get 2MASS data r2 = Vizier.query_region(coord, radius=rad, catalog='II/246') # return NaN if no 2MASS star is found if (r2 == None) or (len(r2) == 0): return np.repeat(np.nan, 18).reshape(9, 2) # step through possible matches for i in range(len(rG)): for j in range(len(r2)): # get giaa parameters par, epar = rG['parallax'][i] + .03, rG['parallax_error'][i] Gmag = rG['phot_g_mean_mag'][i] GBPmag = rG['phot_bp_mean_mag'][i] FBP = rG['phot_bp_mean_flux'][i] eFBP = rG['phot_bp_mean_flux_error'][i] eGBPmag = -2.5 * np.log10(FBP / (FBP + eFBP)) GRPmag = rG['phot_rp_mean_mag'][i] FRP = rG['phot_rp_mean_flux'][i] eFRP = rG['phot_rp_mean_flux_error'][i] eGRPmag = -2.5 * np.log10(FRP / (FRP + eFRP)) # get 2MASS photometry Hmag, Kmag, eKmag = r2[j]['Hmag'][0], r2[j]['Kmag'][0], \ r2[j]['e_Kmag'][0] # change bad values to NaN if np.ma.is_masked(par) or par <= 0: par, epar = np.nan, np.nan if np.ma.is_masked(epar) or epar <= 0: par, epar = np.nan, np.nan if np.ma.is_masked(Gmag): Gmag = np.nan if np.ma.is_masked(GBPmag): GBPmag, eGBPmag = np.nan, np.nan if np.ma.is_masked(eGBPmag): GBPmag, eGBPmag = np.nan, np.nan if np.ma.is_masked(GRPmag): GRPmag, eGRPmag = np.nan, np.nan if np.ma.is_masked(eGRPmag): GRPmag, eGRPmag = np.nan, np.nan if np.ma.is_masked(Hmag): Hmag = np.nan if np.ma.is_masked(Kmag): Kmag, eKmag = np.nan, np.nan if np.ma.is_masked(eKmag): Kmag, eKmag = np.nan, np.nan # check that GAIA and 2MASS photometry aproximately match if np.ma.is_masked(par) or np.ma.is_masked(Gmag): match = False else: matches = np.zeros(4).astype(bool) matches[0], _, _ = does_G_K_match(Gmag, Hmag, Kmag) matches[1], _, _ = does_GBP_K_match(GBPmag, Hmag, Kmag) matches[2], _, _ = does_GRP_K_match(GRPmag, Hmag, Kmag) matches[3], _, _ = does_GBP_GRP_match(GBPmag, GRPmag, Hmag, Kmag) match = np.all(matches) if match: dist, mu = compute_distance_modulus(unp.uarray(par, epar)) print unp.nominal_values(dist) l, b = coord.galactic.l.deg, coord.galactic.b.deg AK = compute_AK_mwdust(l, b, unp.nominal_values(dist), unp.std_devs(dist)) MK = compute_MK(unp.uarray(Kmag, eKmag), mu, AK) Rs = MK2Rs(MK) Teff = gaia2Teff(unp.uarray(GBPmag, eGBPmag), unp.uarray(GRPmag, eGRPmag)) Ms = MK2Ms(MK) return [par,epar], [Kmag,eKmag], \ [unp.nominal_values(dist), unp.std_devs(dist)], \ [unp.nominal_values(mu), unp.std_devs(mu)], \ [unp.nominal_values(AK), unp.std_devs(AK)], \ [unp.nominal_values(MK), unp.std_devs(MK)], \ [unp.nominal_values(Rs), unp.std_devs(Rs)], \ [unp.nominal_values(Teff), unp.std_devs(Teff)], \ [unp.nominal_values(Ms), unp.std_devs(Ms)], \ # return NaN if not found a match by now return np.repeat(np.nan, 18).reshape(9, 2)
print(" - RA = {:0.2f} +/- {:0.2f}; Dec = {:0.2f} +/- {:0.2f}".format( RA, dRA, DEC, dDEC)) print(GAIAcoord) #; sys.exit() print("") #Check if we already have the Gaia catalog if os.path.exists(GaiaTable): gaia_cat = ascii.read(GaiaTable, format="ipac") print("## Already have a Gaia catalog with " + str(len(gaia_cat)) + " sources.") print("## To get a new one rename or remove " + GaiaTable) else: #Get the GAIA catalog print(">> Querying the GAIA catalog ....") gaia_cat = Gaia.query_object_async(coordinate=GAIAcoord, width=dRA, height=dDEC) print(" - Downloaded GAIA-DR2 catalog with " + str(len(gaia_cat)) + ' sources') #Add fluxes in uJy gaia_cat['g'] = 10**((gaia_cat['phot_g_mean_mag'] - 23.9) / -2.5) gaia_cat['bp'] = 10**((gaia_cat['phot_bp_mean_mag'] - 23.9) / -2.5) gaia_cat['rp'] = 10**((gaia_cat['phot_rp_mean_mag'] - 23.9) / -2.5) ascii.write(gaia_cat, GaiaTable, format="ipac", overwrite=True) #save the catalog print(" ==> Wrote GAIA-DR2 catalog to ", GaiaTable) #fix a bug in the ipac table writer fixcmd = "sed -i -e \"s/'null'/ null /g\" " + GaiaTable
print(i, name) sleep(1) if True: # try: radec = dat["pos"][sysi] sep = dat["sep"][sysi] sp = dat["sp"][sysi] print(radec) c = SkyCoord("J" + radec, unit=(u.hourangle, u.deg)) width = u.Quantity(5, u.arcsec) height = u.Quantity(5, u.arcsec) #GAIA r = Gaia.query_object_async(coordinate=c, width=width, height=height) plx = None if len(r["parallax"]) == 0: sw = False elif type(r["parallax"][0]) == np.float64: plx = r["parallax"][0] sw = True else: sw = False print("GAIA", plx) Simbad.SIMBAD_URL = "http://simbad.u-strasbg.fr/simbad/sim-script" Simbad.add_votable_fields("parallax", "flux(V)", "flux(R)", "flux(J)", "flux(H)", "flux(K)", "pmra", "pmdec") result_table = Simbad.query_region(c, radius='0d0m5s')
def query_one_TIC(theta, radius_arcsec=10): # get gaia data assert len(theta) == 9 ra_deg, dec_deg, GAIAmag, Jmag, e_Jmag, Hmag, e_Hmag, Kmag, e_Kmag = theta coord = SkyCoord(ra=ra_deg, dec=dec_deg, unit=(u.degree, u.degree), frame='icrs') rad = u.Quantity(float(radius_arcsec), u.arcsec) rG = Gaia.query_object_async(coordinate=coord, radius=rad) # return NaN if no GAIA star is found if len(rG) == 0: return np.repeat(np.nan, 20).reshape(10, 2) # step through possible matches for i in range(len(rG)): # get gaia parameters par, epar = rG['parallax'][i] + .029, rG['parallax_error'][i] GAIAmag_dr2 = rG['phot_g_mean_mag'][i] GBPmag = rG['phot_bp_mean_mag'][i] FBP = rG['phot_bp_mean_flux'][i] eFBP = rG['phot_bp_mean_flux_error'][i] eGBPmag = -2.5 * np.log10(FBP / (FBP + eFBP)) GRPmag = rG['phot_rp_mean_mag'][i] FRP = rG['phot_rp_mean_flux'][i] eFRP = rG['phot_rp_mean_flux_error'][i] eGRPmag = -2.5 * np.log10(FRP / (FRP + eFRP)) # change bad values to NaN if np.ma.is_masked(par) or par <= 0: par, epar = np.nan, np.nan if np.ma.is_masked(epar) or epar <= 0: par, epar = np.nan, np.nan if np.ma.is_masked(GBPmag): GBPmag, eGBPmag = np.nan, np.nan if np.ma.is_masked(eGBPmag): GBPmag, eGBPmag = np.nan, np.nan if np.ma.is_masked(GRPmag): GRPmag, eGRPmag = np.nan, np.nan if np.ma.is_masked(eGRPmag): GRPmag, eGRPmag = np.nan, np.nan # check that GAIA and 2MASS photometry aproximately match if np.isnan(par) or np.isnan(GBPmag) or np.isnan(GRPmag): match = False elif GAIAmag == GAIAmag_dr2: match = True else: matches = np.zeros(4).astype(bool) matches[0], _, _ = does_G_K_match(GAIAmag_dr2, Hmag, Kmag) matches[1], _, _ = does_GBP_K_match(GBPmag, Hmag, Kmag) matches[2], _, _ = does_GRP_K_match(GRPmag, Hmag, Kmag) matches[3], _, _ = does_GBP_GRP_match(GBPmag, GRPmag, Hmag, Kmag) match = np.all(matches) if match: dist, mu = compute_distance_modulus(unp.uarray(par, epar)) l, b = coord.galactic.l.deg, coord.galactic.b.deg AK = compute_AK_mwdust(l, b, unp.nominal_values(dist), unp.std_devs(dist)) MK = compute_MK(unp.uarray(Kmag, e_Kmag), mu, AK) Rs = MK2Rs(MK) Teff = gaia2Teff(unp.uarray(GBPmag, eGBPmag), unp.uarray(GRPmag, eGRPmag), unp.uarray(Jmag, e_Jmag), unp.uarray(Hmag, e_Hmag)) Ms = MK2Ms(MK) # ***gbp and grp used to have switched positions here return [par,epar], [GBPmag,eGBPmag], [GRPmag,eGRPmag], \ [unp.nominal_values(dist), unp.std_devs(dist)], \ [unp.nominal_values(mu), unp.std_devs(mu)], \ [unp.nominal_values(AK), unp.std_devs(AK)], \ [unp.nominal_values(MK), unp.std_devs(MK)], \ [unp.nominal_values(Rs), unp.std_devs(Rs)], \ [unp.nominal_values(Teff), unp.std_devs(Teff)], \ [unp.nominal_values(Ms), unp.std_devs(Ms)], \ # return NaN if not found a match by now return np.repeat(np.nan, 20).reshape(10, 2)
ixf = 0 for line in lines: cos = line.split() ra1 = float(cos[1]) dec1 = float(cos[2]) ra2 = float(cos[12]) dec2 = float(cos[13]) bp1 = float(cos[-4]) bp2 = float(cos[-2]) #print ra1,dec1,ra2,dec2 if dec1 < 30 and dec2 < 30: coord1 = SkyCoord(ra=ra1, dec=dec1, unit=(u.deg, u.deg), frame='icrs') coord2 = SkyCoord(ra=ra2, dec=dec2, unit=(u.deg, u.deg), frame='icrs') r1 = Gaia.query_object_async(coordinate=coord1, radius=u.Quantity(10./3600., u.deg)) print len(r1) pmra = r1[0]['pmra'] pmdec = r1[0]['pmdec'] parallax = r1[0]['parallax'] epmra = r1[0]['pmra_error'] epmdec = r1[0]['pmdec_error'] eparallax = r1[0]['parallax_error'] drop1,drop2 = False, False drop3,drop4 = False, False if len(r1)>1: for r in r1[1:]: if (np.absolute(r['pmra']-pmra)/max(epmra,r['pmra_error']) < 5) and (np.absolute(r['pmdec']-pmdec)/max(epmdec,r['pmdec_error'])<5) and (np.absolute(r['parallax']-parallax)/eparallax < 5): drop1 = True
def querycat_gaia(ralist=None, declist=None, cone_search=False, width=10.0, height=10.0, radius=5.0, dr2=False, test=False, debug=False): """ """ import time from astropy.table import Table, vstack import astropy.units as u from astropy.coordinates import SkyCoord from astroquery.gaia import Gaia if dr2: help(Gaia) gaiadr2_table = Gaia.load_table('gaiadr2.gaia_source') for column in (gaiadr1_table.get_columns()): print(column.get_name()) if debug: help(Gaia) width = u.Quantity(width/3600.0, u.degree) height = u.Quantity(height/3600.0, u.degree) radius = u.Quantity(radius/3600.0, u.degree) if test is True: ralist = [180.0] declist = [0.0] width = u.Quantity(30.0, u.arcsec) height = u.Quantity(30.0, u.arcsec) radius = u.Quantity(15.0, u.arcsec) result_nrows = 0 for isource, (ra, dec) in enumerate(zip(ralist, declist)): coord = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs') t0 = time.time() if not cone_search: result = Gaia.query_object_async(coordinate=coord, width=width, height=height) # help(result) if debug: result.pprint() result.info('stats') if cone_search: job = Gaia.cone_search_async(coord, radius) # help(job) result = job.get_results() print('Number of rows:', len(result)) print('Elapsed time(secs):',time.time() - t0) if debug: help(result) result.pprint() result.info('stats') result_nrows = result_nrows + len(result) if isource == 0: result_all = result if isource > 0: result_all = vstack([result_all, result]) #def fix_vot_object(): table = result_all print('icol, format, dtype') # help(table) # help(table.columns) for (icol, column) in enumerate(table.columns): print(icol, table.columns[icol].name, table.columns[icol].format, table.columns[icol].dtype) # convert the columns for dtype = object which is not # supported by FITs to bool if table.columns[icol].dtype == 'object': colname = table.columns[icol].name NewColumn = Table.Column(table[colname].data, dtype='bool') table.replace_column(colname, NewColumn) print() result_all = table print('Number of Gaia sources returned:', result_nrows, len(result_all)) return result_all
def search(image, headinfo, target_coords, syntax, catalog_syntax, filter_): """ Search area around transient/target location in photometric catalogs Current catalog (selectable in syntax): - Skymapper: Southern Hemisphere - Pan Starrs: North of declination -30 degree - Apass: All-sky survey - 2MASS: JHK all sky survey Future: - SDSS: Future implemtation - Ability to make custom catalog from different surveys Input: - Image: Numpy 2D array - headinfo: astropy.io.fits.header.Header - target_coords: astropy.coordinates.sky_coordinate.SkyCoord - syntax: dict - catalog_syntax: dict - filter_: str Output: - data: pandas DataFrame """ import warnings if not syntax['catalog_warnings'] or syntax['master_warnings']: warnings.filterwarnings("ignore") import numpy as np import os,sys import requests import pathlib import shutil import os.path import logging from functools import reduce from astropy.table import Table from astropy.wcs import wcs from astroquery.vizier import Vizier from astropy.io.votable import parse_single_table from astropy.coordinates import Angle from autophot.packages.functions import r_dist logger = logging.getLogger(__name__) try: # Get wxs information w1 = wcs.WCS(headinfo) # Radius around target radius = float(syntax['radius']) # Target name, if applicable target = syntax['target_name'] # Get workdirectory location,, create directory if needed dirname = os.path.join(syntax['wdir'],'catalog_queries') pathlib.Path(dirname).mkdir(parents = True, exist_ok=True) ''' Getting target Ra and Dec - if target is none but a Ra and DEC is given, create new target name - if ra and dec not given us center of image as location - for quick reduction of image ''' # if target or it's ra/dec - set target name if target == None: if syntax['target_ra'] != None and syntax['target_dec'] != None: target = 'target_ra_'+str(round(syntax['target_ra']))+'_dec_'+str(round(syntax['target_dec'])) logger.info('New target name: %s' %target) else: # if not just call target target = 'target' # Search limitation with Pan Starrs rlimited to 0.5 deg if radius > 0.5 and syntax['catalog'] == 'pan_starrs' : logger.warning('Search Limitation with PanStarrs API -> Radius = 0.5 [deg] ') radius = 0.5 # Choosen catalog for input.yml, create directory for catalog if needed catalog_dir = syntax['catalog'] pathlib.Path(os.path.join(dirname ,catalog_dir)).mkdir(parents = True, exist_ok=True) # Folder for target, create directory if needed target_dir = reduce(os.path.join,[dirname,catalog_dir,target.lower()]) pathlib.Path(target_dir).mkdir(parents = True, exist_ok=True) # Filename of fetchec catalog fname = str(target) + '_r_' + str(radius) # Can force to use certain catalog - untested 03-10-19 if syntax['force_catalog_csv']: logger.info('Using '+syntax['force_catalog_csv_name']+' as catalog') fname = str(syntax['force_catalog_csv_name']) + '_r_' + str(radius) # if catalog is found via it's filename - use this and return data if os.path.isfile(os.path.join(target_dir,fname+'.csv')): logger.info('Catalog found for Target: %s\nCatalog: %s \nFile: %s' % (target,str(catalog_dir).upper(),fname)) data = Table.read(os.path.join(target_dir,fname+'.csv'),format = 'csv') data = data.to_pandas() else: # If no previously catalog found - look for one logger.info('Searching for new catalog: %s ' % syntax['catalog']) if syntax['catalog'] in ['gaia']: import astropy.units as u from astroquery.gaia import Gaia import warnings warnings.filterwarnings('ignore') width = u.Quantity(radius, u.deg) height = u.Quantity(radius, u.deg) data = Gaia.query_object_async(coordinate=target_coords, width=width, height=height) data = data.to_pandas() data.to_csv(fname+'.csv', sep=',',index = False) # Move file to new location - 'catalog queries' shutil.move(os.path.join(os.getcwd(), fname+'.csv'), os.path.join(target_dir, fname+'.csv')) warnings.filterwarnings('default') if syntax['catalog'] in ['apass','2mass']: # No row limit Vizier.ROW_LIMIT = -1 catalog_search = Vizier.query_region(target_coords, radius = Angle(radius,'deg'), catalog = syntax['catalog']) # Select first catalog from list data = catalog_search[0].to_pandas() data.to_csv(fname+'.csv', sep=',',index = False) # Move file to new location - 'catalog queries' shutil.move(os.path.join(os.getcwd(), fname+'.csv'), os.path.join(target_dir, fname+'.csv')) # some catalogs need specific download path using 'requests' if syntax['catalog'] in ['pan_starrs','skymapper']: mindet=1 if syntax['catalog'] == 'pan_starrs': server=('https://archive.stsci.edu/'+'panstarrs/search.php') params = {'RA': target_coords.ra.degree, 'DEC': target_coords.dec.degree, 'SR': radius, 'max_records': 10000, 'outputformat': 'VOTable', 'ndetections': ('>%d' % mindet)} if syntax['catalog'] == 'skymapper': server=('http://skymapper.anu.edu.au/sm-cone/public/query?') params = {'RA': target_coords.ra.degree, 'DEC': target_coords.dec.degree, 'SR': radius, 'RESPONSEFORMAT': 'VOTABLE'} with open('temp.xml', "wb") as f: logger.info('Downloading from %s' % syntax['catalog'] ) response = requests.get(server,params = params) f.write(response.content) # Parse local file into astropy.table object data = parse_single_table('temp.xml') # Delete temporary file os.remove('temp.xml') # Convert table to dataframe data_table = data.to_table(use_names_over_ids=True) data = data_table.to_pandas() # invalid entries in panstarrs are -999 - change to nans if syntax['catalog'] == 'pan_starrs': data = data.replace(-999,np.nan) # No sources in field - temporary fix - will add "check different catalog" if len(data) == 0: logging.critical('Catalog: %s : does not cover field' % syntax['catalog']) sys.exit() # Save to csv and move to 'catalog_queries' data.to_csv(fname+'.csv',index = False) shutil.move(os.path.join(os.getcwd(), fname+'.csv'), os.path.join(target_dir, fname+'.csv')) # Add in x and y pixel locatins under wcs x_pix,y_pix = w1.wcs_world2pix(data[catalog_syntax['RA']], data[catalog_syntax['DEC']],1) data.insert(loc = 5, column = 'x_pix', value = x_pix) data.insert(loc = 6, column = 'y_pix', value = y_pix) # Remove boundary sources data = data[data.x_pix < image.shape[1] - syntax['pix_bound']] data = data[data.x_pix > syntax['pix_bound']] data = data[data.y_pix < image.shape[0] - syntax['pix_bound']] data = data[data.y_pix > syntax['pix_bound']] logger.info('Catalog length: %d' % len(data)) warnings.filterwarnings("default") except Exception as e: logger.exception(e) data = None return data
def query_nearby_gaia(tic, ra_deg, dec_deg, Npixsearch=5, Npixplt=3, pltt=True): '''Search nearby stars within some number of TESS pixels.''' # download TPF file bjds, tpfs = read_TESS_TPF(tic) TPFfname = glob.glob('MAST/TESS/TIC%i/tess*_tp.fits' % tic) assert len(TPFfname) > 0 hdu = fits.open(TPFfname[0]) # get reference pixel and pixelscale pixscale_ra, pixscale_dec = abs(hdu[1].header['1CDLT4']), \ abs(hdu[1].header['2CDLT4']) refra, refdec = hdu[1].header['1CRVL4'], hdu[1].header['2CRVL4'] #R = np.matrix([[hdu[1].header['11PC4'],hdu[1].header['12PC4']], # [hdu[1].header['21PC4'],hdu[1].header['22PC4']]]) # search gaia data coord = SkyCoord(ra=ra_deg, dec=dec_deg, unit=(u.degree, u.degree), frame='icrs') radius_arcsec = float(Npixsearch * pixscale_ra * 3600) rad = u.Quantity(radius_arcsec, u.arcsec) rG = Gaia.query_object_async(coordinate=coord, radius=rad) assert len(rG) > 0 # get source data ras = np.array(rG['ra']) e_ras = np.array(rG['ra_error']) decs = np.array(rG['dec']) e_decs = np.array(rG['dec_error']) Gmags = np.array(rG['phot_g_mean_mag']) # plotting if pltt: fig, ax = plt.subplots(1) ax.plot(ra_deg, dec_deg, 'bd', ms=8, label='Input source position') cax = ax.scatter(ras, decs, c=Gmags, s=50 * 10**(-.4 * (Gmags - 15)), edgecolor='k', alpha=.9, cmap=truncate_colormap(plt.get_cmap('hot_r'), .1, .9)) ax.plot(ras, decs, 'k.') cbar_axes = fig.add_axes([.1, .1, .85, .04]) cbar = fig.colorbar(cax, cax=cbar_axes, orientation='horizontal') cbar.set_label('GAIA mag') ax.set_xlabel('RA [deg]'), ax.set_ylabel('Dec [deg]') # plot search annulus r = np.repeat(radius_arcsec, 100) / 3600 theta = np.linspace(0, 2 * np.pi, r.size) ax.plot(r * np.cos(theta) + ra_deg, r * np.sin(theta) + dec_deg, 'k--', lw=.9) # add patches of a TESS pixels assert Npixplt % 2 == 1 Xcorners = range(-int(np.floor(Npixplt / 2.)), int(np.floor(Npixplt / 2.)) + 1) Ycorners = range(-int(np.floor(Npixplt / 2.)), int(np.floor(Npixplt / 2.)) + 1) for i in Xcorners: for j in Ycorners: lowercorner_ra,lowercorner_dec = (-.5+i)*pixscale_ra + refra, \ (-.5+j)*pixscale_dec + refdec rect = Rectangle((lowercorner_ra, lowercorner_dec), pixscale_ra, pixscale_dec, lw=1, edgecolor='k', facecolor='none') ax.add_patch(rect) ax.set_xlim((ras.min() - pixscale_ra, ras.max() + pixscale_ra)) ax.set_ylim((decs.min() - pixscale_dec, decs.max() + pixscale_dec)) ax.set_title('TIC %i (%i sources found)' % (tic, len(rG)), fontsize=10) ax.legend(loc='upper left', fontsize=10) fig.subplots_adjust(bottom=.22, top=.95) plt.savefig('plots/gaiasources_tic%i.png' % tic) plt.show() plt.close('all') return ras, e_ras, decs, e_decs, Gmags, radius_arcsec
# get image center ra_targ = table['CRVAL1'][0] print(table) dec_targ = table['CRVAL2'][0] logger.info( "Looking Gaia sources around (%.3f, %.3f) with radius %.3f with less than %.1f mas/yr" % (ra_targ, dec_targ, search_radius.value, proper_motion_threshold)) # improve this using a box like the image https://astroquery.readthedocs.io/en/latest/gaia/gaia.html (width and height) coord = SkyCoord(ra=ra_targ, dec=dec_targ, unit=(u.deg, u.deg)) # query Gaia sources and write the result ref_cat = 'gaia_hst.csv' max_rows = 3000 Gaia.ROW_LIMIT = max_rows gaia_query = Gaia.query_object_async(coordinate=coord, radius=search_radius) #https://gea.esac.esa.int/archive/documentation/GDR2/Gaia_archive/chap_datamodel/sec_dm_main_tables/ssec_dm_gaia_source.html reduced_query = gaia_query['ra', 'dec', 'ra_error', 'dec_error', 'phot_g_mean_flux', 'ref_epoch', 'pmra', 'pmdec', 'pmra_error', 'pmdec_error', 'solution_id'] ngaia = len(reduced_query) # np.abs was giving problems here so just filter twice with 10 and -10 proper motions are in mas filter = ((np.abs(reduced_query['pmdec']) < proper_motion_threshold) & ((np.abs(reduced_query['pmra']) < proper_motion_threshold))) reduced_query = reduced_query[filter] reduced_query.write(ref_cat, format='ascii.commented_header', delimiter='\t',
def hst_button( galaxies, skymethod='globalmin+match', instruments="ACS/WFC", prop_ids=None, filters=None, radius=None, filepath=None, download_data=True, correct_astrometry=True, create_mosaic=True, jy_conversion=True, verbose=False, log_filename='hst.log', ): """Create a HST mosaic, given a galaxy name. Using a galaxy name and radius, queries around that object, downloads available HST data and mosaics into a final product. It will create separate mosaics for each proposal ID, and the file structure will look like ``/galaxy/HST/proposal_id/galaxy_instrument_filter_proposal_id.fits``. N.B. I must confess to not being well-versed with HST data, so if anyone can help improve this please let me know. This data button uses a number of tools included in the drizzlepac Python package. This includes alignimages/tweakreg and astrodrizzle, which correct astrometry and are specifically tailored for the setup of HST data. This means that 1) creating mosaics with this will likely take a long time and 2) you will need a beefy computer (especially with regards to hard drive space). Args: galaxies (str or list): Names of galaxies to create mosaics for. Resolved by NED. skymethod (str, optional): Method used for AstroDrizzle's background matching step. In general, this can be left untouched but for mosaics with little overlap, it may be worth playing around with this. For instance, I've had some luck when there isn't much overlap between exposures using 'globalmin'. Options are 'localmin', 'globalmin+match', 'globalmin', and 'match'. Defaults to 'globalmin+match'. instruments (str or list, optional): Instrument to download data for. Can be any combination of 'ACS/WFC', 'WFC3/IR', 'WFC3/UVIS', 'WFPC2/PC', or 'WFPC2/WFC'. If you want all available data for all these instruments, select 'all', but this is not recommended! Defaults to 'ACS/WFC'. prop_ids (str or list, optional): Proposal IDs to download data for. Defaults to None, which will pull out all proposal IDs for each instrument. filters (str or list, optional): Filters to download data for. The script will look for each filter, for each instrument. Defaults to None, which will pull out all applicable filters for each instrument, for each proposal ID. radius (astropy.units.Quantity, optional): Radius around the galaxy to search for observations. Defaults to None, where it will query Ned to get size. filepath (str, optional): Path to save the working and output files to. If not specified, saves to current working directory. download_data (bool, optional): If True, will download data from MAST. Defaults to True. correct_astrometry (bool, optional): If True, will perform astrometric corrections to the downloaded data using alignimages. Defaults to True. create_mosaic (bool, optional): Switching this to True will mosaic the data using astrodrizzle as appropriate. Defaults to True. jy_conversion (bool, optional): Convert the mosaicked file from raw units to Jy/pix. Defaults to True. verbose (bool, optional): Can be used to suppress most of the output messages produced during the process. Mainly useful for debugging. Defaults to False. log_filename (str, optional): Will produce a stripped down log of what data the code is reducing. By default, will save to galaxy/hst.log. """ if isinstance(galaxies, str): galaxies = [galaxies] if isinstance(instruments,str): instruments = [instruments] if instruments == 'all': instruments = ['ACS/WFC', 'WFC3/IR','WFC3/UVIS', 'WFPC2/PC','WFPC2/WFC'] if isinstance(filters,str): filters = [filters] if isinstance(prop_ids,str): prop_ids = [prop_ids] if filepath is not None: os.chdir(filepath) orig_dir = os.getcwd() if radius is not None: original_radius = radius.copy() else: original_radius = None steps = [] if download_data: steps.append(1) if correct_astrometry: steps.append(2) if create_mosaic: steps.append(3) if jy_conversion: steps.append(4) # Set up folders for various corrections os.environ['CRDS_SERVER_URL'] = 'https://hst-crds.stsci.edu' os.environ['CRDS_PATH'] = orig_dir+'/reference_files' os.environ['iref'] = orig_dir+'/reference_files/references/hst/wfc3/' os.environ['jref'] = orig_dir+'/reference_files/references/hst/acs/' os.environ['uref'] = orig_dir+'/reference_files/references/hst/wfpc2/' # For large proposals, astrodrizzle can run into file open # issues so raise the max file open amount. _, hard = resource.getrlimit(resource.RLIMIT_NOFILE) resource.setrlimit(resource.RLIMIT_NOFILE,(hard,hard)) # Change the temp directory -- if this gets filled up it can cause # problems. orig_tmpdir = os.environ['TMPDIR'] if not os.path.exists('tmp'): os.mkdir('tmp') os.environ['TMPDIR'] = orig_dir+'/tmp' for galaxy in galaxies: if not os.path.exists(galaxy): os.mkdir(galaxy) if not os.path.exists(galaxy+'/HST'): os.mkdir(galaxy+'/HST') if not verbose: # Various packages used here put out a lot of messages. Silence info messages. loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict] for logger in loggers: logger.setLevel(logging.ERROR) # Even if verbose is not True, still print out some useful messages to the # console. hst_logger = logging.getLogger('data_buttons') handler = logging.FileHandler(galaxy+'/'+log_filename,mode='w') hst_logger.addHandler(handler) hst_logger.addHandler(logging.StreamHandler()) hst_logger.setLevel(logging.INFO) hst_logger.info('Beginning '+galaxy) hst_logger.info(' ') hst_logger.info(' ') if radius is None: try: size_query = Ned.get_table(galaxy,table='diameters') radius = np.max(size_query['NED Major Axis'])/2*u.arcsec radius = radius.to(u.deg) except: hst_logger.warning(galaxy+' not resolved by Ned, using 0.2deg radius.') radius = 0.2*u.degree obs_table = Observations.query_criteria(objectname=galaxy, radius=radius, obs_type='all', obs_collection='HST') # Ignore any calibration observations. obs_table = obs_table[obs_table['intentType'] == 'science'] for instrument in instruments: # Pixel sizes for final mosaics selected to match the HLA. pix_size = {'ACS/HRC':0.025, 'ACS/SBC':0.03, 'ACS/WFC':0.05, 'NICMOS/NIC1':0.025, 'NICMOS/NIC2':0.05, 'NICMOS/NIC3':0.1, 'WFC3/IR':0.09, 'WFC3/UVIS':0.04, 'WFPC2/PC':0.05, 'WFPC2/WFC':0.1}[instrument] # Bits to consider good for drizzling. bits = {'ACS/HRC':256, 'ACS/SBC':256, 'ACS/WFC':256, 'NICMOS/NIC1':0, 'NICMOS/NIC2':0, 'NICMOS/NIC3':0, 'WFC3/IR':768, 'WFC3/UVIS':256, 'WFPC2/PC':'8,1024', 'WFPC2/WFC':'8,1024'}[instrument] # Filename extension, in order of preference. suffixes = {'ACS/WFC':['FLC','FLT'], 'WFC3/IR':['FLT'], 'WFC3/UVIS':['FLC','FLT'], 'WFPC2/PC':[['C0M','C1M']], 'WFPC2/WFC':[['C0M','C1M']], }[instrument] # The instruments often have / in the name, so account for # this in making folders and files. hst_logger.info('Beginning '+instrument) if not os.path.exists(galaxy+'/HST/'+instrument.replace('/','_')): os.mkdir(galaxy+'/HST/'+instrument.replace('/','_')) reset_filters = False instrument_table = obs_table[obs_table['instrument_name'] == instrument] reset_prop_ids = False if not prop_ids: prop_ids = list(np.unique(instrument_table['proposal_id'])) reset_prop_ids = True hst_logger.info('Available proposal IDs: '+','.join(prop_ids)) hst_logger.info(' ') for prop_id in prop_ids: hst_logger.info('Proposal ID: '+str(prop_id)) prop_table = instrument_table[instrument_table['proposal_id'] == prop_id] if not filters: filters = list(np.unique(prop_table['filters'])) reset_filters = True hst_logger.info('Available filters: '+','.join(filters)) for hst_filter in filters: # If we have a highly illegal filter, just skip. # TODO: This needs to be sorted for some fringe # cases, probably. if not hst_filter[0] == 'F': continue hst_logger.info('Filter: '+str(hst_filter)) # Pull out available data and download. filter_table = prop_table[prop_table['filters'] == hst_filter] if len(filter_table) == 0: hst_logger.warning('No available data to download. Skipping...') continue data_products_id = Observations.get_product_list(filter_table) for suffix in suffixes: download_table = Observations.filter_products(data_products_id, productSubGroupDescription=suffix, mrp_only=False) if len(download_table) > 0: break if isinstance(suffix,list): filename_exts = [ext.lower() for ext in suffix] else: filename_exts = [suffix.lower()] hst_logger.info(instrument+'/'+prop_id+'/'+hst_filter) if not os.path.exists(galaxy+ '/HST/'+ instrument.replace('/','_')+ '/'+ hst_filter): os.mkdir(galaxy+ '/HST/'+ instrument.replace('/','_')+ '/'+ hst_filter) if not os.path.exists(galaxy+'/HST/'+prop_id): os.mkdir(galaxy+'/HST/'+prop_id) full_filepath = (galaxy+ '/HST/'+ instrument.replace('/','_')+ '/'+ hst_filter+ '/' +prop_id) if not os.path.exists(full_filepath): os.mkdir(full_filepath) if 1 in steps: # Download files download_mast(download_table, download_dir="hst_temp/" + galaxy) if not os.path.exists(full_filepath+'/raw'): os.mkdir(full_filepath+'/raw') if not os.path.exists(full_filepath+'/outputs'): os.mkdir(full_filepath+'/outputs') # Pull out the relevant files, and move to base folder. for filename_ext in filename_exts: matches = [] for root, _, filenames in os.walk("hst_temp/" + galaxy): for filename in fnmatch.filter( filenames, "*_"+filename_ext+".fits" ): matches.append(os.path.join(root, filename)) for match in matches: filename = match.split('/') os.rename(match,full_filepath+'/raw/'+filename[-1]) # Clean up any temporary files. shutil.rmtree("hst_temp/" + galaxy, ignore_errors=True) filename_ext = filename_exts[0] hst_files = glob.glob(full_filepath+'/raw/*_'+filename_ext+'.fits') if 2 in steps: # First, update the WCS information in case it's # required. for filename_ext in filename_exts: hst_files = glob.glob(full_filepath+'/raw/*_'+filename_ext+'.fits') crds.assign_bestrefs(hst_files, sync_references=True) # For WFPC2, the CRDS doesn't download everything # needed. Download the GEIS data files and # rerun the bestrefs assignment. if 'WFPC2' in instrument: geis_hdrs = glob.glob(os.environ['uref']+'/*h') for geis_hdr in geis_hdrs: geis_data = geis_hdr[:-1]+'d' if not os.path.exists(geis_data): geis_data = geis_data.split('/')[-1] print(geis_data) print(os.environ['uref']) wget.download( os.environ['CRDS_SERVER_URL']+'/unchecked_get/references/hst/'+geis_data, out=os.environ['uref']) crds.assign_bestrefs(hst_files,sync_references=True) for hst_file in hst_files: stwcs.updatewcs.updatewcs(hst_file, use_db=False) os.chdir(full_filepath+'/raw') filename_ext = filename_exts[0] hst_files = glob.glob('*_'+filename_ext+'.fits') # Normalize all files. photeq.photeq(', '.join(hst_files),readonly=False) os.rename('photeq.log','../outputs/photeq.log') if 'WFPC' in instrument: # Using tweakreg, align each frame to GAIA. gaia_table = Gaia.query_object_async(coordinate=galaxy, radius=2*radius) ras = gaia_table['ra'] decs = gaia_table['dec'] source_table = Table([ras,decs]) source_table.write('gaia.cat', format='ascii.fast_commented_header') tweakreg.TweakReg(hst_files, imagefindcfg={'threshold':5,'conv_width':3}, refcat='gaia.cat', #expand_refcat=True, enforce_user_order=False, shiftfile=True, outshifts='shifts.txt', searchrad=10, minobj=5, separation=0, updatehdr=True, reusename=True, wcsname='TWEAK', interactive=False, fitgeometry='general', clean=True, see2dplot=False ) # Update the c1m files to use the TWEAK # wcs for hst_file in hst_files: dq_file = hst_file.replace('c0','c1') tweakback.tweakback(hst_file, dq_file, newname='TWEAK') plot_files = glob.glob('*.png') for plot_file in plot_files: os.remove(plot_file) cat_files = glob.glob('*.coo') for cat_file in cat_files: os.remove(cat_file) os.rename('shifts_wcs.fits','../outputs/shifts_wcs.fits') os.rename('tweakreg.log','../outputs/tweakreg.log') os.rename('shifts.txt','../outputs/shifts.txt') elif 'ACS' in instrument or 'WFC3' in instrument: # Correct astrometry using alignimages. First, # correct each frame separately. pool = mp.Pool(mp.cpu_count()) suitable_hst_files = pool.map(astrometric_correction, hst_files) pool.close() suitable_hst_files = [x for x in suitable_hst_files if x is not None] if len(suitable_hst_files) == 0: hst_logger.warning('Failure with astrometry corrections. Skipping') os.chdir(orig_dir) continue # Now, align every suitable frame simultaneously. output_table = astrometric_correction(suitable_hst_files) with open('../outputs/astrometry.pkl','wb') as table_file: pickle.dump(output_table,table_file) else: raise Exception('Unknown instrument!') os.chdir(orig_dir) os.chdir(full_filepath) if 3 in steps: os.chdir('raw') if 'WFPC2' in instrument: hst_files = glob.glob('*_c0m.fits') wcskey = 'TWEAK' elif 'ACS' in instrument or 'WFC3' in instrument: with open('../outputs/astrometry.pkl','rb') as table_file: output_table = pickle.load(table_file) # We only want fits where an acceptable astrometric # solution has been found. suitable_fits = np.where(output_table['fit_qual'] < 5) # (output_table['fit_qual'] >= 1) hst_files = list(output_table[suitable_fits]['imageName']) if len(output_table[suitable_fits]) == 0: hst_logger.warning('Failure with astrometry corrections. Skipping') os.chdir(orig_dir) continue wcskey = ' ' else: raise Exception('Unknown instrument!') # Following Dalcanton+ (2012), group exposures into # long (>50s) and short (<=50s), and process for cosmic # rays separately exp_times = [] for hst_file in hst_files: hdu = fits.open(hst_file)[0] exp_time = hdu.header['EXPTIME'] exp_times.append(exp_time) for exp_group in ['short','long']: hst_files_group = [] for i in range(len(exp_times)): if exp_times[i] > 50 and exp_group == 'long': hst_files_group.append(hst_files[i]) elif exp_times[i] <= 50 and exp_group == 'short': hst_files_group.append(hst_files[i]) if len(hst_files_group) == len(hst_files): exp_group = '' if len(hst_files_group) == 0: continue if len(exp_group) > 0: output_name = '../outputs/'+galaxy+'_'+exp_group drizzle_log_name = '../outputs/astrodrizzle_'+exp_group+'.log' else: output_name = '../outputs/'+galaxy drizzle_log_name = '../outputs/astrodrizzle.log' # Perform the mosaicking. Generally, use iminmed. # However, sometimes iminmed will fail so # for the other instruments we'll use imedian as # a fallback. combine_types = ['iminmed','imedian'] if 'WFPC2' in instrument: combine_nhigh = 1 else: combine_nhigh = 0 for combine_type in combine_types: try: astrodrizzle.AstroDrizzle( input=hst_files_group, output=output_name, preserve=False, clean=True, combine_type=combine_type, combine_nhigh=combine_nhigh, skymethod=skymethod, sky_bits=bits, driz_sep_bits=bits, driz_sep_fillval=99999, combine_hthresh=90000, final_scale=pix_size, final_bits=bits, final_fillval=0, wcskey=wcskey, final_rot=0, ) break except ValueError: pass # Move the AstroDrizzle log. os.rename('astrodrizzle.log', drizzle_log_name) # Move back to the original directory. os.chdir(orig_dir) if 4 in steps: mosaic_outputs = glob.glob(full_filepath+'/outputs/*_sci.fits') for mosaic_output in mosaic_outputs: # Replace any fillvals with NaNs. hdu = fits.open(mosaic_output)[0] hdu.data[hdu.data == 0] = np.nan fits.writeto(mosaic_output, hdu.data,hdu.header, overwrite=True) if '_long_' in mosaic_output.split('/')[-1]: new_filename = (galaxy+ '/HST/' +prop_id +'/' +galaxy +'_' +instrument.replace('/','_') +'_' +hst_filter +'_' +prop_id +'_long.fits') elif '_short_' in mosaic_output.split('/')[-1]: new_filename = (galaxy+ '/HST/' +prop_id +'/' +galaxy +'_' +instrument.replace('/','_') +'_' +hst_filter +'_' +prop_id +'_short.fits') else: new_filename = (galaxy+ '/HST/' +prop_id +'/' +galaxy +'_' +instrument.replace('/','_') +'_' +hst_filter +'_' +prop_id +'.fits') convert_to_jy(mosaic_output, new_filename) if reset_filters: filters = None hst_logger.info(' ') if reset_prop_ids: prop_ids = None hst_logger.info(' ') if original_radius is None: radius = None else: radius = original_radius.copy() # Clear out the tmp folder and reset to the original. shutil.rmtree('tmp/', ignore_errors=True) os.environ['TMPDIR'] = orig_tmpdir
import astropy.units as u from astropy.coordinates import SkyCoord from astroquery.gaia import Gaia import numpy as np Gaia.ROW_LIMIT = -1 coord = SkyCoord(ra=301, dec=-60, unit=(u.degree, u.degree, u.pc), frame='icrs') width = u.Quantity(0.1, u.deg) height = u.Quantity(0.1, u.deg) radius = u.Quantity(0.02, u.deg) r = Gaia.query_object_async(coordinate=coord, width=width, height=height, radius=radius) print((r['dist'])) print(type(r['designation'])) print(r.colnames) # print(r['source_id']) arr = np.array(r) print(arr.shape)
def image_gaia_stars(image, wcs, radius=None, center=None, pixel=0.168, mask_a=694.7, mask_b=4.04, verbose=False, visual=False, size_buffer=1.4, tap_url=None, img_size=(8, 8)): """Search for bright stars using GAIA catalog. TODO: Should be absorbed by the object for image later. TODO: Should have a version that just uses the local catalog. """ # Central coordinate if center is None: ra_cen, dec_cen = wcs.wcs_pix2world(image.shape[0] / 2, image.shape[1] / 2, 0) img_cen_ra_dec = SkyCoord(ra_cen, dec_cen, unit=('deg', 'deg'), frame='icrs') if verbose: print("# The center of the search: RA={:9.5f}, DEC={:9.5f}".format( ra_cen, dec_cen)) else: if not isinstance(center, SkyCoord): raise TypeError( "# The center coordinate should be a SkyCoord object") img_cen_ra_dec = center if verbose: print("# The center of the search: RA={:9.5f}, DEC={:9.5f}".format( center.ra, center.dec)) # Width and height of the search box if radius is None: img_search_x = Quantity(pixel * (image.shape)[0] * size_buffer, u.arcsec) img_search_y = Quantity(pixel * (image.shape)[1] * size_buffer, u.arcsec) if verbose: print("# The width of the search: {:7.1f}".format(img_search_x)) print("# The height of the search: {:7.1f}".format(img_search_y)) else: if not isinstance(radius, Quantity): raise TypeError( "# Searching radius needs to be an Astropy Quantity.") if verbose: print("# The searching radius is: {:7.2f}".format(radius)) # Search for stars if tap_url is not None: with suppress_stdout(): from astroquery.gaia import TapPlus, GaiaClass Gaia = GaiaClass(TapPlus(url=tap_url)) if radius is not None: gaia_results = Gaia.query_object_async( coordinate=img_cen_ra_dec, radius=radius, verbose=verbose) else: gaia_results = Gaia.query_object_async( coordinate=img_cen_ra_dec, width=img_search_x, height=img_search_y, verbose=verbose) else: with suppress_stdout(): from astroquery.gaia import Gaia if radius is not None: gaia_results = Gaia.query_object_async( coordinate=img_cen_ra_dec, radius=radius, verbose=verbose) else: gaia_results = Gaia.query_object_async( coordinate=img_cen_ra_dec, width=img_search_x, height=img_search_y, verbose=verbose) if gaia_results: # Convert the (RA, Dec) of stars into pixel coordinate ra_gaia = np.asarray(gaia_results['ra']) dec_gaia = np.asarray(gaia_results['dec']) x_gaia, y_gaia = wcs.wcs_world2pix(ra_gaia, dec_gaia, 0) # Generate mask for each star rmask_gaia_arcsec = mask_a * np.exp( -gaia_results['phot_g_mean_mag'] / mask_b) # Update the catalog gaia_results.add_column(Column(data=x_gaia, name='x_pix')) gaia_results.add_column(Column(data=y_gaia, name='y_pix')) gaia_results.add_column( Column(data=rmask_gaia_arcsec, name='rmask_arcsec')) if visual: fig = plt.figure(figsize=img_size) ax1 = fig.add_subplot(111) ax1 = display_single(image, ax=ax1) # Plot an ellipse for each object for star in gaia_results: smask = mpl_ellip(xy=(star['x_pix'], star['y_pix']), width=(2.0 * star['rmask_arcsec'] / pixel), height=(2.0 * star['rmask_arcsec'] / pixel), angle=0.0) smask.set_facecolor('coral') smask.set_edgecolor('coral') smask.set_alpha(0.3) ax1.add_artist(smask) # Show stars ax1.scatter(gaia_results['x_pix'], gaia_results['y_pix'], c='orangered', s=100, alpha=0.9, marker='+') ax1.set_xlim(0, image.shape[0]) ax1.set_ylim(0, image.shape[1]) return gaia_results, fig return gaia_results return None