Beispiel #1
0
def galaxies_in_box(center, ra_span, dec_span):

    """
    This function ...
    :param center:
    :param ra_span:
    :param dec_span:
    :return:
    """

    # Initialize a list to contain the galaxies
    names = []

    # Other way ?? Much more results ?
    #ra_radius = 0.5 * ra_span.value
    #dec_radius = 0.5 * dec_span.value
    #radius = math.sqrt(ra_radius**2 + dec_radius**2)
    #result_table = Ned.query_region(center, radius=radius)

    # Create a new Vizier object and set the row limit to -1 (unlimited)
    viz = Vizier(keywords=["galaxies", "optical"])
    viz.ROW_LIMIT = -1

    # Debugging
    log.debug("Querying the HYPERLEDA catalog ...")

    # Query Vizier and obtain the resulting table
    result = viz.query_region(center.to_astropy(), width=ra_span, height=dec_span, catalog=["VII/237"])

    # I noticed something strange happening once; where there were no entries in the result,
    # with the following parameters:
    #   center = (149.07614359, 69.24847936)
    #   ra_span = 1.600000128 deg
    #   dec_span = 1.3966667784 deg
    #   catalog = ["VII/237"]
    # When ra_span was only slightly changed (e.g. change the last digit to a '7'), output was normal
    # Thus, it seems that the query goes wrong with specific values of the width (and/or height), in which
    # case changing the value very slightly resolves the problem...
    # I am baffled by this and I see no reasonable explanation.
    if len(result) == 0:

        ra_span *= 1.0 + 1e-5
        result = viz.query_region(center.to_astropy(), width=ra_span, height=dec_span, catalog=["VII/237"])

    table = result[0]

    # Loop over the rows in the table
    for entry in table:
        name = "PGC " + str(entry["PGC"])
        coordinate = SkyCoordinate(ra=entry["_RAJ2000"], dec=entry["_DEJ2000"], unit="deg", frame="fk5")
        namepluscoordinate = (name, coordinate)
        names.append(namepluscoordinate)

    # Return the list of galaxies
    return names
Beispiel #2
0
def query_nvss(options, ra0, dec0, s=">0.0", proj='SIN'):
	'''
	query_nvss: module which queries the NVSS using the Vizier protocol. 
	inputs: ra0, dec0, s="<20"
	ra0 = the central ra in degrees
	dec0 = the central dec in degrees
	s = the flux cutoff
	returns L, M (relative coordinates in degrees), N (number of sources), S (1.4GHz Flux
	Density in mJy)
	'''
	v = Vizier(column_filters={"S1.4":s})
	v.ROW_LIMIT = 10000
	result = v.query_region(coord.SkyCoord(ra=ra0, dec=dec0, unit=(u.deg, u.deg), frame='icrs'), 
	    radius=Angle(1, "deg"), catalog='NVSS')
	ra = result[0]['_RAJ2000']
	dec = result[0]['_DEJ2000']
	N = len(result[0])
	if proj.upper()=='SIN':
		L = (ra-ra0)*pl.cos(dec*deg2rad)
		M = dec-dec0
	if proj.upper()=='NCP':
		L = 57.2957795*pl.cos(deg2rad*dec)*pl.sin(deg2rad*(ra-ra0))
		M = 57.2957795*(pl.cos(deg2rad*dec0) - pl.cos(deg2rad*dec)*pl.cos(deg2rad*(ra-ra0)))/pl.sin(deg2rad*dec0) 
	S = result[0]['S1.4']
	ascii.write(result[0], options.outfile+'.dat', format='tab') 
	ann_writer(options, result[0])
	return L, M, N, S
def catalog_search(frame_wcs, shape, desired_catalog,
                   ra_column='RAJ2000',
                   dec_column='DEJ2000',
                   radius=0.5,
                   clip_by_frame=True):
    """
    Description: This function takes coordinate data from an image and a
    catalog name and returns the positions of those stars.
    Preconditions:frame_wcs is a WCS object, shape is tuple, list or array of
    numerical values, desired_catalog is a string and radius is a numerical
    value.
    Postconditions:
    """
    rad = radius * units.deg
    # Find the center of the frame
    center_coord = frame_wcs.all_pix2world([[shape[1] / 2, shape[0] / 2]], 0)
    center = SkyCoord(center_coord, frame='icrs', unit='deg')

    # Get catalog via cone search
    Vizier.ROW_LIMIT = -1  # Set row_limit to have no limit
    cat = Vizier.query_region(center, radius=rad, catalog=desired_catalog)
    # Vizier always returns list even if there is only one element. Grab that
    # element.
    cat = cat[0]
    cat_coords = SkyCoord(ra=cat[ra_column], dec=cat[dec_column])
    if clip_by_frame:
        in_fov = in_frame(frame_wcs, cat_coords)
    else:
        in_fov = np.ones([len(cat_coords)], dtype=np.bool)
    x, y = frame_wcs.all_world2pix(cat_coords.ra, cat_coords.dec, 0)
    return (cat[in_fov], x[in_fov], y[in_fov])
Beispiel #4
0
def panstarrs_query(ra_deg, dec_deg, rad_deg, maxmag=20,
                    maxsources=10000):
    """
    Query PanSTARRS @ VizieR using astroquery.vizier
    :param ra_deg: RA in degrees
    :param dec_deg: Declination in degrees
    :param rad_deg: field radius in degrees
    :param maxmag: upper limit G magnitude (optional)
    :param maxsources: maximum number of sources
    :return: astropy.table object
    """
    vquery = Vizier(columns=['objID', 'RAJ2000', 'DEJ2000',
                             'e_RAJ2000', 'e_DEJ2000',
                             'gmag', 'e_gmag',
                             'rmag', 'e_rmag',
                             'imag', 'e_imag',
                             'zmag', 'e_zmag',
                             'ymag', 'e_ymag'],
                    column_filters={"gmag":
                                    ("<%f" % maxmag)},
                    row_limit=maxsources)

    field = coord.SkyCoord(ra=ra_deg, dec=dec_deg,
                           unit=(u.deg, u.deg),
                           frame='icrs')
    return vquery.query_region(field,
                               width=("%fd" % rad_deg),
                               catalog="II/349/ps1")[0]
Beispiel #5
0
def selected_catalog( ra, dec, FOV_base, FOV_height, catalog ):

     '''

     Vizier.query_region in selected FoV. The hist of the apparent blue magnitude
     and the sky coordinates are provided if GWGC catalog is selected

     '''

     from astroquery.vizier import Vizier 

     import astropy.coordinates as coord
     import astropy.units as u

     import aladinSAMP

     # setting rows limit
     Vizier.ROW_LIMIT = None

     # no reduced query size (rq)
     reduce_query_size = 1
     FOV_base_reduced, FOV_height_reduced = FOV_base * reduce_query_size, FOV_height * reduce_query_size

     FOV_base_str, FOV_height_str = str( FOV_base_reduced ) + 'd', str( FOV_height_reduced ) + 'd'

     result = Vizier.query_region(coord.SkyCoord(ra = ra, dec = dec, unit = (u.deg, u.deg), frame='icrs'),
                                  width = FOV_height_str, height = FOV_base_str, catalog = [ catalog ])

     result.pprint()
     print result.values()
Beispiel #6
0
 def query_vizier(self, catalog='APASS'):
     '''
     Uses the astroquery environment to get the data from Vizier.
     Possible selection of catalogues:
     
     '''    
     result = Vizier.query_region("%.6f %.6f"%(self.ra, self.dec), radius=Angle(self.rad, "deg"), \
         catalog=catalog) #column_filters={"rmag":">%s"%self.minmag,"rmag":"<%s"%self.maxmag }
     return result[0]
Beispiel #7
0
    def query_circle(self, ra, dec, radius, catalog):
        """Vizier.query_region in a circle FoV."""

        Vizier.ROW_LIMIT = None
        radius_str = str(radius) + "d"

        query_result = Vizier.query_region(
            SkyCoord(ra=ra, dec=dec, unit=(u.deg, u.deg), frame="icrs"), radius=radius_str, catalog=[catalog]
        )
        return query_result
Beispiel #8
0
	def showVizierCatalogs(self):
		(ra, dec) = self.centre
		from astroquery.vizier import Vizier
		Vizier.ROW_LIMIT = 50
		from astropy import coordinates
		from astropy import units as u
		c = coordinates.SkyCoord(ra,dec,unit=('deg','deg'),frame='icrs')
		skyHeight= coordinates.Angle(self.raRange, unit = u.deg)
		results = Vizier.query_region(coordinates = c, radius= 1.0 * u.deg)
		print results
def serendipitous_variablestars(catalogs, display=True):
    """match catalogs with VSX catalog using astroquery.Vizier
    """

    if display:
        print('# match frames with variable star database... ', end=' ',
              flush=True)
    logging.info('match frames with variable star database')

    # derive center and radius of field of view of all images
    ra_deg, dec_deg, rad_deg = skycenter(catalogs)
    logging.info('FoV center (%.7f/%+.7f) and radius (%.2f deg) derived' %
                 (ra_deg, dec_deg, rad_deg))

    # derive observation midtime of sequence
    midtime = np.average([cat.obstime[0] for cat in catalogs])

    # setup Vizier query
    # note: column filters uses original Vizier column names
    # -> green column names in Vizier

    logging.info(('query Vizier for VSX at %7.3f/%+8.3f in '
                  + 'a %.2f deg radius') %
                 (ra_deg, dec_deg, rad_deg))

    field = coord.SkyCoord(ra=ra_deg, dec=dec_deg, unit=(u.deg, u.deg),
                           frame='icrs')

    vquery = Vizier(columns=['Name', 'RAJ2000', 'DEJ2000'])
    try:
        data = vquery.query_region(field,
                                   width=("%fd" % rad_deg),
                                   catalog="B/vsx/vsx")[0]
    except IndexError:
        if display:
            print('no data available from VSX')
            logging.error('no data available from VSX')
            return []

    objects = []
    for cat_idx, cat in enumerate(catalogs):
        for star in data:
            objects.append({'ident': star['Name'],
                            'obsdate.jd': cat.obstime[0],
                            'cat_idx': cat_idx,
                            'ra_deg': star['RAJ2000'],
                            'dec_deg': star['DEJ2000']})

    if display:
        print(len(objects)/len(catalogs), 'variable stars found')

    return objects
Beispiel #10
0
    def query_box(self, ra, dec, base, height, catalog):
        """Vizier.query_region in a square/rectangular FoV."""

        Vizier.ROW_LIMIT = None
        base_str, height_str = str(base) + "d", str(height) + "d"

        query_result = Vizier.query_region(
            SkyCoord(ra=ra, dec=dec, unit=(u.deg, u.deg), frame="icrs"),
            width=height_str,
            height=base_str,
            catalog=[catalog],
        )
        return query_result
Beispiel #11
0
def fetch_objects_in_box(box, catalog, keywords, radius, limit=None, column_filters=None):

    """
    This function ...
    :param box:
    :param catalog:
    :param keywords:
    :param radius:
    :param limit:
    :param column_filters:
    :return:
    """

    # Define the center coordinate for the box
    coordinate = SkyCoordinate(ra=box[0], dec=box[1], unit="deg", frame="fk5") # frame: icrs, fk5... ?

    # Make a Vizier object
    if column_filters is None:
        viz = Vizier(columns=['_RAJ2000', '_DEJ2000','B-V', 'Vmag', 'Plx'], keywords=keywords)
    else:
        viz = Vizier(columns=['_RAJ2000', '_DEJ2000','B-V', 'Vmag', 'Plx'], column_filters=column_filters, keywords=keywords)

    # No limit on the number of entries
    viz.ROW_LIMIT = limit if limit is not None else -1

    # Query the box of our image frame
    result = viz.query_region(coordinate.to_astropy(), width=box[3] * Unit("deg"), height=box[2] * Unit("deg"), catalog=catalog)

    region_string = "# Region file format: DS9 version 3.0\n"
    region_string += "global color=green\n"

    # Result may contain multiple tables (for different catalogs)
    for table in result:

        # For every entry in the table
        for entry in table:

            # Get the right ascension and the declination
            ra = entry[0]
            dec = entry[1]

            # Create a string with the coordinates of the star
            regline = "fk5;circle(%s,%s,%.2f\")\n" % (ra, dec, radius)

            # Add the parameters of this star to the region string
            region_string += regline

    # Return the region
    return regions.parse(region_string)
Beispiel #12
0
def getSDSS(galaxy):
    """
    Query SDSS through Vizier, pick out only the stellar sources,
    and put the SDSS magnitudes into AB
    """
    Vizier.ROW_LIMIT = -1 # Removes row limit on output table
    result = Vizier.query_region(galaxy, radius=Angle(0.1, "deg"), catalog='SDSS')
    # Only select stellar sources
    index = []
    for i, entry in enumerate(result[1]):
        if entry['cl'] != 6:
            index.append(i)
    result[1].remove_rows(index)

    # SDSS magnitudes are not exactly in AB so need to correct
    return result[1]
Beispiel #13
0
	def getVizierObjects(self, catalogName):
		""" Make a request to Vizier to get an Astropy Table of catalog object for this field. """
		(ra, dec) = self.centre
		
		availableCatalogs = catalogMetadata.keys()
		if catalogName not in availableCatalogs:
			print "The definitions for this catalogue are unknown. Available catalogues are:", availableCatalogs
			return
		
		# First look for a cached copy of this data
		filenameParts = self.filename.split('.')
		catalogCache = filenameParts[0] + "_" + catalogName + "_cache.fits"
		cached = False
		if not self.ignorecache:
			print "Looking for a cached copy of the catalogue:", catalogCache, 
			if os.path.exists(catalogCache):
				print "FOUND"
				cached = True
			else: print "NOT FOUND"
	
		if cached:
			newCatalog = Table.read(catalogCache)
		else:			
			print "Going online to fetch %s results from Vizier with mag limit %f."%(catalogName, self.magLimit)
			from astroquery.vizier import Vizier
			Vizier.ROW_LIMIT = 1E5
			Vizier.column_filters={"r":"<%d"%self.magLimit}
			from astropy import coordinates
			from astropy import units as u
			c = coordinates.SkyCoord(ra,dec,unit=('deg','deg'),frame='icrs')
			skyRA  = coordinates.Angle(self.raRange, unit = u.deg)
			skyDEC = coordinates.Angle(self.decRange, unit = u.deg)
			print "Sky RA, DEC range:", skyRA, skyDEC
			print "going to Astroquery for:", catalogMetadata[catalogName]['VizierLookup']
			result = Vizier.query_region(coordinates = c, width = skyRA, height = skyDEC, catalog = catalogMetadata[catalogName]['VizierName'], verbose=True)
			print result
			newCatalog = result[catalogMetadata[catalogName]['VizierName']]
			newCatalog.pprint()
			
			
			# Write the new catalog to the cache file
			newCatalog.write(catalogCache, format='fits', overwrite=True)
		
		self.addCatalog(newCatalog, catalogName)
		
		return
Beispiel #14
0
    def query_color(self,
                    ra,
                    dec,
                    radius=0.01,
                    min_mag=10,
                    max_mag=20,
                    max_sources=100):
        """

        Query NOMAD object
        @param ra: RA of field center for search, format: degrees or hh:mm:ss
        @type ra: str
        @param dec: DEC of field center for search, format: degrees or hh:mm:ss
        @type dec: str
        @param radius: Radius.
        @type radius: float
        @param min_mag: Minimum magnitude value of query.
        @type min_mag: float
        @param max_mag: Maximum magnitude value of query.
        @type max_mag: float
        @param max_sources: Maximum strs to be queried..
        @type max_sources: int
        @return: astropy.table
        """

        c = coord.SkyCoord(ra,
                           dec,
                           unit=(u.deg, u.deg),
                           frame='icrs')
        r = radius * u.deg

        vquery = Vizier(columns=['NOMAD1',
                                 'RAJ2000',
                                 'DEJ2000',
                                 'Bmag',
                                 'Vmag',
                                 'Rmag'],
                        column_filters={"Rmag":
                                        (">{:f}".format(min_mag)),
                                        "Rmag":
                                        ("<{:f}".format(max_mag))},
                        row_limit=max_sources)

        result = vquery.query_region(c, radius=r, catalog="NOMAD")[0]

        return(result)
Beispiel #15
0
def getSDSS(galaxy):
    """
    Query SDSS through Vizier, pick out only the stellar sources,
    and put the SDSS magnitudes into AB
    """
    Vizier.ROW_LIMIT = -1 # Removes row limit on output table
    result = Vizier.query_region(galaxy, width=1.0*u.deg,
                                 height=1.0*u.deg, catalog='SDSS')
    # Only select stellar sources
    index = []
    for i, entry in enumerate(result[1]):
        if entry['cl'] != 6:
            index.append(i)
    # Get the most recent SDSS catalog
    result[len(result) - 1].remove_rows(index)

    # SDSS magnitudes are not exactly in AB so need to correct (not doing this yet).
    return result[len(result)-1]
def get_hip(ra, dec, mag):
    """
    Given an RA (in hours and decimals), and Dec (in
    degrees and decimals), and a magnitude (in
    visual magnitudes), queries VizieR and attempts
    to locate a Hipparcos star ID at the location.

    Returns an integer HIP ID if found, or None otherwise

    Maintains a .hip_cache file to speed up lookups;
    you can delete the .hip_cache file to perform
    fresh lookups.
    """
    coord = SkyCoord(ra=Angle("{} hours".format(ra)),
                     dec=Angle("{} degree".format(dec)),
                     obstime="J2000.0")

    # Search the Hipparcos catalog, and only return results that include
    # a HIP (Hipparcos) column, sorting the results by magnitude.  The
    # top result is almost certainly the star we want.
    v = Vizier(catalog='I/239/hip_main', columns=["HIP", "+Vmag"])
    # Constrain the search to stars within 1 Vmag of our target
    v.query_constraints(Vmag="{}..{}".format(mag - 0.5, mag + 0.5))

    # Start with a targeted search, which returns more quickly from the
    # API. If that fails to find a star, query a 3 degree diameter circle
    # around the ra/dec.  This is because Sky & Telescope has a convention
    # of stopping their constellation lines a degree or so away from the
    # star, if that star isn't actually part of the constellation
    # (example: Alpheratz, which is part of the Pegasus figure, but the
    # star is in Andromeda)
    for radius in (0.05, 1.5):
        result = v.query_region(coord, radius=radius*u.deg)
        try:
            table = result['I/239/hip_main']
        except TypeError:
            # A TypeError means that the results didn't include anything from
            # the I/239/hip_main catalog.  The "in" operator doesn't seem to
            # work with Table objects.
            continue
        else:
            return table['HIP'][0]
    return None
Beispiel #17
0
def calc_frame_phot_correction(objects, cat_file, vizier_var, phot_var, cat_name='SDSS', 
                                separation=2*u.arcsec, fields=['*']):
    positions = coords.SkyCoord(objects['ra'], objects['dec'], frame='icrs', unit='deg')
    
    print('Querying Vizier')
    v_cat = Vizier(columns=fields, catalog=catalog_info[cat_name]['vizier name'])
    result = v_cat.query_region(positions, radius=separation)
    # Vizier returns a table list with a table for each catalog
    # Since we have only chosen one catalog, we take the first (and only) table
    catalog = result[0] 
    catalog.rename_column('_RAJ2000','ra')
    catalog.rename_column('_DEJ2000','dec')
    catalog.rename_column(vizier_var, phot_var)
    # remove results that have bad photometry
    #catalog = catalog[catalog[phot_var]>-9000]
    
    # only keep the catalog entries that are primary stars
    catalog = catalog[catalog['cl']==catalog_info[cat_name]['class']['star']]
    catalog = (catalog[catalog[catalog_info[cat_name]['mode']] ==
                        catalog_info[cat_name]['modes']['primary']])

    print('Finding matches for catalog stars')
    # find the corresonding object for each entry in the catalog array
    dr = coords.Angle(separation).to('degree').value
    cat_coords = coords.SkyCoord(catalog['ra'], catalog['dec'], frame='icrs', unit='deg')
    matches, d2d, d3d = cat_coords.match_to_catalog_sky(positions)
    match_col = Column(matches)
    catalog['matches'] = match_col
    
    # Remove all sources with duplicate entries (there can be a difference between
    # the astropy catalog match and the match from Vizier in a few rare cases)
    duplicates = np.where(np.bincount(match_col)>1)[0]
    for duplicate in duplicates:
        match_col[np.where(match_col==duplicate)]=-1
    catalog = catalog[match_col>=0]    

    catalog = catalog.group_by('matches')
    print('Total matches:', len(catalog.groups.keys['matches']))
    
    cat_array=np.array(catalog)
    np.save(cat_file, cat_array)
    print('Saving file',cat_file)
    return catalog
Beispiel #18
0
def get_table(name, radius=10, pick=None,
              pm_threshold=40, align_mag_bounds=(14, 19), guide_mag_bounds=(12, 17)):
    '''
    Parameters
    ----------
    name : str, galaxy name around which to query Vizier for align/guide stars,
           e.g., 'NGC 1052'
    radius : float, arcminutes around which to search for stars
    pick : if None, return all within radius, if "align", choose for align stars,
           if "guide", choose for guide stars
    pm_threshold : float, milliarcsec per year, the maximum proper motion allowed
    align_mag_bounds : tuple of (lower i mag, upper i mag) allowed for align stars
    guide_mag_bounds : tuple of (lower i mag, upper i mag) allowed for guide stars
    
    Returns
    -------
    table : astropy Table object with USNO entries
    '''

    if pick not in [None, 'align', 'guide']:
        raise ValueError('pick needs to be one of {None, "align", "guide"}')
    
    # to select all objects instead of just the first 50
    Vizier.ROW_LIMIT = -1 
    result = Vizier.query_region(name, radius=Angle(radius, 'arcmin'), catalog='USNO-B1.0')
    assert len(result) == 1
    table = result[0]
    if pick is None:
        return table
    pmRA = table['pmRA']
    pmDec = table['pmDE']
    # remove high PM objects
    pm_okay = np.sqrt(pmRA**2 + pmDec**2) < 40 # units should be mas / yr
    table = table[pm_okay]
    
    imag = table['Imag']
    if pick == 'align':
        imag_okay = (align_mag_bounds[0] < imag) & (imag < align_mag_bounds[1])
    elif pick == 'guide':
        imag_okay = (guide_mag_bounds[0] < imag) & (imag < guide_mag_bounds[1])

    return table[imag_okay]
Beispiel #19
0
def get_usnob1_cat(ra, dec, blim):
    ra_u = ra*u.degree
    dec_u = dec*u.degree
    coords = SkyCoord(ra_u, dec_u, frame='icrs') #Should this be ICRS or FK5
    #Only Class 0 (stars) - unable to implement this at the current time. Need to understand
    #USNO-B1 s/g classification
    v = Vizier(columns=['USNO-B1.0', '_RAJ2000', '_DEJ2000', 
                        'B1mag', 'R1mag', 'B2mag', 'R2mag', 
                        'pmRA', 'pmDE', 'Imag', 'B1s/g', '_r'],
                row_limit=500000,
               column_filters={"B2mag":">6", 'B2mag':'<{}'.format(blim)}) #B2mag fainter than 6, brighter than blim
    new_table_list = v.query_region(coords, 
                                 radius=900*u.arcsecond, #Search 900 arcseconds
                                 catalog = 'I/284')
    if len(new_table_list) ==0:
        return None
    else:
        new_table = new_table_list[0]
    #Get the 5000 closest
    new_table.sort('_r')
    if len(new_table) > 5000:
        new_table.remove_rows(np.arange(5001, len(new_table)))
    #Sort with brightest star first
    new_table.sort(['B2mag'])
    

    #Fill in blank values with 99.99
    new_table['B1mag'].fill_value = 99.99
    new_table['R1mag'].fill_value = 99.99
    new_table['B2mag'].fill_value = 99.99
    new_table['R2mag'].fill_value = 99.99
    new_table['Imag'].fill_value = 99.99
    new_table['pmRA'].fill_value = 99.99
    new_table['pmDE'].fill_value = 99.99
    filled_table = new_table.filled()
    filled_table.write('search.ub1', overwrite=True, format='ascii.fixed_width_no_header', delimiter=' ')
    
    searchcenter_ofile = open('searchcenter.ub1', 'w')
    searchcenter_ofile.write('{},{}'.format(ra, dec))
    searchcenter_ofile.close()
    return 'success'
Beispiel #20
0
def GAIAplx(ra,de):
    v = Vizier(columns=["*", "+_r"], catalog='I/345/gaia2')
    pos=coord.SkyCoord(ra=ra, dec=de,unit=(u.hourangle,u.deg),frame='icrs',obstime='J2000')
    result=v.query_region(pos, radius="10s", catalog='I/345/gaia2')
    # Moving the positions to 2000
    try:
        nlines=len(result[0]['RA_ICRS'])
        deltat=-15.5
        sep=[]
        for ig,name in enumerate(result[0]['Source']):
            raold=result[0]['RA_ICRS'].data[ig]+(result[0]['pmRA'].data[ig] *deltat)/3600000.
            deold=result[0]['DE_ICRS'].data[ig]+(result[0]['pmDE'].data[ig] *deltat)/3600000.
            posold = coord.ICRS(ra=raold * u.deg, dec=deold * u.deg)
            sep.append(pos.separation(posold).arcsecond)
        indG=np.argmin(sep)
        if sep[indG]<1.5 and result[0]['Plx'].data[indG]>0:
            return str(round(result[0]['Plx'].data[indG],2)), str(round(result[0]['e_Plx'].data[indG],2))
    except:
        return 'NULL','NULL'

    return 'NULL','NULL'
Beispiel #21
0
    def gaia_query(self, ra_deg, dec_deg, rad_deg, max_mag=20,
                   max_coo_err=1,
                   max_sources=100):

        """
        Query Gaia DR1 @ VizieR using astroquery.vizier
        parameters: ra_deg, dec_deg, rad_deg: RA, Dec, field
        @param ra_deg: RA in degrees
        @type ra_dec: float
        @param dec_deg: DEC in degrees
        @type dec_deg: float
        @param max_mag: Limit G magnitude to be queried object(s)
        @type max_mag: float
        @max_coo_err: Max error of position
        @type max_coo_err: float
        @max_sources: Maximum number of sources
        @type max_sources: int
        @returns: astropy.table object
        """

        vquery = Vizier(columns=['Source', 'RA_ICRS',
                                 'DE_ICRS', 'e_RA_ICRS',
                                 'e_DE_ICRS', 'phot_g_mean_mag',
                                 'pmRA', 'pmDE',
                                 'e_pmRA', 'e_pmDE',
                                 'Epoch', 'Plx'],
                        column_filters={"phot_g_mean_mag":
                                        ("<{:f}".format(max_mag)),
                                        "e_RA_ICRS":
                                        ("<{:f}".format(max_coo_err)),
                                        "e_DE_ICRS":
                                        ("<{:f}".format(max_coo_err))},
                        row_limit=max_sources)
 
        field = coord.SkyCoord(ra=ra_deg, dec=dec_deg,
                               unit=(u.deg, u.deg),
                               frame='icrs')
        return(vquery.query_region(field,
                                   width="{:f}d".format(rad_deg),
                                   catalog="I/337/gaia")[0])
def gaia_query(ra_deg, dec_deg, rad_deg, maxmag=20, 
               maxsources=10000): 
    """
    Query Gaia DR1 @ VizieR using astroquery.vizier
    parameters: ra_deg, dec_deg, rad_deg: RA, Dec, field 
                                          radius in degrees
                maxmag: upper limit G magnitude (optional)
                maxsources: maximum number of sources
    returns: astropy.table object
    """
    vquery = Vizier(columns=['Source', 'RA_ICRS', 'DE_ICRS', 
                             'phot_g_mean_mag'], 
                    column_filters={"phot_g_mean_mag": 
                                    ("<%f" % maxmag)}, 
                    row_limit = maxsources) 
 
    field = coord.SkyCoord(ra=ra_deg, dec=dec_deg, 
                           unit=(u.deg, u.deg), 
                           frame='icrs')
    return vquery.query_region(field, 
                               width=("%fd" % rad_deg), 
                               catalog="I/337/gaia")[0] 
Beispiel #23
0
def query_cat(catalog, min_ra, max_ra, min_dec, max_dec, columns=None,
        column_filters=None):
    """
    Use vizquery to get a reference catalog from vizier
    """
    from astroquery.vizier import Vizier
    import numpy as np
    from astropy.coordinates import SkyCoord
    # Build vizquery statement
    width = int(np.ceil((max_ra-min_ra)*60))
    height = int(np.ceil((max_dec-min_dec)*60))
    center = SkyCoord((min_ra+max_ra)/2, (min_dec+max_dec)/2, unit='deg')
    
    # If no column filters are specified, use the defaults
    if column_filters is None:
        if catalog.startswith('SDSS'):
            column_filters = {
                'cl': '=6',
                'q_mode':'=+'
            }
        elif catalog.startswith('UKIDSS'):
            column_filters = {
                'cl': '=-1',
                'm': '=1'
            }
        else:
            column_filters = {}
    # Query the catalog in Vizier
    logger.info('columns:{0}'.format(columns))
    v = Vizier(columns=columns, column_filters=column_filters, 
        catalog=catalog_info[catalog]['info']['vizier_id'])
    v.ROW_LIMIT=200000
    result = v.query_region(center, width='{0}m'.format(width*1.25), 
        height='{0}m'.format(height*1.25))
    logger.warn(result[0].columns)
    refcat = result[0]
    
    return refcat
Beispiel #24
0
def query(coords,catalog,cols,radius=6*u.arcsec,fill_val=-99.99,full=False):

    results = Vizier.query_region(coords,catalog=catalog,radius=radius)
    if len(results) == 0:
        return None

    if full:
        return results

    results = results[0]
    
    # if dict, remap colnames
    if isinstance(cols,dict):
        for k,v in cols.iteritems():
            results.rename_column(k,v)
        names = cols.values()
    else:
        names = cols

    # make new columns one-to-one with coords
    newtable = Table(masked=True)
    for col in names:
        oldcol = results[col]
        newcol = MaskedColumn(data=np.zeros(len(coords),dtype=oldcol.dtype),unit=oldcol.unit,name=col,mask=np.ones(len(coords),dtype=bool),fill_value=fill_val)

        # copy data from results
        for row in results:
            if not row[col]:
                continue
            # _q IS 1-BASED INDEXING?!
            newcol[row['_q']-1] = row[col]
            newcol.mask[row['_q']-1] = False

        newtable.add_column(newcol)

    return newtable
Beispiel #25
0
def plot_wise_cc(position, radius=5):
    """
    Plot a WISE color-color diagram with source at position location overlaid. (same as that in download module)

    Params:
    ----------
    position: SkyCoord, list or tuple
        position of interest
    radius: float or int, 5 by default
        crossmatch radius with WISE catalogue

    Returns:
        fig, ax
    """
    if isinstance(position,tuple) or isinstance(position,list):
        position = SkyCoord(*position, unit=u.deg)

    v = Vizier(columns=['*', '+_r'])
    tablelist = v.query_region(position, radius=radius * u.arcsec, catalog='II/328/allwise')
    fig = plt.figure(figsize=(6, 6))
    ax = fig.add_subplot(111)
    
    ccplot_path = pkg_resources.resource_filename(
        __name__, "./setups/wise_cc.png"
    )

    
    if len(tablelist) > 0:
        result = tablelist[0][0]
        resultcoord = SkyCoord(ra=result['RAJ2000'], dec=result['DEJ2000'], unit=u.deg)
        sep = resultcoord.separation(position).arcsec
        source = pd.Series({'3.4 micron': result['W1mag'],
                            '4.6 micron': result['W2mag'],
                            '12 micron': result['W3mag']})

        w4_12 = source['4.6 micron'] - source['12 micron']
        w3_4 = source['3.4 micron'] - source['4.6 micron']

        ax.scatter(w4_12, w3_4, marker='*', color='magenta', s=100, label=result['AllWISE'])
        im = plt.imread(ccplot_path)
        ax.imshow(im, extent=[-1, 7, -0.5, 4], aspect=2)

        ax.set_xticks([0, 2, 4, 6])
        ax.set_yticks([0, 1, 2, 3, 4])
        ax.set_xlim(-1, 7)
        ax.set_ylim(-0.5, 4)
        ax.set_xlabel(r'[$4.6\mu m] - [12\mu m$] mag')
        ax.set_ylabel(r'[$3.4\mu m] - [4.6\mu m$] mag')
        ax.set_title('{} - {:.1f} arcsec separation'.format(result["AllWISE"], sep))
        ax.legend()
    else:
        im = plt.imread(ccplot_path)
        ax.imshow(im, extent=[-1, 7, -0.5, 4], aspect=2)

        ax.set_xticks([0, 2, 4, 6])
        ax.set_yticks([0, 1, 2, 3, 4])
        ax.set_xlabel(r'[$4.6\mu m] - [12\mu m$] mag')
        ax.set_ylabel(r'[$3.4\mu m] - [4.6\mu m$] mag')
        ax.set_title(f"No WISE crossmatch")
        ax.legend()

    return fig, ax
Beispiel #26
0
from astroquery.vizier import Vizier
from astropy import coordinates
from astropy import units as u

v = Vizier(keywords=['stars:white_dwarf'])

c = coordinates.SkyCoord(0, 0, unit=('deg', 'deg'), frame='icrs')
result = v.query_region(c, radius=2*u.deg)

print(len(result))
# 44

result[0].pprint()
"""
   LP    Rem Name  RA1950   DE1950  Rmag l_Pmag Pmag u_Pmag spClass     pm    pmPA  _RA.icrs   _DE.icrs
                  "h:m:s"  "d:m:s"  mag         mag                 arcs / yr deg              "d:m:s"
-------- --- ---- -------- -------- ---- ------ ---- ------ ------- --------- ---- ---------- ---------
584-0063          00 03 23 +00 01.8 18.1        18.3              f     0.219   93 00 05 56.8 +00 18 41
643-0083          23 50 40 +00 33.4 15.9        17.0              k     0.197   93 23 53 13.7 +00 50 15
584-0030          23 54 05 -01 32.3 16.6        17.7              k     0.199  193 23 56 38.8 -01 15 26
"""
Beispiel #27
0
def makelist(ra, dec, ccd, date, lname, variable=0., transients=0.):

    vname = date + '/var/' + lname + '.var'
    tname = date + '/var/' + lname + '.trans'
    rname = date + '/reg/' + lname + '.reg'
    radec = date + '/radec/' + lname + '.txt'

    #Define size of camera in pixels:
    xsi = 8176.
    ysi = 6132.

    #Corresponding size in degs:
    wid = xsi * 1.24 / 3600.
    hei = ysi * 1.24 / 3600.

    #Define the columns to get from UCAC, magnitude filter and set unlimited rows:
    v = Vizier(columns=['_RAJ2000', '_DEJ2000', 'Vmag'],
               column_filters={"Vmag": "<17."})
    v.ROW_LIMIT = -1

    #Query UCAC4
    ucac = v.query_region(\
                            coord.SkyCoord(ra=ra, dec=dec,\
                            unit=(u.deg, u.deg),\
                            frame='icrs'),\
                            height=str(hei+1)+"d", width=str(wid+1)+"d",\
                            catalog=["UCAC4"])[0]

    #For SDSS, need to calculate bounding box:
    declim = dec + np.array([-hei, hei]) / 2.
    declim_r = np.pi * declim / 180.
    wid_r = np.pi * wid / 180.
    d_lim_r = 2. * np.arcsin(np.sin(wid_r / 4.) / np.cos(declim_r))
    ralim = ra + np.array([-1, 1]) * np.max(180. * d_lim_r / np.pi)

    #Query SDSS:
    query = "SELECT p.objid, p.ra, p.dec, p.g, "+\
    "p.deVRad_g, p.deVPhi_g, p.deVAB_g, "+\
    "p.type, p.flags_g, (flags & dbo.fPhotoFlags('SATURATED')) as SAT "+\
    "FROM PhotoPrimary AS p "+\
    "WHERE  p.ra BETWEEN "+str(ralim[0])+" AND "+str(ralim[1])+" AND "+\
    "p.dec BETWEEN "+str(declim[0])+" AND "+str(declim[1])+" AND "+\
    "p.g BETWEEN 16 AND 22"# AND "+\
    #"p.htmid*37 & 0x000000000000FFFF < (650 * 10)"
    sdss = SDSS.query_sql(query)

    #Remove saturated sources and separate gals from stars:
    o = np.where(sdss['SAT'] == 0)
    sdss = sdss[o]

    #Find and remove matching sources:
    c = coord.SkyCoord(ra=sdss['ra'] * u.degree, dec=sdss['dec'] * u.degree)
    cs = coord.SkyCoord(ra=np.array(ucac['_RAJ2000'])*u.degree, \
                        dec=np.array(ucac['_DEJ2000'])*u.degree)
    idx, d2d, d3d = cs.match_to_catalog_sky(c)
    match = np.where(d2d.value * 3600. > 1.0)
    ucac = ucac[match]

    #Group stars from SDSS and UCAC together:
    o = np.where(sdss['type'] == 6)
    sdss_stars = sdss[o]
    stars = np.append(np.array([ucac['_RAJ2000'], ucac['_DEJ2000'], ucac['Vmag']]),\
                      np.array([sdss_stars['ra'], sdss_stars['dec'], sdss_stars['g']]),\
                      axis=1)

    #Extract gals from SDSS:
    o = np.where(sdss['type'] != 6)
    sdss_gals = sdss[o]
    gals = np.array([sdss_gals['ra'], sdss_gals['dec'],\
                     sdss_gals['g'],\
                     sdss_gals['deVRad_g'], sdss_gals['deVAB_g'], sdss_gals['deVPhi_g']])

    #Generate WCS to convert (RA,Dec) to (x,y)
    w = wcs.WCS(naxis=2)
    w.wcs.crpix = np.array([4088, 3066]) + np.random.normal(scale=3., size=2)
    w.wcs.cdelt = np.array([3.444e-4, 3.444e-4])
    w.wcs.crval = [ra, dec]  #Pointing position of telescope.
    w.wcs.ctype = ["RA---TAN", "DEC--TAN"]

    pixcrds = np.column_stack((stars[0, :], stars[1, :]))
    pixcrdg = np.column_stack((gals[0, :], gals[1, :]))

    worldg = w.wcs_world2pix(pixcrdg, 1)
    worlds = w.wcs_world2pix(pixcrds, 1)

    gals[0, :] = worldg[:, 0]
    gals[1, :] = worldg[:, 1]

    stars[0, :] = worlds[:, 0]
    stars[1, :] = worlds[:, 1]

    ind = [-1]
    if variable > 0:
        #Select a set proportion of m<19 stars and add random variation:
        bright = np.squeeze(np.where((stars[2,:]<19) &\
                                      (stars[0,:]>0) & (stars[0,:]<xsi) &\
                                      (stars[1,:]>0) & (stars[1,:]<ysi)))
        n_vary = int(np.round(variable * (np.size(bright))))
        if n_vary > 0:
            ind = np.random.choice(bright, n_vary, replace=False)
            orig = stars[2, ind]
            stars[2, ind] = stars[2, ind] + np.random.normal(0, 1, ind.size)

            np.savetxt(vname, np.c_[pixcrds[ind,0], pixcrds[ind,1], orig, stars[2,ind]], \
                       header = 'RA Dec Orig_Mag New_Mag', \
                       fmt='%9.5f %8.5f %5.2f %5.2f')

    if transients > 0:
        #Randomly distribute transients around image:
        tx = np.random.uniform(0, xsi, transients)
        ty = np.random.uniform(0, ysi, transients)
        tm = np.random.uniform(14, 19, transients)

        t = np.array([tx, ty, tm])
        stars = np.append(stars, t, axis=1)
        txy = (np.array([tx, ty])).transpose()
        pixcrdt = w.wcs_pix2world(txy, 1)

        #Append transients to stars:
        pixcrds = np.append(pixcrds, pixcrdt, axis=0)

        np.savetxt(tname, np.c_[pixcrdt[:,0], pixcrdt[:,1], tm], \
                   header = 'RA Dec Orig_Mag New_Mag', \
                   fmt='%9.5f %8.5f %5.2f')

    #Write stars to file:
    myfile = open('templist' + ccd + '.list', 'w')
    regfile = open(rname, 'w')
    regfile.write('image\n')

    obj = np.concatenate(
        (100 * np.ones(pixcrds[:, 0].size), 200 * np.ones(pixcrdg[:, 0].size)))
    ras = np.concatenate((pixcrds[:, 0], pixcrdg[:, 0]))
    decs = np.concatenate((pixcrds[:, 1], pixcrdg[:, 1]))
    mags = np.concatenate((stars[2, :], gals[2, :]))

    np.savetxt(radec, \
               np.c_[obj, ras, decs, mags], \
               fmt='%3i %9.5f %8.5f %5.2f', \
               header='Type RA DEC Mag')

    for i in range(0, (stars.shape)[1]):
        myfile.write((str(100) + ' ' + str(stars[0, i]) + ' ' +
                      str(stars[1, i]) + ' ' + str(stars[2, i])) + '\n')
        if np.any(ind == i):
            regfile.write('circle(' + str(stars[0, i]) + ',' +
                          str(stars[1, i]) + ',3) #color=blue\n')
        else:
            regfile.write('circle(' + str(stars[0, i]) + ',' +
                          str(stars[1, i]) + ',3)\n')

    #Write gals to file:
    for i in range(0, (gals.shape)[1]):
        myfile.write((str(200)+' '+str(gals[0,i])+' '+str(gals[1,i])+' '+str(gals[2,i])+' ' +\
                      str(0)+' ' +str(0)+' ' +str(0)+' ' +str(0)+' ' + str(gals[3,i])+' '+\
                      str(gals[4,i])+' '+str(gals[5,i])+'\n'))
        regfile.write('circle(' + str(gals[0, i]) + ',' + str(gals[1, i]) +
                      ',3) #color=red\n')

    myfile.close()
    regfile.close()
Beispiel #28
0
def get_wcs(pattern):
    for filename in pattern:

        which_hdu = choose_hdu(filename)
        header = fits.getheader(filename, which_hdu)

        # Gets file data and rotates the image
        data = fits.getdata(filename, ext=0)
        rot = rotate(data, -90, reshape=False)

        # extracts the light sources from the image (basing on sigma, FWHM, thrsholds...)
        threshold = detect_threshold(rot, nsigma=2.)
        sigma = 3.0 * gaussian_fwhm_to_sigma  # FWHM = 3.
        kernel = Gaussian2DKernel(sigma, x_size=3, y_size=3)
        kernel.normalize()
        mean, median, std = sigma_clipped_stats(rot, sigma=3)
        daofind = DAOStarFinder(fwhm=3.0, threshold=5. * std)
        sources = daofind(rot - median)
        for col in sources.colnames:
            sources[col].info.format = '%.8g'  # for consistent table output

    # Pixel coordinates of the sources
        x1 = np.array(sources['xcentroid'])
        y1 = np.array(sources['ycentroid'])

        # Gets the coordinates (deg) of the target
        catalog_object_coor = SkyCoord.from_name(header['OBJECT'], parse=True)
        xobj = catalog_object_coor.ra.degree
        yobj = catalog_object_coor.dec.degree

        # Transforms pixels into degs
        Bin = float(header['CCDSUM'][0])
        Res = 0.25  #arcsec/px
        f_arcsec = Bin * Res  #arcsec/px
        f = f_arcsec / 3600

        for n in x1:
            dx = x1 - x1[30]
            x_deg = xobj + (f * dx)

        for n in y1:
            dy = y1 - y1[30]
            y_deg = yobj + (f * dy)

        # plots the file's coordinates
        fig1, ax = plt.subplots()
        ax.plot(x_deg, y_deg, 'ok', ms=5)
        ax.plot(xobj, yobj, 'oy', mfc='none', ms=10)

        # Gets a catalog by name and its objects coordinates
        coo = SkyCoord.from_name('GJ3470')
        rad = 40 * u.arcmin
        cat_id = 'I/284/out'
        v = Vizier(catalog=cat_id,
                   columns=["RAJ2000", "DEJ2000", "Plx", "RAJ2000", "DEJ2000"],
                   column_filters={
                       'RAJ2000': '!null',
                       'DEJ2000': '!null'
                   })
        v.ROW_LIMIT = -1
        tab = v.query_region(coo, radius=rad, catalog=cat_id)[0]
        x2 = tab['RAJ2000']
        y2 = tab['DEJ2000']

        # plots the file's coordinates (blu), the target (yeallow) and the catalog (red & black)
        fig2, ax = plt.subplots()
        ax.set_aspect('equal')
        ax.plot(x2, y2, '.k', ms=1)
        pmdd = .5
        cond = ((x2 - xobj)**2 + (y2 - yobj)**2) < pmdd**2
        ax.plot(x2[cond], y2[cond], '.r', ms=1)
        ax.plot(x_deg, y_deg, 'ob', ms=2)
        ax.plot(xobj, yobj, 'oy', ms=3)
        plt.show()
        print(coo.galactic)
Beispiel #29
0
def queryGAIA2(ra, dec, boxdeg, maxsources=10000):
    """
    Queries GAIA2 table at Vizier (I/345/gaia2)
      ra  = center RA of field
      dec = center DEC of field
      boxdeg = box size around ra/dec for source retrieval (degrees)

    Returns:  array of stars with format
              IDa IDb RA DEC ...
    """

    # Absolute floor to photometric errors (mag)
    min_e = 0.01  # mag

    # GAIA2 has ICRS coords precessed to Epoch=2015.5
    vquery = Vizier(columns=[
        'Source', 'RA_ICRS', 'DE_ICRS', 'Gmag', 'e_Gmag', '_RAJ2000',
        '_DEJ2000', 'Plx', 'e_Plx', 'pmRA', 'pmDE', 'BP-RP', 'Teff', 'AG',
        'E(BP-RP)'
    ],
                    row_limit=maxsources)

    field = coord.SkyCoord(ra=ra, dec=dec, unit=(u.deg, u.deg), frame='fk5')
    D = vquery.query_region(field,
                            width=("%fd" % boxdeg),
                            catalog="I/345/gaia2")
    try:
        Data = D[0]
    except Exception:
        return np.array([])

    # dummy g-i color, not used any more
    g_i = 1.

    # Output tuple
    oo = []
    for i, obj in enumerate(Data['Source']):

        oid_a = Data['Source'][i]
        oid_b = 0

        # Preserve these values as-is
        ra = Data['_RAJ2000'][i]
        dec = Data['_DEJ2000'][i]
        pmra = Data['pmRA'][i]
        pmde = Data['pmDE'][i]
        teff = Data['Teff'][i]

        # GAIA "G" mag after extinction correction
        Gc = Data['Gmag'][i] - Data['AG'][i]
        Gc_e = Data['e_Gmag'][i]

        # GAIA BP-RP mag after extinction correction
        # --> Note the Python key sends () to _ in key name
        BmRc = Data['BP-RP'][i] - Data['E_BP-RP_'][i]

        # Former g-band magnitude calculation using g-i=1 mag fixed
        # --> this looks like an equation from GAIA documentation for (G-g) vs (g-i)
        # g = G + 0.0939 + 0.6758 * g_i + 0.04 * g_i**2 - 0.003 * g_i**3

        # New g-band magnitude calculation!!

        # Reference: Table A.2 from DR2 paper A&A 616, A4 (2018)
        #  "Gaia Data Release 2: Photometric content and validation"
        #   https://ui.adsabs.harvard.edu/abs/2018A%26A...616A...4E/abstract
        g = Gc - 0.13518 + BmRc * (0.46245 + BmRc *
                                   (0.25171 - BmRc * 0.021349))

        # Use Parallax (in mas) to calculate M_g
        plx = Data['Plx'][i]
        plx_e = Data['e_Plx'][i]

        # Definition of "good parallax" measurement from Gaia
        if plx > 0 and plx_e < 0.5 * plx:

            # Distance modulus + uncertainty
            dmod = 10.0 - 5 * np.log10(plx)
            dmod_e = 2.1715 * (plx_e / plx)

            # M_g:  g-band absolute magnitude + uncertainty
            gabs = g - dmod
            gabs_e = np.sqrt(Gc_e**2 + dmod_e**2 + min_e**2)

        else:

            # Insufficient parallax measurement: dmod->0, gabs->g
            # (also set:  dmod_e==0, gabs_e==0)
            dmod, dmod_e = 0, 0
            gabs, gabs_e = g, 0

        # Check against GAIA2 flags
        if np.any([j for j in Data.mask[i]]):
            continue

        # Construct output tuple
        oo.append([
            oid_a, oid_b, ra, dec, pmra, pmde, teff, Gc, Gc_e, BmRc, g, plx,
            plx_e, dmod, dmod_e, gabs, gabs_e
        ])

    # Done
    return np.array(oo)
Beispiel #30
0
def find_source(obs, return_all_sources=True):
    """Find a source given some coordinates.

    Args:
        obs (dict): an Observation object, created in build_source_object().

    Returns:
        out (dict): a dictionary of the RA/dec coords we are observing and the
                    Henry Draper number of the star we think is most likely.
    """
    ra_dec = altaz_to_radec(obs)

    coords = coordinates.SkyCoord(ra=ra_dec[0],
                                  dec=ra_dec[1],
                                  unit=(u.deg, u.deg),
                                  frame='icrs')
    # Get the actual results
    # For some reason, if this goes too big it stops seeing the actual source?!
    r = 100 * u.arcminute
    results = Vizier.query_region(coords, radius=r, catalog='V/50')[0]
    df = results.to_pandas()

    candidate_sources = filter(None, [n for n in df['HD']])
    sources = []
    dmax, vmax = 0, 0
    for s in candidate_sources:
        source_info = df.loc[df['HD'] == s]
        mag = round(float(source_info['Vmag']), 2)

        temp_ra = source_info['RAJ2000'].tolist()[0]
        temp_dec = source_info['DEJ2000'].tolist()[0]
        source_ra_hms = tuple(map(float, temp_ra.split()))
        source_dec_dms = tuple(map(float, temp_dec.split()))
        source_ra = Angle(source_ra_hms, unit='hourangle').degree
        source_dec = Angle(source_dec_dms, unit=u.deg).degree

        dist_from_center = np.sqrt((source_ra - ra_dec[0])**2 +
                                   (source_dec - ra_dec[1])**2)

        score = float(c1 * mag + c2 * dist_from_center)
        source_dict = {
            'HD': source_info['HD'].values[0],
            'Name': source_info['Name'].values[0],
            'RA': source_ra,
            'DEC': source_dec,
            'Distance': dist_from_center,
            'Vmag': source_info['Vmag'],
            'Score': score
        }

        sources.append(source_dict)

        dmax = dist_from_center if dist_from_center > dmax else dmax
        vmax = mag if mag > vmax else mag

    for s in range(len(sources)):
        d = sources[s]['Distance'] / dmax
        mag = sources[s]['Vmag'].values[0] / vmax
        score = c1 * mag + c2 * d
        sources[s]['Score'] = score
        sources[s]['Scaled-Distance'] = d
        sources[s]['Scaled-Mag'] = mag

    sources_df = pd.DataFrame(sources)

    # This loop is supremely janky, but df.loc'ing (below) wasn't working.
    # best_source = sources_df.loc[sources_df['Score'] == sources_df['Score'].min]
    best_source_idx = 0
    best_score = 10000
    for i in range(len(sources)):
        score = sources[i]['Score']
        if score < best_score:
            best_source_idx = i
            best_score = score

    print score
    out = {
        'Coords': ra_dec,
        'HD-Name': 'HD' + str(int(sources[best_source_idx]['HD'])),
    }
    return out
Beispiel #31
0
def add_gaia_figure_elements(tpf, fig, magnitude_limit=18):
    """Make the Gaia Figure Elements"""
    # Get the positions of the Gaia sources
    c1 = SkyCoord(tpf.ra, tpf.dec, frame='icrs', unit='deg')
    # Use pixel scale for query size
    pix_scale = 4.0  # arcseconds / pixel for Kepler, default
    if tpf.mission == 'TESS':
        pix_scale = 21.0
    # We are querying with a diameter as the radius, overfilling by 2x.
    from astroquery.vizier import Vizier
    Vizier.ROW_LIMIT = -1
    result = Vizier.query_region(c1,
                                 catalog=["I/345/gaia2"],
                                 radius=Angle(
                                     np.max(tpf.shape[1:]) * pix_scale,
                                     "arcsec"))
    no_targets_found_message = ValueError(
        'Either no sources were found in the query region '
        'or Vizier is unavailable')
    too_few_found_message = ValueError(
        'No sources found brighter than {:0.1f}'.format(magnitude_limit))
    if result is None:
        raise no_targets_found_message
    elif len(result) == 0:
        raise too_few_found_message
    result = result["I/345/gaia2"].to_pandas()
    result = result[result.Gmag < magnitude_limit]
    if len(result) == 0:
        raise no_targets_found_message
    radecs = np.vstack([result['RA_ICRS'], result['DE_ICRS']]).T
    coords = tpf.wcs.all_world2pix(radecs, 0)
    year = ((tpf.astropy_time[0].jd - 2457206.375) * u.day).to(u.year)
    pmra = (
        (np.nan_to_num(np.asarray(result.pmRA)) * u.milliarcsecond / u.year) *
        year).to(u.arcsec).value
    pmdec = (
        (np.nan_to_num(np.asarray(result.pmDE)) * u.milliarcsecond / u.year) *
        year).to(u.arcsec).value
    result.RA_ICRS += pmra
    result.DE_ICRS += pmdec

    # Gently size the points by their Gaia magnitude
    sizes = 64.0 / 2**(result['Gmag'] / 5.0)
    one_over_parallax = 1.0 / (result['Plx'] / 1000.)
    source = ColumnDataSource(data=dict(ra=result['RA_ICRS'],
                                        dec=result['DE_ICRS'],
                                        source=result['Source'].astype(str),
                                        Gmag=result['Gmag'],
                                        plx=result['Plx'],
                                        one_over_plx=one_over_parallax,
                                        x=coords[:, 0] + tpf.column,
                                        y=coords[:, 1] + tpf.row,
                                        size=sizes))

    r = fig.circle('x',
                   'y',
                   source=source,
                   fill_alpha=0.3,
                   size='size',
                   line_color=None,
                   selection_color="firebrick",
                   nonselection_fill_alpha=0.0,
                   nonselection_line_color=None,
                   nonselection_line_alpha=0.0,
                   fill_color="firebrick",
                   hover_fill_color="firebrick",
                   hover_alpha=0.9,
                   hover_line_color="white")

    fig.add_tools(
        HoverTool(tooltips=[("Gaia source", "@source"), ("G", "@Gmag"),
                            ("Parallax (mas)",
                             "@plx (~@one_over_plx{0,0} pc)"),
                            ("RA", "@ra{0,0.00000000}"),
                            ("DEC", "@dec{0,0.00000000}"), ("x", "@x"),
                            ("y", "@y")],
                  renderers=[r],
                  mode='mouse',
                  point_policy="snap_to_data"))
    return fig, r
Beispiel #32
0
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 30 13:16:57 2018

@author: Наталия 
"""
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import astropy.units as u
from astropy.coordinates import SkyCoord
from astroquery.vizier import Vizier

center_coords = SkyCoord('02h21m00s +57d07m42s')
vizier = Vizier(column_filters={'Bmag': '<13'}, row_limit=10000)
usno_sources = vizier.query_region(
    center_coords,
    width=90 * u.arcmin,
    height=90 * u.arcmin,
    catalog='USNO-A2.0',
)[0]
print(usno_sources)

ra = usno_sources['RAJ2000']._data
dec = usno_sources['DEJ2000']._data
x = np.vstack(
    ((ra - ra.mean()) * np.cos(dec / 180 * np.pi) + ra.mean(), dec)).T

plt.plot(*x.T, '*')
print(x)
Beispiel #33
0
from astroquery.vizier import Vizier
import astropy.units as u
from astropy.io import fits

#query setup
Vizier.ROW_LIMIT = -1
target = 'M4'
r = 0.4

#making query
tmc = Vizier.query_region(target, radius=r * u.deg,
                          catalog='II/246/out')  #2MASS catalog
ppmxl = Vizier.query_region(target, radius=r * u.deg,
                            catalog='I/317/sample')  # PPMXL proper motion

#get catalog only
jhk = tmc[0]
pm = ppmxl[0]

# retrive data only from 2MASS columns [3,4,9,11,13,15,17,19,21]
col1 = fits.Column(name=jhk.colnames[3],
                   format=jhk.dtype[3],
                   array=jhk[jhk.colnames[3]])
col2 = fits.Column(name=jhk.colnames[4],
                   format=jhk.dtype[4],
                   array=jhk[jhk.colnames[4]])
col3 = fits.Column(name=jhk.colnames[9],
                   format=jhk.dtype[9],
                   array=jhk[jhk.colnames[9]])
col4 = fits.Column(name=jhk.colnames[11],
                   format=jhk.dtype[11],
Beispiel #34
0
    def load(self, ra=0.0, dec=90.0, radius=0.2, write=True, faintlimit=None):

        # select the columns that should be downloaded from UCAC
        catalog = 'UCAC4'
        ratag = '_RAJ2000'
        dectag = '_DEJ2000'
        if catalog == 'UCAC4':
            vcat = 'I/322A/out'
            rmagtag = 'f.mag'
            jmagtag = 'Jmag'
            vmagtag = 'Vmag'
            pmratag, pmdectag = 'pmRA', 'pmDE'
            columns = ['_RAJ2000', '_DECJ2000', 'pmRA', 'pmDE', 'f.mag', 'Jmag', 'Vmag', 'UCAC4']

        # create a query through Vizier
        v = Vizier(catalog=vcat, columns=columns)
        v.ROW_LIMIT = -1

        # either reload an existing catalog file or download to create a new one
        starsfilename = settings.intermediates + self.directory
        starsfilename += "{catalog}ra{ra:.4f}dec{dec:.4f}rad{radius:.4f}".format(
            catalog=catalog,
            ra=ra,
            dec=dec,
            radius=radius) + '.npy'

        try:
            # try to load a raw catalog file
            logger.info("loading a catalog of stars from {0}".format(starsfilename))
            t = np.load(starsfilename)
        except IOError:
            logger.info('could not load stars')
            # otherwise, make a new query
            logger.info("querying {catalog} "
                        "for ra = {ra}, dec = {dec}, radius = {radius}".format(
                catalog=catalog, ra=ra, dec=dec, radius=radius))
            # load via astroquery
            t = v.query_region(astropy.coordinates.ICRS(ra=ra, dec=dec,
                                                        unit=(astropy.units.deg, astropy.units.deg)),
                               radius='{:f}d'.format(radius), verbose=True)[0]

            # save the queried table
            np.save(starsfilename, t)

        # define the table
        self.table = astropy.table.Table(t)

        ras = np.array(t[:][ratag])
        decs = np.array(t[:][dectag])
        pmra = np.array(t[:][pmratag])
        pmdec = np.array(t[:][pmdectag])
        rmag = np.array(t[:][rmagtag])
        jmag = np.array(t[:][jmagtag])
        vmag = np.array(t[:][vmagtag])

        rbad = (np.isfinite(rmag) == False) * (np.isfinite(vmag))
        rmag[rbad] = vmag[rbad]
        rbad = (np.isfinite(rmag) == False) * (np.isfinite(jmag))
        rmag[rbad] = jmag[rbad]

        jbad = (np.isfinite(jmag) == False) * (np.isfinite(vmag))
        jmag[jbad] = vmag[jbad]
        jbad = (np.isfinite(jmag) == False) * (np.isfinite(rmag))
        jmag[jbad] = rmag[jbad]

        vbad = (np.isfinite(vmag) == False) * (np.isfinite(rmag))
        vmag[vbad] = rmag[vbad]
        vbad = (np.isfinite(vmag) == False) * (np.isfinite(jmag))
        vmag[vbad] = jmag[vbad]

        temperatures = relations.pickles(rmag - jmag)
        imag = rmag - relations.davenport(rmag - jmag)

        pmra[np.isfinite(pmra) == False] = 0.0
        pmdec[np.isfinite(pmdec) == False] = 0.0

        ok = np.isfinite(imag)
        if faintlimit is not None:
            ok *= imag <= faintlimit

        logger.info("found {0} stars with {1} < V < {2}".format(np.sum(ok), np.min(rmag[ok]), np.max(rmag[ok])))
        self.ra = ras[ok]
        self.dec = decs[ok]
        self.pmra = pmra[ok]
        self.pmdec = pmdec[ok]
        self.tmag = imag[ok]
        self.temperature = temperatures[ok]
        self.epoch = 2000.0
Beispiel #35
0
    def _add_gaia_figure_elements(self, tpf, fig, magnitude_limit=18):
        """Make the Gaia Figure Elements"""
        # Get the positions of the Gaia sources
        c1 = SkyCoord(tpf.ra, tpf.dec, frame='icrs', unit='deg')
        # Use pixel scale for query size
        pix_scale = 21.0
        # We are querying with a diameter as the radius, overfilling by 2x.
        from astroquery.vizier import Vizier
        Vizier.ROW_LIMIT = -1
        result = Vizier.query_region(c1,
                                     catalog=["I/345/gaia2"],
                                     radius=Angle(
                                         np.max(tpf.shape[1:]) * pix_scale,
                                         "arcsec"))
        no_targets_found_message = ValueError(
            'Either no sources were found in the query region '
            'or Vizier is unavailable')
        too_few_found_message = ValueError(
            'No sources found brighter than {:0.1f}'.format(magnitude_limit))
        if result is None:
            raise no_targets_found_message
        elif len(result) == 0:
            raise too_few_found_message
        result = result["I/345/gaia2"].to_pandas()
        result = result[result.Gmag < magnitude_limit]
        if len(result) == 0:
            raise no_targets_found_message
        radecs = np.vstack([result['RA_ICRS'], result['DE_ICRS']]).T
        coords = tpf.wcs.all_world2pix(radecs, 0)
        try:
            year = ((tpf.time[0].jd - 2457206.375) * u.day).to(u.year)
        except:
            year = ((tpf.astropy_time[0].jd - 2457206.375) * u.day).to(u.year)
        pmra = ((np.nan_to_num(np.asarray(result.pmRA)) * u.milliarcsecond /
                 u.year) * year).to(u.arcsec).value
        pmdec = ((np.nan_to_num(np.asarray(result.pmDE)) * u.milliarcsecond /
                  u.year) * year).to(u.arcsec).value
        result.RA_ICRS += pmra
        result.DE_ICRS += pmdec

        # Gently size the points by their Gaia magnitude
        sizes = 10000.0 / 2**(result['Gmag'] / 2)

        target = tpf.wcs.world_to_pixel(c1)
        plt.scatter(target[0] + tpf.column,
                    target[1] + tpf.row,
                    s=50,
                    zorder=1000,
                    c='k',
                    marker='x')

        plt.scatter(coords[:, 0] + tpf.column,
                    coords[:, 1] + tpf.row,
                    c='firebrick',
                    alpha=0.5,
                    edgecolors='r',
                    s=sizes)
        plt.scatter(coords[:, 0] + tpf.column,
                    coords[:, 1] + tpf.row,
                    c='None',
                    edgecolors='r',
                    s=sizes)
        plt.xlim([tpf.column - 0.5, tpf.column + tpf.shape[1] - 0.5])
        plt.ylim([tpf.row - 0.5, tpf.row + tpf.shape[2] - 0.5])

        return fig
Beispiel #36
0
def draw_fc(fc_params, bk_image=None, bk_lam=None, do_pdf=False, do_png=False):
    ''' 
   The finding chart master plotting function.
   
   Args:
      fc_params: a dictionnary containing the OB parameters
      bk_image: the background image as string, either a SkyView entry, or local filename
      bk_lam: the wavelength of the chart as a string
      do_pdf (bool): save a pdf file, in addition to jpg
      do_png (bool): save a png file, in addition to jpg
   Returns:
      The finding chart filename as a string
   '''

    # Load all the fields dictionaries
    fields = fcm_id.get_fields_dict(fc_params)

    # Get the radius and center of the charts
    (left_radius, right_radius) = fcm_id.get_chart_radius(fc_params)
    (left_center, right_center) = fcm_id.get_chart_center(fc_params)

    # Get the background image in place
    (fn_bk_image_L, survey_L, bk_lam_L) = get_bk_image(bk_image, bk_lam,
                                                       left_center,
                                                       left_radius, fc_params)

    # If this is not the DSS2 Red, then download this as well for the right-hand-side plot
    if not (survey_L == 'DSS2 Red'):
        (fn_bk_image_R, survey_R,
         bk_lam_R) = get_bk_image('DSS2 Red', None, right_center, right_radius,
                                  fc_params)
    else:
        (fn_bk_image_R, survey_R, bk_lam_R) = copy.deepcopy(
            (fn_bk_image_L, survey_L, bk_lam_L))

    # Start the plotting
    plt.close(1)
    fig1 = plt.figure(
        1,
        figsize=(14.17, 7))  #14.17 inches, at 50% = 1 full page plot in A&A.

    ax1 = aplpy.FITSFigure(fn_bk_image_L,
                           figure=fig1,
                           north=fcm_m.set_North,
                           subplot=[0.12, 0.12, 0.35, 0.76])
    ax1.show_grayscale(invert=True,
                       stretch='linear',
                       pmin=fcm_id.get_pmin(survey_L),
                       pmax=99.9)

    ax2 = aplpy.FITSFigure(fn_bk_image_R,
                           figure=fig1,
                           north=fcm_m.set_North,
                           subplot=[0.59, 0.12, 0.38, 0.8])
    ax2.show_grayscale(invert=True,
                       stretch='linear',
                       pmin=fcm_id.get_pmin(survey_R))

    ax1.recenter(left_center.ra, left_center.dec, radius=left_radius / 3600.)
    ax2.recenter(right_center.ra,
                 right_center.dec,
                 radius=right_radius / 3600.)

    # I will be adding stuff to the left-hand-side plot ... start collecting them
    ax1_legend_items = []

    # Do I have the epoch of observation for the background image ?
    '''
   try:
      bk_obsdate = fits.getval(fn_bk_image, 'DATE-OBS')
      
      # If not specified, assume UTC time zone for bk image   
      if bk_obsdate.tzinfo is None:
      #   #warnings.warn(' "--obsdate" timezone not specified. Assuming UTC.')
         bk_obsdate = bk_obsdate.replace(tzinfo=pytz.utc)
      
      nowm_time = Time(bk_obsdate)
      pm_track_time = (fcm_obsdate - bk_obsdate).total_seconds()*u.s
      
   except:
      # If not, just shows the default length set in fcmaker_metadata
      nowm_time = Time(fcm_m.obsdate) - fcm_m.pm_track_time
      pm_track_time = fcm_m.pm_track_time
   '''

    # Just keep things simple. Same lookback time for all charts.
    nowm_time = Time(fcm_m.obsdate) - fcm_m.pm_track_time
    #pm_track_time = fcm_m.pm_track_time

    # If yes, then let's show where are the fastest stars moved from/to.
    #if do_GAIA_pm and (fcm_m.min_abs_GAIA_pm >=0):
    if (fcm_m.min_abs_GAIA_pm >= 0):

        # Query GAIA
        print(
            '   Querying GAIA DR2 to look for high proper motion stars in the area ...'
        )

        # Make it a sync search, because I don't think I need more than 2000 targets ...
        # 2020-04: for some reason the sync search fails ... run an async one for now.
        j = Gaia.cone_search_async(left_center,
                                   right_radius * u.arcsec,
                                   verbose=False)
        r = j.get_results()

        selected = np.sqrt(r['pmra']**2 + r['pmdec']**2
                           ) * u.mas / u.yr > fcm_m.min_abs_GAIA_pm

        # Show the fastest stars
        past_tracks = []
        future_tracks = []

        # Need to propagate their coords to the fc epoch and the obstime
        for s in range(len(r['ra'][selected])):

            star = SkyCoord(
                ra=r['ra'][selected][s],
                dec=r['dec'][selected][s],
                obstime=Time(r['ref_epoch'][selected][s],
                             format='decimalyear'),
                frame='icrs',
                unit=(u.deg, u.deg),
                pm_ra_cosdec=r['pmra'][selected][s] * u.mas / u.yr,
                pm_dec=r['pmdec'][selected][s] * u.mas / u.yr,
                # I must specify a generic distance to the target,
                # if I want to later on propagate the proper motions
                distance=fcm_m.default_pm_d,
            )

            now = star.apply_space_motion(new_obstime=Time(fcm_m.obsdate))
            nowm = star.apply_space_motion(new_obstime=nowm_time)

            past_tracks += [
                np.array([[nowm.ra.deg, now.ra.deg],
                          [nowm.dec.deg, now.dec.deg]])
            ]

            for ax in [ax1, ax2]:
                ax.show_markers([now.ra.deg], [now.dec.deg],
                                marker='.',
                                color='crimson',
                                facecolor='crimson',
                                edgecolor='crimson')

        #if len(r['ra'][selected])>0:
        #   # Prepare a dedicated legend entry
        #   ax1_legend_items += [mlines.Line2D([], [],color='crimson',
        #                         markerfacecolor='crimson',
        #                         markeredgecolor='crimson',
        #                         linestyle='-',
        #                         linewidth=0.75,
        #                         marker='.',
        #                         #markersize=10,
        #                         label='PM* (track$=-$%.1f yr)' % (pm_track_time.to(u.yr).value)) ]
        for ax in [ax1, ax2]:
            ax.show_lines(past_tracks,
                          color='crimson',
                          linewidth=0.75,
                          linestyle='-')

    # Query UCAC2 via Vizier over the finding chart area, to look for suitable Guide Stars
    print('   Querying UCAC2 via Vizier to look for possible Guide Stars ...')
    Vizier.ROW_LIMIT = 10000
    gs_table = Vizier.query_region(
        right_center,
        radius=right_radius * u.arcsec,
        inner_radius=fcm_id.get_inner_GS_search(fc_params) * u.arcsec,
        catalog="UCAC2")
    gs_table = gs_table[gs_table.keys()[0]]

    # Turn the table into a list of SkyCoord, that I can clean as I go.
    # Only select guide stars in the nominal GS mag range
    gs_list = [
        SkyCoord(
            ra=line['RAJ2000'],
            dec=line['DEJ2000'],
            obstime=Time('J2000'),
            equinox='J2000',
            frame='icrs',
            unit=(u.deg, u.deg),
            pm_ra_cosdec=line['pmRA'] / 1000. * u.mas / u.yr,
            pm_dec=line['pmDE'] / 1000. * u.mas / u.yr,
            # Assume a fixed distance, so that I can then propagate proper motions
            distance=100. *
            u.pc).apply_space_motion(new_obstime=Time(fcm_m.obsdate))
        for line in gs_table
        if fcm_m.gs_mag[0] <= line['UCmag'] <= fcm_m.gs_mag[1]
    ]

    # Here, I will show all the ephemeris points I have prepared, ahead of the
    # observations (If I have any)

    if len(fc_params['ephem_points_past']) > 0:
        ax1.show_markers(
            [item.ra.deg for item in fc_params['ephem_points_past']],
            [item.dec.deg for item in fc_params['ephem_points_past']],
            marker='*',
            color='crimson',
            s=100,
            facecolor='none',
            edgecolor='crimson')

        # Prepare a dedicated legend entry
        ax1_legend_items += [
            mlines.Line2D(
                [],
                [],
                color='crimson',
                markerfacecolor='none',
                markeredgecolor='crimson',
                linestyle='',
                #linewidth=0.75,
                marker='*',
                #markersize=10,
                label='Target ($\Delta T=-%.0f$ min)' %
                (fc_params['ephem_past_delta'].total_seconds() / 60.))
        ]

    if len(fc_params['ephem_points_future']) > 0:
        ax1.show_markers(
            [item.ra.deg for item in fc_params['ephem_points_future']],
            [item.dec.deg for item in fc_params['ephem_points_future']],
            marker='*',
            color='crimson',
            s=100,
            facecolor='crimson',
            edgecolor='crimson')

        # Prepare a dedicated legend entry
        ax1_legend_items += [
            mlines.Line2D(
                [],
                [],
                color='crimson',
                markerfacecolor='crimson',
                markeredgecolor='crimson',
                linestyle='',
                #linewidth=0.75,
                marker='*',
                #markersize=10,
                label='Target ($\Delta T=+%.0f$ min)' %
                (fc_params['ephem_future_delta'].total_seconds() / 60.))
        ]

    # Show the observation footprint
    for f in fields:

        # Call the function that will plot all the important stuff for this field.
        fcm_id.plot_field(ax1, ax2, fc_params, fields[f])

        # Keep all the possible guide stars in the area.
        gs_list = [
            star for star in gs_list
            if (fields[f][2].separation(star) >
                (fcm_id.get_inner_GS_search(fc_params) / 3600. * u.deg)) and (
                    fields[f][2].separation(star) <
                    (fcm_id.get_GS_outer_radius(fc_params) / 3600. * u.deg))
        ]

    # Show all the suitable Guide Star in the area
    if len(gs_list) > 0:
        ax2.show_markers([np.array(star.ra) for star in gs_list],
                         [np.array(star.dec) for star in gs_list],
                         marker='o',
                         edgecolor='crimson',
                         s=50,
                         linewidth=1.0)
    else:
        warnings.warn('Watch out ... no suitable Guide Star found in UCAC2 !')

    # Add orientation arrows to the large view plot
    add_orient(ax2,
               right_center,
               radius=(right_radius * 0.82) * u.arcsec,
               arrow_width=(right_radius * 0.82) / 4.5 * u.arcsec,
               usetex=fcm_m.fcm_usetex)
    add_orient(ax1,
               left_center,
               radius=(left_radius * 0.82) * u.arcsec,
               arrow_width=(left_radius * 0.82) / 4.5 * u.arcsec,
               usetex=fcm_m.fcm_usetex)

    # Add a scale bar
    (scl, scll) = fcm_id.get_scalebar(fc_params['inst'],
                                      ins_mode=fc_params['ins_mode'])

    ax1.add_scalebar(scl)  # Length in degrees
    ax1.scalebar.show(scl,
                      label=scll,
                      corner='bottom left',
                      color='k',
                      frame=1)
    ax1.scalebar.set_font_size(12)

    scl2 = np.floor(right_radius / 60 / 6) / 60.
    scll2 = r'%.0f$^{\prime}$' % (scl2 * 60)
    ax2.add_scalebar(scl2)
    ax2.scalebar.show(scl2,
                      label=scll2,
                      corner='bottom left',
                      color='k',
                      frame=1)
    ax2.scalebar.set_font_size(12)

    for ax in [ax1, ax2]:
        ax.scalebar.set_linewidth(2)

    # Fine tune things a bit further, just because I can ...
    for ax in [ax1, ax2]:
        ax.tick_labels.set_xformat('hh:mm:ss')
        ax.axis_labels.set_xpad(10)
        ax.ticks.set_linewidth(1.5)
        ax.ticks.set_length(10)
        ax.ticks.set_color('k')
        ax.axis_labels.set_xtext('R.A. [J2000]')

    ax1.axis_labels.set_ytext('Dec. [J2000]')
    #ax1.axis_labels.set_ypad(-10)
    ax2.axis_labels.set_ytext(' ')

    # Add the required OB information to comply with ESO requirements ...
    # ... and make the life of the night astronomer a lot easier !
    ax1.add_label(0.0,
                  1.11,
                  'Run ID: ' + fc_params['prog_id'] + ' | ' + fc_params['pi'],
                  relative=True,
                  horizontalalignment='left',
                  size=14)

    # Fix some bugs for anyone not using LaTeX ... sigh ...
    if fcm_m.fcm_usetex:
        lab = fc_params['ob_name'].replace('_', '\_')
    else:
        lab = fc_params['ob_name']

    ax1.add_label(0.0,
                  1.06,
                  'OB: %i | %s' % (fc_params['ob_id'], lab),
                  relative=True,
                  horizontalalignment='left',
                  size=14)
    #ax1.add_label(0.0,1.08, r'$\lambda_{fc}$: %s (%s)' % (bk_lam_L, survey_L), relative=True,
    #              horizontalalignment='left')

    # Display the Wavelength of the plots
    ax1.add_label(0.02,
                  0.965,
                  r'%s (%s)' % (bk_lam_L, survey_L),
                  relative=True,
                  fontsize=12,
                  horizontalalignment='left',
                  verticalalignment='center',
                  bbox=dict(edgecolor='w', facecolor='w', alpha=0.85))
    ax2.add_label(0.02,
                  0.965,
                  r'%s (%s)' % (bk_lam_R, survey_R),
                  relative=True,
                  fontsize=12,
                  horizontalalignment='left',
                  verticalalignment='center',
                  bbox=dict(edgecolor='w', facecolor='w', alpha=0.85))

    # Add a legend (if warranted) for the left plot
    if len(ax1_legend_items) > 0:
        ax1.ax.legend(
            handles=ax1_legend_items,
            bbox_to_anchor=(-0.03, 0.82, 0.4, .1),  #loc='lower right',
            ncol=1,  #mode="expand", 
            borderaxespad=0.,
            fontsize=10,
            borderpad=0.3,
            handletextpad=0.,
            handlelength=2.0)

    # Start keeping track of any tags I need to show
    tag_string = r' '

    # Show the obsdate
    ax1.add_label(1.0,
                  1.02,
                  r'Date: ' +
                  datetime.strftime(fcm_m.obsdate, '%Y-%m-%d %H:%M %Z'),
                  relative=True,
                  color='k',
                  horizontalalignment='right',
                  fontsize=10)

    # Show the OB tags
    if 'moving_target' in fc_params['tags']:
        tag_string += '$\leadsto$ '

    if 'parallactic_angle' in fc_params['tags']:
        tag_string += '$\measuredangle$ '

    if len(tag_string) > 1:  # only show the tag if it has a non-zero length
        ax1.add_label(
            -0.18,
            1.08,
            tag_string,
            relative=True,
            color='k',
            horizontalalignment='center',
            verticalalignment='center',
            fontsize=30,
            bbox=dict(edgecolor='k',
                      facecolor='lightsalmon',
                      alpha=1,
                      linewidth=1,
                      boxstyle="sawtooth,pad=0.2,tooth_size=0.075"),
        )

    # Finally include the version of fcmaker in there
    ax1.add_label(1.01,
                  0.00,
                  r'Created with fcmaker v%s' % (fcm_v.__version__),
                  relative=True,
                  horizontalalignment='left',
                  verticalalignment='bottom',
                  fontsize=10,
                  rotation=90)

    # Save it all, both jpg for upload to p2, and pdf for nice quality.
    fn_out = os.path.join(
        fcm_m.plot_loc, fc_params['ob_name'].replace(' ', '_') + '_' +
        survey_L.replace(' ', '-'))

    fig1.savefig(fn_out + '.jpg')
    if do_pdf:
        fig1.savefig(fn_out + '.pdf')
    if do_png:
        fig1.savefig(fn_out + '.png')

    plt.close()

    return fn_out + '.jpg'
Beispiel #37
0
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 27 09:38:38 2016

@author: ebachelet
"""
from astroquery.vizier import Vizier
import astropy.units as u
import astropy.coordinates as coord
 
 
v = Vizier(columns=['_RAJ2000', '_DEJ2000', 'Vmag'],column_filters={'Vmag':'<20'})
result=v.query_region(coord.SkyCoord(ra=270, dec=-28,unit=(u.deg, u.deg),frame='icrs'),width="2400m",catalog=['Tycho'])
RA=[]
DEC=[]
V=[]
for i in result['I/239/hip_main'] :
    RA.append(i['_RAJ2000'])
    DEC.append(i['_DEJ2000'])
    V.append(i['Vmag'])

import pdb; pdb.set_trace()
Beispiel #38
0
while st <= sra+rad:
	st = st + step
	ntiles += 1
	

##########################################################
### Hipparcos
print('Querying Hipparcos')
t0 = time.time()
v = Vizier(columns=['_RAJ2000', '_DEJ2000','HIP', 'Plx', 'e_Plx', 'pmRA', 'e_pmRA', 'pmDE', 'e_pmDE', 'Hpmag', 'e_Hpmag'])

# for some reason we need this first to get more than 50 entries 
Vizier.ROW_LIMIT = -1
v.ROW_LIMIT = -1

result = v.query_region(coord.ICRS(ra=sra, dec=sde, unit=(u.deg, u.deg)), radius=Angle(rad, "deg"), catalog='I/311')

#print(result)
#print(sra,sde,rad)
#pdb.set_trace()

table=result[0]
#table.colnames
#pdb.set_trace()	
#x=np.array(table[:]['Plx'])
#y=np.array(table[:]['e_Plx'])

ra = np.array(table[:]['_RAJ2000'])
dec = np.array(table[:]['_DEJ2000'])
dis = np.sqrt( np.power((ra-sra),2) + np.power((dec-sde),2) )
keep = np.where(dis < rad)
Beispiel #39
0
def do_lamost(catalog):
    """Import spectra from LAMOST."""
    task_str = catalog.get_current_task_str()

    # Set preferred names, calculate some columns based on imported data,
    # sanitize some fields
    keys = list(catalog.entries.keys())

    viz = Vizier(columns=["**"])

    fureps = {'erg/cm2/s/A': 'erg/s/cm^2/Angstrom'}

    c_kms = con.c.cgs.value / 1.0e5

    for oname in pbar(keys, task_str):
        # Some events may be merged in cleanup process, skip them if
        # non-existent.

        if (FASTSTARS.RA not in catalog.entries[oname]
                or FASTSTARS.DEC not in catalog.entries[oname]):
            continue
        else:
            result = viz.query_region(coord.SkyCoord(
                ra=catalog.entries[oname][FASTSTARS.RA][0]['value'],
                dec=catalog.entries[oname][FASTSTARS.DEC][0]['value'],
                unit=(un.hourangle, un.deg),
                frame='icrs'),
                                      width="2s",
                                      catalog="V/149/dr2")

            if not result.keys():
                continue
            tab = result['V/149/dr2']

            star = None
            for row in tab:
                if (row['objType'] == 'Star'
                        and row['Class'].lower() in ['star', 'unknown']):
                    star = row
                    break
            if not star:
                continue

            try:
                name, source = catalog.new_entry(oname,
                                                 bibcode='2016yCat.5149....0L',
                                                 srcname='LAMOST',
                                                 url='http://dr3.lamost.org/')
            except Exception:
                catalog.log.warning(
                    '"{}" was not found, suggests merge occurred in cleanup '
                    'process.'.format(oname))
                continue

            if row['SubClass'] is not 'Non':
                #catalog.entries[name].add_quantity(
                #    FASTSTARS.SPECTRAL_TYPE, row['SubClass'], source=source)

                ST, SCfull = row['SubClass'][:2], row['SubClass'][2:]
                if len(SCfull) > 0:
                    if 'IV' in SCfull:
                        SC = 'sg'
                    elif 'III' in SCfull:
                        SC = 'g'
                    elif 'V' in SCfull:
                        SC = 'd'
                    elif 'I' in SCfull:
                        SC = 'Sg'
                    catalog.entries[name].add_quantity(FASTSTARS.STELLAR_CLASS,
                                                       SC,
                                                       source=source)
                catalog.entries[name].add_quantity(FASTSTARS.SPECTRAL_TYPE,
                                                   ST,
                                                   source=source)

            if row['z'] and is_number(row['z']):
                catalog.entries[name].add_quantity(FASTSTARS.REDSHIFT,
                                                   str(row['z']),
                                                   e_value=str(row['e_z']),
                                                   source=source)
                catalog.entries[name].add_quantity(
                    FASTSTARS.VELOCITY,
                    pretty_num(float(row['z']) * c_kms, sig=5),
                    e_value=pretty_num(float(row['e_z'] * c_kms), sig=5),
                    source=source)

            mag_types = list(row['magType'].replace('psf_', ''))

            nmt = []
            nmi = 0
            for mt in mag_types:
                if is_number(mt):
                    nmt[nmi - 1] += mt
                else:
                    nmt += mt
                    nmi += 1
            mag_types = [
                x.upper() if x in ['b', 'v', 'j', 'h'] else x for x in nmt
            ]

            for mi, mt in enumerate(mag_types):
                snrf = 'snr' + mt.lower()
                if snrf in row.columns and float(row[snrf]) < 3:
                    continue
                photodict = {
                    PHOTOMETRY.TIME: str(row['MJD']),
                    PHOTOMETRY.U_TIME: 'MJD',
                    PHOTOMETRY.BAND: mt,
                    PHOTOMETRY.TELESCOPE: 'LAMOST',
                    PHOTOMETRY.MAGNITUDE: str(row['mag' + str(mi + 1)]),
                    PHOTOMETRY.SOURCE: source
                }
                if snrf in row.columns:
                    photodict[PHOTOMETRY.E_MAGNITUDE] = str(
                        Decimal('2.5') *
                        (Decimal('1') +
                         Decimal('1') / Decimal(str(row[snrf]))).log10())[:5]
                catalog.entries[name].add_photometry(**photodict)

            vname = row['PlanId']

            ffile = ('spec-' + row['LMJD'] + '-' + vname + '_sp' +
                     row['spId'] + '-' + row['FiberId'] + '.fits.gz')

            furl = 'http://dr3.lamost.org/sas/fits/' + vname + '/' + ffile

            datafile = os.path.join(catalog.get_current_task_repo(), 'LAMOST',
                                    ffile)

            if not os.path.exists(datafile):
                fr = requests.get(furl)

                open(datafile, 'wb').write(fr.content)

            hdulist = fits.open(datafile)
            for oi, obj in enumerate(hdulist[0].header):
                if any(x in ['.', '/'] for x in obj):
                    del (hdulist[0].header[oi])
            hdulist[0].verify('silentfix')
            hdrkeys = list(hdulist[0].header.keys())
            # print(hdrkeys)
            # for key in hdulist[0].header.keys():
            #     print(key, hdulist[0].header[key])
            if hdulist[0].header['SIMPLE']:
                if 'JD' in hdrkeys:
                    mjd = str(jd_to_mjd(Decimal(str(hdulist[0].header['JD']))))
                elif 'MJD' in hdrkeys:
                    mjd = str(hdulist[0].header['MJD'])
                elif 'DATE-OBS' in hdrkeys:
                    if 'T' in hdulist[0].header['DATE-OBS']:
                        dateobs = hdulist[0].header['DATE-OBS'].strip()
                    elif 'UTC-OBS' in hdrkeys:
                        dateobs = hdulist[0].header['DATE-OBS'].strip(
                        ) + 'T' + hdulist[0].header['UTC-OBS'].strip()
                    mjd = str(astrotime(dateobs, format='isot').mjd)
                else:
                    raise ValueError("Couldn't find JD/MJD for spectrum.")
                if hdulist[0].header['NAXIS'] == 2:
                    waves = [str(x) for x in list(hdulist[0].data)[2]]
                    fluxes = [str(x) for x in list(hdulist[0].data)[0]]
                else:
                    print('Warning: Skipping FITS spectrum `{}`.'.format(
                        datafile))
                    continue
            else:
                raise ValueError('Non-simple FITS import not yet supported.')
            if 'BUNIT' in hdrkeys:
                fluxunit = hdulist[0].header['BUNIT']
                if fluxunit in fureps:
                    fluxunit = fureps[fluxunit]
            else:
                if max([float(x) for x in fluxes]) < 1.0e-5:
                    fluxunit = 'erg/s/cm^2/Angstrom'
                else:
                    fluxunit = 'Uncalibrated'
            specdict = {
                SPECTRUM.U_WAVELENGTHS: 'Angstrom',
                SPECTRUM.WAVELENGTHS: waves,
                SPECTRUM.TIME: mjd,
                SPECTRUM.U_TIME: 'MJD',
                SPECTRUM.FLUXES: fluxes,
                SPECTRUM.U_FLUXES: fluxunit,
                SPECTRUM.FILENAME: ffile,
                SPECTRUM.SOURCE: source
            }
            if 'TELESCOP' in hdrkeys:
                specdict[SPECTRUM.TELESCOPE] = hdulist[0].header['TELESCOP']
            if 'INSTRUME' in hdrkeys:
                specdict[SPECTRUM.INSTRUMENT] = hdulist[0].header['INSTRUME']
            if 'SITENAME' in hdrkeys:
                specdict[SPECTRUM.OBSERVATORY] = hdulist[0].header['SITENAME']
            elif 'OBSERVAT' in hdrkeys:
                specdict[SPECTRUM.OBSERVATORY] = hdulist[0].header['OBSERVAT']
            if 'OBSERVER' in hdrkeys:
                specdict[SPECTRUM.OBSERVER] = hdulist[0].header['OBSERVER']
            if 'AIRMASS' in hdrkeys:
                specdict[SPECTRUM.AIRMASS] = hdulist[0].header['AIRMASS']
            catalog.entries[name].add_spectrum(**specdict)
            hdulist.close()
            catalog.journal_entries()

    return
Beispiel #40
0
def astrometric_checking():
    dra = (astrometry.comb['ra_1']-astrometry.comb['ra_2'])*3600
    ddec = (astrometry.comb['dec_1']-astrometry.comb['dec_2'])*3600
    pl.clf()
    sp = pl.subplot(221)
    sp.scatter(astrometry.comb['ra_1'], 
               dra, 
               alpha=0.5)
    sp = pl.subplot(222)
    sp.scatter(astrometry.comb['dec_1'], 
               dra, 
               alpha=0.5)
    sp = pl.subplot(223)
    sp.scatter(astrometry.comb['ra_1'], 
               ddec, 
               alpha=0.5)
    sp = pl.subplot(224)
    sp.scatter(astrometry.comb['dec_1'], 
               ddec, 
               alpha=0.5)
    
    fig = pl.figure(num=3)
    fig.clf()
    sp = fig.add_subplot(111)
    sp.scatter(astrometry.comb['ra_1'],
               astrometry.comb['dec_1'],
               c=np.hypot(dra, ddec), cmap='jet')
    
    fig = pl.figure(num=2)
    fig.clf()
    sp = fig.add_subplot(111)
    db = DBSCAN(eps=0.15)
    x = np.zeros((len(dra), 2))
    x[:, 0] = dra
    x[:, 1] = ddec
    db.fit(x)
    sp.scatter(dra,
               ddec,
               c=db.labels_)
    p = np.where(db.labels_ >= 0)[0]
    print(round(np.median(dra), 2), round(np.mean(dra), 2), round(np.std(dra), 2))
    print(round(np.median(ddec), 2), round(np.mean(ddec), 2), round(np.std(ddec), 2))
    dra = dra[p]
    ddec = ddec[p]
    print(len(dra))
    print(round(np.median(dra), 2), round(np.mean(dra), 2), round(np.std(dra), 2))
    print(round(np.median(ddec), 2), round(np.mean(ddec), 2), round(np.std(ddec), 2))
    
    apass = Vizier.query_region(s, catalog='APASS', radius=30*u.arcmin)
    apass = apass[-1]
    
    
    fig = pl.figure(num=4)
    fig.clf()
    sp = fig.add_subplot(211)
    
    sources = astrometry.sources
    comb = combine_w_apass(sources, apass)
    
    p = np.where((comb['mag'] > -99) &
                 (comb['Vmag'] > -99) &
                 (comb['e_Vmag'] > 0))[0]
    comb = comb[p]
    
    fit = np.polyfit(comb['mag'], comb['Vmag'], 3, w=1./comb['e_Vmag'])
    poly = np.poly1d(fit)
    
    sp.scatter(comb['mag'], comb['Vmag'])
    comb.sort('mag')
    sp.plot(comb['mag'], poly(comb['mag']), '--k')
    
    sp2 = pl.subplot(212)
    sp2.scatter(comb['Vmag'], comb['Vmag']-poly(comb['mag']))
    print(np.median(np.abs(comb['Vmag']-poly(comb['mag']))))
    
    phot = identify_sources(path+file_name)
    db = DBSCAN(eps=2, min_samples=2)
    x = np.zeros((len(comb)+len(phot), 2))
    x[:len(comb), 0] = comb['xcentroid']
    x[:len(comb), 1] = comb['ycentroid']
    x[len(comb):, 0] = phot['xcentroid']
    x[len(comb):, 1] = phot['ycentroid']
    
    db.fit(x)
    comb['label'] = db.labels_[:len(comb)]
    phot['label'] = db.labels_[len(comb):]
    p = np.where(comb['label'] >= 0)[0]
    comb = comb[p]
    p = np.where(phot['label'] >= 0)[0]
    phot = phot[p]
    
    comb2 = join(comb, phot, keys='label')
    comb2.rename_column('mag_2', 'mag')
    
#    comb2 = combine_w_apass(phot, apass)
    
    sp.scatter(comb2['mag'], comb2['Vmag'])
    fit = np.polyfit(comb2['mag'], comb2['Vmag'], 3, w=1./comb2['e_Vmag'])
    poly = np.poly1d(fit)
    comb2.sort('mag')
    sp.plot(comb2['mag'], poly(comb2['mag']), '--k')
    
    sp2.scatter(comb2['Vmag'], comb2['Vmag']-poly(comb2['mag']))
    sp.scatter(comb2['mag_1'], comb2['mag'])
    
    x1_col = 'ra'
    y1_col = 'dec'
    
    x2_col = 'ra_1'
    y2_col = 'dec_1'
#    x2_col = 'ra_2'
#    y2_col = 'dec_2'
    
    wcs = AWCS(path+file_name)
    print(wcs)
    comb2 = astrometry.comb
    print(comb2.colnames)
    comb2['ra'], comb2['dec'] = wcs.all_pix2world(comb2['xcentroid'],
                                                  comb2['ycentroid'],1)
    dra = (comb2[x1_col]-comb2[x2_col])*3600
    ddec = (comb2[y1_col]-comb2[y2_col])*3600
    print(round(np.median(dra), 2), round(np.mean(dra), 2), round(np.std(dra), 2))
    print(round(np.median(ddec), 2), round(np.mean(ddec), 2), round(np.std(ddec), 2))
    db = DBSCAN(eps=0.15)
    x = np.zeros((len(dra), 2))
    x[:, 0] = dra
    x[:, 1] = ddec
    db.fit(x)
    
    p = np.where(db.labels_ >= 0)[0]
    print(len(p)/len(comb2), len(comb2), len(p))
    comb2 = comb2[p]
    dra = (comb2[x1_col]-comb2[x2_col])*3600
    ddec = (comb2[y1_col]-comb2[y2_col])*3600
    
    pl.clf()
    sp = pl.subplot(221)
    sp.scatter(comb2[x1_col], ddec)
    fit = np.polyfit(comb2[x1_col]-np.median(comb2[x1_col]),
                     (comb2[x1_col]-comb2[x2_col])*3600, 1)
    fit = np.polyfit(comb2[y1_col]-np.median(comb2[y1_col]),
                     ddec, 1)
    dec_rel = comb2[y1_col]-np.median(comb2[y1_col])
    
    dec_cor = np.poly1d(fit)
    print(round(np.median(dra), 2), round(np.mean(dra), 2), round(np.std(dra), 2))
    print(round(np.median(ddec), 2), round(np.mean(ddec), 2), round(np.std(ddec), 2))
    print(fit)
    sp = pl.subplot(222)
    sp.scatter(comb2[y1_col], dra)
    sp.scatter(comb2[y1_col], dec_cor(comb2[y1_col]-np.median(comb2[y1_col])))
    sp = pl.subplot(223)
    sp.scatter(comb2[x1_col],comb2[y1_col], c=ddec, cmap='jet')
    sp.scatter(comb2[x1_col],comb2[y1_col], c=ddec-dec_cor(dec_rel), cmap='jet')
    sp = pl.subplot(224)
    
    sp.scatter(comb2[x1_col],comb2[y1_col], c=dra, cmap='jet')
Beispiel #41
0
def get_vizier_cat(image='RXJ2248-IR_sci.fits', ext=0, catalog="II/246"):
    """
    Get a list of RA/Dec coords from a Vizier catalog that can be used
    for WCS alignment.
    
    `catalog` is any catalog ID recognized by Vizier, e.g.: 
        "II/328/allwise": WISE
        "II/246": 2MASS
        "V/139": SDSS DR9
    """
    import threedhst.dq
    import astropy.wcs as pywcs
    from astropy.table import Table as table
    import astropy.io.fits as pyfits
    
    import astroquery
    from astroquery.vizier import Vizier
    import astropy.coordinates as coord
    import astropy.units as u
    
    im = pyfits.open(image)
    
    wcs = pywcs.WCS(im[ext].header)
    #wcs = pywcs.WCS(pyfits.getheader('Q0821+3107-F140W_drz.fits', 1))

    Vizier.ROW_LIMIT = -1
            
    r0, d0 = wcs.wcs_pix2world([[im[ext].header['NAXIS1']/2., im[ext].header['NAXIS2']/2.]], 1)[0]
    foot = wcs.calc_footprint()
    
    corner_radius = np.sqrt((foot[:,0]-r0)**2/np.cos(d0/360.*2*np.pi)**2 + (foot[:,1]-d0)**2).max()*60*1.1

    try:
        c = coord.ICRS(ra=r0, dec=d0, unit=(u.deg, u.deg))
    except:
        c = coord.ICRSCoordinates(ra=r0, dec=d0, unit=(u.deg, u.deg))
        
    #### something with astropy.coordinates
    # c.icrs.ra.degree = c.icrs.ra.degrees
    # c.icrs.dec.degree = c.icrs.dec.degrees
    #
    vt = Vizier.query_region(c, radius=u.Quantity(corner_radius, u.arcminute), catalog=[catalog])
    if not vt:
        threedhst.showMessage('No matches found in Vizier %s @ (%.6f, %.6f).\n\nhttp://vizier.u-strasbg.fr/viz-bin/VizieR?-c=%.6f+%.6f&-c.rs=8' %(catalog, r0, d0, r0, d0), warn=True)
        return False
    
    vt = vt[0]
            
    #### Make a region file
    ra_list, dec_list = vt['RAJ2000'], vt['DEJ2000']
    print 'Vizier, found %d objects in %s.' %(len(ra_list), catalog)
    
    fp = open('%s.vizier.radec' %(image.split('.fits')[0]), 'w')
    fpr = open('%s.vizier.reg' %(image.split('.fits')[0]), 'w')
    
    fp.write('# %s, r=%.1f\'\n' %(catalog, corner_radius))
    fpr.write('# %s, r=%.1f\'\nfk5\n' %(catalog, corner_radius))
    for ra, dec in zip(ra_list, dec_list):
        fp.write('%.7f %.7f\n' %(ra, dec))
        fpr.write('circle(%.6f, %.6f, 0.5")\n' %(ra, dec))
    
    fpr.close()
    fp.close()
    
    return True
Beispiel #42
0
ra_hhmmss = df_obj_radec['RA_hhmmss'][idx_source]
dec_ddmmss = df_obj_radec['DEC_ddmmss'][idx_source]
obj_name = df_obj_radec['Object'][idx_source]
print('[OBJ]', obj_name, '[RA]', ra0_deg, ' [DEC]', dec0_deg, '[RA]',
      ra_hhmmss, ' [DEC]', dec_ddmmss)

#sys.exit(0)

r1_deg = 0.001
r1_as = r1_deg * 3600
print('... will query the data archive with radius =', r1_as, '[as] ...')
input("Press Enter to continue...")

result = Vizier.query_region(coord.SkyCoord(ra=ra0_deg,
                                            dec=dec0_deg,
                                            unit=(u.deg, u.deg),
                                            frame='icrs'),
                             radius=r1_deg * u.deg)
'''
result = Vizier.query_region(coord.SkyCoord
                             (ra=ra0_deg, dec=dec0_deg,unit=(u.deg, u.deg),
                              frame='icrs'),width=90, height=360,catalog='II/183A/table2') 


#Landlot: II/183A/table2
#UCAC4: I/322A
'''

print(result)
#print(result[-1][0])
Beispiel #43
0
    def load_catalog(self):

        coordinates = self.settings.catalog.coordinates
        image = self.settings.catalog.image
        obs_year = self.settings.image.obs_year
        ra, dec = self.settings.catalog.ra, self.settings.catalog.dec
        radius = self.settings.catalog.radius
        max_stars = self.settings.catalog.max_stars

        coordinate, radius = SkyCoord(ra * u.deg, dec * u.deg), radius * u.deg

        if image == 'None':

            data, wcs = np.zeros((2,2)), WCS()
            wcs.wcs.cdelt = -radius.value, radius.value
            wcs.wcs.crpix = 2, 2
            wcs.wcs.crval = ra, dec

        elif image == 'Custom':

            fn = askopenfilename(title='Image', filetypes=[('FITS', '.fits')])

            if not fn:
                return

            try:
                with fits.open(fn) as hdul:
                    data, wcs = hdul[0].data, WCS(hdul[0].header)
            except:
                showerror('Error', 'Failed to open catalog image %s' % fn)
                return

        else:

            try:
                with SkyView.get_images(position=coordinate, survey=image, radius=radius)[0] as hdul:
                    data, wcs = hdul[0].data, WCS(hdul[0].header)
            except:
                showerror('Error', 'Failed to get catalog image')
                return

        if coordinates == 'Custom':

            fn = askopenfilename(title='Catalog')

            if not fn:
                return

            try:
                c = np.loadtxt(fn, unpack=True)
            except:
                showerror('Error', 'Failed to open catalog coordinates %s' % fn)
                return

            try:
                c[0][0]
            except:
                c = [np.array([r]) for r in c]

            if len(c) < 2:
                return

            if len(c) >= 5:
                ra = (c[0] * u.deg + (obs_year - 2000) * c[3] * u.marcsec).value
                dec = (c[1] * u.deg + (obs_year - 2000) * c[4] * u.marcsec).value

            x, y = map(np.array, zip(*wcs.wcs_world2pix(list(zip(ra, dec)), 0)))
            i = np.where((x >= 0) & (x <= data.shape[1]) & (y >= 0) & (y <= data.shape[0]))
            x, y = x[i], y[i]

            if len(c) > 2:
                mag = c[2]
                i = np.argsort(mag)
                x, y = x[i], y[i]

            catalog_xy = list(zip(x[:max_stars], y[:max_stars]))

        else:

            try:
                c = Vizier.query_region(coordinate, radius=radius, catalog=coordinates.name)[coordinates.id]
            except:
                showerror('Error', 'Failed to get catalog coordinates')
                return

            ra, dec = c[coordinates.ra], c[coordinates.dec]
            if hasattr(coordinates, 'pm_ra'):
                ra = c[coordinates.ra] + (obs_year - 2000) * c[coordinates.pm_ra] * u.marcsec
            if hasattr(coordinates, 'pm_dec'):
                dec = c[coordinates.dec] + (obs_year - 2000) * c[coordinates.pm_dec] * u.marcsec

            x, y = map(np.array, zip(*wcs.wcs_world2pix(list(zip(ra, dec)), 0)))
            i = np.where((x < 0) | (x > data.shape[1]) | (y < 0) | (y > data.shape[0]))
            c.remove_rows(i); c.sort(coordinates.mag)
            catalog = list(zip(c[coordinates.ra][:max_stars], c[coordinates.dec][:max_stars]))
            catalog_xy = wcs.wcs_world2pix(catalog, 0)

        if self.axr:

            if np.all(data == self.axr.image.get_array().data):

                self.axr.artist.set_offsets(catalog_xy)
                self.canvas.draw()

                return

            self.clear_connections()
            self.axr.remove()

        self.axr = self.add_subplot(122)
        self.axr.set_axis_off()

        clim = np.nanquantile(data, (.01, .99))
        self.axr.image = self.axr.imshow(data, cmap='gray_r', clim=clim)
        self.axr.wcs = wcs
        self.axr.artist = self.axr.scatter(*zip(*catalog_xy), c=[], s=200, edgecolors=(1, 0, 0, .25), picker=True)

        self.axr.artist.point = self.axr.scatter([], [], c=[], s=200, edgecolors=(.25, .25, 1))
        self.add_artist(self.axr.artist.point)

        self.axr.set_xlim(0, data.shape[1])
        self.axr.set_ylim(0, data.shape[0])

        self.canvas.draw()
Beispiel #44
0
def catalog_search(ra, dec, radius=10):
    """
    Search Simbad, 2MASS, and WISE catalogs for object
    
    Parameters
    ----------
    ra: float
        The right ascension
    dec: float
        The declination
    radius: float
        The search radius
        
    Returns
    -------
    astropy.table.Table
        A table of the search results
    """
    c = coords.ICRS(ra=ra * q.deg, dec=dec * q.deg)

    # Query 2MASS and WISE
    WISE = Vizier.query_region(c,
                               radius=radius * q.arcsec,
                               catalog=['II/328/allwise'])
    MASS = Vizier.query_region(c,
                               radius=radius * q.arcsec,
                               catalog=['II/246/out'])
    SDSS = Vizier.query_region(c,
                               radius=radius * q.arcsec,
                               catalog=['V/139/sdss9'])
    SIMB = Simbad.query_region(c, radius=(radius + 10) * q.arcsec)

    # Get the bands
    SDSSbands = [
        'umag', 'e_umag', 'gmag', 'e_gmag', 'rmag', 'e_rmag', 'imag', 'e_imag',
        'zmag', 'e_zmag'
    ]
    MASSbands = ['Jmag', 'e_Jmag', 'Hmag', 'e_Hmag', 'Kmag', 'e_Kmag']
    WISEbands = [
        'W1mag', 'e_W1mag', 'W2mag', 'e_W2mag', 'W3mag', 'e_W3mag', 'W4mag',
        'e_W4mag'
    ]

    # Magnitude arrays
    SDSSmags = [np.nan] * len(SDSSbands)
    MASSmags = [np.nan] * len(MASSbands)
    WISEmags = [np.nan] * len(WISEbands)

    if SDSS:
        SDSSmags = list(
            map(lambda x: x
                if x != '--' else np.nan, SDSS[0][SDSSbands][0].as_void()))

    if MASS:
        MASSmags = list(
            map(lambda x: x
                if x != '--' else np.nan, MASS[0][MASSbands][0].as_void()))

    if WISE:
        WISEmags = list(
            map(lambda x: x
                if x != '--' else np.nan, WISE[0][WISEbands][0].as_void()))

    if SIMB:
        name = SIMB['MAIN_ID'][0]
    else:
        name = '--'

    result = at.Table(
        np.array([name, ra, dec] + SDSSmags + MASSmags + WISEmags),
        masked=True,
        names=['name', 'ra', 'dec'] +
        [b.replace('mag', '') for b in SDSSbands + MASSbands + WISEbands],
        dtype=[str] + [float] * 26)

    return result
########################################################################################################

output_path = '/mnt/dwf/archive_NOAO_data/data_outputs/'
output_cats = '/mnt/dwf/archive_NOAO_data/data_outputs/' + year + '/' + month + '/' + field + '/vizier_cats/'

if not os.path.exists(output_cats):
    os.makedirs(output_cats)
else:
    pass

AAVOS = 'II/336'
#v = Vizier(columns = ['mag', '_RAJ2000', '_DEJ2000'], catalog = AAVOS)
#result = v.query_region(field_RA_DEC, radius=Angle(1.5, "deg"))
result = Vizier.query_region(field_RA_DEC,
                             radius=Angle(1.5, "deg"),
                             catalog=AAVOS)

AAVOS_table = Table()
AAVOS_table['RA'] = result[0]['RAJ2000']
AAVOS_table['DEC'] = result[0]['DEJ2000']
AAVOS_table['g_mag'] = result[0]['g_mag']
AAVOS_table['g_mag_err'] = result[0]['e_g_mag']
AAVOS_table['r_mag'] = result[0]['r_mag']
AAVOS_table['r_mag_err'] = result[0]['e_r_mag']
AAVOS_table['i_mag'] = result[0]['i_mag']
AAVOS_table['i_mag_err'] = result[0]['e_i_mag']

AAVOS_output = 'AAVOS.ascii'
AAVOS_table.write(output_cats + AAVOS_output, format='ascii', overwrite=True)
Beispiel #46
0
                         obstime=sun_to_stereo.obstime,
                         frame='hcrs')

###############################################################################
# Let's look up bright stars using the Vizier search capability provided by
# astroquery.
# We will search the GAIA2 star catalog for stars with magnitude
# brighter than 7.

vv = Vizier(columns=['**'],
            row_limit=-1,
            column_filters={'Gmag': '<7'},
            timeout=1200)
vv.ROW_LIMIT = -1
result = vv.query_region(stereo_to_sun,
                         radius=4 * u.deg,
                         catalog='I/345/gaia2')

###############################################################################
# Let's see how many stars we've found.

print(len(result[0]))

###############################################################################
# Now we load all stars into an array coordinate.  The reference epoch for the
# star positions is J2015.5, # so we update these positions to the date of the
# COR2 observation using :meth:`astropy.coordinates.SkyCoord.apply_space_motion`.

tbl_crds = SkyCoord(ra=result[0]['RA_ICRS'],
                    dec=result[0]['DE_ICRS'],
                    distance=Distance(parallax=u.Quantity(result[0]['Plx'])),
Beispiel #47
0
def __zero_point(image_file, psf_sources, sep_max=2.0,
                 plot_corr=True, corr_plotname=None,
                 plot_source_offsets=True, source_offs_plotname=None,
                 plot_field_offsets=False, field_offs_plotname=None, 
                 gaussian_blur_sigma=30.0, cat_num=None, write=False, 
                 output=None):
    
    """
    Input: 
        - filename for **background-subtracted** image
        - an astropy table of sources, ideally fit for their PSF, with at least
          the following columns: "x_0", "y_0", ra", "dec", "mag_fit", 
          "mag_unc", where "mag_fit" is the instrumental magnitude in the 
          photometric system of <image_file> and "mag_unc" is the associated 
          uncertainty
        - maximum allowed separation when cross-matching sources (optional;
          default 2.0 pix ~ 0.6" for WIRCam and ~ 0.37" pix for MegaPrime)
          
        - whether or not to plot the correlation with linear fit (optional; 
          default True)
        - name for the output correlation plot (optional; default set below)
        - whether to plot the offsets in RA and Dec of each catalog-matched 
          source (optional; default True)
        - name for the output offsets plot (optional; default set below)
        - whether to show the overall offsets as an image with a Gaussian blur 
          to visualize large-scale structure (optional; default False)
        - name for the output field offsets plot (optional; default set below)
        - sigma to apply to the Gaussian filter (optional; default 30.0)
        - Vizier catalog number to choose which catalog to cross-match 
          (optional; defaults are PanStarrs 1, SDSS DR12, and 2MASS for 
          relevant filters)
        - whether to write the table of calibrated sources (optional; default 
          False)
        - name for the output fits table (optional; default set below)
    
    Uses astroquery and Vizier to query an online catalog for sources which 
    match those previously detected and fit for their ePSF. Computes the offset 
    between the apparent and instrumental magnitudes of the queried sources for 
    photometric calibration. Computes the mean, median and standard deviation.
    
    Output: ePSF-fit sources 
    """   

    # load in data 
    image_data = fits.getdata(image_file)
    image_header = fits.getheader(image_file) 
    
    # don't necessarily need:
    try:
        instrument = image_header["INSTRUME"]
    except KeyError:
        instrument = "Unknown"        
    # mandatory:
    pixscale = image_header["PIXSCAL1"]    
    try: filt = image_header["FILTER"][0] 
    except KeyError: filt = image_header["HIERARCH FPA.FILTER"][0] # for PS1
    try: t_MJD = image_header["MJDATE"] 
    except KeyError: t_MJD = image_header["MJD-OBS"]
    
    # determine the catalog to compare to for photometry
    if cat_num: # if a Vizier catalog number is given 
        ref_cat = cat_num
        ref_cat_name = cat_num
    else:  
        if filt in ['g','r','i','z','Y']:
            zp_filter = (filt).lower() # lowercase needed for PS1
            ref_cat = "II/349/ps1" # PanStarrs 1
            ref_cat_name = "PS1" 
        elif filt == 'u':
            zp_filter = 'u' # closest option right now 
            ref_cat = "V/147" 
            ref_cat_name = "SDSS DR12"
        else: 
            zp_filter = filt[0] # Ks must be K for 2MASS 
            ref_cat = "II/246/out" # 2MASS
            ref_cat_name = "2MASS"
        
    w = wcs.WCS(image_header) # WCS object and coords of centre 
    xsize = image_data.shape[1]
    ysize = image_data.shape[0]          
    wcs_centre = np.array(w.all_pix2world(xsize/2.0, ysize/2.0, 1)) 

    ra_centre = wcs_centre[0]
    dec_centre = wcs_centre[1]
    radius = pixscale*np.max([xsize,ysize])/60.0 #arcmins
    minmag = 13.0 # magnitude minimum
    maxmag = 20.0 # magnitude maximum
    max_emag = 0.4 # maximum allowed error 
    nd = 5 # minimum no. of detections for a source (across all filters)
     
    # actual querying (internet connection needed)
    print(f"\nQuerying Vizier {ref_cat} ({ref_cat_name}) "+
          f"around RA {ra_centre:.4f}, Dec {dec_centre:.4f} "+
          f"with a radius of {radius:.4f} arcmin")
    
    v = Vizier(columns=["*"], column_filters={
            zp_filter+"mag":str(minmag)+".."+str(maxmag),
            "e_"+zp_filter+"mag":"<"+str(max_emag),
            "Nd":">"+str(nd)}, row_limit=-1) # no row limit 
    Q = v.query_region(SkyCoord(ra=ra_centre, dec=dec_centre, 
                        unit=(u.deg, u.deg)), radius=f'{radius}m', 
                        catalog=ref_cat, cache=False)

    if len(Q) == 0: # if no matches
        print("\nNo matches were found in the "+ref_cat_name+
              " catalog. The requested region may be in an unobserved"+
              " region of this catalog. Exiting.")
        return 
        
    
    # pixel coords of found sources
    cat_coords = w.all_world2pix(Q[0]['RAJ2000'], Q[0]['DEJ2000'], 1)
    
    # mask out edge sources
    # a bounding circle for WIRCam, rectangle for MegaPrime/other instruments
    if "WIRCam" in instrument:
        rad_limit = xsize/2.0
        dist_to_center = np.sqrt((cat_coords[0]-xsize/2.0)**2 + 
                                 (cat_coords[1]-ysize/2.0)**2)
        mask = dist_to_center <= rad_limit
        good_cat_sources = Q[0][mask]
    else:
        x_lims = [int(0.05*xsize), int(0.95*xsize)] 
        y_lims = [int(0.05*ysize), int(0.95*ysize)]
        mask = (cat_coords[0] > x_lims[0]) & (
                cat_coords[0] < x_lims[1]) & (
                cat_coords[1] > y_lims[0]) & (
                cat_coords[1] < y_lims[1])
        good_cat_sources = Q[0][mask] 
    
    # cross-matching coords of sources found by astrometry
    source_coords = SkyCoord(ra=psf_sources['ra'], 
                             dec=psf_sources['dec'], 
                             frame='icrs', unit='degree')
    # and coords of valid sources in the queried catalog 
    cat_source_coords = SkyCoord(ra=good_cat_sources['RAJ2000'], 
                                 dec=good_cat_sources['DEJ2000'], 
                                 frame='icrs', unit='degree')
    
    # indices of matching sources (within <sep_max> pixels of each other) 
    idx_image, idx_cat, d2d, d3d = cat_source_coords.search_around_sky(
            source_coords, sep_max*pixscale*u.arcsec)

    if len(idx_image) <= 3:
        print(f"\nFound {len(idx_image)} matches between image and "+
              f"{ref_cat_name} and >3 matches are required. Exiting.")
        return
   
    nmatches = len(idx_image) # store number of matches 
    sep_mean = np.mean(d2d.value*3600.0) # store mean separation in "
    print(f'\nFound {nmatches:d} sources in {ref_cat_name} within '+
          f'{sep_max} pix of sources detected by astrometry, with average '+
          f'separation {sep_mean:.3f}" ')
    
    # get coords for sources which were matched
    source_matches = source_coords[idx_image]
    cat_matches = cat_source_coords[idx_cat]
    source_matches_ra = [i.ra.value for i in source_matches]
    cat_matches_ra = [i.ra.value for i in cat_matches]
    source_matches_dec = [i.dec.value for i in source_matches]
    cat_matches_dec = [i.dec.value for i in cat_matches]
    # compute offsets 
    ra_offsets = np.subtract(source_matches_ra, cat_matches_ra)*3600.0 # arcsec
    dec_offsets = np.subtract(source_matches_dec, cat_matches_dec)*3600.0
    ra_offsets_mean = np.mean(ra_offsets)
    dec_offsets_mean = np.mean(dec_offsets)

    # plot the correlation
    if plot_corr:
        # fit a straight line to the correlation
        from scipy.optimize import curve_fit
        def f(x, m, b):
            return b + m*x
        
        xdata = good_cat_sources[zp_filter+'mag'][idx_cat] # catalog
        xdata = [float(x) for x in xdata]
        ydata = psf_sources['mag_fit'][idx_image] # instrumental 
        ydata = [float(y) for y in ydata]
        popt, pcov = curve_fit(f, xdata, ydata) # obtain fit
        m, b = popt # fit parameters
        perr = np.sqrt(np.diag(pcov))
        m_err, b_err = perr # errors on parameters 
        fitdata = [m*x + b for x in xdata] # plug fit into data 
        
        # plot correlation
        fig, ax = plt.subplots(figsize=(10,10))
        ax.errorbar(good_cat_sources[zp_filter+'mag'][idx_cat], 
                 psf_sources['mag_fit'][idx_image], 
                 psf_sources['mag_unc'][idx_image],
                 marker='.', mec="#fc5a50", mfc="#fc5a50", ls="", color='k', 
                 markersize=12, label=f"Data [{filt}]", zorder=1) 
        ax.plot(xdata, fitdata, color="blue", 
                 label=r"$y = mx + b $"+"\n"+r"$ m=$%.3f$\pm$%.3f, $b=$%.3f$\pm$%.3f"%(
                         m, m_err, b, b_err), zorder=2) # the linear fit 
        ax.set_xlabel(f"Catalog magnitude [{ref_cat_name}]", fontsize=15)
        ax.set_ylabel("Instrumental PSF-fit magnitude", fontsize=15)
        ax.set_title("PSF Photometry", fontsize=15)
        ax.legend(loc="upper left", fontsize=15, framealpha=0.5)
        
        if not(corr_plotname):
            corr_plotname=image_file.replace(".fits", "_PSF_photometry.png")
        plt.savefig(corr_plotname, bbox_inches="tight")
        plt.close()        
    
    # plot the RA, Dec offset for each matched source 
    if plot_source_offsets:             
        # plot
        plt.figure(figsize=(10,10))
        plt.plot(ra_offsets, dec_offsets, marker=".", linestyle="", 
                color="#ffa62b", mec="black", markersize=5)
        plt.xlabel('RA (J2000) offset ["]', fontsize=15)
        plt.ylabel('Dec (J2000) offset ["]', fontsize=15)
        plt.title(f"Source offsets from {ref_cat_name} catalog", fontsize=15)
        plt.axhline(0, color="k", linestyle="--", alpha=0.3) # (0,0)
        plt.axvline(0, color="k", linestyle="--", alpha=0.3)
        plt.plot(ra_offsets_mean, dec_offsets_mean, marker="X", 
                 color="blue", label = "Mean", linestyle="") # mean
        plt.legend(fontsize=15)
        plt.rc("xtick",labelsize=14)
        plt.rc("ytick",labelsize=14)
        
        if not(source_offs_plotname):
            source_offs_plotname = image_file.replace(".fits", 
                                       "_source_offsets_astrometry.png")
        plt.savefig(source_offs_plotname, bbox_inches="tight")        
        plt.close()
    
    # plot the overall offset across the field 
    if plot_field_offsets:
        from scipy.ndimage import gaussian_filter
        # add offsets to a 2d array
        offsets_image = np.zeros(image_data.shape)
        for i in range(len(d2d)): 
            x = psf_sources[idx_image][i]["x_0"]
            y = psf_sources[idx_image][i]["y_0"]
            intx, inty = int(x), int(y)
            offsets_image[inty, intx] = d2d[i].value*3600.0    
        # apply a gaussian blur to visualize large-scale structure
        blur_sigma = gaussian_blur_sigma
        offsets_image_gaussian = gaussian_filter(offsets_image, blur_sigma)
        offsets_image_gaussian *= np.max(offsets_image)
        offsets_image_gaussian *= np.max(offsets_image_gaussian)
        
        # plot
        if "WIRCam" in instrument:
            plt.figure(figsize=(10,9))
        else:
            plt.figure(figsize=(9,13))                
        ax = plt.subplot(projection=w)
        plt.imshow(offsets_image_gaussian, cmap="magma", 
                   interpolation="nearest", origin="lower")
        # textbox indicating the gaussian blur and mean separation
        textstr = r"Gaussian blur: $\sigma = %.1f$"%blur_sigma+"\n"
        textstr += r'$\overline{offset} = %.3f$"'%sep_mean
        box = dict(boxstyle="square", facecolor="white", alpha=0.8)
        if "WIRCam" in instrument:
            plt.text(0.6, 0.91, transform=ax.transAxes, s=textstr, 
                     bbox=box, fontsize=15)
        else:
            plt.text(0.44, 0.935, transform=ax.transAxes, s=textstr, 
                     bbox=box, fontsize=15)    
        plt.xlabel("RA (J2000)", fontsize=16)
        plt.ylabel("Dec (J2000)", fontsize=16)
        plt.title(f"Field offsets from {ref_cat_name} catalog", fontsize=15)
        ax.coords["ra"].set_ticklabel(size=15)
        ax.coords["dec"].set_ticklabel(size=15)
        
        if not(field_offs_plotname):
            field_offs_plotname = image_file.replace(".fits", 
                                       "_field_offsets_astrometry.png")
            
        plt.savefig(field_offs_plotname, bbox_inches="tight")        
        plt.close()
    
    # compute magnitude differences and zero point mean, median and error
    mag_offsets = ma.array(good_cat_sources[zp_filter+'mag'][idx_cat] - 
                  psf_sources['mag_fit'][idx_image])

    zp_mean, zp_med, zp_std = sigma_clipped_stats(mag_offsets)
    
    # add these to the header of the image file 
    f = fits.open(image_file, mode="update")
    f[0].header["ZP_MEAN"] = zp_mean
    f[0].header["ZP_MED"] = zp_med
    f[0].header["ZP_STD"] = zp_std
    f.close()
    
    # add a mag_calib and mag_calib_unc column to psf_sources
    mag_calib = psf_sources['mag_fit'] + zp_mean
    mag_calib.name = 'mag_calib'
    # propagate errors 
    mag_calib_unc = np.sqrt(psf_sources['mag_unc']**2 + zp_std**2)
    mag_calib_unc.name = 'mag_calib_unc'
    psf_sources['mag_calib'] = mag_calib
    psf_sources['mag_calib_unc'] = mag_calib_unc
    
    # add flag indicating if source is in a catalog and which catalog 
    in_cat = []
    for i in range(len(psf_sources)):
        if i in idx_image:
            in_cat.append(True)
        else:
            in_cat.append(False)
    in_cat_col = Column(data=in_cat, name="in_catalog")
    psf_sources[f"in {ref_cat_name}"] = in_cat_col
    
    # add new columns 
    nstars = len(psf_sources)
    col_filt = Column([filt for i in range(nstars)], "filter",
                       dtype = np.dtype("U2"))
    col_mjd = Column([t_MJD for i in range(nstars)], "MJD")
    psf_sources["filter"] = col_filt
    psf_sources["MJD"] = col_mjd
    
    # compute magnitude differences between catalog and calibration 
    # diagnostic for quality of zero point determination 
    sources_mags = psf_sources[idx_image]["mag_calib"]
    cat_mags = good_cat_sources[idx_cat][zp_filter+"mag"]
    mag_diff_mean = np.mean(sources_mags - cat_mags)
    print("\nMean difference between calibrated magnitudes and "+
          f"{ref_cat_name} magnitudes = {mag_diff_mean}")
    
    if write: # write the table of sources w calibrated mags, if desired
        if not(output):
            output = image_file.replace(".fits", "_PSF_photometry.fits")
        psf_sources.write(output, overwrite=True, format="ascii")    
        
    return psf_sources
Beispiel #48
0
def search(image, headinfo, target_coords, syntax, catalog_syntax, filter_):
    """
    Search area around transient/target location in photometric catalogs

    Current catalog (selectable in syntax):

        - Skymapper: Southern Hemisphere
        - Pan Starrs: North of declination -30 degree
        - Apass: All-sky survey
        - 2MASS: JHK all sky survey

    Future:
        - SDSS: Future implemtation
        - Ability to make custom catalog from different surveys


    Input:

        - Image: Numpy 2D array
        - headinfo: astropy.io.fits.header.Header
        - target_coords: astropy.coordinates.sky_coordinate.SkyCoord
        - syntax: dict
        - catalog_syntax: dict
        - filter_: str

    Output:

        - data:  pandas DataFrame

    """

    import warnings

    if not syntax['catalog_warnings'] or syntax['master_warnings']:
        warnings.filterwarnings("ignore")

    import numpy as np
    import os, sys
    import requests
    import pathlib
    import shutil
    import os.path
    import logging
    from functools import reduce
    import pandas as pd
    from autophot.packages.functions import gauss_sigma2fwhm, gauss_2d, gauss_fwhm2sigma

    from autophot.packages.functions import moffat_2d, moffat_fwhm

    from astropy.table import Table
    from astropy.wcs import wcs
    from astroquery.vizier import Vizier
    from astropy.io.votable import parse_single_table
    from astropy.coordinates import Angle

    # from autophot.packages.functions import pix_dist
    logger = logging.getLogger(__name__)

    try:

        # Get wxs information
        w1 = wcs.WCS(headinfo)

        # Radius around target
        radius = float(syntax['radius'])

        # Target name, if applicable
        target = syntax['target_name']

        # Get workdirectory location,, create directory if needed
        dirname = os.path.join(syntax['wdir'], 'catalog_queries')
        pathlib.Path(dirname).mkdir(parents=True, exist_ok=True)
        '''
        Getting target Ra and Dec

        - if target is none but a Ra and DEC is given, create new target name

        - if ra and dec not given us center of image as location - for quick reduction of image

        '''
        # if target or it's ra/dec - set target name
        if target == None:
            if syntax['target_ra'] != None and syntax['target_dec'] != None:
                target = 'target_ra_' + str(round(
                    syntax['target_ra'])) + '_dec_' + str(
                        round(syntax['target_dec']))
                logger.info('New target name: %s' % target)
            else:
                #  if not just call target
                target = 'target'

        # Search limitation with Pan Starrs rlimited to 0.5 deg
        if radius > 0.5 and syntax['catalog'] == 'pan_starrs':
            logger.warning(
                'Search Limitation with PanStarrs API -> Radius = 0.5 [deg] ')
            radius = 0.5

        # Choosen catalog for input.yml, create directory for catalog if needed
        catalog_dir = syntax['catalog']
        pathlib.Path(os.path.join(dirname, catalog_dir)).mkdir(parents=True,
                                                               exist_ok=True)

        # Folder for target, create directory if needed
        target_dir = reduce(
            os.path.join,
            [dirname, catalog_dir, target.lower()])
        pathlib.Path(target_dir).mkdir(parents=True, exist_ok=True)

        # Filename of fetchec catalog
        fname = str(target) + '_r_' + str(radius)

        # Can force to use certain catalog - untested 03-10-19
        if syntax['force_catalog_csv']:
            logger.info('Using ' + syntax['force_catalog_csv_name'] +
                        ' as catalog')
            fname = str(syntax['force_catalog_csv_name']) + '_r_' + str(radius)

        # if syntax['catalog'] == 'custom':
        #     dir_name = os.path.join(syntax['wdir'],'catalog_queries')
        #     catalog_dir = syntax['catalog']
        #     target = syntax['target_name']
        #     target_dir =  dir_name + '/' + catalog_dir+'/'+target.lower()
        #     fname = str(target) + '_RAD_' + str(float(syntax['radius']))
        #     data =pd.read_csv(target_dir +'/'+ fname+'.csv')

        #  If catalog set to cutsom
        if syntax['catalog'] == 'custom':
            target = syntax['target_name']
            fname = str(target) + '_RAD_' + str(float(syntax['radius']))

            if not syntax['catalog_custom_fpath']:
                logger.critical(
                    'Custoim catalog selected but "catalog_custom_fpath" not defined'
                )
                exit()
            else:
                fname = syntax['catalog_custom_fpath']

            data = pd.read_csv(fname)

        # if catalog is found via it's filename - use this and return data
        if os.path.isfile(os.path.join(target_dir, fname + '.csv')):
            logger.info(
                'Catalog found for Target: %s\nCatalog: %s \nFile: %s' %
                (target, str(catalog_dir).upper(), fname))
            data = Table.read(os.path.join(target_dir, fname + '.csv'),
                              format='csv')
            data = data.to_pandas()

        else:
            # If no previously catalog found - look for one
            logger.info('Searching for new catalog: %s ' % syntax['catalog'])

            if syntax['catalog'] in ['gaia']:

                import astropy.units as u
                from astroquery.gaia import Gaia
                import warnings
                warnings.filterwarnings('ignore')

                width = u.Quantity(radius, u.deg)
                height = u.Quantity(radius, u.deg)

                data = Gaia.query_object_async(coordinate=target_coords,
                                               width=width,
                                               height=height)

                data = data.to_pandas()
                data.to_csv(fname + '.csv', sep=',', index=False)

                # Move file to new location - 'catalog queries'
                shutil.move(os.path.join(os.getcwd(), fname + '.csv'),
                            os.path.join(target_dir, fname + '.csv'))

                warnings.filterwarnings('default')

            if syntax['catalog'] in ['apass', '2mass', 'sdss']:

                # No row limit
                Vizier.ROW_LIMIT = -1
                catalog_search = Vizier.query_region(target_coords,
                                                     radius=Angle(
                                                         radius, 'deg'),
                                                     catalog=syntax['catalog'])

                # Select first catalog from list
                data = catalog_search[0].to_pandas()
                data.to_csv(fname + '.csv', sep=',', index=False)

                # Move file to new location - 'catalog queries'
                shutil.move(os.path.join(os.getcwd(), fname + '.csv'),
                            os.path.join(target_dir, fname + '.csv'))

            # some catalogs need specific download path using 'requests'
            if syntax['catalog'] in ['pan_starrs', 'skymapper']:

                mindet = 1

                if syntax['catalog'] == 'pan_starrs':

                    server = ('https://archive.stsci.edu/' +
                              'panstarrs/search.php')
                    params = {
                        'RA': target_coords.ra.degree,
                        'DEC': target_coords.dec.degree,
                        'SR': radius,
                        'max_records': 10000,
                        'outputformat': 'VOTable',
                        'ndetections': ('>%d' % mindet)
                    }

                if syntax['catalog'] == 'skymapper':

                    server = (
                        'http://skymapper.anu.edu.au/sm-cone/public/query?')
                    params = {
                        'RA': target_coords.ra.degree,
                        'DEC': target_coords.dec.degree,
                        'SR': radius,
                        'RESPONSEFORMAT': 'VOTABLE'
                    }

                with open('temp.xml', "wb") as f:

                    logger.info('Downloading from %s' % syntax['catalog'])
                    response = requests.get(server, params=params)
                    f.write(response.content)

                # Parse local file into astropy.table object
                data = parse_single_table('temp.xml')

                # Delete temporary file
                os.remove('temp.xml')

                # Convert table to dataframe
                data_table = data.to_table(use_names_over_ids=True)
                data = data_table.to_pandas()

                # invalid entries in panstarrs are -999 - change to nans
                if syntax['catalog'] == 'pan_starrs':
                    data = data.replace(-999, np.nan)

                # No sources in field - temporary fix - will add "check different catalog"
                if len(data) == 0:
                    logging.critical('Catalog: %s : does not cover field' %
                                     syntax['catalog'])
                    sys.exit()

                # Save to csv and move to 'catalog_queries'
                data.to_csv(fname + '.csv', index=False)

                shutil.move(os.path.join(os.getcwd(), fname + '.csv'),
                            os.path.join(target_dir, fname + '.csv'))

        # Add in x and y pixel locatins under wcs
        x_pix, y_pix = w1.wcs_world2pix(data[catalog_syntax['RA']],
                                        data[catalog_syntax['DEC']], 1)

        data.insert(loc=5, column='x_pix', value=x_pix)
        data.insert(loc=6, column='y_pix', value=y_pix)

        # Remove boundary sources
        data = data[data.x_pix < image.shape[1] - syntax['pix_bound']]
        data = data[data.x_pix > syntax['pix_bound']]
        data = data[data.y_pix < image.shape[0] - syntax['pix_bound']]
        data = data[data.y_pix > syntax['pix_bound']]

        logger.info('Catalog length: %d' % len(data))

        warnings.filterwarnings("default")

    except Exception as e:
        logger.exception(e)
        data = None

    return data
Beispiel #49
0
def findfriends(targname,radial_velocity,velocity_limit=5.0,search_radius=25.0,rvcut=5.0,radec=[None,None],output_directory = None,showplots=False,verbose=False,DoGALEX=True,DoWISE=True,DoROSAT=True):
    
    radvel= radial_velocity * u.kilometer / u.second
    
    if output_directory == None:
        outdir = './' + targname.replace(" ", "") + '_friends/'
    else: 
        outdir = output_directory
    if os.path.isdir(outdir) == True:
        print('Output directory ' + outdir +' Already Exists!!')
        print('Either Move it, Delete it, or input a different [output_directory] Please!')
        return
    os.mkdir(outdir)
    
    if velocity_limit < 0.00001 : 
        print('input velocity_limit is too small, try something else')
        print('velocity_limit: ' + str(velocity_limit))
    if search_radius < 0.0000001:
        print('input search_radius is too small, try something else')
        print('search_radius: ' + str(search_radius))
     
    # Search parameters
    vlim=velocity_limit * u.kilometer / u.second
    searchradpc=search_radius * u.parsec

    if (radec[0] != None) & (radec[1] != None):
        usera,usedec = radec[0],radec[1]
    else:  ##use the target name to get simbad ra and dec.
        print('Asking Simbad for RA and DEC')
        result_table = Simbad.query_object(targname)
        usera,usedec = result_table['RA'][0],result_table['DEC'][0]
    
    if verbose == True:
        print('Target name: ',targname)
        print('Coordinates: ' + str(usera) +' '+str(usedec))
        print()

    c = SkyCoord( ra=usera , dec=usedec , unit=(u.hourangle, u.deg) , frame='icrs')
    if verbose == True: print(c)

    # Find precise coordinates and distance from Gaia, define search radius and parallax cutoff
    print('Asking Gaia for precise coordinates')
    sqltext = "SELECT * FROM gaiaedr3.gaia_source WHERE CONTAINS( \
               POINT('ICRS',gaiaedr3.gaia_source.ra,gaiaedr3.gaia_source.dec), \
               CIRCLE('ICRS'," + str(c.ra.value) +","+ str(c.dec.value) +","+ str(6.0/3600.0) +"))=1;"
    job = Gaia.launch_job_async(sqltext , dump_to_file=False)
    Pgaia = job.get_results()
    if verbose == True:
        print(sqltext)
        print()
        print(Pgaia['source_id','ra','dec','phot_g_mean_mag','parallax','ruwe'].pprint_all())
        print()

    minpos = Pgaia['phot_g_mean_mag'].tolist().index(min(Pgaia['phot_g_mean_mag']))
    Pcoord = SkyCoord( ra=Pgaia['ra'][minpos]*u.deg , dec=Pgaia['dec'][minpos]*u.deg , \
                      distance=(1000.0/Pgaia['parallax'][minpos])*u.parsec , frame='icrs' , \
                      radial_velocity=radvel , \
                      pm_ra_cosdec=Pgaia['pmra'][minpos]*u.mas/u.year , pm_dec=Pgaia['pmdec'][minpos]*u.mas/u.year )

    searchraddeg = np.arcsin(searchradpc/Pcoord.distance).to(u.deg)
    minpar = (1000.0 * u.parsec) / (Pcoord.distance + searchradpc) * u.mas
    if verbose == True:
        print(Pcoord)
        print()
        print('Search radius in deg: ',searchraddeg)
        print('Minimum parallax: ',minpar)


    # Query Gaia with search radius and parallax cut
    # Note, a cut on parallax_error was added because searches at low galactic latitude 
    # return an overwhelming number of noisy sources that scatter into the search volume - ALK 20210325
    print('Querying Gaia for neighbors')

    Pllbb     = bc.radec_to_lb(Pcoord.ra.value , Pcoord.dec.value , degree=True)
    if ( np.abs(Pllbb[1]) > 10.0): plxcut = max( 0.5 , (1000.0/Pcoord.distance.value/10.0) )
    else: plxcut = 0.5
    print('Parallax cut: ',plxcut)

    if (searchradpc < Pcoord.distance):
        sqltext = "SELECT * FROM gaiaedr3.gaia_source WHERE CONTAINS( \
            POINT('ICRS',gaiaedr3.gaia_source.ra,gaiaedr3.gaia_source.dec), \
            CIRCLE('ICRS'," + str(Pcoord.ra.value) +","+ str(Pcoord.dec.value) +","+ str(searchraddeg.value) +"))\
            =1 AND parallax>" + str(minpar.value) + " AND parallax_error<" + str(plxcut) + ";"
    if (searchradpc >= Pcoord.distance):
        sqltext = "SELECT * FROM gaiaedr3.gaia_source WHERE parallax>" + str(minpar.value) + " AND parallax_error<" + str(plxcut) + ";"
        print('Note, using all-sky search')
    if verbose == True:
        print(sqltext)
        print()

    job = Gaia.launch_job_async(sqltext , dump_to_file=False)
    r = job.get_results()
   
    if verbose == True: print('Number of records: ',len(r['ra']))


    # Construct coordinates array for all stars returned in cone search

    gaiacoord = SkyCoord( ra=r['ra'] , dec=r['dec'] , distance=(1000.0/r['parallax'])*u.parsec , \
                         frame='icrs' , \
                         pm_ra_cosdec=r['pmra'] , pm_dec=r['pmdec'] )

    sep = gaiacoord.separation(Pcoord)
    sep3d = gaiacoord.separation_3d(Pcoord)

    if verbose == True:
        print('Printing angular separations in degrees as sanity check')
        print(sep.degree)



    Pllbb     = bc.radec_to_lb(Pcoord.ra.value , Pcoord.dec.value , degree=True)
    Ppmllpmbb = bc.pmrapmdec_to_pmllpmbb( Pcoord.pm_ra_cosdec.value , Pcoord.pm_dec.value , \
                                         Pcoord.ra.value , Pcoord.dec.value , degree=True )
    Pvxvyvz   = bc.vrpmllpmbb_to_vxvyvz(Pcoord.radial_velocity.value , Ppmllpmbb[0] , Ppmllpmbb[1] , \
                                   Pllbb[0] , Pllbb[1] , Pcoord.distance.value/1000.0 , XYZ=False , degree=True)

    if verbose == True:
        print('Science Target Name: ',targname)
        print('Science Target RA/DEC: ',Pcoord.ra.value,Pcoord.dec.value)
        print('Science Target Galactic Coordinates: ',Pllbb)
        print('Science Target UVW: ',Pvxvyvz)
        print()

    Gllbb = bc.radec_to_lb(gaiacoord.ra.value , gaiacoord.dec.value , degree=True)
    Gxyz = bc.lbd_to_XYZ( Gllbb[:,0] , Gllbb[:,1] , gaiacoord.distance/1000.0 , degree=True)
    Gvrpmllpmbb = bc.vxvyvz_to_vrpmllpmbb( \
                    Pvxvyvz[0]*np.ones(len(Gxyz[:,0])) , Pvxvyvz[1]*np.ones(len(Gxyz[:,1])) , Pvxvyvz[2]*np.ones(len(Gxyz[:,2])) , \
                    Gxyz[:,0] , Gxyz[:,1] , Gxyz[:,2] , XYZ=True)
    Gpmrapmdec = bc.pmllpmbb_to_pmrapmdec( Gvrpmllpmbb[:,1] , Gvrpmllpmbb[:,2] , Gllbb[:,0] , Gllbb[:,1] , degree=True)

    # Code in case I want to do chi^2 cuts someday
    Gvtanerr = 1.0 * np.ones(len(Gxyz[:,0]))
    Gpmerr = Gvtanerr * 206265000.0 * 3.154e7 / (gaiacoord.distance.value * 3.086e13)


    Gchi2 = ( (Gpmrapmdec[:,0]-gaiacoord.pm_ra_cosdec.value)**2 + (Gpmrapmdec[:,1]-gaiacoord.pm_dec.value)**2 )**0.5
    Gchi2 = Gchi2 / Gpmerr
    if verbose == True:
        print('Predicted PMs if comoving:')
        print(Gpmrapmdec , "\n")
        print('Actual PMRAs from Gaia:')
        print(gaiacoord.pm_ra_cosdec.value , "\n")
        print('Actual PMDECs from Gaia:')
        print(gaiacoord.pm_dec.value , "\n")
        print('Predicted PM errors:')
        print(Gpmerr , "\n")
        print('Chi^2 values:')
        print(Gchi2)


    # Query external list(s) of RVs

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) )
    yy = zz[0][np.argsort(sep3d[zz])]
    
    RV    = np.empty(np.array(r['ra']).size)
    RVerr = np.empty(np.array(r['ra']).size)
    RVsrc = np.array([ '                             None' for x in range(np.array(r['ra']).size) ])
    RV[:]    = np.nan
    RVerr[:] = np.nan

    print('Populating RV table')
    for x in range(0 , np.array(yy).size):
        if np.isnan(r['dr2_radial_velocity'][yy[x]]) == False:        # First copy over DR2 RVs
            RV[yy[x]]    = r['dr2_radial_velocity'][yy[x]]
            RVerr[yy[x]] = r['dr2_radial_velocity_error'][yy[x]]
            RVsrc[yy[x]] = 'Gaia DR2'
    if os.path.isfile('LocalRV.csv'):
        with open('LocalRV.csv') as csvfile:                          # Now check for a local RV that would supercede
            readCSV = csv.reader(csvfile, delimiter=',')
            for row in readCSV:
                ww = np.where(r['designation'] == row[0])[0]
                if (np.array(ww).size == 1):
                    RV[ww]    = row[2]
                    RVerr[ww] = row[3]
                    RVsrc[ww] = row[4]
                    if verbose == True: 
                        print('Using stored RV: ',row)
                        print(r['ra','dec','phot_g_mean_mag'][ww])
                        print(RV[ww])
                        print(RVerr[ww])
                        print(RVsrc[ww])



    # Create Gaia CMD plot

    mamajek  = np.loadtxt(datapath+'/sptGBpRp.txt')
    pleiades = np.loadtxt(datapath+'/PleGBpRp.txt')
    tuchor   = np.loadtxt(datapath+'/TucGBpRp.txt')
    usco     = np.loadtxt(datapath+'/UScGBpRp.txt')
    chai     = np.loadtxt(datapath+'/ChaGBpRp.txt')

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (np.isnan(r['bp_rp']) == False) ) # Note, this causes an error because NaNs
    yy = zz[0][np.argsort(sep3d[zz])]
    zz2= np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) & \
                 (r['phot_bp_rp_excess_factor'] < (1.3 + 0.06*r['bp_rp']**2)) & \
                 (np.isnan(r['bp_rp']) == False) )                                                              # Note, this causes an error because NaNs
    yy2= zz2[0][np.argsort((-Gchi2)[zz2])]


    figname=outdir + targname.replace(" ", "") + "cmd.png"
    if verbose == True: print(figname)

    fig,ax1 = plt.subplots(figsize=(12,8))

    ax1.axis([ math.floor(min(r['bp_rp'][zz])) , \
               math.ceil(max(r['bp_rp'][zz])), \
               math.ceil(max((r['phot_g_mean_mag'][zz] - (5.0*np.log10(gaiacoord.distance[zz].value)-5.0))))+1, \
               math.floor(min((r['phot_g_mean_mag'][zz] - (5.0*np.log10(gaiacoord.distance[zz].value)-5.0))))-1 ] )
    ax1.set_xlabel(r'$B_p-R_p$ (mag)' , fontsize=16)
    ax1.set_ylabel(r'$M_G$ (mag)' , fontsize=16)
    ax1.tick_params(axis='both',which='major',labelsize=12)

    ax2 = ax1.twiny()
    ax2.set_xlim(ax1.get_xlim())
    spttickvals = np.array([ -0.037 , 0.377 , 0.782 , 0.980 , 1.84 , 2.50 , 3.36 , 4.75 ])
    sptticklabs = np.array([ 'A0' , 'F0' , 'G0' , 'K0' , 'M0' , 'M3' , 'M5' , 'M7' ])
    xx = np.where( (spttickvals >= math.floor(min(r['bp_rp'][zz]))) & (spttickvals <= math.ceil(max(r['bp_rp'][zz]))) )[0]
    ax2.set_xticks(spttickvals[xx])
    ax2.set_xticklabels( sptticklabs[xx] )
    ax2.set_xlabel('SpT' , fontsize=16, labelpad=15)
    ax2.tick_params(axis='both',which='major',labelsize=12)

    ax1.plot(    chai[:,1] ,     chai[:,0]  , zorder=1 , label='Cha-I (0-5 Myr)')
    ax1.plot(    usco[:,1] ,     usco[:,0]  , zorder=2 , label='USco (11 Myr)')
    ax1.plot(  tuchor[:,1] ,   tuchor[:,0]  , zorder=3 , label='Tuc-Hor (40 Myr)')
    ax1.plot(pleiades[:,1] , pleiades[:,0]  , zorder=4 , label='Pleiades (125 Myr)')
    ax1.plot( mamajek[:,2] ,  mamajek[:,1]  , zorder=5 , label='Mamajek MS')

    for x in range(0 , np.array(yy2).size):
        msize  = (17-12.0*(sep3d[yy2[x]].value/searchradpc.value))**2
        mcolor = Gchi2[yy2[x]]
        medge  = 'black'
        mzorder= 7
        if (r['ruwe'][yy2[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy2[x]] >= 1.2):
            mshape='s'
        if (np.isnan(rvcut) == False): 
            if (np.isnan(RV[yy2[x]])==False) & (np.abs(RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) > rvcut):
                mshape='+'
                mcolor='black'
                mzorder=6
            if (np.isnan(RV[yy2[x]])==False) & (np.abs(RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) <= rvcut):
                medge='blue'

        ccc = ax1.scatter(r['bp_rp'][yy2[x]] , (r['phot_g_mean_mag'][yy2[x]] - (5.0*np.log10(gaiacoord.distance[yy2[x]].value)-5.0)) , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )

    temp1 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='o' , s=12**2 , label = 'RUWE < 1.2')
    temp2 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='s' , s=12**2 , label = 'RUWE >= 1.2')
    temp3 = ax1.scatter([] , [] , c='white' , edgecolors='blue' , marker='o' , s=12**2 , label = 'RV Comoving')
    temp4 = ax1.scatter([] , [] , c='black' , marker='+' , s=12**2 , label = 'RV Outlier')

    ax1.plot(r['bp_rp'][yy[0]] , (r['phot_g_mean_mag'][yy[0]] - (5.0*np.log10(gaiacoord.distance[yy[0]].value)-5.0)) , \
             'rx' , markersize=18 , mew=3 , markeredgecolor='red' , zorder=10 , label=targname)

    ax1.arrow( 1.3 , 2.5 , 0.374, 0.743 , length_includes_head=True , head_width=0.07 , head_length = 0.10 )
    ax1.text(  1.4 , 2.3, r'$A_V=1$' , fontsize=12)



    ax1.legend(fontsize=11)
    cb = plt.colorbar(ccc , ax=ax1)
    cb.set_label(label='Velocity Difference (km/s)',fontsize=14)
    plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)
    if showplots == True: plt.show()
    plt.close('all')


    # Create PM plot


    zz2= np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) )
    yy2= zz2[0][np.argsort((-Gchi2)[zz2])]
    zz3= np.where( (sep3d.value < searchradpc.value) & (sep.degree > 0.00001) )

    figname=outdir + targname.replace(" ", "") + "pmd.png"

    fig,ax1 = plt.subplots(figsize=(12,8))

    ax1.axis([ (max(r['pmra'][zz2]) + 0.05*np.ptp(r['pmra'][zz2]) ) , \
           (min(r['pmra'][zz2]) - 0.05*np.ptp(r['pmra'][zz2]) ) , \
           (min(r['pmdec'][zz2])- 0.05*np.ptp(r['pmra'][zz2]) ) , \
           (max(r['pmdec'][zz2])+ 0.05*np.ptp(r['pmra'][zz2]) ) ] )
    ax1.tick_params(axis='both',which='major',labelsize=16)

    if  ((max(r['pmra'][zz2]) + 0.05*np.ptp(r['pmra'][zz2])) > 0.0) & \
            ((min(r['pmra'][zz2]) - 0.05*np.ptp(r['pmra'][zz2])) < 0.0) & \
            ((min(r['pmdec'][zz2])- 0.05*np.ptp(r['pmra'][zz2])) < 0.0) & \
            ((max(r['pmdec'][zz2])+ 0.05*np.ptp(r['pmra'][zz2])) > 0.0):
        ax1.plot( [0.0,0.0] , [-1000.0,1000.0] , 'k--' , linewidth=1 )
        ax1.plot( [-1000.0,1000.0] , [0.0,0.0] , 'k--' , linewidth=1 )

    ax1.errorbar( (r['pmra'][yy2]) , (r['pmdec'][yy2]) , \
            yerr=(r['pmdec_error'][yy2]) , xerr=(r['pmra_error'][yy2]) , fmt='none' , ecolor='k' )

    ax1.scatter( (r['pmra'][zz3]) , (r['pmdec'][zz3]) , \
              s=(0.5)**2 , marker='o' , c='black' , zorder=2 , label='Field' )

    for x in range(0 , np.array(yy2).size):
        msize  = (17-12.0*(sep3d[yy2[x]].value/searchradpc.value))**2
        mcolor = Gchi2[yy2[x]]
        medge  = 'black'
        mzorder= 7
        if (r['ruwe'][yy2[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy2[x]] >= 1.2):
            mshape='s'
        if (np.isnan(rvcut) == False): 
            if (np.isnan(RV[yy2[x]])==False) & (np.abs(RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) > rvcut):
                mshape='+'
                mcolor='black'
                mzorder=6
            if (np.isnan(RV[yy2[x]])==False) & (np.abs(RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) <= rvcut):
                medge='blue'
        ccc = ax1.scatter(r['pmra'][yy2[x]] , r['pmdec'][yy2[x]] , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )

    temp1 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='o' , s=12**2 , label = 'RUWE < 1.2')
    temp2 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='s' , s=12**2 , label = 'RUWE >= 1.2')
    temp3 = ax1.scatter([] , [] , c='white' , edgecolors='blue' , marker='o' , s=12**2 , label = 'RV Comoving')
    temp4 = ax1.scatter([] , [] , c='black' , marker='+' , s=12**2 , label = 'RV Outlier')

    ax1.plot( Pgaia['pmra'][minpos] , Pgaia['pmdec'][minpos] , \
         'rx' , markersize=18 , mew=3 , markeredgecolor='red' , zorder=3 , label=targname)

    ax1.set_xlabel(r'$\mu_{RA}$ (mas/yr)' , fontsize=22 , labelpad=10)
    ax1.set_ylabel(r'$\mu_{DEC}$ (mas/yr)' , fontsize=22 , labelpad=10)
    ax1.legend(fontsize=12)

    cb = plt.colorbar(ccc , ax=ax1)
    cb.set_label(label='Tangential Velocity Difference (km/s)',fontsize=18 , labelpad=10)
    plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)
    if showplots == True: plt.show()
    plt.close('all')


    # Create RV plot

    zz2= np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) & \
             (np.isnan(RV) == False) )
    yy2= zz2[0][np.argsort((-Gchi2)[zz2])]

    zz3= np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) & \
             (np.isnan(RV) == False) & (np.isnan(r['phot_g_mean_mag']) == False) & \
             (np.abs(RV-Gvrpmllpmbb[:,0]) < 20.0) ) # Just to set Y axis

    fig,ax1 = plt.subplots(figsize=(12,8))
    ax1.axis([ -20.0 , +20.0, \
           max( np.append( np.array(r['phot_g_mean_mag'][zz3] - (5.0*np.log10(gaiacoord.distance[zz3].value)-5.0)) ,  0.0 )) + 0.3 , \
           min( np.append( np.array(r['phot_g_mean_mag'][zz3] - (5.0*np.log10(gaiacoord.distance[zz3].value)-5.0)) , 15.0 )) - 0.3   ])
    ax1.tick_params(axis='both',which='major',labelsize=16)

    ax1.plot( [0.0,0.0] , [-20.0,25.0] , 'k--' , linewidth=1 )

    ax1.errorbar( (RV[yy2]-Gvrpmllpmbb[yy2,0]) , \
           (r['phot_g_mean_mag'][yy2] - (5.0*np.log10(gaiacoord.distance[yy2].value)-5.0)) , \
            yerr=None,xerr=(RVerr[yy2]) , fmt='none' , ecolor='k' )

    for x in range(0 , np.array(yy2).size):
        msize  = (17-12.0*(sep3d[yy2[x]].value/searchradpc.value))**2
        mcolor = Gchi2[yy2[x]]
        medge  = 'black'
        mzorder= 2
        if (r['ruwe'][yy2[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy2[x]] >= 1.2):
            mshape='s'
        ccc = ax1.scatter( (RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) , \
                (r['phot_g_mean_mag'][yy2[x]] - (5.0*np.log10(gaiacoord.distance[yy2[x]].value)-5.0)) , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )

    temp1 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='o' , s=12**2 , label = 'RUWE < 1.2')
    temp2 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='s' , s=12**2 , label = 'RUWE >= 1.2')
    temp3 = ax1.scatter([] , [] , c='white' , edgecolors='blue' , marker='o' , s=12**2 , label = 'RV Comoving')

    if ( (Pgaia['phot_g_mean_mag'][minpos] - (5.0*np.log10(Pcoord.distance.value)-5.0)) < \
                                     (max( np.append( np.array(r['phot_g_mean_mag'][zz3] - (5.0*np.log10(gaiacoord.distance[zz3].value)-5.0)) , 0.0 )) + 0.3) ):
        ax1.plot( [0.0] , (Pgaia['phot_g_mean_mag'][minpos] - (5.0*np.log10(Pcoord.distance.value)-5.0)) , \
                  'rx' , markersize=18 , mew=3 , markeredgecolor='red' , zorder=3 , label=targname)


    ax1.set_ylabel(r'$M_G$ (mag)' , fontsize=22 , labelpad=10)
    ax1.set_xlabel(r'$v_{r,obs}-v_{r,pred}$ (km/s)' , fontsize=22 , labelpad=10)
    ax1.legend(fontsize=12)

    cb = plt.colorbar(ccc , ax=ax1)
    cb.set_label(label='Tangential Velocity Difference (km/s)',fontsize=18 , labelpad=10)

    figname=outdir + targname.replace(" ", "") + "drv.png"
    plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)
    if showplots == True: plt.show()
    plt.close('all')



    
    # Create XYZ plot

    Pxyz = bc.lbd_to_XYZ( Pllbb[0] , Pllbb[1] , Pcoord.distance.value/1000.0 , degree=True)

    fig,axs = plt.subplots(2,2)
    fig.set_figheight(16)
    fig.set_figwidth(16)
    fig.subplots_adjust(hspace=0.03,wspace=0.03)

    zz2= np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) )
    yy2= zz2[0][np.argsort((-Gchi2)[zz2])]

    for x in range(0 , np.array(yy2).size):
        msize  = (17-12.0*(sep3d[yy2[x]].value/searchradpc.value))**2
        mcolor = Gchi2[yy2[x]]
        medge  = 'black'
        mzorder= 3
        if (r['ruwe'][yy2[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy2[x]] >= 1.2):
            mshape='s'
        if (np.isnan(rvcut) == False): 
            if (np.isnan(RV[yy2[x]])==False) & (np.abs(RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) > rvcut):
                mshape='+'
                mcolor='black'
                mzorder=2
            if (np.isnan(RV[yy2[x]])==False) & (np.abs(RV[yy2[x]]-Gvrpmllpmbb[yy2[x],0]) <= rvcut):
                medge='blue'
        ccc = axs[0,0].scatter( 1000.0*Gxyz[yy2[x],0] , 1000.0*Gxyz[yy2[x],1] , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )
        ccc = axs[0,1].scatter( 1000.0*Gxyz[yy2[x],2] , 1000.0*Gxyz[yy2[x],1] , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )
        ccc = axs[1,0].scatter( 1000.0*Gxyz[yy2[x],0] , 1000.0*Gxyz[yy2[x],2] , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )

    temp1 = axs[0,0].scatter([] , [] , c='white' , edgecolors='black', marker='o' , s=12**2 , label = 'RUWE < 1.2')
    temp2 = axs[0,0].scatter([] , [] , c='white' , edgecolors='black', marker='s' , s=12**2 , label = 'RUWE >= 1.2')
    temp3 = axs[0,0].scatter([] , [] , c='white' , edgecolors='blue' , marker='o' , s=12**2 , label = 'RV Comoving')
    temp4 = axs[0,0].scatter([] , [] , c='black' , marker='+' , s=12**2 , label = 'RV Outlier')

    axs[0,0].plot( 1000.0*Pxyz[0] , 1000.0*Pxyz[1] , 'rx' , markersize=18 , mew=3 , markeredgecolor='red')
    axs[0,1].plot( 1000.0*Pxyz[2] , 1000.0*Pxyz[1] , 'rx' , markersize=18 , mew=3 , markeredgecolor='red')
    axs[1,0].plot( 1000.0*Pxyz[0] , 1000.0*Pxyz[2] , 'rx' , markersize=18 , mew=3 , markeredgecolor='red' , zorder=1 , label = targname)

    axs[0,0].set_xlim( [1000.0*Pxyz[0]-(search_radius+1.0) , 1000.0*Pxyz[0]+(search_radius+1.0)] )
    axs[0,0].set_ylim( [1000.0*Pxyz[1]-(search_radius+1.0) , 1000.0*Pxyz[1]+(search_radius+1.0)] )
    axs[0,1].set_xlim( [1000.0*Pxyz[2]-(search_radius+1.0) , 1000.0*Pxyz[2]+(search_radius+1.0)] )
    axs[0,1].set_ylim( [1000.0*Pxyz[1]-(search_radius+1.0) , 1000.0*Pxyz[1]+(search_radius+1.0)] )
    axs[1,0].set_xlim( [1000.0*Pxyz[0]-(search_radius+1.0) , 1000.0*Pxyz[0]+(search_radius+1.0)] )
    axs[1,0].set_ylim( [1000.0*Pxyz[2]-(search_radius+1.0) , 1000.0*Pxyz[2]+(search_radius+1.0)] )
    
    axs[0,0].set_xlabel(r'$X$ (pc)',fontsize=20,labelpad=10)
    axs[0,0].set_ylabel(r'$Y$ (pc)',fontsize=20,labelpad=10)

    axs[1,0].set_xlabel(r'$X$ (pc)',fontsize=20,labelpad=10)
    axs[1,0].set_ylabel(r'$Z$ (pc)',fontsize=20,labelpad=10)

    axs[0,1].set_xlabel(r'$Z$ (pc)',fontsize=20,labelpad=10)
    axs[0,1].set_ylabel(r'$Y$ (pc)',fontsize=20,labelpad=10)

    axs[0,0].xaxis.set_ticks_position('top')
    axs[0,1].xaxis.set_ticks_position('top')
    axs[0,1].yaxis.set_ticks_position('right')

    axs[0,0].xaxis.set_label_position('top')
    axs[0,1].xaxis.set_label_position('top')
    axs[0,1].yaxis.set_label_position('right')

    for aa in [0,1]:
        for bb in [0,1]:
            axs[aa,bb].tick_params(top=True,bottom=True,left=True,right=True,direction='in',labelsize=18)

    fig.delaxes(axs[1][1])
    strsize = 26
    if (len(targname) > 12.0): strsize = np.floor(24 / (len(targname)/14.5))
    fig.legend( bbox_to_anchor=(0.92,0.37) , prop={'size':strsize})

    cbaxes = fig.add_axes([0.55,0.14,0.02,0.34])
    cb = plt.colorbar( ccc , cax=cbaxes )
    cb.set_label( label='Velocity Difference (km/s)' , fontsize=24 , labelpad=20 )
    cb.ax.tick_params(labelsize=18)

    figname=outdir + targname.replace(" ", "") + "xyz.png"
    plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)

    if showplots == True: plt.show()
    plt.close('all')



    # Create sky map
    # Hacked from cartopy.mpl.gridliner
    _DEGREE_SYMBOL = u'\u00B0'
    def _east_west_formatted(longitude, num_format='g'):
        fmt_string = u'{longitude:{num_format}}{degree}'
        return fmt_string.format(longitude=(longitude if (longitude >= 0) else (longitude + 360)) , \
                                            num_format=num_format,degree=_DEGREE_SYMBOL)
    def _north_south_formatted(latitude, num_format='g'):
        fmt_string = u'{latitude:{num_format}}{degree}'
        return fmt_string.format(latitude=latitude, num_format=num_format,degree=_DEGREE_SYMBOL)
    LONGITUDE_FORMATTER = mticker.FuncFormatter(lambda v, pos:
                                                _east_west_formatted(v))
    LATITUDE_FORMATTER = mticker.FuncFormatter(lambda v, pos:
                                               _north_south_formatted(v))

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) )
    yy = zz[0][np.argsort((-Gchi2)[zz])]

    searchcircle = Pcoord.directional_offset_by( (np.arange(0,360)*u.degree) , searchraddeg*np.ones(360))
    circleRA = searchcircle.ra.value
    circleDE = searchcircle.dec.value
    ww = np.where(circleRA > 180.0)
    circleRA[ww] = circleRA[ww] - 360.0

    RAlist = gaiacoord.ra[yy].value
    DElist = gaiacoord.dec[yy].value
    ww = np.where( RAlist > 180.0 )
    RAlist[ww] = RAlist[ww] - 360.0

    polelat = ((Pcoord.dec.value+90) if (Pcoord.dec.value<0) else (90-Pcoord.dec.value))
    polelong= (Pcoord.ra.value if (Pcoord.dec.value<0.0) else (Pcoord.ra.value+180.0))
    polelong= (polelong if polelong < 180 else polelong - 360.0)

    if verbose == True:
        print('Alignment variables: ',polelat,polelong,Pcoord.ra.value)
        print(Pcoord.dec.value+searchraddeg.value)
    rotated_pole = ccrs.RotatedPole( \
        pole_latitude=polelat , \
        pole_longitude=polelong , \
        central_rotated_longitude=90.0 )#\
    #    (Pcoord.ra.value if (Pcoord.dec.value > 0.0) else (Pcoord.ra.value+180.0)) )

    fig = plt.figure(figsize=(8,8))
    ax = fig.add_subplot(1, 1, 1, projection=rotated_pole)

    ax.gridlines(draw_labels=True,x_inline=True,y_inline=True, \
                 xformatter=LONGITUDE_FORMATTER,yformatter=LATITUDE_FORMATTER)
    ax.plot( circleRA , circleDE , c="gray" , ls="--" , transform=ccrs.Geodetic())
    
    figname=outdir + targname.replace(" ", "") + "sky.png"

    base=plt.cm.get_cmap('cubehelix')

    for x in range(0 , np.array(yy).size):
        msize  = (17-12.0*(sep3d[yy[x]].value/searchradpc.value))
        mcolor = base(Gchi2[yy[x]]/vlim.value)
        medge  = 'black'
        mzorder= 3
        if (r['ruwe'][yy[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy[x]] >= 1.2):
            mshape='s'
        if (np.isnan(rvcut) == False): 
            if (np.isnan(RV[yy[x]])==False) & (np.abs(RV[yy[x]]-Gvrpmllpmbb[yy[x],0]) > rvcut):
                mshape='+'
                mcolor='black'
                mzorder=2
            if (np.isnan(RV[yy[x]])==False) & (np.abs(RV[yy[x]]-Gvrpmllpmbb[yy[x],0]) <= rvcut):
                medge='blue'
        ccc = ax.plot( RAlist[x] , DElist[x] , marker=mshape ,  \
                markeredgecolor=medge , ms = msize , mfc = mcolor , transform=ccrs.Geodetic() )
        
    ax.plot( (Pcoord.ra.value-360.0) , Pcoord.dec.value , \
            'rx' , markersize=18 , mew=3 , transform=ccrs.Geodetic())

    plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)
    
    if showplots == True: plt.show()
    plt.close('all')

    ## Query GALEX and 2MASS data

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) )
    yy = zz[0][np.argsort((-Gchi2)[zz])]
    
    NUVmag = np.empty(np.array(r['ra']).size)
    NUVerr = np.empty(np.array(r['ra']).size)
    NUVmag[:] = np.nan
    NUVerr[:] = np.nan

    print('Searching on neighbors in GALEX')
    ##suppress the stupid noresultswarning from the catalogs package
    warnings.filterwarnings("ignore",category=NoResultsWarning)

    for x in range(0 , np.array(yy).size):
        querystring=((str(gaiacoord.ra[yy[x]].value) if (gaiacoord.ra[yy[x]].value > 0) \
                      else str(gaiacoord.ra[yy[x]].value+360.0)) + " " + str(gaiacoord.dec[yy[x]].value))
        print('GALEX query ',x,' of ',np.array(yy).size, end='\r')
        if verbose == True: print('GALEX query ',x,' of ',np.array(yy).size)
        if verbose == True: print(querystring)
        if (DoGALEX == True): 
            galex = Catalogs.query_object(querystring , catalog="Galex" , radius=0.0028 , TIMEOUT=600)
            if ((np.where(galex['nuv_magerr'] > 0.0)[0]).size > 0):
                ww = np.where( (galex['nuv_magerr'] == min(galex['nuv_magerr'][np.where(galex['nuv_magerr'] > 0.0)])))
                NUVmag[yy[x]] = galex['nuv_mag'][ww][0]
                NUVerr[yy[x]] = galex['nuv_magerr'][ww][0]
                if verbose == True: print(galex['distance_arcmin','ra','nuv_mag','nuv_magerr'][ww])

        
    Jmag = np.empty(np.array(r['ra']).size)
    Jerr = np.empty(np.array(r['ra']).size)
    Jmag[:] = np.nan
    Jerr[:] = np.nan

    print('Searching on neighbors in 2MASS')

    for x in range(0 , np.array(yy).size):
        if ( np.isnan(NUVmag[yy[x]]) == False ):
            querycoord = SkyCoord((str(gaiacoord.ra[yy[x]].value) if (gaiacoord.ra[yy[x]].value > 0) else \
                     str(gaiacoord.ra[yy[x]].value+360.0)) , str(gaiacoord.dec[yy[x]].value) , \
                     unit=(u.deg,u.deg) , frame='icrs')
            print('2MASS query ',x,' of ',np.array(yy).size, end='\r')
            if verbose == True: print('2MASS query ',x,' of ',np.array(yy).size)
            if verbose == True: print(querycoord)
            tmass = []
            if (DoGALEX == True): 
                tmass = Irsa.query_region(querycoord , catalog='fp_psc' , radius='0d0m10s' )
                if ((np.where(tmass['j_m'] > -10.0)[0]).size > 0):
                    ww = np.where( (tmass['j_m'] == min(tmass['j_m'][np.where(tmass['j_m'] > 0.0)])))
                    Jmag[yy[x]] = tmass['j_m'][ww][0]
                    Jerr[yy[x]] = tmass['j_cmsig'][ww][0]
                    if verbose == True: print(tmass['j_m','j_cmsig'][ww])
        


    # Create GALEX plots
    mamajek = np.loadtxt(datapath+'/sptGBpRp.txt')
    f = interp1d( mamajek[:,2] , mamajek[:,0] , kind='cubic')

    zz2 = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) )
    yy2 = zz[0][np.argsort(sep3d[zz])]
    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) )
    yy = zz[0][np.argsort((-Gchi2)[zz])]

    fnuvj = (3631.0 * 10**6 * 10**(-0.4 * NUVmag)) / (1594.0 * 10**6 * 10**(-0.4 * Jmag))
    spt = f(r['bp_rp'].filled(np.nan))
    sptstring = ["nan" for x in range(np.array(r['bp_rp']).size)]
    for x in range(0 , np.array(zz2).size):
        if (round(spt[yy2[x]],1) >= 17.0) and (round(spt[yy2[x]],1) < 27.0):
            sptstring[yy2[x]] = 'M' + ('% 3.1f' % (round(spt[yy2[x]],1)-17.0)).strip()
        if (round(spt[yy2[x]],1) >= 16.0) and (round(spt[yy2[x]],1) < 17.0):
            sptstring[yy2[x]] = 'K' + ('% 3.1f' % (round(spt[yy2[x]],1)-9.0)).strip()
        if (round(spt[yy2[x]],1) >= 10.0) and (round(spt[yy2[x]],1) < 16.0):
            sptstring[yy2[x]] = 'K' + ('% 3.1f' % (round(spt[yy2[x]],1)-10.0)).strip()
        if (round(spt[yy2[x]],1) >= 0.0) and (round(spt[yy2[x]],1) < 10.0):
            sptstring[yy2[x]] = 'G' + ('% 3.1f' % (round(spt[yy2[x]],1)-0.0)).strip()
        if (round(spt[yy2[x]],1) >= -10.0) and (round(spt[yy2[x]],1) < 0.0):
            sptstring[yy2[x]] = 'F' + ('% 3.1f' % (round(spt[yy2[x]],1)+10.0)).strip()
        if (round(spt[yy2[x]],1) >= -20.0) and (round(spt[yy2[x]],1) < -10.0):
            sptstring[yy2[x]] = 'A' + ('% 3.1f' % (round(spt[yy2[x]],1)+20.0)).strip()       
        if (round(spt[yy2[x]],1) >= -30.0) and (round(spt[yy2[x]],1) < -20.0):
            sptstring[yy2[x]] = 'B' + ('% 3.1f' % (round(spt[yy2[x]],1)+30.0)).strip()  
    


    figname=outdir + targname.replace(" ", "") + "galex.png"
    if verbose == True: print(figname)
    ##Muck with the axis to get two x axes

    fig,ax1 = plt.subplots(figsize=(12,8))
    ax1.set_yscale('log')
    ax1.axis([5.0 , 24.0 , 0.000004 , 0.02])
    ax2 = ax1.twiny()
    ax2.set_xlim(ax1.get_xlim())
    ax1.set_xticks(np.array([5.0 , 10.0 , 15.0 , 17.0 , 22.0 , 24.0]))
    ax1.set_xticklabels(['G5','K0','K5','M0','M5','M7'])
    ax1.set_xlabel('SpT' , fontsize=20, labelpad=15)
    ax1.tick_params(axis='both',which='major',labelsize=16)
    ax2.set_xticks(np.array([5.0 , 10.0 , 15.0 , 17.0 , 22.0 , 24.0]))
    ax2.set_xticklabels(['0.85','0.98','1.45','1.84','3.36','4.75'])
    ax2.set_xlabel(r'$B_p-R_p$ (mag)' , fontsize=20, labelpad=15)
    ax2.tick_params(axis='both',which='major',labelsize=16)
    ax1.set_ylabel(r'$F_{NUV}/F_{J}$' , fontsize=22, labelpad=0)

    ##Hyades
    hyades = readsav(datapath +'/HYsaved.sav')
    hyadesfnuvj = (3631.0 * 10**6 * 10**(-0.4 * hyades['clnuv'])) / (1594.0 * 10**6 * 10**(-0.4 * hyades['clJ']))
    ax1.plot(hyades['clspt'] , hyadesfnuvj , 'x' , markersize=4 , mew=1 , markeredgecolor='black' , zorder=1 , label='Hyades' )

    for x in range(0 , np.array(yy).size):
        msize  = (17-12.0*(sep3d[yy[x]].value/searchradpc.value))**2
        mcolor = Gchi2[yy[x]]
        medge  = 'black'
        mzorder= 3
        if (r['ruwe'][yy[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy[x]] >= 1.2):
            mshape='s'
        if (np.isnan(rvcut) == False): 
            if (np.isnan(RV[yy[x]])==False) & (np.abs(RV[yy[x]]-Gvrpmllpmbb[yy[x],0]) > rvcut):
                mshape='+'
                mcolor='black'
                mzorder=2
            if (np.isnan(RV[yy[x]])==False) & (np.abs(RV[yy[x]]-Gvrpmllpmbb[yy[x],0]) <= rvcut):
                medge='blue'
        ccc = ax1.scatter( spt[yy[x]] , fnuvj[yy[x]] , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )

    temp1 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='o' , s=12**2 , label = 'RUWE < 1.2')
    temp2 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='s' , s=12**2 , label = 'RUWE >= 1.2')
    temp3 = ax1.scatter([] , [] , c='white' , edgecolors='blue' , marker='o' , s=12**2 , label = 'RV Comoving')
    temp4 = ax1.scatter([] , [] , c='black' , marker='+' , s=12**2 , label = 'RV Outlier')



    # Plot science target
    if (spt[yy[0]] > 5): ax1.plot(spt[yy[0]] , fnuvj[yy[0]] , 'rx' , markersize=18 , mew=3 , markeredgecolor='red' , zorder=3 , label=targname )

    ax1.legend(fontsize=16 , loc='lower left')
    cb = fig.colorbar(ccc , ax=ax1)
    cb.set_label(label='Velocity Offset (km/s)',fontsize=13)
    if (DoGALEX == True): plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)
    if showplots == True: plt.show()
    plt.close('all')
    
    
    # Query CatWISE for W1+W2 and AllWISE for W3+W4

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) )
    yy = zz[0][np.argsort((-Gchi2)[zz])]

    WISEmag = np.empty([np.array(r['ra']).size,4])
    WISEerr = np.empty([np.array(r['ra']).size,4])
    WISEmag[:] = np.nan
    WISEerr[:] = np.nan

    print('Searching on neighbors in WISE')
    ##there's an annoying nan warning here, hide it for now as it's not a problem
    warnings.filterwarnings("ignore",category=UserWarning)

    for x in range(0 , np.array(yy).size):
        querycoord = SkyCoord((str(gaiacoord.ra[yy[x]].value) if (gaiacoord.ra[yy[x]].value > 0) else \
                     str(gaiacoord.ra[yy[x]].value+360.0)) , str(gaiacoord.dec[yy[x]].value) , \
                     unit=(u.deg,u.deg) , frame='icrs')
        print('WISE query ',x,' of ',np.array(yy).size, end='\r')
        if verbose == True: print('WISE query ',x,' of ',np.array(yy).size)
        if verbose == True: print(querycoord)
    
        wisecat = []
        if (DoWISE == True): 
            wisecat = Irsa.query_region(querycoord,catalog='catwise_2020' , radius='0d0m10s')
            if ((np.where(wisecat['w1mpro'] > -10.0)[0]).size > 0):
                ww = np.where( (wisecat['w1mpro'] == min( wisecat['w1mpro'][np.where(wisecat['w1mpro'] > -10.0)]) ))
                WISEmag[yy[x],0] = wisecat['w1mpro'][ww][0]
                WISEerr[yy[x],0] = wisecat['w1sigmpro'][ww][0]
            if ((np.where(wisecat['w2mpro'] > -10.0)[0]).size > 0):
                ww = np.where( (wisecat['w2mpro'] == min( wisecat['w2mpro'][np.where(wisecat['w2mpro'] > -10.0)]) ))
                WISEmag[yy[x],1] = wisecat['w2mpro'][ww][0]
                WISEerr[yy[x],1] = wisecat['w2sigmpro'][ww][0]
 
        if (DoWISE == True): 
            wisecat = Irsa.query_region(querycoord,catalog='allwise_p3as_psd' , radius='0d0m10s')
            if ((np.where(wisecat['w1mpro'] > -10.0)[0]).size > 0):
                ww = np.where( (wisecat['w1mpro'] == min( wisecat['w1mpro'][np.where(wisecat['w1mpro'] > -10.0)]) ))
                if (np.isnan(WISEmag[yy[x],0]) == True) | (wisecat['w1mpro'][ww][0] < 11.0):				# Note, only if CatWISE absent/saturated
                    WISEmag[yy[x],0] = wisecat['w1mpro'][ww][0]
                    WISEerr[yy[x],0] = wisecat['w1sigmpro'][ww][0]
            if ((np.where(wisecat['w2mpro'] > -10.0)[0]).size > 0):
                ww = np.where( (wisecat['w2mpro'] == min( wisecat['w2mpro'][np.where(wisecat['w2mpro'] > -10.0)]) ))
                if (np.isnan(WISEmag[yy[x],1]) == True) | (wisecat['w2mpro'][ww][0] < 11.0):				# Note, only if CatWISE absent/saturated
                    WISEmag[yy[x],1] = wisecat['w2mpro'][ww][0]
                    WISEerr[yy[x],1] = wisecat['w2sigmpro'][ww][0]
            if ((np.where(wisecat['w3mpro'] > -10.0)[0]).size > 0):
                ww = np.where( (wisecat['w3mpro'] == min( wisecat['w3mpro'][np.where(wisecat['w3mpro'] > -10.0)]) ))
                WISEmag[yy[x],2] = wisecat['w3mpro'][ww][0]
                WISEerr[yy[x],2] = wisecat['w3sigmpro'][ww][0]
            if ((np.where(wisecat['w4mpro'] > -10.0)[0]).size > 0):
                ww = np.where( (wisecat['w4mpro'] == min( wisecat['w4mpro'][np.where(wisecat['w4mpro'] > -10.0)]) ))
                WISEmag[yy[x],3] = wisecat['w4mpro'][ww][0]
                WISEerr[yy[x],3] = wisecat['w4sigmpro'][ww][0]
        
        if verbose == True: print(yy[x],WISEmag[yy[x],:],WISEerr[yy[x],:])

    # Create WISE plots

    W13 = WISEmag[:,0]-WISEmag[:,2]
    W13err = ( WISEerr[:,0]**2 + WISEerr[:,2]**2 )**0.5

    zz = np.argwhere( np.isnan(W13err) )
    W13[zz] = np.nan
    W13err[zz] = np.nan

    zz = np.where( (W13err > 0.15) )
    W13[zz] = np.nan
    W13err[zz] = np.nan
    warnings.filterwarnings("default",category=UserWarning)




    zz2 = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value))
    yy2 = zz[0][np.argsort(sep3d[zz])]
    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) & (sep.degree > 0.00001) )
    yy = zz[0][np.argsort((-Gchi2)[zz])]

    figname=outdir + targname.replace(" ", "") + "wise.png"
    if verbose == True: print(figname)
    plt.figure(figsize=(12,8))

    if (verbose == True) & ((np.where(np.isfinite(W13+W13err))[0]).size > 0): print('Max y value: ' , (max((W13+W13err)[np.isfinite(W13+W13err)])+0.1) )
    plt.axis([ 5.0 , 24.0 , \
              max( [(min(np.append((W13-W13err)[ np.isfinite(W13-W13err) ],-0.1))-0.1) , -0.3]) , \
              max( [(max(np.append((W13+W13err)[ np.isfinite(W13+W13err) ],+0.0))+0.2) , +0.6]) ])

    ax1 = plt.gca()
    ax2 = ax1.twiny()
    ax2.set_xlim(5.0,24.0)

    ax1.set_xticks(np.array([5.0 , 10.0 , 15.0 , 17.0 , 22.0 , 24.0]))
    ax1.set_xticklabels(['G5','K0','K5','M0','M5','M7'])
    ax1.set_xlabel('SpT' , fontsize=20, labelpad=15)
    ax1.tick_params(axis='both',which='major',labelsize=16)

    ax2.set_xticks(np.array([5.0 , 10.0 , 15.0 , 17.0 , 22.0 , 24.0]))
    ax2.set_xticklabels(['0.85','0.98','1.45','1.84','3.36','4.75'])
    ax2.set_xlabel(r'$B_p-R_p$ (mag)' , fontsize=20, labelpad=15)
    ax2.tick_params(axis='both',which='major',labelsize=16)

    ax1.set_ylabel(r'$W1-W3$ (mag)' , fontsize=22, labelpad=0)

    # Plot field sequence from Tuc-Hor (Kraus et al. 2014)
    fldspt = [ 5 , 7 , 10 , 12 , 15 , 17 , 20 , 22 , 24 ]
    fldW13 = [ 0 , 0 ,  0 , .02, .06, .12, .27, .40, .60]
    plt.plot(fldspt , fldW13  , zorder=0 , label='Photosphere')

    # Plot neighbors
    ax1.errorbar( spt[yy] , W13[yy] , yerr=W13err[yy] , fmt='none' , ecolor='k')


    for x in range(0 , np.array(yy).size):
        msize  = (17-12.0*(sep3d[yy[x]].value/searchradpc.value))**2
        mcolor = Gchi2[yy[x]]
        medge  = 'black'
        mzorder= 3
        if (r['ruwe'][yy[x]] < 1.2):
            mshape='o'
        if (r['ruwe'][yy[x]] >= 1.2):
            mshape='s'
        if (np.isnan(rvcut) == False): 
            if (np.isnan(RV[yy[x]])==False) & (np.abs(RV[yy[x]]-Gvrpmllpmbb[yy[x],0]) > rvcut):
                mshape='+'
                mcolor='black'
                mzorder=2
            if (np.isnan(RV[yy[x]])==False) & (np.abs(RV[yy[x]]-Gvrpmllpmbb[yy[x],0]) <= rvcut):
                medge='blue'
        ccc = ax1.scatter( spt[yy[x]] , W13[yy[x]] , \
                s=msize , c=mcolor , marker=mshape , edgecolors=medge , zorder=mzorder , \
                vmin=0.0 , vmax=vlim.value , cmap='cubehelix' , label='_nolabel' )

    temp1 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='o' , s=12**2 , label = 'RUWE < 1.2')
    temp2 = ax1.scatter([] , [] , c='white' , edgecolors='black', marker='s' , s=12**2 , label = 'RUWE >= 1.2')
    temp3 = ax1.scatter([] , [] , c='white' , edgecolors='blue' , marker='o' , s=12**2 , label = 'RV Comoving')
    temp4 = ax1.scatter([] , [] , c='black' , marker='+' , s=12**2 , label = 'RV Outlier')


    # Plot science target
    if (spt[yy2[0]] > 5):
        plt.plot(spt[yy2[0]] , W13[yy2[0]] , 'rx' , markersize=18 , mew=3 , markeredgecolor='red' , zorder=3 , label=targname )

    plt.legend(fontsize=16 , loc='upper left')
    cb = plt.colorbar(ccc , ax=ax1)
    cb.set_label(label='Velocity Offset (km/s)',fontsize=14)
    if (DoWISE == True): plt.savefig(figname , bbox_inches='tight', pad_inches=0.2 , dpi=200)
    if showplots == True: plt.show()
    plt.close('all')

    # Cross-reference with ROSAT

    v = Vizier(columns=["**", "+_R"] , catalog='J/A+A/588/A103/cat2rxs' )

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) )
    yy = zz[0][np.argsort(sep3d[zz])]

    ROSATflux = np.empty([np.array(r['ra']).size])
    ROSATflux[:] = np.nan

    print('Searching on neighbors in ROSAT')
    for x in range(0 , np.array(yy).size):
        querycoord = SkyCoord((str(gaiacoord.ra[yy[x]].value) if (gaiacoord.ra[yy[x]].value > 0) else \
                     str(gaiacoord.ra[yy[x]].value+360.0)) , str(gaiacoord.dec[yy[x]].value) , \
                     unit=(u.deg,u.deg) , frame='icrs')
        print('ROSAT query ',x,' of ',np.array(yy).size, end='\r')
        if verbose == True: print('ROSAT query ',x,' of ',np.array(yy).size)
        if verbose == True: print(querycoord)
        if (DoROSAT == True): 
            rosatcat = v.query_region(querycoord , radius='0d1m0s' )
            if (len(rosatcat) > 0):
                rosatcat = rosatcat['J/A+A/588/A103/cat2rxs']
                if verbose == True: print(rosatcat)
                if ((np.where(rosatcat['CRate'] > -999)[0]).size > 0):
                    ww = np.where( (rosatcat['CRate'] == max(rosatcat['CRate'][np.where(rosatcat['CRate'] > -999)])))
                    ROSATflux[yy[x]] = rosatcat['CRate'][ww][0]
                if verbose == True: print(x,yy[x],ROSATflux[yy[x]])


    # Create output table with results
    print('Creating Output Tables with Results')
    if verbose == True: 
        print('Reminder, there were this many input entries: ',len(Gxyz[:,0]))
        print('The search radius in velocity space is: ',vlim)
        print()

    zz = np.where( (sep3d.value < searchradpc.value) & (Gchi2 < vlim.value) )
    sortlist = np.argsort(sep3d[zz])
    yy = zz[0][sortlist]

    fmt1 = "%11.7f %11.7f %6.3f %6.3f %11.3f %8.4f %8.4f %8.2f %8.2f %8.2f %8.3f %4s %8.6f %6.2f %7.3f %7.3f %35s"
    fmt2 = "%11.7f %11.7f %6.3f %6.3f %11.3f %8.4f %8.4f %8.2f %8.2f %8.2f %8.3f %4s %8.6f %6.2f %7.3f %7.3f %35s"
    filename=outdir + targname.replace(" ", "") + ".txt"
    
    warnings.filterwarnings("ignore",category=UserWarning)
    if verbose == True: 
        print('Also creating SIMBAD query table')
        print(filename)
        print('RA            DEC        Gmag   Bp-Rp  Voff(km/s) Sep(deg)   3D(pc) Vr(pred)  Vr(obs)    Vrerr Plx(mas)  SpT    FnuvJ  W1-W3    RUWE  XCrate RVsrc')
    with open(filename,'w') as file1:
        file1.write('RA            DEC        Gmag   Bp-Rp  Voff(km/s) Sep(deg)   3D(pc) Vr(pred)  Vr(obs)    Vrerr Plx(mas)  SpT    FnuvJ  W1-W3    RUWE  XCrate RVsrc \n')
    for x in range(0 , np.array(zz).size):
            if verbose == True:
                print(fmt1 % (gaiacoord.ra[yy[x]].value,gaiacoord.dec[yy[x]].value, \
                  r['phot_g_mean_mag'][yy[x]], r['bp_rp'][yy[x]] , \
                  Gchi2[yy[x]] , sep[yy[x]].value , sep3d[yy[x]].value , \
                  Gvrpmllpmbb[yy[x],0] , RV[yy[x]] , RVerr[yy[x]] , \
                  r['parallax'][yy[x]], \
                  sptstring[yy[x]] , fnuvj[yy[x]] , W13[yy[x]] , r['ruwe'][yy[x]] , ROSATflux[yy[x]] , RVsrc[yy[x]]) )
            with open(filename,'a') as file1:
                  file1.write(fmt2 % (gaiacoord.ra[yy[x]].value,gaiacoord.dec[yy[x]].value, \
                      r['phot_g_mean_mag'][yy[x]], r['bp_rp'][yy[x]] , \
                      Gchi2[yy[x]],sep[yy[x]].value,sep3d[yy[x]].value , \
                      Gvrpmllpmbb[yy[x],0] , RV[yy[x]] , RVerr[yy[x]] , \
                      r['parallax'][yy[x]], \
                      sptstring[yy[x]] , fnuvj[yy[x]] , W13[yy[x]] , r['ruwe'][yy[x]] , ROSATflux[yy[x]] , RVsrc[yy[x]]) )
                  file1.write("\n")

    filename=outdir + targname.replace(" ", "") + ".csv"
    with open(filename,mode='w') as result_file:
        wr = csv.writer(result_file)
        wr.writerow(['RA','DEC','Gmag','Bp-Rp','Voff(km/s)','Sep(deg)','3D(pc)','Vr(pred)','Vr(obs)','Vrerr','Plx(mas)','SpT','FnuvJ','W1-W3','RUWE','XCrate','RVsrc'])
        for x in range(0 , np.array(zz).size):
            wr.writerow(( "{0:.7f}".format(gaiacoord.ra[yy[x]].value) , "{0:.7f}".format(gaiacoord.dec[yy[x]].value) , \
                      "{0:.3f}".format(r['phot_g_mean_mag'][yy[x]]), "{0:.3f}".format(r['bp_rp'][yy[x]]) , \
                      "{0:.3f}".format(Gchi2[yy[x]]) , "{0:.4f}".format(sep[yy[x]].value) , "{0:.4f}".format(sep3d[yy[x]].value) , \
                      "{0:.2f}".format(Gvrpmllpmbb[yy[x],0]) , "{0:.2f}".format(RV[yy[x]]) , "{0:.2f}".format(RVerr[yy[x]]) , \
                      "{0:.3f}".format(r['parallax'][yy[x]]), \
                      sptstring[yy[x]] , "{0:.6f}".format(fnuvj[yy[x]]) , "{0:.2f}".format(W13[yy[x]]) , \
                      "{0:.3f}".format(r['ruwe'][yy[x]]) , "{0:.3f}".format(ROSATflux[yy[x]]) , RVsrc[yy[x]].strip()) )

    if verbose == True: print('All output can be found in ' + outdir)



    return outdir
Beispiel #50
0
def create_star_catalog(coordinate_box, pixelscale, catalogs, check_in_box=False):

    """
    This function ...
    :param coordinate_box:
    :param pixelscale:
    :param catalogs:
    :param check_in_box:
    :return:
    """

    # Initialize empty lists for the table columns
    catalog_column = []
    id_column = []
    ra_column = []
    dec_column = []
    ra_error_column = []
    dec_error_column = []
    magnitude_columns = {}
    magnitude_error_columns = {}
    on_galaxy_column = []
    confidence_level_column = []

    # Get the range of right ascension and declination of this image
    #center, ra_span, dec_span = frame.coordinate_range

    center = coordinate_box.center
    ra_span = 2.0 * coordinate_box.radius.ra
    dec_span = 2.0 * coordinate_box.radius.dec

    # Create a new Vizier object and set the row limit to -1 (unlimited)
    viz = Vizier(keywords=["stars", "optical"])
    viz.ROW_LIMIT = -1

    # Loop over the different catalogs
    for catalog in catalogs:

        # Get catalog code
        code = get_stellar_catalog_code_for_name(catalog)

        # Initialize a list to specify which of the stars added to the columns from other catalogs is already
        # matched to a star of the current catalog
        encountered = [False] * len(catalog_column)

        # Inform the user
        log.debug("Querying the " + catalog + " catalog ...")

        # Query Vizier and obtain the resulting table
        result = viz.query_region(center.to_astropy(), width=ra_span, height=dec_span, catalog=code)
        table = result[0]

        number_of_stars = 0
        number_of_stars_in_frame = 0
        number_of_new_stars = 0

        magnitudes = {}
        magnitude_errors = {}

        # Get the magnitude in different bands
        for name in table.colnames:

            # If this column name does not end with "mag", skip it
            if not name.endswith("mag"): continue

            # If the column name contains more than one character before "mag", skip it
            if len(name.split("mag")[0]) > 1: continue

            # Get the name of the band
            band = name.split("mag")[0]

            # Create empty lists for the magnitudes and errors
            magnitudes[band] = []
            magnitude_errors[band] = []

        # Loop over all entries in the table
        for i in range(len(table)):

            # Debugging
            log.debug("Processing entry " + str(i+1) + " ...")

            # -- General information --

            # Get the ID of the star in the catalog
            star_id = get_star_id(catalog, table, i)

            # -- Positional information --

            # Get the position of the star as a SkyCoord object and as pixel coordinate
            position = SkyCoordinate(ra=table["RAJ2000"][i], dec=table["DEJ2000"][i], unit="deg", frame="fk5")
            #pixel_position = position.to_pixel(frame.wcs)

            # Get the right ascension and declination for the current star
            star_ra = table["RAJ2000"][i]
            star_dec = table["DEJ2000"][i]

            number_of_stars += 1

            # Optional because this takes a lot of time
            if check_in_box:

                # If this star does not lie within the frame, skip it
                #if not frame.contains(position): continue
                if not coordinate_box.contains(position): continue

            number_of_stars_in_frame += 1

            # DOESN'T WORK ANYMORE!
            # Get the mean error on the right ascension and declination
            #if catalog == "UCAC4" or catalog == "NOMAD":
            #    ra_error = table["e_RAJ2000"][i] * u("mas")
            #    dec_error = table["e_DEJ2000"][i] * u("mas")
            #elif catalog == "II/246":
            #    error_maj = table["errMaj"][i] * u("arcsec")
            ##    error_min = table["errMin"][i] * u("arcsec")
            #    error_theta = Angle(table["errPA"][i], "deg")
            #    # Temporary: use only the major axis error (convert the error ellipse into a circle)
            #    ra_error = error_maj.to("mas")
            #    dec_error = error_maj.to("mas")
            #else: raise ValueError("Catalogs other than 'UCAC4', 'NOMAD' or 'II/246' are currently not supported")
            ra_error = dec_error = None

            # -- Magnitudes --

            # Loop over the different bands for which a magnitude is defined
            for band in magnitudes:

                # Determine the column name
                column_name = band + "mag"

                value = table[column_name][i]

                if isinstance(value, np.ma.core.MaskedConstant):

                    magnitudes[band].append(None)
                    magnitude_errors[band].append(None)

                else:

                    # Add the magnitude value
                    magnitudes[band].append(Magnitude(value))

                    # Check for presence of error on magnitude
                    error_column_name = "e_" + column_name
                    if error_column_name in table.colnames:
                        error = table[error_column_name][i]
                        if isinstance(error, np.ma.core.MaskedConstant): magnitude_errors[band].append(None)
                        else: magnitude_errors[band].append(Magnitude(error))
                    else: magnitude_errors[band].append(None)

            # -- Cross-referencing with previous catalogs --

            # If there are already stars in the list, check for correspondences with the current stars
            for index in range(len(encountered)):

                # Skip stars that are already encountered as matches with the current catalog (we assume there can only
                # be one match of a star of one catalog with the star of another catalog, within the radius of 3 pixels)
                if encountered[index]: continue

                saved_star_position = SkyCoordinate(ra=ra_column[index].value, dec=dec_column[index].value, unit="deg", frame="fk5")
                #saved_star_pixel_position = saved_star_position.to_pixel(frame.wcs)

                # Calculate the distance between the star already in the list and the new star
                #difference = saved_star_pixel_position - pixel_position

                difference_ra = saved_star_position.ra - position.ra
                difference_dec = saved_star_position.dec - position.dec
                difference = Extent((difference_ra * pixelscale.average).to("").value, (difference_dec * pixelscale.average).to("").value)

                # Check whether the distance is less then 3 pixels
                if difference.norm < 3.0:

                    # Inform the user
                    log.debug("Star " + star_id + " could be identified with star " + id_column[index] + " from the " + catalog_column[index] + " catalog")

                    # Increment the confidence level for the 'saved' star
                    confidence_level_column[index] += 1

                    # Set the 'encountered' flag to True for the 'saved' star
                    encountered[index] = True

                    # Break, because the current star does not have to be saved again (it is already in the lists)
                    break

            # If no other stars are in the list yet or no corresponding star was found (no break was
            # encountered), just add all stars of the current catalog
            else:

                number_of_new_stars += 1

                # Inform the user
                #print("DEBUG: Adding star " + star_id + " at " + str(position.to_string("hmsdms")))

                # Fill in the column lists
                catalog_column.append(catalog)
                id_column.append(star_id)
                ra_column.append(star_ra * u("deg"))
                dec_column.append(star_dec * u("deg"))
                ra_error_column.append(ra_error.value if ra_error is not None else None)
                dec_error_column.append(dec_error.value if dec_error is not None else None)
                confidence_level_column.append(1)

        # Debug messages
        log.debug("Number of stars that were in the catalog: " + str(number_of_stars))
        log.debug("Number of stars that fell within the frame: " + str(number_of_stars_in_frame))
        log.debug("Number of stars that were only present in this catalog: " + str(number_of_new_stars))

    # Create and return the table
    #data = [catalog_column, id_column, ra_column, dec_column, ra_error_column, dec_error_column, confidence_level_column]
    #names = ['Catalog', 'Id', 'Right ascension', 'Declination', 'Right ascension error', 'Declination error', 'Confidence level']

    # TODO: add magnitudes to the table ?

    #magnitude_column_names = []
    #for band in magnitudes:

        # Values
        ##column = MaskedColumn(magnitudes[band], mask=[mag is None for mag in magnitudes[band]])
        ##data.append(column)
        #data.append(magnitudes[band])
        #column_name = band + " magnitude"
        #names.append(column_name)
        #magnitude_column_names.append(column_name)

        # Errors
        ##column = MaskedColumn(magnitude_errors[band], mask=[mag is None for mag in magnitude_errors[band]])
        ##data.append(column)
        #data.append(magnitude_errors[band])
        #column_name = band + " magnitude error"
        #names.append(column_name)
        #magnitude_column_names.append(column_name)

    # Create the catalog
    #meta = {'name': 'stars'}
    #catalog = tables.new(data, names, meta)

    # Set units
    #catalog["Right ascension"].unit = "deg"
    #catalog["Declination"].unit = "deg"
    #catalog["Right ascension error"].unit = "mas"
    #catalog["Declination error"].unit = "mas"
    #for name in magnitude_column_names:
    #    self.catalog[name].unit = "mag"

    # Return the catalog
    #return catalog

    return catalog_column, id_column, ra_column, dec_column, ra_error_column, dec_error_column, confidence_level_column
Filter = str(values[3])

Area = str(sys.argv[1])
Num = str(sys.argv[2])
lowlim = float(sys.argv[3])
highlim = float(sys.argv[4])

Vizier.ROW_LIMIT = Num
Search = Vizier(columns=[
    "+_r", 'UCAC4', '_RAJ2000', '_DEJ2000', 'Bmag', 'Vmag', 'gmag', 'rmag',
    'imag', 'e_Bmag', 'e_Vmag', 'e_gmag', 'e_rmag', 'e_imag'
])

result = Search.query_region(coord.SkyCoord(RA,
                                            DEC,
                                            unit=(u.deg, u.deg),
                                            frame='icrs'),
                             width=str(Area) + "m",
                             catalog=["UCAC4"])
Catalog = result['I/322A/out']
print Catalog
Catalog.write('table.dat', format='ascii')

CURRENT_DIR = os.path.dirname(__file__)
fullcat_name_path = os.path.join(CURRENT_DIR, "Full_UCAC4.dat")
fullcat = open(fullcat_name_path, 'w')
Catalog.write(fullcat_name_path, format='ascii')

trimmedcat_name_path = os.path.join(CURRENT_DIR, "Trimmed_UCAC4.dat")
trimmedcat = open(trimmedcat_name_path, 'w')

data = numpy.genfromtxt(fullcat_name_path,
Beispiel #52
0
# "img" is now a fits.HDUList object; the 0th entry is the image
mywcs = WCS(img[0].header)

fig = plt.figure(1)
fig.clf()  # Just in case one was open before
# Use astropy's wcsaxes tool to create an RA/Dec image
ax = fig.add_axes([0.15, 0.1, 0.8, 0.8], projection=mywcs)
ax.set_xlabel("RA")
ax.set_ylabel("Dec")

ax.imshow(img[0].data, cmap="gray_r", interpolation="none", origin="lower",
          norm=plt.matplotlib.colors.LogNorm())

# Retrieve a specific table from Vizier to overplot
tablelist = Vizier.query_region(
    center, radius=5*u.arcmin, catalog="J/ApJ/826/16/table1")
# Again, the result is a list of tables, so we"ll get the first one
result = tablelist[0]

# Convert the ra/dec entries in the table to astropy coordinates
tbl_crds = SkyCoord(result["RAJ2000"], result["DEJ2000"],
                    unit=(u.hour, u.deg), frame="fk5")

# We want this table too:
tablelist2 = Vizier(row_limit=10000).query_region(
    center, radius=5*u.arcmin, catalog="J/ApJ/540/236")
result2 = tablelist2[0]
tbl_crds2 = SkyCoord(result2["RAJ2000"], result2["DEJ2000"],
                     unit=(u.hour, u.deg), frame="fk5")

# Overplot the data in the image
 def query_stars(self, ra, dec):
     c = coordinates.SkyCoord(ra = ra, dec = dec, unit=(u.hourangle, u.deg))
     r = 0.1 * u.deg
     v = Vizier(column_filters={'Rmag':">"+str(5.0), 'Rmag':"<"+str(15.0)})
     result = v.query_region(c,radius = r,catalog="NOMAD")
     return (result[0]['_RAJ2000', '_DEJ2000','Bmag','Vmag','Rmag'])
    def Photometry(self):
        ### perform aperture photometry
        hdr,img = self.hdr,self.img
        egain = float(hdr['EGAIN'])

        bkg = sep.Background(img) # get background noise from image (maybe need to looki into issues with this?)
        img_sub = img - bkg # subtract background
        flux, fluxerr, flag = sep.sum_circle(img_sub, self.source['X'], self.source['Y'], self.aperture_size, err=bkg.globalrms)
        # get flux values from source extraction package
        instmag = -2.5*np.log10(flux) # convert flux to instrumental magnitude
        snr = np.sqrt(flux*egain)
        instmag_err = 1/snr

        for j in flux:
            if j<0:
                print('Negative')

        # snr = np.sqrt(final_sum*egain) 

        # instmag_err = 1/snr
        # instmag = -2.5*np.log10(final_sum)


        ### retrieve magnitudes from catalog
        time = hdr['DATE-OBS'] # time image was taken
        time = datetime.strptime(time, '%Y-%m-%dT%H:%M:%S.%f') # convert string to datetime object
        filt = hdr['FILTER']
        objects = self.world

        # lookup data in the UCAC4 catalog by querying Vizier
        v = Vizier(columns=['UCAC4','+_r','RAJ2000','DEJ2000','Bmag','Vmag','rmag'])
        output = OrderedDict([('id',[]),('RA_C',[]),('DEC_C',[]),('RA_M',[]),('DEC_M',[]),('DIF',[]),('MAG_R',[]),('MAG_V',[]),('MAG_B',[]),('MAG_err',[]),('CMAG_R',[]),('CMAG_V',[]),('CMAG_B',[]),('DATETIME',[]),('IMGNAME',[])])
        output['MAG_err'] = instmag_err
        cmags = [] # catalog magnitudes list
        misfires = 0 # number of errors
        
        objects_indices_matched = []

        for n in range(len(objects)):
            catalog = 'UCAC4' # specify catalog 
            #(if needed, we can implement a method to change this based on the object, which is why it is defined *inside* the loop)
            
            result = v.query_region(coord.SkyCoord(ra=objects[n,0], dec=objects[n,1],
            unit=(u.degree, u.degree), frame='fk5'),radius='2s',catalog=catalog) # submit query at object coordinates with a radius of 2 arcseconds
            
            try:
                # query_region returns result which is a TableList and we only need the first Table in the List
                result = result[0] # try to get the first result from the list of results (which is usually just 1 element)
                # but if there are NO tables in the list...
            except: # !! important, if we do not find a match we still save the data as this may be an anomoly or an object like an asteroid
                prnt(self.filename,'No star match within 2 arcseconds')
                misfires += 1 
                output['id'].append('nan')
                output['RA_C'].append('nan')
                output['DEC_C'].append('nan')
                output['RA_M'].append(objects[n,0])
                output['DEC_M'].append(objects[n,1])
                output['DIF'].append('nan')
                output['DATETIME'].append(time)
                output['IMGNAME'].append(self.filename)
                cmags.append('nan') 
                continue # skip to the next object by moving to the next iteration of the loop

            ids = np.array(result['UCAC4'],str) # get array of all the stars identified
            ra = np.array(result['RAJ2000'],float)
            dec = np.array(result['DEJ2000'],float) # catalog RA and Dec
            dif = np.array(result['_r'],float)
            
            fluxtype = filt+'mag' # get a variable for fluxtype to match to catalog magnitude types
            if filt=='R':
                fluxtype = 'rmag'
            
            flux = np.array(result[fluxtype],float)

            for i in range(len(ids)): # for all the stars matched, 
                if dif[i] <= 2 and i==np.argmin(dif) and ids[i] not in output['id']: # pick the star w the min residual value and less than 2 arcsec off
                    prnt(self.filename,'Star match in %s, mag %s, residual %s arcsec' % (catalog,flux[i],dif[i]))
                    output['id'].append(ids[i]) # add this data to the output dictionary 
                    output['RA_C'].append(ra[i])
                    output['DEC_C'].append(dec[i])
                    output['RA_M'].append(objects[n,0])
                    output['DEC_M'].append(objects[n,1])
                    output['DIF'].append(dif[i])
                    output['DATETIME'].append(time)
                    output['IMGNAME'].append(self.filename)
                    cmags.append(flux[i])
                    objects_indices_matched.append(n)

                else:
                    prnt(self.filename,'No star match within 2 arcseconds')
                    misfires += 1
                    output['id'].append('nan')
                    output['RA_C'].append('nan')
                    output['DEC_C'].append('nan')
                    output['RA_M'].append(objects[n,0])
                    output['DEC_M'].append(objects[n,1])
                    output['DIF'].append('nan')
                    output['DATETIME'].append(time)
                    output['IMGNAME'].append(self.filename)
                    cmags.append('nan') 
                    continue


        prnt(self.filename,'Output %s stars' % len(output['id']))
        prnt(self.filename,'Output %s unique stars' % len(set(output['id'])))
        prnt(self.filename,'Missed %s objects' % misfires)
        

        instmags_to_median = [instmag[m] for m in objects_indices_matched]
        cmags_nonan = [k for k in cmags if not math.isnan(float(k))] 
        
        if not len(instmags_to_median)==len(cmags_nonan):
            raise Exception('Catalog comparison list not same length as instrumental magnitude list')

        # with open(self.filename+'.csv', 'a') as outfile:
        #     writer = csv.writer(outfile)
        #     writer.writerows(magnitudes)

        # median_i = np.median(np.array(instmags_to_median))
        # median_c = np.median(np.array(cmags_nonan)) # median of catalog magnitude values
        d = np.array(cmags_nonan) - np.array(instmags_to_median)
        d = float(np.median(d))

        # d = float(median_c) - float(median_i) # difference is the difference between the medians


        for i in ['R','V','B']:
            magtype = 'MAG_'+i
            if i==filt:
                output[magtype] = instmag
            else:
                output[magtype] = np.full(np.shape(instmag),'---',dtype="S3")

        for i in ['R','V','B']:
            magtype = 'CMAG_'+i
            if i==filt:
                output[magtype] = cmags
            else:
                output[magtype] = np.full(np.shape(cmags),'---',dtype="S3")

        output['MAG_'+filt] += d # offset magnitudes by that difference 
        
        prnt(self.filename,'Wrote magnitude data to sources.csv')
        sleep(3)
        print(' ')
        sleep(1)
        print("\033c")
        header('Calibration & Source Extraction')
        
        return output
# 'img' is now a fits.HDUList object; the 0th entry is the image
mywcs = wcs.WCS(img[0].header)

fig = pl.figure(1)
fig.clf() # just in case one was open before
# use astropy's wcsaxes tool to create an RA/Dec image
ax = fig.add_axes([0.15, 0.1, 0.8, 0.8], projection=mywcs)
ax.set_xlabel("RA")
ax.set_ylabel("Dec")

ax.imshow(img[0].data, cmap='gray_r', interpolation='none', origin='lower',
          norm=pl.matplotlib.colors.LogNorm())


# retrieve a specific table from Vizier to overplot
tablelist = Vizier.query_region(center, radius=5*u.arcmin,
                                catalog='J/ApJ/826/16/table1')
# again, the result is a list of tables, so we'll get the first one
result = tablelist[0]

# convert the ra/dec entries in the table to astropy coordinates
tbl_crds = coordinates.SkyCoord(result['RAJ2000'], result['DEJ2000'],
                                unit=(u.hour, u.deg), frame='fk5')

# we want this table too:
tablelist2 = Vizier(row_limit=10000).query_region(center, radius=5*u.arcmin,
                                                  catalog='J/ApJ/540/236')
result2 = tablelist2[0]
tbl_crds2 = coordinates.SkyCoord(result2['RAJ2000'], result2['DEJ2000'],
                                 unit=(u.hour, u.deg), frame='fk5')

Beispiel #56
0
def add_gaia_figure_elements(tpf, fig, magnitude_limit=18):
    """Make the Gaia Figure Elements"""
    # Get the positions of the Gaia sources
    try:
        c1 = SkyCoord(tpf.ra, tpf.dec, frame="icrs", unit="deg")
    except Exception as err:
        msg = ("Cannot get nearby stars in GAIA because TargetPixelFile has no valid coordinate. "
               f"ra: {tpf.ra}, dec: {tpf.dec}")
        raise LightkurveError(msg) from err

    # Use pixel scale for query size
    pix_scale = 4.0  # arcseconds / pixel for Kepler, default
    if tpf.mission == "TESS":
        pix_scale = 21.0
    # We are querying with a diameter as the radius, overfilling by 2x.
    from astroquery.vizier import Vizier

    Vizier.ROW_LIMIT = -1
    result = Vizier.query_region(
        c1,
        catalog=["I/345/gaia2"],
        radius=Angle(np.max(tpf.shape[1:]) * pix_scale, "arcsec"),
    )
    no_targets_found_message = ValueError(
        "Either no sources were found in the query region " "or Vizier is unavailable"
    )
    too_few_found_message = ValueError(
        "No sources found brighter than {:0.1f}".format(magnitude_limit)
    )
    if result is None:
        raise no_targets_found_message
    elif len(result) == 0:
        raise too_few_found_message
    result = result["I/345/gaia2"].to_pandas()
    result = result[result.Gmag < magnitude_limit]
    if len(result) == 0:
        raise no_targets_found_message

    ra_corrected, dec_corrected, _ = _correct_with_proper_motion(
            np.nan_to_num(np.asarray(result.RA_ICRS)) * u.deg, np.nan_to_num(np.asarray(result.DE_ICRS)) * u.deg,
            np.nan_to_num(np.asarray(result.pmRA)) * u.milliarcsecond / u.year,
            np.nan_to_num(np.asarray(result.pmDE)) * u.milliarcsecond / u.year,
            Time(2457206.375, format="jd", scale="tdb"),
            tpf.time[0])
    result.RA_ICRS = ra_corrected.to(u.deg).value
    result.DE_ICRS = dec_corrected.to(u.deg).value

    # Convert to pixel coordinates
    radecs = np.vstack([result["RA_ICRS"], result["DE_ICRS"]]).T
    coords = tpf.wcs.all_world2pix(radecs, 0)

    # Gently size the points by their Gaia magnitude
    sizes = 64.0 / 2 ** (result["Gmag"] / 5.0)
    one_over_parallax = 1.0 / (result["Plx"] / 1000.0)
    source = ColumnDataSource(
        data=dict(
            ra=result["RA_ICRS"],
            dec=result["DE_ICRS"],
            pmra=result["pmRA"],
            pmde=result["pmDE"],
            source=result["Source"].astype(str),
            Gmag=result["Gmag"],
            plx=result["Plx"],
            one_over_plx=one_over_parallax,
            x=coords[:, 0] + tpf.column,
            y=coords[:, 1] + tpf.row,
            size=sizes,
        )
    )

    tooltips = [
        ("Gaia source", "@source"),
        ("G", "@Gmag"),
        ("Parallax (mas)", "@plx (~@one_over_plx{0,0} pc)"),
        ("RA", "@ra{0,0.00000000}"),
        ("DEC", "@dec{0,0.00000000}"),
        ("pmRA", "@pmra{0,0.000} mas/yr"),
        ("pmDE", "@pmde{0,0.000} mas/yr"),
        ("column", "@x{0.0}"),
        ("row", "@y{0.0}"),
        ]

    try:
        source, tooltips = _add_nearby_tics_if_tess(tpf, source, tooltips)
    except Exception as err:
        warnings.warn(
            f"interact_sky() - cannot obtain nearby TICs. Skip it. The error: {err}",
            LightkurveWarning,
        )

    r = fig.circle(
        "x",
        "y",
        source=source,
        fill_alpha=0.3,
        size="size",
        line_color=None,
        selection_color="firebrick",
        nonselection_fill_alpha=0.0,
        nonselection_line_color=None,
        nonselection_line_alpha=0.0,
        fill_color="firebrick",
        hover_fill_color="firebrick",
        hover_alpha=0.9,
        hover_line_color="white",
    )

    fig.add_tools(
        HoverTool(
            tooltips=tooltips,
            renderers=[r],
            mode="mouse",
            point_policy="snap_to_data",
        )
    )

    # mark the target's position too
    target_ra, target_dec, pm_corrected = _get_corrected_coordinate(tpf)
    if target_ra is not None and target_dec is not None:
        target_x, target_y = tpf.wcs.all_world2pix([(target_ra, target_dec)], 0)[0]
        fig.cross(x=tpf.column + target_x, y=tpf.row + target_y, size=20, color="black", line_width=1)
        if not pm_corrected:
            warnings.warn(("Proper motion correction cannot be applied to the target, as none is available. "
                           "Thus the target (the cross) might be noticeably away from its actual position, "
                           "if it has large proper motion."),
                           category=LightkurveWarning)

    # a widget that displays some of the selected star's metadata
    # so that they can be copied (e.g., GAIA ID).
    # It is a workaround, because bokeh's hover tooltip disappears as soon as the mouse is away from the star.
    message_selected_target = Div(text="")

    def show_target_info(attr, old, new):
        # the following is essentially redoing the bokeh tooltip template above in plain HTML
        # with some slight tweak, mainly to add some helpful links.
        #
        # Note: in source, columns "x" and "y" are ndarray while other column are pandas Series,
        # so the access api is slightly different.
        if len(new) > 0:
            msg = "Selected:<br><table>"
            for idx in new:
                tic_id = source.data['tic'].iat[idx] if source.data.get('tic') is not None else None
                if tic_id is not None and tic_id != "":  # TESS-specific meta data, if available
                    msg += f"""
<tr><td>TIC</td><td>{tic_id}
(<a target="_blank" href="https://exofop.ipac.caltech.edu/tess/target.php?id={tic_id}">ExoFOP</a>)</td></tr>
<tr><td>TESS Mag</td><td>{source.data['TESSmag'].iat[idx]}</td></tr>
<tr><td>Separation (")</td><td>{source.data['separation'].iat[idx]}</td></tr>
"""
                # the main meta data
                msg += f"""
<tr><td>Gaia source</td><td>{source.data['source'].iat[idx]}
(<a target="_blank"
    href="http://vizier.u-strasbg.fr/viz-bin/VizieR-S?Gaia DR2 {source.data['source'].iat[idx]}">Vizier</a>)</td></tr>
<tr><td>G</td><td>{source.data['Gmag'].iat[idx]:.3f}</td></tr>
<tr><td>Parallax (mas)</td>
    <td>{source.data['plx'].iat[idx]:,.3f} (~ {source.data['one_over_plx'].iat[idx]:,.0f} pc)</td>
</tr>
<tr><td>RA</td><td>{source.data['ra'].iat[idx]:,.8f}</td></tr>
<tr><td>DEC</td><td>{source.data['dec'].iat[idx]:,.8f}</td></tr>
<tr><td>pmRA</td><td>{source.data['pmra'].iat[idx]} mas/yr</td></tr>
<tr><td>pmDE</td><td>{source.data['pmde'].iat[idx]} mas/yr</td></tr>
<tr><td>column</td><td>{source.data['x'][idx]:.1f}</td></tr>
<tr><td>row</td><td>{source.data['y'][idx]:.1f}</td></tr>
<tr><td colspan="2">Search
<a target="_blank"
   href="http://simbad.u-strasbg.fr/simbad/sim-id?Ident=Gaia DR2 {source.data['source'].iat[idx]}">
SIMBAD by Gaia ID</a></td></tr>
<tr><td colspan="2">
<a target="_blank"
   href="http://simbad.u-strasbg.fr/simbad/sim-coo?Coord={source.data['ra'].iat[idx]}+{source.data['dec'].iat[idx]}&Radius=2&Radius.unit=arcmin">
SIMBAD by coordinate</a></td></tr>
<tr><td colspan="2">&nbsp;</td></tr>
"""

            msg += "\n<table>"
            message_selected_target.text = msg
        # else do nothing (not clearing the widget) for now.

    source.selected.on_change("indices", show_target_info)

    return fig, r, message_selected_target
    FF.set_tick_labels_xformat('d.dd')
    FF.set_tick_labels_yformat('d.dd')
    FF.set_title(sp.specname)
    FF.save('../figures/mom0_gfit_{0}.png'.format(cubename))

    l_rounded, el_rounded = rounded(fitcoord.galactic.l.deg, dx_deg)
    b_rounded, eb_rounded = rounded(fitcoord.galactic.b.deg, dy_deg)
    v_rounded, ev_rounded = rounded(sp.specfit.parinfo[1].value, sp.specfit.parinfo[1].error)

    if 'v0' not in cubename:
        results[cubename] = [l_rounded, b_rounded, el_rounded, eb_rounded, v_rounded, ev_rounded,]


other_masers = Table.read('../data/other_masers.tbl', format='ascii.ecsv')
walshtbl = Vizier.query_region(coordinates.SkyCoord(0.38*u.deg, +0.04*u.deg,
                                                    frame='galactic'),
                               radius=0.5*u.arcmin, catalog='J/MNRAS/442/2240')[0]
h2ocoords = coordinates.SkyCoord(walshtbl['_RAJ2000'], walshtbl['_DEJ2000'],
                                 frame='fk5', unit=(u.deg, u.deg))

tbl = Table([Column([names[name] for name in results], name='Line'),
             Column([results[name][0] for name in results], name='$\ell$', unit=u.deg),
             Column([results[name][1] for name in results], name='$b$', unit=u.deg),
             Column([results[name][2] for name in results], name='$\sigma(\ell)$', unit=u.deg),
             Column([results[name][3] for name in results], name='$\sigma(b)$', unit=u.deg),
             Column([results[name][4] for name in results], name='$v_{LSR}$', unit=u.km/u.s),
             Column([results[name][5] for name in results], name='$\sigma(v_{LSR})$', unit=u.km/u.s),
             Column(['This Work' for name in results], name='Measurement', dtype='S20'),
            ])

latexdict['header_start'] = '\label{tab:measurements}'
Beispiel #58
0
            keys = ['_r','_RAJ2000', '_DEJ2000','Vmag','Bmag','pmRA','pmDE']
        for key in keys:
            target_dict[key] = np.array(viz_result[key])

    else:
        for key in viz_result.keys():
            #target_dict[key] = np.array(viz_result[key].filled(999).tolist())
            target_dict[key] = viz_result[key][0]

    targ_ra = target_dict['_RAJ2000']
    targ_dec = target_dict['_DEJ2000']

if not args.tc:
    search_radius = str(search_radius)+'m'

    comparisons = v.query_region(SkyCoord(targ_ra,targ_dec,unit=(u.deg,u.deg),frame='icrs'),radius=search_radius,catalog=[catalog]) # Query UCAC4 for comparison objects within search radius

    if args.apass:
        table =  comparisons[u'II/336/apass9']
    else:
        table =  comparisons[u'I/322A/out']


    table.sort('_r')
    print(table)
    print("===")
    which_comp = int(input("Which comparison to use?     ")) # Need to make a selection for which comparison object

    if which_comp == 0:
        cont = input("WARNING! Target is indexed as 0 in this table, you have selected 0 as the comparison also. Do you wish to continue? [y/n] ")
        if cont == 'y':
Beispiel #59
0
def make_rcsample(parser):
    options,args= parser.parse_args()
    savefilename= options.savefilename
    if savefilename is None:
        #Create savefilename if not given
        savefilename= os.path.join(appath._APOGEE_DATA,
                                   'rcsample_'+appath._APOGEE_REDUX+'.fits')
        print "Saving to %s ..." % savefilename
    #Read the base-sample
    data= apread.allStar(adddist=_ADDHAYDENDIST,rmdups=options.rmdups)
    #Remove a bunch of fields that we do not want to keep
    data= esutil.numpy_util.remove_fields(data,
                                          ['TARGET_ID',
                                           'FILE',
                                           'AK_WISE',
                                           'SFD_EBV',
                                           'SYNTHVHELIO_AVG',
                                           'SYNTHVSCATTER',
                                           'SYNTHVERR',
                                           'SYNTHVERR_MED',
                                           'RV_TEFF',
                                           'RV_LOGG',
                                           'RV_FEH',
                                           'RV_CCFWHM',
                                           'RV_AUTOFWHM',
                                           'SYNTHSCATTER',
                                           'CHI2_THRESHOLD',
                                           'APSTAR_VERSION',
                                           'ASPCAP_VERSION',
                                           'RESULTS_VERSION',
                                           'REDUCTION_ID',
                                           'SRC_H',
                                           'PM_SRC'])
    if not appath._APOGEE_REDUX.lower() == 'current' \
            and int(appath._APOGEE_REDUX[1:]) < 500:
        data= esutil.numpy_util.remove_fields(data,
                                              ['ELEM'])
    #Select red-clump stars
    jk= data['J0']-data['K0']
    z= isodist.FEH2Z(data['METALS'],zsolar=0.017)
    if appath._APOGEE_REDUX.lower() == 'current' \
            or int(appath._APOGEE_REDUX[1:]) > 600:
        from apogee.tools import paramIndx
        if False:
            #Use my custom logg calibration that's correct for the RC
            logg= (1.-0.042)*data['FPARAM'][:,paramIndx('logg')]-0.213
            lowloggindx= data['FPARAM'][:,paramIndx('logg')] < 1.
            logg[lowloggindx]= data['FPARAM'][lowloggindx,paramIndx('logg')]-0.255
            hiloggindx= data['FPARAM'][:,paramIndx('logg')] > 3.8
            logg[hiloggindx]= data['FPARAM'][hiloggindx,paramIndx('logg')]-0.3726
        else:
            #Use my custom logg calibration that's correct on average
            logg= (1.+0.03)*data['FPARAM'][:,paramIndx('logg')]-0.37
            lowloggindx= data['FPARAM'][:,paramIndx('logg')] < 1.
            logg[lowloggindx]= data['FPARAM'][lowloggindx,paramIndx('logg')]-0.34
            hiloggindx= data['FPARAM'][:,paramIndx('logg')] > 3.8
            logg[hiloggindx]= data['FPARAM'][hiloggindx,paramIndx('logg')]-0.256
    else:
        logg= data['LOGG']
    indx= (jk < 0.8)*(jk >= 0.5)\
        *(z <= 0.06)\
        *(z <= rcmodel.jkzcut(jk,upper=True))\
        *(z >= rcmodel.jkzcut(jk))\
        *(logg >= rcmodel.loggteffcut(data['TEFF'],z,upper=False))\
        *(logg <= rcmodel.loggteffcut(data['TEFF'],z,upper=True))
    data= data[indx]
    #Add more aggressive flag cut
    data= esutil.numpy_util.add_fields(data,[('ADDL_LOGG_CUT',numpy.int32)])
    data['ADDL_LOGG_CUT']= ((data['TEFF']-4800.)/1000.+2.75) > data['LOGG']
    if options.loggcut:
        data= data[data['ADDL_LOGG_CUT'] == 1]
    print "Making catalog of %i objects ..." % len(data)
    #Add distances
    data= esutil.numpy_util.add_fields(data,[('RC_DIST', float),
                                             ('RC_DM', float),
                                             ('RC_GALR', float),
                                             ('RC_GALPHI', float),
                                             ('RC_GALZ', float)])
    rcd= rcmodel.rcdist()
    jk= data['J0']-data['K0']
    z= isodist.FEH2Z(data['METALS'],zsolar=0.017)
    data['RC_DIST']= rcd(jk,z,appmag=data['K0'])*options.distfac
    data['RC_DM']= 5.*numpy.log10(data['RC_DIST'])+10.
    XYZ= bovy_coords.lbd_to_XYZ(data['GLON'],
                                data['GLAT'],
                                data['RC_DIST'],
                                degree=True)
    R,phi,Z= bovy_coords.XYZ_to_galcencyl(XYZ[:,0],
                                          XYZ[:,1],
                                          XYZ[:,2],
                                          Xsun=8.,Zsun=0.025)
    data['RC_GALR']= R
    data['RC_GALPHI']= phi
    data['RC_GALZ']= Z
    #Save
    fitsio.write(savefilename,data,clobber=True)
    if not options.nostat:
        #Determine statistical sample and add flag
        apo= apogee.select.apogeeSelect()
        statIndx= apo.determine_statistical(data)
        mainIndx= apread.mainIndx(data)
        data= esutil.numpy_util.add_fields(data,[('STAT',numpy.int32),
                                                 ('INVSF',float)])
        data['STAT']= 0
        data['STAT'][statIndx*mainIndx]= 1
        for ii in range(len(data)):
            if (statIndx*mainIndx)[ii]:
                data['INVSF'][ii]= 1./apo(data['LOCATION_ID'][ii],
                                          data['H'][ii])
            else:
                data['INVSF'][ii]= -1.
    if options.nopm:
        fitsio.write(savefilename,data,clobber=True)       
        return None
    #Get proper motions
    from astroquery.vizier import Vizier
    import astroquery
    from astropy import units as u
    import astropy.coordinates as coord
    pmfile= savefilename.split('.')[0]+'_pms.fits'
    if os.path.exists(pmfile):
        pmdata= fitsio.read(pmfile,1)
    else:
        pmdata= numpy.recarray(len(data),
                               formats=['f8','f8','f8','f8','f8','f8','i4'],
                               names=['RA','DEC','PMRA','PMDEC',
                                      'PMRA_ERR','PMDEC_ERR','PMMATCH'])
        rad= u.Quantity(4./3600.,u.degree)
        v= Vizier(columns=['RAJ2000','DEJ2000','pmRA','pmDE','e_pmRA','e_pmDE'])
        for ii in range(len(data)):
            #if ii > 100: break
            sys.stdout.write('\r'+"Getting pm data for point %i / %i" % (ii+1,len(data)))
            sys.stdout.flush()
            pmdata.RA[ii]= data['RA'][ii]
            pmdata.DEC[ii]= data['DEC'][ii]
            co= coord.ICRS(ra=data['RA'][ii],
                           dec=data['DEC'][ii],
                           unit=(u.degree, u.degree))
            trying= True
            while trying:
                try:
                    tab= v.query_region(co,rad,catalog='I/322') #UCAC-4 catalog
                except astroquery.exceptions.TimeoutError:
                    pass
                else:
                    trying= False
            if len(tab) == 0:
                pmdata.PMMATCH[ii]= 0
                print "Didn't find a match for %i ..." % ii
                continue
            else:
                pmdata.PMMATCH[ii]= len(tab)
                if len(tab[0]['pmRA']) > 1:
                    print "Found more than 1 match for %i ..." % ii
            try:
                pmdata.PMRA[ii]= float(tab[0]['pmRA'])
            except TypeError:
                jj= 1
                while len(tab[0]['pmRA']) > 1 and jj < 4: 
                    trad= u.Quantity((4.-jj)/3600.,u.degree)
                    trying= True
                    while trying:
                        try:
                            tab= v.query_region(co,trad,catalog='I/322') #UCAC-4 catalog
                        except astroquery.exceptions.TimeoutError:
                            pass
                        else:
                            trying= False
                    jj+= 1
                if len(tab) == 0:
                    pmdata.PMMATCH[ii]= 0
                    print "Didn't find a unambiguous match for %i ..." % ii
                    continue               
                pmdata.PMRA[ii]= float(tab[0]['pmRA'])
            pmdata.PMDEC[ii]= float(tab[0]['pmDE'])
            pmdata.PMRA_ERR[ii]= float(tab[0]['e_pmRA'])
            pmdata.PMDEC_ERR[ii]= float(tab[0]['e_pmDE'])
            if numpy.isnan(float(tab[0]['pmRA'])): pmdata.PMMATCH[ii]= 0
        sys.stdout.write('\r'+_ERASESTR+'\r')
        sys.stdout.flush()
        fitsio.write(pmfile,pmdata,clobber=True)
        #To make sure we're using the same format below
        pmdata= fitsio.read(pmfile,1)
    #Match proper motions
    try: #These already exist currently, but may not always exist
        data= esutil.numpy_util.remove_fields(data,['PMRA','PMDEC'])
    except ValueError:
        pass
    data= esutil.numpy_util.add_fields(data,[('PMRA', numpy.float),
                                             ('PMDEC', numpy.float),
                                             ('PMRA_ERR', numpy.float),
                                             ('PMDEC_ERR', numpy.float),
                                             ('PMMATCH',numpy.int32)])
    data['PMMATCH']= 0
    h=esutil.htm.HTM()
    m1,m2,d12 = h.match(pmdata['RA'],pmdata['DEC'],
                        data['RA'],data['DEC'],
                        2./3600.,maxmatch=1)
    data['PMRA'][m2]= pmdata['PMRA'][m1]
    data['PMDEC'][m2]= pmdata['PMDEC'][m1]
    data['PMRA_ERR'][m2]= pmdata['PMRA_ERR'][m1]
    data['PMDEC_ERR'][m2]= pmdata['PMDEC_ERR'][m1]
    data['PMMATCH'][m2]= pmdata['PMMATCH'][m1].astype(numpy.int32)
    pmindx= data['PMMATCH'] == 1
    data['PMRA'][True-pmindx]= -9999.99
    data['PMDEC'][True-pmindx]= -9999.99
    data['PMRA_ERR'][True-pmindx]= -9999.99
    data['PMDEC_ERR'][True-pmindx]= -9999.99
    #Calculate Galactocentric velocities
    data= esutil.numpy_util.add_fields(data,[('GALVR', numpy.float),
                                             ('GALVT', numpy.float),
                                             ('GALVZ', numpy.float)])
    lb= bovy_coords.radec_to_lb(data['RA'],data['DEC'],degree=True)
    XYZ= bovy_coords.lbd_to_XYZ(lb[:,0],lb[:,1],data['RC_DIST'],degree=True)
    pmllpmbb= bovy_coords.pmrapmdec_to_pmllpmbb(data['PMRA'],data['PMDEC'],
                                                data['RA'],data['DEC'],
                                                degree=True)
    vxvyvz= bovy_coords.vrpmllpmbb_to_vxvyvz(data['VHELIO_AVG'],
                                             pmllpmbb[:,0],
                                             pmllpmbb[:,1],
                                             lb[:,0],lb[:,1],data['RC_DIST'],
                                             degree=True)
    vR, vT, vZ= bovy_coords.vxvyvz_to_galcencyl(vxvyvz[:,0],
                                                vxvyvz[:,1],
                                                vxvyvz[:,2],
                                                8.-XYZ[:,0],
                                                XYZ[:,1],
                                                XYZ[:,2]+0.025,
                                                vsun=[-11.1,30.24*8.,7.25])#Assumes proper motion of Sgr A* and R0=8 kpc, zo= 25 pc
    data['GALVR']= vR
    data['GALVT']= vT
    data['GALVZ']= vZ
    data['GALVR'][True-pmindx]= -9999.99
    data['GALVT'][True-pmindx]= -9999.99
    data['GALVZ'][True-pmindx]= -9999.99
    #Get proper motions
    pmfile= savefilename.split('.')[0]+'_pms_ppmxl.fits'
    if os.path.exists(pmfile):
        pmdata= fitsio.read(pmfile,1)
    else:
        pmdata= numpy.recarray(len(data),
                               formats=['f8','f8','f8','f8','f8','f8','i4'],
                               names=['RA','DEC','PMRA','PMDEC',
                                      'PMRA_ERR','PMDEC_ERR','PMMATCH'])
        rad= u.Quantity(4./3600.,u.degree)
        v= Vizier(columns=['RAJ2000','DEJ2000','pmRA','pmDE','e_pmRA','e_pmDE'])
        for ii in range(len(data)):
            #if ii > 100: break
            sys.stdout.write('\r'+"Getting pm data for point %i / %i" % (ii+1,len(data)))
            sys.stdout.flush()
            pmdata.RA[ii]= data['RA'][ii]
            pmdata.DEC[ii]= data['DEC'][ii]
            co= coord.ICRS(ra=data['RA'][ii],
                           dec=data['DEC'][ii],
                           unit=(u.degree, u.degree))
            trying= True
            while trying:
                try:
                    tab= v.query_region(co,rad,catalog='I/317') #PPMXL catalog
                except astroquery.exceptions.TimeoutError:
                    pass
                else:
                    trying= False
            if len(tab) == 0:
                pmdata.PMMATCH[ii]= 0
                print "Didn't find a match for %i ..." % ii
                continue
            else:
                pmdata.PMMATCH[ii]= len(tab)
                if len(tab[0]['pmRA']) > 1:
                    pass
                    #print "Found more than 1 match for %i ..." % ii
            try:
                pmdata.PMRA[ii]= float(tab[0]['pmRA'])
            except TypeError:
                #Find nearest
                cosdists= numpy.zeros(len(tab[0]['pmRA']))
                for jj in range(len(tab[0]['pmRA'])):
                    cosdists[jj]= cos_sphere_dist(tab[0]['RAJ2000'][jj],
                                                  tab[0]['DEJ2000'][jj],
                                                  data['RA'][ii],
                                                  data['DEC'][ii])
                closest= numpy.argmax(cosdists)
                pmdata.PMRA[ii]= float(tab[0]['pmRA'][closest])
                pmdata.PMDEC[ii]= float(tab[0]['pmDE'][closest])
                pmdata.PMRA_ERR[ii]= float(tab[0]['e_pmRA'][closest])
                pmdata.PMDEC_ERR[ii]= float(tab[0]['e_pmDE'][closest])
                if numpy.isnan(float(tab[0]['pmRA'][closest])): pmdata.PMMATCH[ii]= 0
            else:
                pmdata.PMDEC[ii]= float(tab[0]['pmDE'])
                pmdata.PMRA_ERR[ii]= float(tab[0]['e_pmRA'])
                pmdata.PMDEC_ERR[ii]= float(tab[0]['e_pmDE'])
                if numpy.isnan(float(tab[0]['pmRA'])): pmdata.PMMATCH[ii]= 0
        sys.stdout.write('\r'+_ERASESTR+'\r')
        sys.stdout.flush()
        fitsio.write(pmfile,pmdata,clobber=True)
        #To make sure we're using the same format below
        pmdata= fitsio.read(pmfile,1)
    #Match proper motions to ppmxl
    data= esutil.numpy_util.add_fields(data,[('PMRA_PPMXL', numpy.float),
                                             ('PMDEC_PPMXL', numpy.float),
                                             ('PMRA_ERR_PPMXL', numpy.float),
                                             ('PMDEC_ERR_PPMXL', numpy.float),
                                             ('PMMATCH_PPMXL',numpy.int32)])
    data['PMMATCH_PPMXL']= 0
    h=esutil.htm.HTM()
    m1,m2,d12 = h.match(pmdata['RA'],pmdata['DEC'],
                        data['RA'],data['DEC'],
                        2./3600.,maxmatch=1)
    data['PMRA_PPMXL'][m2]= pmdata['PMRA'][m1]
    data['PMDEC_PPMXL'][m2]= pmdata['PMDEC'][m1]
    data['PMRA_ERR_PPMXL'][m2]= pmdata['PMRA_ERR'][m1]
    data['PMDEC_ERR_PPMXL'][m2]= pmdata['PMDEC_ERR'][m1]
    data['PMMATCH_PPMXL'][m2]= pmdata['PMMATCH'][m1].astype(numpy.int32)
    pmindx= data['PMMATCH_PPMXL'] == 1
    data['PMRA_PPMXL'][True-pmindx]= -9999.99
    data['PMDEC_PPMXL'][True-pmindx]= -9999.99
    data['PMRA_ERR_PPMXL'][True-pmindx]= -9999.99
    data['PMDEC_ERR_PPMXL'][True-pmindx]= -9999.99
    #Calculate Galactocentric velocities
    data= esutil.numpy_util.add_fields(data,[('GALVR_PPMXL', numpy.float),
                                             ('GALVT_PPMXL', numpy.float),
                                             ('GALVZ_PPMXL', numpy.float)])
    lb= bovy_coords.radec_to_lb(data['RA'],data['DEC'],degree=True)
    XYZ= bovy_coords.lbd_to_XYZ(lb[:,0],lb[:,1],data['RC_DIST'],degree=True)
    pmllpmbb= bovy_coords.pmrapmdec_to_pmllpmbb(data['PMRA_PPMXL'],
                                                data['PMDEC_PPMXL'],
                                                data['RA'],data['DEC'],
                                                degree=True)
    vxvyvz= bovy_coords.vrpmllpmbb_to_vxvyvz(data['VHELIO_AVG'],
                                             pmllpmbb[:,0],
                                             pmllpmbb[:,1],
                                             lb[:,0],lb[:,1],data['RC_DIST'],
                                             degree=True)
    vR, vT, vZ= bovy_coords.vxvyvz_to_galcencyl(vxvyvz[:,0],
                                                vxvyvz[:,1],
                                                vxvyvz[:,2],
                                                8.-XYZ[:,0],
                                                XYZ[:,1],
                                                XYZ[:,2]+0.025,
                                                vsun=[-11.1,30.24*8.,7.25])#Assumes proper motion of Sgr A* and R0=8 kpc, zo= 25 pc
    data['GALVR_PPMXL']= vR
    data['GALVT_PPMXL']= vT
    data['GALVZ_PPMXL']= vZ
    data['GALVR_PPMXL'][True-pmindx]= -9999.99
    data['GALVT_PPMXL'][True-pmindx]= -9999.99
    data['GALVZ_PPMXL'][True-pmindx]= -9999.99
    #Save
    fitsio.write(savefilename,data,clobber=True)
    return None
Beispiel #60
0
def photometry(header, data, name, RA_bound, DEC_bound, thresh_factor,
               results_file, im=True):
    """
    Input: the header of a reduced object's .fits file, the image data of the 
    file, the name to be used when creating the segmented image and a csv 
    containing all detected sources, a threshold factor to be used in image 
    segmentation, 2 arrays giving the RA and DEC (in degrees) boundaries on the 
    desired source, the name of the results textfile to which the photometry 
    will be appended, and a boolean indicating whether or not to save the image 
    of the segmentation test (optional; defaultTrue)
    Output: None
    
    Obtains a stack of images in the form of a header and data from a .fits 
    file. Estimates the background photon count of this image. Sets a threshold 
    above which we declare a cluster of pixels to be a source: this threshold 
    is defined as the background + the input thresh_factor*the RMS deviation of 
    the background. 
    
    Scans the input image and looks for sources which are at least 7 pixels in 
    area and above this threshold. Saves an image of the sources found in the 
    original field to the working directory. Uses a pixel coordinate to WCS 
    coordinate transformation, via a previously known reference pixel (see 
    PESTO_lib.WCS_merge()) to obtain WCS coords of all detected sources. 
    Outputs a .csv containing the properties of all detected sources.
    
    A tab-delimited results file is appended to. The number of images used in 
    the stack, the total exposure time of the stack and its error, the 
    timestamp (averaged across all images) and its error are already included.    
    IF a source is found, this script appends the x and y minima of the 
    source's centroid, the pixel area of the source, the photon count, and the 
    error on the photon count. If not, the flag NO SOURCE FOUND is appended to 
    the image. 
    """
    
    import numpy as np   
    import numpy.ma as ma 
    import os
    from astropy.stats import (SigmaClip, gaussian_fwhm_to_sigma, 
                               sigma_clipped_stats)
    from astropy.convolution import Gaussian2DKernel
    from astropy.table import Table, Column
    from astroquery.vizier import Vizier
    from astropy.coordinates import SkyCoord
    import astropy.units as u
    from photutils import Background2D, MedianBackground
    from photutils.utils import calc_total_error
    from photutils import detect_sources
    
    # perform 20 iterations of sigma clipping where needed 
    sigma_clip = SigmaClip(sigma=3.0, iters=20) 
    # background is estimated as the median of the entire image 
    bkg_estimator = MedianBackground() 
    mask = (data == 0) # mask all pixels where the ADU is 0  
    bkg = Background2D(data, (50,50), filter_size=(3,3), sigma_clip=sigma_clip, 
                       bkg_estimator=bkg_estimator, mask=mask)

    ### find sources using image segmentation

    # set the threshold for source detection 
    threshold = bkg.background + (thresh_factor*bkg.background_rms)
    sigma = 3.0*gaussian_fwhm_to_sigma
    kernel = Gaussian2DKernel(sigma, x_size=3.0, y_size=3.0)
    kernel.normalize()
    segm = detect_sources(data, threshold, npixels=7, filter_kernel=kernel)
    segm.remove_masked_labels(mask)
    
    try: 
        segm.remove_border_labels(10, partial_overlap=True, relabel=True)
    except: 
        print("The background threshold factor is too large; sources are "+
              "being ignored during image segmentation.\nPlease try a smaller"+
              " value.\n")
        return
 
    # pictures to see what's going on
    if(im):
        import matplotlib.pyplot as plt
        plt.switch_backend('agg') # stop matplotlib from trying to show image
        from astropy.visualization import SqrtStretch
        from astropy.visualization.mpl_normalize import ImageNormalize
        
        norm = ImageNormalize(stretch=SqrtStretch()) # normalize the image 
        fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(90, 90)) # 2 plots 

	     # show the (data-background) of the image with a greyscale colourmap 
        # using the above normalization
        ax1.imshow(data-bkg.background, origin='lower', cmap='Greys_r', 
                   norm=norm) 
        ax2.imshow(segm, origin='lower', cmap=segm.cmap(random_state=12345)) 
        plt.savefig('segmentationtest_'+name+'.png')
    
    # find source properties (centroid, source pixel area, etc.) 
    from photutils import source_properties
    from astropy.wcs import WCS
    
    # calculate the error on the photon counts
#    tf = open('/data/irulan/omm_transients/'+results_file,'r')
#    contents = tf.readlines()
#    tf.close()
#    tf_last = contents[len(contents)-1]
#    tf_data = tf_last.split("\t")  
#    # gain*exposure*stack: 
#    effective_gain = 13.522*(float(tf_data[1])/1000.0)*float(tf_data[0])  
    effective_gain = 13.522
    # compute photon count error :
    error = calc_total_error(data, bkg.background_rms, effective_gain) 
    
    cat = source_properties(data-bkg.background, segm, wcs='all_pix2world', 
                            error=error)
    segm_tbl = cat.to_table() # contruct a table of source properties 
    
    # WCS object
    w = WCS(header)
    # get WCS of all sources, add to segm_tbl, and write the table
    ra, dec = w.all_pix2world(segm_tbl['xcentroid'], segm_tbl['ycentroid'],1)
    segm_tbl["ra"] = ra
    segm_tbl["dec"] = dec
    segm_tbl.write('segmentation_table_'+name+'.csv', format = 'csv', 
              overwrite=True)
    
    # build a new table with only the parameters we care about 
    tbl = Table()
    tbl["id"] = segm_tbl["id"] # id 
    tbl["xcentroid"] = segm_tbl["xcentroid"] # x coord
    tbl["ycentroid"] = segm_tbl["ycentroid"] # y coord 
    tbl["area"] = segm_tbl["area"] # area in pixels
    tbl["ra"] = ra # ra 
    tbl["dec"] = dec # dec
    tbl["pc"] = segm_tbl["source_sum"] # flux 
    tbl["pc_err"] = segm_tbl["source_sum_err"] # error on flux 
    tbl["mag_fit"] = -2.5*np.log10(tbl["pc"]) # instrumental magnitude
    tbl["mag_fit_unc"] = 2.5/(tbl["pc"]*np.log(10)) # error on magnitude 
    
    ### query Vizier to match sources and do aperture photometry
    
    # set the catalogue and filter of the image
    ref_catalog = "II/349/ps1"
    ref_catalog_name = "PS1" # PanStarrs 1
    filt = header["filtre"][0]
    # get the centre of the image and its RA, Dec
    x_size = data.shape[1]
    y_size = data.shape[0]
    ra_centre, dec_centre = np.array(w.all_pix2world(x_size/2.0, 
                                                     y_size/2.0, 1))
    # set radius to search in, minimum, maximum magnitudes
    minmag = 10.0
    maxmag = 22.0 
    max_emag = 0.3 # maximum error on magnitude 
    pixscale = np.mean(np.abs([header["CDELT1"], header["CDELT2"]])) # in deg
    pixscale = pixscale*3600.0 # in arcsec
    radius = pixscale*y_size/60.0 # radius in arcmin 
    
    # query print statement
    #print('Querying Vizier %s around RA %.4f, Dec %.4f with a radius of %.4f arcmin\n'%(
    #        ref_catalog, ra_centre, dec_centre, radius))
    
    # querying 
    v = Vizier(columns=["*"], column_filters={
            filt+"mag":str(minmag)+".."+str(maxmag),
            "e_"+filt+"mag":"<"+str(max_emag)}, row_limit=-1) # no row limit
    
    Q = v.query_region(SkyCoord(ra=ra_centre, dec=dec_centre, 
                    unit = (u.deg, u.deg)), radius = str(radius)+'m', 
                    catalog=ref_catalog, cache=False)
    cat_coords = w.all_world2pix(Q[0]['RAJ2000'], Q[0]['DEJ2000'], 1)
    # mask out edge sources
    x_lims = [int(0.05*x_size), int(0.95*x_size)] 
    y_lims = [int(0.05*y_size), int(0.95*y_size)]
    mask = (cat_coords[0] > x_lims[0]) & (
            cat_coords[0] < x_lims[1]) & (
            cat_coords[1] > y_lims[0]) & (
            cat_coords[1] < y_lims[1])
    good_cat_sources = Q[0][mask] # sources in catalogue 
    
    # cross-matching coords of sources found by astrometry
    source_coords = SkyCoord(ra=tbl['ra'], dec=tbl['dec'], frame='fk5', 
                             unit='degree')
    # and coords of valid sources in the queried catalog 
    cat_source_coords = SkyCoord(ra=good_cat_sources['RAJ2000'], 
                                     dec=good_cat_sources['DEJ2000'], 
                                     frame='fk5', unit='degree')
        
    # indices of matching sources (within 5.0 pix of each other) 
    idx_image, idx_cat, d2d, d3d = cat_source_coords.search_around_sky(
            source_coords, 5.0*pixscale*u.arcsec)
    
    # compute magnitude offsets and zero point
    mag_offsets = ma.array(good_cat_sources[filt+'mag'][idx_cat] - 
                      tbl['mag_fit'][idx_image])
    zp_mean, zp_med, zp_std = sigma_clipped_stats(mag_offsets) # zero point
    
    mag_calib = tbl['mag_fit'] + zp_mean # compute magnitudes 
    mag_calib.name = 'mag_calib'
    mag_calib_unc = np.sqrt(tbl['mag_fit_unc']**2 + zp_std**2) # propagate errs
    mag_calib_unc.name = 'mag_calib_unc'
    tbl['mag_calib'] = mag_calib
    tbl['mag_calib_unc'] = mag_calib_unc
    
    #print(zp_mean)
    #print(zp_std)
    
    # add flag indicating if source is in catalog
    #in_cat = []
    #for i in range(len(tbl)):
    #    if i in idx_image:
    #        in_cat.append(True)
    #    else:
    #        in_cat.append(False)
    #in_cat_col = Column(data=in_cat, name="in "+ref_catalog_name)
    #tbl["in "+ref_catalog_name] = in_cat_col
    
    #return tbl

    # boundaries on the desired source
    RA_min, RA_max = RA_bound
    DEC_min, DEC_max = DEC_bound

    # parse a list of all sources for a source within the RA, Dec bounds
    cwd = os.getcwd() # current working dir
    for i in range(len(tbl['id'])):
        # if source is found:
        if (RA_min <= tbl[i]['ra'] <= RA_max) and (
                DEC_min <= tbl[i]['dec'] <= DEC_max):                      
            print("\nFound a source.\n")

            # Write xcentroid and ycentroid, the pixel area of the source, 
            # the photon count, photon count error, calibrated magnitude, 
            # calibrated magnitude error, and filter used to the file.
            # If xcentroid and ycentroid change drastically from one stack to 
            # another, the sources are not the same or astrometric calibration 
            # may have failed. 
            xcentroid = tbl["xcentroid"].data[i]
            ycentroid = tbl["ycentroid"].data[i]
            area = tbl["area"].data[i]
            pc = tbl["pc"].data[i]
            pc_err = tbl["pc_err"].data[i] 
            mag = tbl["mag_calib"].data[i]
            mag_err = tbl["mag_calib_unc"][i]
            # line to write to the file: 
            line = str(xcentroid)+"\t"+str(ycentroid)+"\t"+str(area)
            line += "\t"+str(pc)+"\t"+str(pc_err) 
            line += "\t"+str(mag)+"\t"+str(mag_err)+"\t"+filt+"\n"
            tf = open(cwd+"/"+results_file,'a')
            tf.write(line)
            tf.close()
            return tbl

    # if no source is found:
    line = "NO SOURCE FOUND.\n" 
    print("No source found.\n")
    tf = open(cwd+"/"+results_file,'a')
    tf.write(line)
    tf.close()

    return tbl