Пример #1
0
def get_tic_name(name):
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        customSimbad = Simbad()
        customSimbad.add_votable_fields(
            'ra(2;A;ICRS;J2000;2000)', 'dec(2;D;ICRS;J2000;2000)')
        customSimbad.remove_votable_fields('coordinates')
        result_table = customSimbad.query_object(name)
    if result_table is None:
        logger.error("Target name failed to resolve, please check")
        sys.exit(1)

    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        ra_sex = result_table['RA_2_A_ICRS_J2000_2000'][0]
        dec_sex = result_table['DEC_2_D_ICRS_J2000_2000'][0]
        catalogData = Catalogs.query_region(SkyCoord
                                            (ra_sex, dec_sex, unit=(u.hour, u.deg)),
                                            catalog='Tic', radius=0.006)

    try:
        return catalogData['ID'][0]
    except IndexError:
        logger.error("No TIC target at those coordiantes")
        sys.exit(1)
Пример #2
0
def get_TIC_data(ra, dec):
    radii = np.linspace(start=0.0001, stop=0.001, num=19)
    #for i,row in self.df.iterrows():
    tic_found = False
    for rad in radii:
        if tic_found == False:
            # query_string = str(ra) + " " + str(dec) # make sure to have a space between the strings!#SkyCoord(ra = row['ra'], dec = row['dec'], frame = 'icrs') str(row['ra']) + " " + str(row['dec']) # make sure to have a space between the strings!
            sc = SkyCoord(ra=ra * u.deg, dec=dec * u.deg)
            obs_table = Catalogs.query_region(coordinates=sc,
                                              radius=rad * u.deg,
                                              catalog="TIC")
            obs_df = obs_table.to_pandas()
            if len(obs_table['ID']) == 1:
                tic = obs_table['ID'][0]
                tic_found = True
                continue
            if len(obs_df[obs_df['GAIA'].to_numpy(dtype='str') != '']) == 1:
                temp_obs_df = obs_df[obs_df['GAIA'].to_numpy(
                    dtype='str') != '']
                tic = temp_obs_df['ID'].iloc[0]
                tic_found = True
                continue
            # if len(np.unique(obs_df[obs_df['HIP'].to_numpy(dtype = 'str') != '']['HIP'])) == 1:
            #     tic = obs_table['ID'][0]
            #     tic_found = True
            #     continue
    if tic_found == False:
        tic = np.nan
        print("Didn't find TIC for this object.")
    else:
        print("Found TIC Data for TIC " + str(tic) + "!")
        #self.tic = tic
    return (obs_df, tic)
Пример #3
0
def load_catalog(filename=False,
                 header=False,
                 wcs=False,
                 ra_key=False,
                 dec_key=False):
    '''
    From Anna Marini: get positions from catalog.
    '''

    if filename and not header:
        header = get_fits_header(filename)
    if header and not wcs:
        wcs = WCS(header)
        if ra_key and dec_key:
            ra = header[ra_key]
            dec = header[dec_key]

    ra = wcs.wcs.crval[0]
    dec = wcs.wcs.crval[1]

    # Diagonal
    diag_bound = wcs.pixel_to_world_values([[0, 0], wcs.pixel_shape])
    radius = np.mean(diag_bound[1] - diag_bound[0]) / 2

    catalog = Catalogs.query_region(
        f'{ra} {dec}',
        # frame='icrs',
        # unit="deg",
        radius=radius,
        catalog='Gaia',
        version=2)

    return catalog
Пример #4
0
    def get_TIC(ra, dec, radius):
        """Retrieve TIC from MAST."""
        cat = Catalogs.query_region(
            SkyCoord(
                ra=ra, dec=dec, unit=(u.deg, u.deg), frame='icrs'
            ), radius=radius, catalog='TIC'
        )

        return cat
Пример #5
0
 def get_catalog(self, image):
     max_fov = image.fov.max() * np.sqrt(2) / 2
     table = Catalogs.query_region(image.skycoord,
                                   max_fov,
                                   "TIC",
                                   verbose=False)
     table["ra"].unit = "deg"
     table["dec"].unit = "deg"
     table.rename_column('ID', 'id')
     return table
Пример #6
0
def find_tic(target_ID, from_file = True):
    if from_file == True:
        try:
            table_data = Table.read("Original_BANYAN_XI-III_xmatch_TIC.csv" , format='ascii.csv')
            #table_data = Table.read("Original VCA Members.csv" , format='ascii.csv') 
            #table_data = Table.read("Original Argus members info.csv" , format='ascii.csv')
            
            # Obtains ra and dec for object from target_ID
            i = list(table_data['main_id']).index(target_ID)
            ra = table_data['ra'][i]
            dec = table_data['dec'][i]
            tic = table_data['MatchID'][i]
        except:
            try:
                TIC_table = Catalogs.query_object(target_ID, catalog = "TIC")
                ra = TIC_table['ra'][0]
                dec = TIC_table['dec'][0]
                tic = TIC_table['ID'][0] 
            except:
                table_data = Table.read('BANYAN_XI-III_combined_members.csv')
                i = list(table_data['main_id']).index(target_ID)
                ra = table_data['ra'][i]
                dec = table_data['dec'][i]
                object_coord = SkyCoord(ra, dec, unit="deg")
                TIC_table = Catalogs.query_region(object_coord, radius = '1 deg', catalog = 'TIC')
                tic = TIC_table['ID'][0]
    else:
        # Find ra, dec and tic # via the TIC (typically based on Gaia DR2)
        try:
            TIC_table = Catalogs.query_object(target_ID, catalog = "TIC")
            ra = TIC_table['ra'][0]
            dec = TIC_table['dec'][0]
            tic = TIC_table['ID'][0] 
        except:
            table_data = Table.read('BANYAN_XI-III_combined_members.csv')
            i = list(table_data['main_id']).index(target_ID)
            ra = table_data['ra'][i]
            dec = table_data['dec'][i]
            object_coord = SkyCoord(ra, dec, unit="deg")
            TIC_table = Catalogs.query_region(object_coord, radius = '1 deg', catalog = 'TIC')
            tic = TIC_table['ID'][0]
    
    return ra, dec, tic
Пример #7
0
def get_tic_radec(ra, dec):
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        catalogData = Catalogs.query_region('{} {}'.format(
            ra, dec),
            catalog='Tic', radius=1 * u.arcsec)

    try:
        return catalogData['ID'][0]
    except IndexError:
        logger.error("No TIC target at those coordiantes")
        sys.exit(1)
Пример #8
0
def get_gaia_id(ra, dec):
    radii = np.linspace(start=0.0001, stop=0.001, num=19)
    #for i,row in self.df.iterrows():
    gaia_id_found = False
    try:
        for rad in radii:
            if gaia_id_found == False:
                query_string = str(ra) + " " + str(
                    dec
                )  # make sure to have a space between the strings!#SkyCoord(ra = row['ra'], dec = row['dec'], frame = 'icrs') str(row['ra']) + " " + str(row['dec']) # make sure to have a space between the strings!
                obs_table = Catalogs.query_region(coordinates=query_string,
                                                  radius=rad * u.deg,
                                                  catalog="TIC")
                obs_df = obs_table.to_pandas()
                if len(obs_table['ID']) == 1:
                    gaia_id = str(obs_table['GAIA'][0])
                    gaia_id_found = True
                    continue
                if len(obs_df[obs_df['GAIA'].to_numpy(
                        dtype='str') != '']) == 1:
                    temp_obs_df = obs_df[obs_df['GAIA'].to_numpy(
                        dtype='str') != '']
                    gaia_id = str(temp_obs_df['GAIA'].iloc[0])
                    gaia_id_found = True
                    continue
                if len(
                        np.unique(obs_df[obs_df['HIP'].to_numpy(
                            dtype='str') != '']['HIP'])) == 1:
                    gaia_id = str(obs_table['GAIA'][0])
                    gaia_id_found = True
                    continue
        if gaia_id_found == False:
            gaia_id = np.nan
            print("Didn't find GAIA ID for this object.")
        else:
            if gaia_id == '--':
                gaia_id = np.nan
                print("Found object, but no Gaia ID availabel through TIC.")
            else:
                print("Found GAIA ID " + str(gaia_id) + "!")
    except:
        print("Issue finding GAIA ID for this object.")
        gaia_id = np.nan
    return (gaia_id)
Пример #9
0
def get_panstarrs_catalog(imgwcs,
                          imgfile,
                          radius=0.2,
                          rfaint=17,
                          region=False):
    from astroquery.mast import Catalogs
    ra0, dec0 = imgwcs.wcs.crval
    print(
        'Querying Pan-STARRS catalog with radius={:.3f} deg and central coordinates RA,Dec={:.5f},{:.5f}'
        .format(radius, ra0, dec0))
    if region:
        allcat = Catalogs.query_region('{} {}'.format(ra0, dec0),
                                       radius=radius,
                                       catalog='PANSTARRS',
                                       data_release='dr2',
                                       table='mean')  #, rMeanPSFMag=[12, 22])
    else:
        allcat = Catalogs.query_criteria(coordinates='{} {}'.format(ra0, dec0),
                                         radius=radius,
                                         catalog='PANSTARRS',
                                         data_release='dr2',
                                         table='mean',
                                         columns=[
                                             'objID', 'raMean', 'decMean',
                                             'gMeanPSFMag', 'rMeanPSFMag',
                                             'iMeanPSFMag', 'zMeanPSFMag'
                                         ],
                                         gMeanPSFMag=[('lte', 18),
                                                      ('gte', 12)],
                                         rMeanPSFMag=[('lte', 18),
                                                      ('gte', 12)],
                                         iMeanPSFMag=[('lte', 18),
                                                      ('gte', 12)],
                                         zMeanPSFMag=[('lte', 18),
                                                      ('gte', 12)],
                                         sort_by=[("asc", "rMeanPSFMag")])

    #rmag = allcat['rMeanPSFMag']
    #good = np.isfinite(rmag) * rmag < rfaint
    #cat = allcat[good]
    #print('Keeping {}/{} Pan-STARRS sources.'.format(len(cat), len(allcat)))
    return allcat
Пример #10
0
def circular_aperture_catalog(pattern):
    for filename in pattern:
        header = get_fits_header(filename)
        catalog = Catalogs.query_region(f'{header["CRVAL1"]} {header["CRVAL2"]}',
                                        radius = '0.2 deg',
                                        catalog = 'TIC')
        dattab = Table(catalog)
        radec = (f['catalog']['ra','dec','Bmag'])
        mask = radec['Bmag']<19.0
        mag_radec = radec[mask]
        positions_cat = SkyCoord(mag_radec['ra'],
                             mag_radec['dec'],
                             frame='icrs',
                             unit=(u.deg,u.deg))
        aperture_cat = SkyCircularAperture(positions_cat,
                                           r=4.5*u.arcsec) 
        annulus_cat = SkyCircularAnnulus(positions_cat,
                                         r_in=5*u.arcsec,
                                         r_out=8*u.arcsec)
        apers = [aperture_cat, annulus_cat]
        return(apers)
Пример #11
0
def coord_to_tic(ra, dec):
    """
    Function to convert input RA and Dec coordinates to the nearest TIC ID from 
    the TESS Input Catalog (TIC).

    Parameters
    ----------
    ra : float
       The RA of the target source.
    dec : float
       The Dec of the target source.

    Returns
    -------
    tic : int
       TIC ID of the source nearest to the input RA and Dec from the TIC.
    """
    cat = Catalogs.query_region((str(ra) + ' ' + str(dec)), catalog="TIC")
    tic = int(cat[0]['ID'])
    
    return tic
Пример #12
0
def _show_tics(data, header=None, telescope_kw="TELESCOP", r=12 * u.arcminute):
    if header is None:
        header = fits.getheader(data)
        data = fits.getdata(data)

    telescope = Telescope.from_name(header[telescope_kw])
    ra = header["RA"]
    dec = header["DEC"]
    skycoord = SkyCoord(ra,
                        dec,
                        frame='icrs',
                        unit=(telescope.ra_unit, telescope.dec_unit))

    coord = skycoord
    tic_data = Catalogs.query_region(coord, r, "TIC", verbose=False)
    tic_data.sort("Jmag")

    skycoords = SkyCoord(ra=tic_data['ra'], dec=tic_data['dec'], unit="deg")

    x, y = np.array(wcsutils.skycoord_to_pixel(skycoords, WCS(header)))
    _ = show_stars(data, contrast=0.5)
    plot_marks(x, y)
Пример #13
0
def get_nbhr_stars():

    ra, dec = 157.03728055645, -64.50521068147
    c_obj = SkyCoord(ra, dec, unit=(u.deg), frame='icrs')

    #
    # get the neighbor stars, and their positions
    #
    Tmag_cutoff = 16
    radius = 6.0 * u.arcminute

    nbhr_stars = Catalogs.query_region("{} {}".format(float(c_obj.ra.value),
                                                      float(c_obj.dec.value)),
                                       catalog="TIC",
                                       radius=radius)

    sel = (nbhr_stars['Tmag'] < Tmag_cutoff)

    sra, sdec, ticids = (nbhr_stars[sel]['ra'], nbhr_stars[sel]['dec'],
                         nbhr_stars[sel]['ID'])

    return nbhr_stars, sra, sdec, ticids
Пример #14
0
def generate_verification_page(lcd, ls, freq, power, cutoutpaths, c_obj,
                               outvppath, outd, show_binned=True):
    """
    Make the verification page, which consists of:

    top row: entire light curve (with horiz bar showing rotation period)

    bottom row:
        lomb scargle periodogram  |  phased light curve  |  image w/ aperture

    ----------
    args:

        lcd (dict): has the light curve, aperture positions, some lomb
        scargle results.

        ls: LombScargle instance with everything passed.

        cutoutpaths (list): FFI cutout FITS paths.

        c_obj (SkyCoord): astropy sky coordinate of the target

        outvppath (str): path to save verification page to
    """
    cutout_wcs = lcd['cutout_wcss'][0]

    mpl.rcParams['xtick.direction'] = 'in'
    mpl.rcParams['ytick.direction'] = 'in'

    plt.close('all')

    fig = plt.figure(figsize=(12,12))

    #ax0 = plt.subplot2grid((3, 3), (0, 0), colspan=3)
    #ax1 = plt.subplot2grid((3, 3), (1, 0), colspan=3)
    #ax2 = plt.subplot2grid((3, 3), (2, 0))
    #ax3 = plt.subplot2grid((3, 3), (2, 1))
    #ax4 = plt.subplot2grid((3, 3), (2, 2), projection=cutout_wcs)

    ax0 = plt.subplot2grid((3, 3), (1, 0), colspan=3)
    ax1 = plt.subplot2grid((3, 3), (2, 0), colspan=3)
    ax2 = plt.subplot2grid((3, 3), (0, 0))
    ax3 = plt.subplot2grid((3, 3), (0, 1))
    ax4 = plt.subplot2grid((3, 3), (0, 2), projection=cutout_wcs)

    #
    # row 0: entire light curve, pre-detrending (with horiz bar showing
    # rotation period). plot model LC too.
    #
    try:
        ax0.scatter(lcd['predetrending_time'], lcd['predetrending_rel_flux'],
                    c='k', alpha=1.0, zorder=3, s=10, rasterized=True,
                    linewidths=0)
    except KeyError as e:
        print('ERR! {}\nReturning.'.format(e))
        return


    try:
        model_flux = nparr(lcd['predetrending_rel_flux']/lcd['rel_flux'])
    except ValueError:
        model_flux = 0

    if isinstance(model_flux, np.ndarray):
        ngroups, groups = find_lc_timegroups(lcd['predetrending_time'], mingap=0.5)
        for group in groups:
            ax0.plot(lcd['predetrending_time'][group], model_flux[group], c='C0',
                     alpha=1.0, zorder=2, rasterized=True, lw=2)

    # add the bar showing the derived period
    ymax = np.percentile(lcd['predetrending_rel_flux'], 95)
    ymin = np.percentile(lcd['predetrending_rel_flux'], 5)
    ydiff = 1.15*(ymax-ymin)

    epoch = np.nanmin(lcd['predetrending_time']) + lcd['ls_period']
    ax0.plot([epoch, epoch+lcd['ls_period']], [ymax, ymax], color='red', lw=2,
             zorder=4)

    ax0.set_ylim((ymin-ydiff,ymax+ydiff))

    #ax0.set_xlabel('Time [BJD$_{\mathrm{TDB}}$]')
    ax0.set_xticklabels('')
    ax0.set_ylabel('Raw flux')

    name = outd['name']
    group_id = outd['group_id']
    if name=='nan':
        nstr = 'Group {}'.format(group_id)
    else:
        nstr = '{}'.format(name)


    if not np.isfinite(outd['teff']):
        outd['teff'] = 0

    ax0.text(0.98, 0.97,
        'Teff={:d}K. {}'.format(int(outd['teff']), nstr),
             ha='right', va='top', fontsize='large', zorder=2,
             transform=ax0.transAxes
    )

    #
    # row 1: entire light curve (with horiz bar showing rotation period)
    #
    ax1.scatter(lcd['time'], lcd['rel_flux'], c='k', alpha=1.0, zorder=2, s=10,
                rasterized=True, linewidths=0)

    # add the bar showing the derived period
    ymax = np.percentile(lcd['rel_flux'], 95)
    ymin = np.percentile(lcd['rel_flux'], 5)
    ydiff = 1.15*(ymax-ymin)

    epoch = np.nanmin(lcd['time']) + lcd['ls_period']
    ax1.plot([epoch, epoch+lcd['ls_period']], [ymax, ymax], color='red', lw=2)

    ax1.set_ylim((ymin-ydiff,ymax+ydiff))

    ax1.set_xlabel('Time [BJD$_{\mathrm{TDB}}$]')
    ax1.set_ylabel('Detrended flux')

    #
    # row 2, col 0: lomb scargle periodogram
    #
    ax2.plot(1/freq, power, c='k')
    ax2.set_xscale('log')
    ax2.text(0.03, 0.97, 'FAP={:.1e}\nP={:.1f}d'.format(
        lcd['ls_fap'], lcd['ls_period']), ha='left', va='top',
        fontsize='large', zorder=2, transform=ax2.transAxes
    )
    ax2.set_xlabel('Period [day]', labelpad=-1)
    ax2.set_ylabel('LS power')

    #
    # row 2, col 1: phased light curve 
    #
    phzd = phase_magseries(lcd['time'], lcd['rel_flux'], lcd['ls_period'],
                           lcd['time'][np.argmin(lcd['rel_flux'])], wrap=False,
                           sort=True)

    ax3.scatter(phzd['phase'], phzd['mags'], c='k', rasterized=True, s=10,
                linewidths=0, zorder=1)

    if show_binned:
        try:
            binphasedlc = phase_bin_magseries(phzd['phase'], phzd['mags'],
                                              binsize=1e-2, minbinelems=5)
            binplotphase = binphasedlc['binnedphases']
            binplotmags = binphasedlc['binnedmags']

            ax3.scatter(binplotphase, binplotmags, s=10, c='darkorange',
                        linewidths=0, zorder=3, rasterized=True)
        except TypeError as e:
            print(e)
            pass

    xlim = ax3.get_xlim()
    ax3.hlines(1.0, xlim[0], xlim[1], colors='gray', linestyles='dotted',
               zorder=2)
    ax3.set_xlim(xlim)

    ymax = np.percentile(lcd['rel_flux'], 95)
    ymin = np.percentile(lcd['rel_flux'], 5)
    ydiff = 1.15*(ymax-ymin)
    ax3.set_ylim((ymin-ydiff,ymax+ydiff))

    ax3.set_xlabel('Phase', labelpad=-1)
    ax3.set_ylabel('Flux', labelpad=-0.5)

    #
    # row2, col2: image w/ aperture. put on the nbhr stars as dots too, to
    # ensure the wcs isn't wonky!
    #

    # acquire neighbor stars.
    radius = 2.0*u.arcminute

    nbhr_stars = Catalogs.query_region(
        "{} {}".format(float(c_obj.ra.value), float(c_obj.dec.value)),
        catalog="TIC",
        radius=radius
    )

    try:
        Tmag_cutoff = 15
        px,py = cutout_wcs.all_world2pix(
            nbhr_stars[nbhr_stars['Tmag'] < Tmag_cutoff]['ra'],
            nbhr_stars[nbhr_stars['Tmag'] < Tmag_cutoff]['dec'],
            0
        )
    except Exception as e:
        print('ERR! wcs all_world2pix got {}'.format(repr(e)))
        return

    tmags = nbhr_stars[nbhr_stars['Tmag'] < Tmag_cutoff]['Tmag']

    sel = (px > 0) & (px < 19) & (py > 0) & (py < 19)
    px,py = px[sel], py[sel]
    tmags = tmags[sel]

    ra, dec = float(c_obj.ra.value), float(c_obj.dec.value)
    target_x, target_y = cutout_wcs.all_world2pix(ra,dec,0)

    #
    # finally make it
    #

    img = lcd['median_imgs'][0]

    # some images come out as nans.
    if np.all(np.isnan(img)):
        img = np.ones_like(img)

    interval = vis.PercentileInterval(99.9)
    vmin,vmax = interval.get_limits(img)
    norm = vis.ImageNormalize(
        vmin=vmin, vmax=vmax, stretch=vis.LogStretch(1000))

    cset = ax4.imshow(img, cmap='YlGnBu_r', origin='lower', zorder=1,
                      norm=norm)

    ax4.scatter(px, py, marker='x', c='r', s=5, rasterized=True, zorder=2,
                linewidths=1)
    ax4.plot(target_x, target_y, mew=0.5, zorder=5, markerfacecolor='yellow',
             markersize=7, marker='*', color='k', lw=0)

    #ax4.coords.grid(True, color='white', ls='dotted', lw=1)
    lon = ax4.coords['ra']
    lat = ax4.coords['dec']

    lon.set_ticks(spacing=1*u.arcminute)
    lat.set_ticks(spacing=1*u.arcminute)

    lon.set_ticklabel(exclude_overlapping=True)
    lat.set_ticklabel(exclude_overlapping=True)

    ax4.coords.grid(True, color='white', alpha=0.3, lw=0.3, ls='dotted')

    #cb0 = fig.colorbar(cset, ax=ax4, extend='neither', fraction=0.046, pad=0.04)

    # overplot aperture
    radius_px = 3
    circle = plt.Circle((target_x, target_y), radius_px,
                         color='C1', fill=False, zorder=5)
    ax4.add_artist(circle)

    #
    # cleanup
    # 
    for ax in [ax0,ax1,ax2,ax3,ax4]:
        ax.get_yaxis().set_tick_params(which='both', direction='in',
                                       labelsize='small', top=True, right=True)
        ax.get_xaxis().set_tick_params(which='both', direction='in',
                                       labelsize='small', top=True, right=True)

    fig.tight_layout(w_pad=0.5, h_pad=0)

    #
    # save
    #
    fig.savefig(outvppath, dpi=300, bbox_inches='tight')
    print('made {}'.format(outvppath))
Пример #15
0
def get_tic(ra, dec, radiusDegrees):
    return Catalogs.query_region(str(ra) + " " + str(dec),
                                 radius=radiusDegrees,
                                 catalog="TIC")
Пример #16
0
def plot_gaia_sources_on_tpf(
    tpf,
    target_gaiaid,
    gaia_sources=None,
    sap_mask="pipeline",
    depth=None,
    kmax=1,
    dmag_limit=8,
    fov_rad=None,
    cmap="viridis",
    figsize=None,
    ax=None,
    invert_xaxis=False,
    invert_yaxis=False,
    pix_scale=TESS_pix_scale,
    verbose=True,
    **mask_kwargs,
):
    """
    plot gaia sources brighter than dmag_limit; only annotated with starids
    are those that are bright enough to cause reproduce the transit depth;
    starids are in increasing separation

    dmag_limit : float
        maximum delta mag to consider; computed based on depth if None

    TODO: correct for proper motion difference between
    survey image and gaia DR2 positions
    """
    if verbose:
        print("Plotting nearby gaia sources on tpf.")
    assert target_gaiaid is not None
    img = np.nanmedian(tpf.flux, axis=0)
    # make aperture mask
    mask = parse_aperture_mask(tpf, sap_mask=sap_mask, **mask_kwargs)
    ax = plot_aperture_outline(img,
                               mask=mask,
                               imgwcs=tpf.wcs,
                               figsize=figsize,
                               cmap=cmap,
                               ax=ax)
    if fov_rad is None:
        nx, ny = tpf.shape[1:]
        diag = np.sqrt(nx**2 + ny**2)
        fov_rad = (0.4 * diag * pix_scale).to(u.arcmin).round(0)

    if gaia_sources is None:
        print(
            "Querying Gaia sometimes hangs. Provide `gaia_sources` if you can."
        )
        target_coord = SkyCoord(ra=tpf.header["RA_OBJ"],
                                dec=tpf.header["DEC_OBJ"],
                                unit="deg")
        gaia_sources = Catalogs.query_region(target_coord,
                                             radius=fov_rad,
                                             catalog="Gaia",
                                             version=2).to_pandas()
    assert len(gaia_sources) > 1, "gaia_sources contains single entry"
    # find sources within mask
    # target is assumed to be the first row
    idx = gaia_sources["source_id"].astype(int).isin([target_gaiaid])
    target_gmag = gaia_sources.loc[idx, "phot_g_mean_mag"].values[0]
    # sources_inside_aperture = []
    if depth is not None:
        # compute delta mag limit given transit depth
        dmag_limit = (np.log10(kmax / depth -
                               1) if dmag_limit is None else dmag_limit)

        # get min_gmag inside mask
        ra, dec = gaia_sources[["ra", "dec"]].values.T
        pix_coords = tpf.wcs.all_world2pix(np.c_[ra, dec], 0)
        contour_points = measure.find_contours(mask, level=0.1)[0]
        isinside = [
            is_point_inside_mask(contour_points, pix) for pix in pix_coords
        ]
        # sources_inside_aperture.append(isinside)
        min_gmag = gaia_sources.loc[isinside, "phot_g_mean_mag"].min()
        if (target_gmag - min_gmag) != 0:
            print(
                f"target Gmag={target_gmag:.2f} is not the brightest within aperture (Gmag={min_gmag:.2f})"
            )
    else:
        min_gmag = gaia_sources.phot_g_mean_mag.min()  # brightest
        dmag_limit = (gaia_sources.phot_g_mean_mag.max()
                      if dmag_limit is None else dmag_limit)

    base_ms = 128.0  # base marker size
    starid = 1
    # if very crowded, plot only top N
    gmags = gaia_sources.phot_g_mean_mag
    dmags = gmags - target_gmag
    rank = np.argsort(dmags.values)
    for index, row in gaia_sources.iterrows():
        # FIXME: why some indexes are missing?
        ra, dec, gmag, id = row[["ra", "dec", "phot_g_mean_mag", "source_id"]]
        dmag = gmag - target_gmag
        pix = tpf.wcs.all_world2pix(np.c_[ra, dec], 0)[0]
        contour_points = measure.find_contours(mask, level=0.1)[0]

        color, alpha = "red", 1.0
        # change marker color and transparency depending on the location and dmag
        if is_point_inside_mask(contour_points, pix):
            if int(id) == int(target_gaiaid):
                # plot x on target
                ax.plot(
                    pix[1],
                    pix[0],
                    marker="x",
                    ms=base_ms / 16,
                    c="k",
                    zorder=3,
                )
            if depth is not None:
                # compute flux ratio with respect to brightest star
                gamma = 1 + 10**(0.4 * (min_gmag - gmag))
                if depth > kmax / gamma:
                    # orange if flux is insignificant
                    color = "C1"
        else:
            # outside aperture
            color, alpha = "C1", 0.5

        ax.scatter(
            pix[1],
            pix[0],
            s=base_ms / 2**dmag,  # fainter -> smaller
            c=color,
            alpha=alpha,
            zorder=2,
            edgecolor=None,
        )
        # choose which star to annotate
        if len(gmags) < 20:
            # sparse: annotate all
            ax.text(pix[1], pix[0], str(starid), color="white", zorder=100)
        elif len(gmags) > 50:
            # crowded: annotate only 15 smallest dmag ones
            if rank[starid - 1] < 15:
                ax.text(pix[1], pix[0], str(starid), color="white", zorder=100)
            elif (color == "red") & (dmag < dmag_limit):
                # plot if within aperture and significant source of dilution
                ax.text(pix[1], pix[0], str(starid), color="white", zorder=100)
        elif color == "red":
            # neither sparse nor crowded
            # annotate if inside aperture
            ax.text(pix[1], pix[0], str(starid), color="white", zorder=100)
        starid += 1
    # Make legend with 4 sizes representative of delta mags
    dmags = dmags[dmags < dmag_limit]
    _, dmags = pd.cut(dmags, 3, retbins=True)
    for dmag in dmags:
        size = base_ms / 2**dmag
        # -1, -1 is outside the fov
        # dmag = 0 if float(dmag)==0 else 0
        ax.scatter(
            -1,
            -1,
            s=size,
            c="red",
            alpha=0.6,
            edgecolor=None,
            zorder=10,
            clip_on=True,
            label=r"$\Delta m= $" + f"{dmag:.1f}",
        )
    ax.legend(fancybox=True, framealpha=0.5)
    # set img limits
    xdeg = (nx * pix_scale).to(u.arcmin)
    ydeg = (ny * pix_scale).to(u.arcmin)
    # orient such that north is up; east is left
    if invert_yaxis:
        # ax.invert_yaxis()  # increasing upward
        raise NotImplementedError()
    if invert_xaxis:
        # ax.invert_xaxis() #decresing rightward
        raise NotImplementedError()
    if hasattr(ax, "coords"):
        ax.coords[0].set_major_formatter("dd:mm")
        ax.coords[1].set_major_formatter("dd:mm")
    pl.setp(ax,
            xlim=(0, nx),
            ylim=(0, ny),
            xlabel=f"({xdeg:.2f} x {ydeg:.2f})")
    return ax
Пример #17
0
def plot_gaia_sources_on_survey(
    tpf,
    target_gaiaid,
    gaia_sources=None,
    fov_rad=None,
    depth=0.0,
    kmax=1.0,
    sap_mask="pipeline",
    survey="DSS2 Red",
    ax=None,
    color_aper="C0",  # pink
    figsize=None,
    invert_xaxis=False,
    invert_yaxis=False,
    pix_scale=TESS_pix_scale,
    verbose=True,
    **mask_kwargs,
):
    """Plot (superpose) Gaia sources on archival image

    Parameters
    ----------
    target_coord : astropy.coordinates
        target coordinate
    gaia_sources : pd.DataFrame
        gaia sources table
    fov_rad : astropy.unit
        FOV radius
    survey : str
        image survey; see from astroquery.skyview import SkyView;
        SkyView.list_surveys()
    verbose : bool
        print texts
    ax : axis
        subplot axis
    color_aper : str
        aperture outline color (default=C6)
    kwargs : dict
        keyword arguments for aper_radius, percentile
    Returns
    -------
    ax : axis
        subplot axis

    TODO: correct for proper motion difference between
    survey image and gaia DR2 positions
    """
    if verbose:
        print("Plotting nearby gaia sources on survey image.")
    assert target_gaiaid is not None
    ny, nx = tpf.flux.shape[1:]
    if fov_rad is None:
        diag = np.sqrt(nx**2 + ny**2)
        fov_rad = (0.4 * diag * pix_scale).to(u.arcmin).round(0)
    target_coord = SkyCoord(ra=tpf.ra * u.deg, dec=tpf.dec * u.deg)
    if gaia_sources is None:
        print(
            "Querying Gaia sometimes hangs. Provide `gaia_sources` if you can."
        )
        gaia_sources = Catalogs.query_region(target_coord,
                                             radius=fov_rad,
                                             catalog="Gaia",
                                             version=2).to_pandas()
    assert len(gaia_sources) > 1, "gaia_sources contains single entry"
    # make aperture mask
    mask = parse_aperture_mask(tpf, sap_mask=sap_mask, **mask_kwargs)
    maskhdr = tpf.hdu[2].header
    # make aperture mask outline
    contour = np.zeros((ny, nx))
    contour[np.where(mask)] = 1
    contour = np.lib.pad(contour, 1, PadWithZeros)
    highres = zoom(contour, 100, order=0, mode="nearest")
    extent = np.array([-1, nx, -1, ny])

    if verbose:
        print(
            f"Querying {survey} ({fov_rad:.2f} x {fov_rad:.2f}) archival image"
        )
    # -----------create figure---------------#
    if ax is None:
        # get img hdu for subplot projection
        try:
            hdu = SkyView.get_images(
                position=target_coord.icrs.to_string(),
                coordinates="icrs",
                survey=survey,
                radius=fov_rad,
                grid=False,
            )[0][0]
        except Exception:
            errmsg = "survey image not available"
            raise FileNotFoundError(errmsg)
        fig = pl.figure(figsize=figsize)
        # define scaling in projection
        ax = fig.add_subplot(111, projection=WCS(hdu.header))
    # plot survey img
    if str(target_coord.distance) == "nan":
        target_coord = SkyCoord(ra=target_coord.ra, dec=target_coord.dec)
    nax, hdu = plot_finder_image(target_coord,
                                 ax=ax,
                                 fov_radius=fov_rad,
                                 survey=survey,
                                 reticle=False)
    imgwcs = WCS(hdu.header)
    mx, my = hdu.data.shape
    # plot mask
    _ = ax.contour(
        highres,
        levels=[0.5],
        extent=extent,
        origin="lower",
        linewidths=[3],
        colors=color_aper,
        transform=ax.get_transform(WCS(maskhdr)),
    )
    idx = gaia_sources["source_id"].astype(int).isin([target_gaiaid])
    target_gmag = gaia_sources.loc[idx, "phot_g_mean_mag"].values[0]

    for index, row in gaia_sources.iterrows():
        marker, s = "o", 100
        r, d, mag, id = row[["ra", "dec", "phot_g_mean_mag", "source_id"]]
        pix = imgwcs.all_world2pix(np.c_[r, d], 1)[0]
        if int(id) != int(target_gaiaid):
            gamma = 1 + 10**(0.4 * (mag - target_gmag))
            if depth > kmax / gamma:
                # too deep to have originated from secondary star
                edgecolor = "C1"
                alpha = 1  # 0.5
            else:
                # possible NEBs
                edgecolor = "C3"
                alpha = 1
        else:
            s = 200
            edgecolor = "C2"
            marker = "s"
            alpha = 1
        nax.scatter(
            pix[0],
            pix[1],
            marker=marker,
            s=s,
            edgecolor=edgecolor,
            alpha=alpha,
            facecolor="none",
        )
    # orient such that north is up; left is east
    if invert_yaxis:
        # ax.invert_yaxis()
        raise NotImplementedError()
    if invert_xaxis:
        # ax.invert_xaxis()
        raise NotImplementedError()
    if hasattr(ax, "coords"):
        ax.coords[0].set_major_formatter("dd:mm")
        ax.coords[1].set_major_formatter("dd:mm")
    # set img limits
    pl.setp(
        nax,
        xlim=(0, mx),
        ylim=(0, my),
        title="{0} ({1:.2f}' x {1:.2f}')".format(survey, fov_rad.value),
    )
    return ax
Пример #18
0
def query_TIC(target,
              target_coord,
              tic_id=None,
              search_radius=600. * u.arcsec,
              **kwargs):
    """
            Source: Courtesy of Dr. Timothy Van Reeth
            Note: I modified the behaviour when `tic_id` is given
            Retrieving information from the TESS input catalog. 
            
            Parameters:
                target: target name
                target_coord (optional): target coordinates (astropy Skycoord)
                search_radius: TIC entries around the target coordinaes wihtin this radius are considered.
                **kwargs: dict; to be passed to astroquery.Catalogs.query_object or query_region.
        """
    def _tic_handler(self, signum):
        '''Supporting function of `query_TIC`'''
        print(
            'the query of the TIC is taking a long time... Something may be wrong with the database right now...'
        )

    deg_radius = float(search_radius / u.deg)
    arc_radius = float(search_radius / u.arcsec)

    tic = None
    tess_coord = None
    tmag = None
    nb_coords = []
    nb_tmags = []
    tic_index = -1

    try:
        # The TIC query should finish relatively fast, but has sometimes taken (a lot!) longer.
        # Setting a timer to warn the user if this is the case...
        signal.signal(signal.SIGALRM, _tic_handler)
        signal.alarm(
            30
        )  # This should be finished after 30 seconds, but it may take longer...

        catalogTIC = Catalogs.query_region(target_coord,
                                           catalog="TIC",
                                           radius=deg_radius,
                                           **kwargs)
        signal.alarm(0)

    except:
        print(f"no entry could be retrieved from the TIC around {target}.")
        catalogTIC = []

    if (len(catalogTIC) == 0):
        print(
            f"no entry around {target} was found in the TIC within a {deg_radius:5.3f} degree radius."
        )

    else:
        if not (tic_id is None):
            # tic_index = np.argmin((np.array(catalogTIC['ID'],dtype=int) - int(tic_id))**2.)
            # Stefano added:
            tic_index = np.argwhere(catalogTIC['ID'] == str(tic_id))
            if tic_index.size == 0:
                return '-1', None, None, None, None

            else:
                tic_index = tic_index.item()
        else:
            tic_index = np.argmin(catalogTIC['dstArcSec'])

        if (tic_index < 0):
            print(
                f"the attempt to retrieve target {target} from the TIC failed."
            )

        else:
            tic = int(catalogTIC[tic_index]['ID'])
            ra = catalogTIC[tic_index]['ra']
            dec = catalogTIC[tic_index]['dec']
            tmag = catalogTIC[tic_index]['Tmag']

            # Retrieve the coordinates
            tess_coord = SkyCoord(ra, dec, unit="deg")

            # Collecting the neighbours
            if (len(catalogTIC) > 1):
                for itic, tic_entry in enumerate(catalogTIC):
                    if (itic != tic_index):
                        nb_coords.append(
                            SkyCoord(tic_entry['ra'],
                                     tic_entry['dec'],
                                     unit="deg"))
                        nb_tmags.append(tic_entry['Tmag'])

    nb_tmags = np.array(nb_tmags)

    return tic, tess_coord, tmag, nb_coords, nb_tmags
Пример #19
0
def get_teff_rstar_logg(hdr):
    #
    # Given CDIPS header, acquire estimates of Teff, Rstar, logg from TICv8. If
    # Teff fails, go with the Gaia DR2 Teff.  If Rstar fails, go with Gaia DR2
    # Rstar.  If Rstar still fails, use Teff and Mamajek relation to
    # interpolate Rstar.
    #
    # If logg fails, but you have Gaia DR2 Rstar, then go from Rstar to Mstar
    # using Mamajek relation, and combine to estimate a ratty logg.
    #
    identifier = hdr['TICID']
    ra, dec = hdr['RA_OBJ'], hdr['DEC_OBJ']
    targetcoord = SkyCoord(ra=ra,
                           dec=dec,
                           unit=(u.degree, u.degree),
                           frame='icrs')
    radius = 10.0 * u.arcsec
    try:
        stars = Catalogs.query_region("{} {}".format(
            float(targetcoord.ra.value), float(targetcoord.dec.value)),
                                      catalog="TIC",
                                      radius=radius)
    except requests.exceptions.ConnectionError:
        print('ERR! TIC query failed. trying again...')
        pytime.sleep(30)
        stars = Catalogs.query_region("{} {}".format(
            float(targetcoord.ra.value), float(targetcoord.dec.value)),
                                      catalog="TIC",
                                      radius=radius)

    Tmag_pred = (hdr['phot_g_mean_mag'] - 0.00522555 *
                 (hdr['phot_bp_mean_mag'] - hdr['phot_rp_mean_mag'])**3 +
                 0.0891337 *
                 (hdr['phot_bp_mean_mag'] - hdr['phot_rp_mean_mag'])**2 -
                 0.633923 *
                 (hdr['phot_bp_mean_mag'] - hdr['phot_rp_mean_mag']) +
                 0.0324473)
    Tmag_cutoff = 1.2

    selstars = stars[np.abs(stars['Tmag'] - Tmag_pred) < Tmag_cutoff]

    if len(selstars) >= 1:

        seltab = selstars[np.argmin(selstars['dstArcSec'])]

        if not int(seltab['GAIA']) == int(hdr['GAIA-ID']):
            raise ValueError(
                'TIC result doesnt match hdr target gaia-id. ' +
                'Should just instead query selstars by Gaia ID you want.')

        teff = seltab['Teff']
        teff_err = seltab['e_Teff']
        logg = seltab['logg']
        logg_err = seltab['e_logg']
        rstar = seltab['rad']
        rstar_err = seltab['e_rad']

        if type(teff) == np.ma.core.MaskedConstant:
            print('WRN! TIC teff nan. why? trying gaia value...')
            teff = hdr['teff_val']
            teff_err = 100

        if type(rstar) == np.ma.core.MaskedConstant:

            print('WRN! TIC rstar nan. why? trying gaia value...')
            rstar = hdr['radius_val']

            if rstar == 'NaN':
                print('WRN! Gaia rstar also nan. Trying to interpolate from '
                      'Teff...')

                if teff == 'NaN':
                    raise NotImplementedError(
                        'need rstar somehow. didnt get for {}'.format(
                            identifier))

                rstar = get_interp_rstar_from_teff(teff)

            if rstar != 'NaN':
                # given rstar, get mass, so that you can get logg
                rstar_err = 0.3 * rstar
                mstar = get_interp_mass_from_rstar(rstar)

            if not isinstance(rstar, float):
                raise NotImplementedError('got unexpected value for rstar! '
                                          'manual debug required')

            _Mstar = mstar * u.Msun
            _Rstar = rstar * u.Rsun

            if type(logg) == np.ma.core.MaskedConstant:
                logg = np.log10((const.G * _Mstar / _Rstar**2).cgs.value)
                logg_err = 0.3 * logg

    else:
        raise ValueError('bad xmatch for {}'.format(hdr['GAIA-ID']))

    return teff, teff_err, rstar, rstar_err, logg, logg_err
Пример #20
0
def plot_fov(target_coord,res,fov_rad=60*u.arcsec,ang_dist=15*u.arcsec,
             survey='DSS',verbose=True,outdir=None,savefig=False):
    """Plot FOV indicating the query position (magenta reticle) and nearby HARPS
    target (colored triangle), query radius (green circle) and Gaia DR2 sources
    (red squares)

    Parameters
    ----------
    targ_coord : astropy.coordinates.SkyCoord
        targ_coord
    res : pandas.DataFrame
        masked dataframe from `query_target`
    fov_rad : astropy.unit
        field of view radius
    ang_dist : astropy.unit
        angular distance within which to find nearest HARPS object
    survey : str
        survey name of archival image
    outdir : str
        download directory location
    verbose : bool
        print texts
    savefig : bool
        save figure

    Returns
    -------
    res : pandas.DataFrame
        masked dataframe
    """
    if verbose:
        print('\nGenerating FOV ...\n')

    nearest_obj = res['Target'].values[0]
    tic = res['ticid'].values[0]
    if outdir is None:
        outdir = nearest_obj
    else:
        #save with folder name==ticid
        if len(res['ticid'].dropna())>0:
            outdir = join(outdir,'tic'+str(tic))
#         elif res['toi'] is not None:
#             outdir = join(outdir,str(res['toi']).split('.')[0])
        else:
            outdir = join(outdir,nearest_obj)
    if not isdir(outdir):
        makedirs(outdir)

    nearest_obj_ra,nearest_obj_dec =res[['RA_deg','DEC_deg']].values[0]
    nearest_obj_coord = SkyCoord(ra=nearest_obj_ra, dec=nearest_obj_dec, unit=u.deg)

    #indicate target location with magenta reticle
    ax,hdu=plot_finder_image(target_coord,fov_radius=fov_rad,reticle=True,
        survey=survey,reticle_style_kwargs={'label':'target'})
    c = SphericalCircle((nearest_obj_ra, nearest_obj_dec)*u.deg, ang_dist,
        edgecolor='C2', transform=ax.get_transform('icrs'),
        facecolor='none', label='query radius')
    ax.set_title('{} ({})'.format(survey,nearest_obj))
    ax.add_patch(c)

    #harps objects within angular distance
    coords = SkyCoord(ra=res['RA_deg'], dec=res['DEC_deg'], unit=u.deg)
    sep2d = target_coord.separation(coords)

    #get indices that satisfy the criterion
    idxs = sep2d < ang_dist
    colors = cm.rainbow(np.linspace(0, 1, idxs.sum()))

    if len(coords[idxs])>1:
        #plot each star match within search radius
        for n,(coord,color) in enumerate(zip(coords[idxs],colors)):
            ax.scatter(coord.ra.deg, coord.dec.deg, transform=ax.get_transform('icrs'), s=300,
               marker='^', edgecolor=color, facecolor='none',label=res.loc[idxs,'Target'].values[n])
    else:
        ax.scatter(coords.ra.deg, coords.dec.deg, transform=ax.get_transform('icrs'), s=300,
               marker='^', edgecolor='blue', facecolor='none',label=res['Target'].values[0])

    #gaia dr2 sources
    wcs = WCS(hdu.header)
    mx, my = hdu.data.shape

    #query gaia sources within region centered at target_coord
    gaia_sources = Catalogs.query_region(target_coord, radius=fov_rad,
                                         catalog="Gaia", version=2).to_pandas()
    for r,d in gaia_sources[['ra','dec']].values:
        pix = wcs.all_world2pix(np.c_[r,d],1)[0]
        ax.scatter(pix[0], pix[1], marker='s', s=50, edgecolor='C1',
            facecolor='none', label='gaia source')
    pl.setp(ax, xlim=(0,mx), ylim=(0,my))

    #remove redundant labels due to 4 reticles
    handles, labels = pl.gca().get_legend_handles_labels()
    by_label = OrderedDict(zip(labels, handles))
    pl.legend(by_label.values(), by_label.keys())
    if savefig:
        fp = join(outdir,'tic{}_{}_fov.png'.format(tic,nearest_obj))
        ax.figure.savefig(fp,bbox_inches='tight')
        print('Saved: {}'.format(fp))
    return None
Пример #21
0
def main(kc19_groupid=113, Tmag_cutoff=14, clean_gaia_cache=False):

    #
    # get info needed to query gaia for comparison stars
    #
    source_df = pd.read_csv('../data/kounkel_table1_sourceinfo.csv')
    sdf = source_df[(source_df['Tmag_pred'] < Tmag_cutoff)
                    & (source_df['group_id'] == kc19_groupid)]
    n_sel_sources_in_group = len(sdf)

    df2 = pd.read_csv('../data/string_table2.csv')

    gdf = df2[df2['group_id'] == kc19_groupid]

    group_coord = SkyCoord(float(gdf['l']) * u.deg,
                           float(gdf['b']) * u.deg,
                           frame='galactic')
    ra = group_coord.icrs.ra
    dec = group_coord.icrs.dec
    plx_mas = float(gdf['parallax'])

    #
    # define relevant directories / paths
    #
    gaiadir = os.path.join(basedir, 'gaia_queries')
    if not os.path.exists(gaiadir):
        os.mkdir(gaiadir)

    outfile = os.path.join(
        gaiadir, 'group{}_comparison_sample.xml.gz'.format(kc19_groupid))

    #
    # run the gaia query. require the same cuts imposed by Kounkel & Covey 2019
    # on stellar quality. also require close on-sky (within 5 degrees of KC19
    # group position), and close in parallax space (within +/-20% of KC19
    # parallax).
    #
    if clean_gaia_cache and os.path.exists(outfile):
        os.remove(outfile)

    if not os.path.exists(outfile):

        Gaia.login(credentials_file=os.path.join(homedir, '.gaia_credentials'))

        jobstr = ('''
        SELECT *
        FROM gaiadr2.gaia_source
        WHERE 1=CONTAINS(
          POINT('ICRS', ra, dec),
            CIRCLE('ICRS', {ra:.8f}, {dec:.8f}, {sep_deg:.1f}))
        AND parallax < {plx_upper:.2f} AND parallax > {plx_lower:.2f}
        AND parallax > 1
        AND parallax_error < 0.1
        AND 1.0857/phot_g_mean_flux_over_error < 0.03
        AND astrometric_sigma5d_max < 0.3
        AND visibility_periods_used > 8
        AND (
                (astrometric_excess_noise < 1)
                OR
                (astrometric_excess_noise > 1 AND astrometric_excess_noise_sig < 2)
        )
        ''')

        query = jobstr.format(sep_deg=5.0,
                              ra=ra.value,
                              dec=dec.value,
                              plx_upper=1.3 * plx_mas,
                              plx_lower=0.7 * plx_mas)

        if not os.path.exists(outfile):
            print(42 * '-')
            print('launching\n{}'.format(query))
            print(42 * '-')
            j = Gaia.launch_job(query=query,
                                verbose=True,
                                dump_to_file=True,
                                output_file=outfile)

        Gaia.logout()

    vot = parse(outfile)
    tab = vot.get_first_table().to_table()
    field_df = tab.to_pandas()

    #
    # require the same Tmag cutoff for the nbhd stars. ensure no overlap w/
    # sample of stars from the group itself. then randomly sample the
    # collection of stars.
    #

    Tmag_pred = (
        field_df['phot_g_mean_mag'] - 0.00522555 *
        (field_df['phot_bp_mean_mag'] - field_df['phot_rp_mean_mag'])**3 +
        0.0891337 *
        (field_df['phot_bp_mean_mag'] - field_df['phot_rp_mean_mag'])**2 -
        0.633923 *
        (field_df['phot_bp_mean_mag'] - field_df['phot_rp_mean_mag']) +
        0.0324473)

    field_df['Tmag_pred'] = Tmag_pred

    sfield_df = field_df[field_df['Tmag_pred'] < Tmag_cutoff]
    common = sfield_df.merge(sdf, on='source_id', how='inner')
    sfield_df = sfield_df[~sfield_df.source_id.isin(common.source_id)]

    n_field = len(sfield_df)

    if 2 * n_sel_sources_in_group > n_field:
        errmsg = (
            'ngroup: {}. nfield: {}. plz tune gaia query to get >2x the stars'.
            format(n_sel_sources_in_group, n_field))
        raise AssertionError(errmsg)

    srfield_df = sfield_df.sample(n=n_sel_sources_in_group)

    #
    # now given the gaia ids, get the rotation periods
    #
    for ix, r in srfield_df.iterrows():

        source_id = np.int64(r['source_id'])
        ra, dec = float(r['ra']), float(r['dec'])
        group_id = kc19_groupid
        name = str(gdf['name'].iloc[0])

        c_obj = SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs')

        #
        # require that we are on-silicon. for year 1, this roughly means --
        # were are in southern ecliptic hemisphere
        #
        if c_obj.barycentrictrueecliptic.lat > 0 * u.deg:
            print('group{}, {}: found in northern hemisphere. skip!'.format(
                group_id, name))
            continue

        workingdir = os.path.join(
            basedir, 'fits_pkls_results_pngs',
            'field_star_comparison_group{}_name{}'.format(group_id, name))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)
        workingdir = os.path.join(workingdir, str(source_id))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)

        outvppath = os.path.join(workingdir,
                                 'verification_page_{}.png'.format(source_id))
        if os.path.exists(outvppath):
            print('found {}, continue'.format(outvppath))
            continue

        #
        # if you already downloaded ffi cutouts for this object, dont get any
        # more. otherwise, get them
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            print('found {} cutouts in {}, skip'.format(
                len(cutouts), workingdir))
        else:
            gfc.get_fficutout(c_obj, cutoutdir=workingdir)

        #
        # given the FFI cutouts, make simple light curves.
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            d = glgf.get_lc_given_fficutout(workingdir,
                                            cutouts,
                                            c_obj,
                                            return_pkl=False)
        else:
            d = np.nan
            print('WRN! did not find fficutout for {}'.format(workingdir))

        if not isinstance(d, dict):
            print('WRN! got bad light curve for {}. skipping.'.format(
                workingdir))
            continue

        outpath = os.path.join(workingdir, 'GLS_rotation_period.results')

        #
        # do Lomb scargle w/ uniformly weighted points.
        #
        ls = LombScargle(d['time'], d['rel_flux'])
        period_min = 0.1
        period_max = np.min(
            [0.9 * (np.max(d['time']) - np.min(d['time'])), 16])
        freq, power = ls.autopower(minimum_frequency=1 / period_max,
                                   maximum_frequency=1 / period_min)
        try:
            _ = power.max()
        except ValueError:
            print('WRN! got bad Lomb-Scargle for {}. skipping.'.format(
                workingdir))
            continue

        ls_fap = ls.false_alarm_probability(power.max(), method='baluev')
        ls_period = 1 / freq[np.argmax(power)]

        d['ls_fap'] = ls_fap
        d['ls_period'] = ls_period

        #
        # try to get TIC Teff. search TIC within 5 arcseconds, then take the
        # Gaia-ID match.  (removing sources with no gaia ID, which do exist in
        # TICv8.
        #
        radius = 5.0 * u.arcsecond

        stars = Catalogs.query_region("{} {}".format(float(c_obj.ra.value),
                                                     float(c_obj.dec.value)),
                                      catalog="TIC",
                                      radius=radius)

        nbhr_source_ids = np.array(stars['GAIA'])

        stars = stars[nbhr_source_ids != '']
        nbhr_source_ids = nbhr_source_ids[nbhr_source_ids != '']

        sel = nbhr_source_ids.astype(int) == source_id

        if len(sel[sel]) == 1:
            star = stars[sel]
        else:
            raise NotImplementedError('did not get any TIC match. why?')

        teff = float(star['Teff'])
        if not isinstance(teff, float) and np.isfinite(teff):
            raise NotImplementedError('got nan TIC teff. what do?')

        #
        # make "check plot" analog for visual inspection
        #
        outd = {
            'ls_fap': d['ls_fap'],
            'ls_period': d['ls_period'],
            'source_id': source_id,
            'ra': ra,
            'dec': dec,
            'name': name,
            'group_id': group_id,
            'teff': teff
        }
        pu.save_status(outpath, 'lomb-scargle', outd)

        vp.generate_verification_page(d, ls, freq, power, cutouts, c_obj,
                                      outvppath, outd)
    source_id = r['source_id']

    # get the gaia information
    gaia_d = objectid_search(source_id, columns=columns)
    gaia_df = pd.read_csv(gaia_d['result'])

    ra, dec = float(gaia_df['ra']), float(gaia_df['dec'])
    targetcoord = SkyCoord(ra=ra,
                           dec=dec,
                           unit=(u.degree, u.degree),
                           frame='icrs')
    radius = 1.0 * u.arcminute

    stars = Catalogs.query_region("{} {}".format(float(targetcoord.ra.value),
                                                 float(targetcoord.dec.value)),
                                  catalog="TIC",
                                  radius=radius)

    # require finite dr2 value
    sel = ~stars['GAIA'].mask
    stars = stars[sel]

    # TICv8: just get whichever star matches the source_id!
    target_star = stars[stars['GAIA'] == str(source_id)]

    if len(target_star) != 1:
        raise AssertionError('didnt get match for {}'.format(source_id))

    cols = [
        'ID', 'Bmag', 'Vmag', 'Jmag', 'Hmag', 'Kmag', 'Tmag', 'Teff', 'logg',
        'rad', 'mass'
Пример #23
0
def plot_rotationcheck(a, lsp, objectinfo):

    ap = int(a['ap'])
    APSIZE = APSIZEDICT[ap]
    phdr = a['dtr_infos'][0][0]
    TESSMAG = phdr['TESSMAG']

    time, flux, err = a['STIME'], a[f'SPCA{ap}'], a[f'SPCAE{ap}']
    rawtime, rawmag = a['TMID_BJD'], a['vec_dict'][f'IRM{ap}']

    ra, dec = phdr['RA_OBJ'], phdr['DEC_OBJ']
    dss, dss_hdr, sizepix = _get_dss(ra, dec)

    # Count how many stars inside the aperture are brighter.
    radius = APSIZE * 21.0 * u.arcsec
    nbhr_stars = Catalogs.query_region("{} {}".format(float(ra), float(dec)),
                                       catalog="TIC",
                                       radius=radius)
    n_in_ap_equal = len(nbhr_stars[nbhr_stars['Tmag'] < TESSMAG])
    n_in_ap_close = len(nbhr_stars[nbhr_stars['Tmag'] < (TESSMAG + 1.25)])
    n_in_ap_faint = len(nbhr_stars[nbhr_stars['Tmag'] < (TESSMAG + 2.5)])
    n_dict = {
        'equal': n_in_ap_equal,
        'close': n_in_ap_close,
        'faint': n_in_ap_faint
    }

    #
    # make the plot!
    #

    figsize = (30, 10)
    plt.close('all')

    fig = plt.figure(figsize=figsize)
    ax0 = plt.subplot2grid((1, 3), (0, 0))
    ax1 = plt.subplot2grid((1, 3), (0, 1))
    ax2 = plt.subplot2grid((1, 3), (0, 2), projection=WCS(dss_hdr))
    # fig, axs = plt.subplots(nrows=nrows, ncols=ncols, figsize=figsize)

    #
    # First: periodogram
    #
    ls_period_0 = lsp['bestperiod']
    period = lsp['periods']
    power = lsp['lspvals']

    ax0.plot(period, power)
    ax0.axvline(ls_period_0, alpha=0.4, lw=1, color='C0', ls='-')
    ax0.axvline(2 * ls_period_0, alpha=0.4, lw=1, color='C0', ls='--')
    ax0.axvline(0.5 * ls_period_0, alpha=0.4, lw=1, color='C0', ls='--')
    ax0.set_title(f'P = {ls_period_0:.3f} d')
    ax0.set_ylabel('LS Power')
    ax0.set_xlabel('Period [days]')
    ax0.set_xscale('log')

    #
    # Next: Prot vs (extinction corrected) Bp-Rp color.
    #
    E_BpmRp = a['E_BpmRp']
    if E_BpmRp is None:
        include_extinction = False
    else:
        include_extinction = True

    BpmRp = phdr['phot_bp_mean_mag'] - phdr['phot_rp_mean_mag']
    if include_extinction:
        BpmRp -= E_BpmRp

    rotdir = os.path.join(DATADIR)

    classes = ['pleiades', 'praesepe']
    colors = ['k', 'gray']
    zorders = [3, 2]
    markers = ['o', 'x']
    lws = [0, 0.]
    mews = [0.5, 0.5]
    ss = [3.0, 6]
    labels = ['Pleaides', 'Praesepe']

    for _cls, _col, z, m, l, lw, s, mew in zip(classes, colors, zorders,
                                               markers, labels, lws, ss, mews):

        t = Table.read(os.path.join(rotdir, 'Curtis_2020_apjabbf58t5_mrt.txt'),
                       format='cds')
        if _cls == 'pleiades':
            df = t[t['Cluster'] == 'Pleiades'].to_pandas()
        elif _cls == 'praesepe':
            df = t[t['Cluster'] == 'Praesepe'].to_pandas()
        else:
            raise NotImplementedError

        xval = df['(BP-RP)0']

        ax1.plot(xval,
                 df['Prot'],
                 c=_col,
                 alpha=1,
                 zorder=z,
                 markersize=s,
                 rasterized=False,
                 lw=lw,
                 label=l,
                 marker=m,
                 mew=mew,
                 mfc=_col)

    ax1.plot(BpmRp,
             ls_period_0,
             alpha=1,
             mew=0.5,
             zorder=8,
             label='Target',
             markerfacecolor='yellow',
             markersize=18,
             marker='*',
             color='black',
             lw=0)

    ax1.legend(loc='best', handletextpad=0.1, framealpha=0.7)
    ax1.set_ylabel('P$_\mathrm{rot}$ [days]')
    ax1.set_xlabel('($G_{\mathrm{BP}}-G_{\mathrm{RP}}$)$_0$ [mag]')
    ax1.set_ylim((0, 14))

    #
    # Finally: blending check. DSS finder.
    #

    # standard tick formatting fails for these images.
    import matplotlib as mpl
    mpl.rcParams['xtick.direction'] = 'in'
    mpl.rcParams['ytick.direction'] = 'in'

    cset2 = ax2.imshow(dss, origin='lower', cmap=plt.cm.gray_r)

    ax2.grid(ls='--', alpha=0.5)
    ax2.set_title('DSS2 Red', fontsize='xx-large')
    showcolorbar = False
    if showcolorbar:
        cb = fig.colorbar(cset2,
                          ax=ax2,
                          extend='neither',
                          fraction=0.046,
                          pad=0.04)

    # DSS is ~1 arcsecond per pixel. overplot aperture that was used.
    px_to_arcsec = 21
    circle = plt.Circle((sizepix / 2, sizepix / 2),
                        APSIZE * px_to_arcsec,
                        color=f'C0',
                        fill=False,
                        zorder=5)
    ax2.add_artist(circle)

    ax2.set_xlabel(r'$\alpha_{2000}$')
    ax2.set_ylabel(r'$\delta_{2000}$')

    #
    # clean figure
    #

    for ax in [ax0, ax1, ax2]:
        ax.get_yaxis().set_tick_params(which='both',
                                       direction='in',
                                       labelsize='xx-large')
        ax.get_xaxis().set_tick_params(which='both',
                                       direction='in',
                                       labelsize='xx-large')

    fig.tight_layout(w_pad=0.5, h_pad=0.5)

    return fig, n_dict
Пример #24
0
def get_panstarrs(cfg,
                  field_name,
                  pointing,
                  filter,
                  radius=0.40,
                  maglimit=None,
                  log=None):
    catalogname = cfg['Photometry'].get('calibration_catalog')
    band = {'PSi': 'i', 'PSr': 'r'}[filter]
    if maglimit is None: maglimit = 25

    ## First check if we have a pre-downloaded catalog for this field
    local_catalog_path = Path(cfg['Photometry'].get('local_catalog_path', '.'))
    local_catalog_file = local_catalog_path.joinpath(
        f'{field_name}_{band}{maglimit*10:03.0f}.cat')
    if local_catalog_file.exists() is True:
        ## Read local file
        if log: log.debug(f'  Reading {local_catalog_file}')
        pscat = Table.read(local_catalog_file, format='ascii.csv')
    else:
        ## Download
        if log: log.debug(f'  Downloading from Mast')
        from astroquery.mast import Catalogs
        #         cols = ['objName', 'objID', 'objInfoFlag', 'qualityFlag', 'raMean',
        #                 'decMean', 'raMeanErr', 'decMeanErr', 'epochMean', 'nDetections',
        #                 'ng', 'nr', 'ni', 'gMeanApMag', 'gMeanApMagErr', 'gMeanApMagStd',
        #                 'gMeanApMagNpt', 'gFlags', 'rMeanApMag', 'rMeanApMagErr',
        #                 'rMeanApMagStd', 'rMeanApMagNpt', 'rFlags', 'iMeanApMag',
        #                 'iMeanApMagErr', 'iMeanApMagStd', 'iMeanApMagNpt', 'iFlags']
        cols = [
            'objName', 'objID', 'raMean', 'decMean', 'raMeanErr', 'decMeanErr',
            'gMeanApMag', 'gMeanApMagErr', 'gMeanApMagStd', 'gMeanApMagNpt',
            'gFlags', 'rMeanApMag', 'rMeanApMagErr', 'rMeanApMagStd',
            'rMeanApMagNpt', 'rFlags', 'iMeanApMag', 'iMeanApMagErr',
            'iMeanApMagStd', 'iMeanApMagNpt', 'iFlags'
        ]

        if band in ['i', 'r', 'g']:
            pscat = Catalogs.query_region(
                pointing,
                radius=radius,
                catalog="Panstarrs",
                table="mean",
                data_release="dr2",
                sort_by=[("desc", f"{band}MeanApMag")],
                columns=cols,
                iMeanApMag=[("gte", 0), ("lte", maglimit)],
            )
        else:
            pscat = Catalogs.query_region(
                pointing,
                radius=radius,
                catalog="Panstarrs",
                table="mean",
                data_release="dr2",
                columns=cols,
            )

#         if band == 'i':
#             pscat = Catalogs.query_region(pointing, radius=radius,
#                              catalog="Panstarrs", table="mean", data_release="dr2",
#                              sort_by=[("desc", f"{band}MeanApMag")], columns=cols,
#                              iMeanApMag=[("gte", 0), ("lte", maglimit)],
#                              )
#         elif band == 'r':
#             pscat = Catalogs.query_region(pointing, radius=radius,
#                              catalog="Panstarrs", table="mean", data_release="dr2",
#                              sort_by=[("desc", f"{band}MeanApMag")], columns=cols,
#                              rMeanApMag=[("gte", 0), ("lte", maglimit)],
#                              )
#         elif band == 'g':
#             pscat = Catalogs.query_region(pointing, radius=radius,
#                              catalog="Panstarrs", table="mean", data_release="dr2",
#                              sort_by=[("desc", f"{band}MeanApMag")], columns=cols,
#                              gMeanApMag=[("gte", 0), ("lte", maglimit)],
#                              )
#         else:
#             pscat = Catalogs.query_region(pointing, radius=radius,
#                              catalog="Panstarrs", table="mean", data_release="dr2",
#                              columns=cols,
#                              )
        if log: log.debug(f'  Got {len(pscat)} entries total')
        if log:
            log.debug(
                f'  Got {len(pscat)} entries with {band}-band magnitudes')
        if log: log.debug(f'  Writing {local_catalog_file}')
        pscat.write(local_catalog_file, format='ascii.csv')

    # Filter based on magnitude
    if maglimit is not None:
        pscat = pscat[pscat[f'{band}MeanApMag'] <= maglimit]

    return pscat
Пример #25
0
def search_catalogs(coord, catnames, search_radius=2.564*u.arcmin,
    match_radius=2.0*u.arcsec, outfile=''):

    if outfile and os.path.exists(outfile):
        m = '{0} exists.  Do you want to use this catalog?'
        m = m.format(outfile) + '([y]/n): '
        y = input(m)
        if not y or y=='y' or y=='yes':
            outtable = ascii.read(outfile)
            return(outtable)
        else:
            print('Redoing search...')

    outtable = None
    for name in catnames:

        table = None
        print('Searching {0} catalog...'.format(name))
        if name in viziercat.keys():
            table = searchVizier(coord, name, radius=search_radius)
            table_match_ra = 'RAJ2000'
            table_match_dec = 'DEJ2000'
        elif name=='ps1dr2':
            table = Catalogs.query_region(coord, catalog='Panstarrs',
                radius=search_radius.to_value('degree'),
                data_release='dr2', table='mean')
            table_match_ra = 'raMean'
            table_match_dec = 'decMean'
        elif name=='strm':
            table = docasjobsstrm(coord, size=search_radius.to_value('degree'))
            table_match_ra = 'raMean'
            table_match_dec = 'decMean'

        if table and len(table)>0:
            print('{0} records in {1} catalog'.format(len(table), name))
            if not outtable:
                outtable = table
                for key in table.keys(): table.rename_column(key, name+'_'+key)
            else:
                out_match_ra = ''
                out_match_dec = ''
                if 'ra' in outtable.keys() and 'dec' in outtable.keys():
                    out_match_ra = 'ra'
                    out_match_dec = 'dec'
                elif any(['ps1dr2' in key for key in outtable.keys()]):
                    out_match_ra = 'ps1dr2_raMean'
                    out_match_dec = 'ps1dr2_decMean'
                elif any(['strm' in key for key in outtable.keys()]):
                    out_match_ra = 'strm_raMean'
                    out_match_dec = 'strm_decMean'
                else:
                    for key in outtable.keys():
                        if 'raj2000' in key.lower() and not out_match_ra:
                            out_match_ra = key
                        if 'dej2000' in key.lower() and not out_match_dec:
                            out_match_dec = key

                outtable = crossmatch_tables(outtable,table,'',name,
                    radius=match_radius,
                    cat1ra=out_match_ra, cat1dec=out_match_dec,
                    cat2ra=table_match_ra, cat2dec=table_match_dec)

            n=len(outtable)
            print('{0} total records in catalog'.format(n))

        else:
            print('0 records in {0} catalog'.format(name))

    if not outfile:
        if not os.path.exists('data/'):
            os.makedirs('data/')
        # Definitely gonna want to write this out, so write out to outfile.cat
        ascii.write(outtable, 'data/outfile.cat')
    else:
        outdir, base = os.path.split(outfile)
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        ascii.write(outtable, outfile)

    return(outtable)
Пример #26
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""


.. codeauthor:: Rasmus Handberg <*****@*****.**>
"""

import numpy as np
import matplotlib.pyplot as plt
from astroquery.mast import Catalogs
import astropy.units as u

if __name__ == '__main__':

	buffer_coord = 0.1

	radius = np.sqrt(6**2 + 6**2) + buffer_coord
	radius = u.Quantity(radius, u.deg)
	print(radius)

	catalogData = Catalogs.query_region("352.49324 12.16683", radius=radius, catalog="Tic") # disposition=None

	print(catalogData)
Пример #27
0
def main():

    source_df = pd.read_csv('../data/kounkel_table1_sourceinfo.csv')

    sel = ((source_df['Tmag_pred'] < 14) & (source_df['parallax'] > 5))

    # sel = (
    #     (source_df['Tmag_pred'] < 14)
    #     &
    #     (source_df['age'] < 9.2)
    #     &
    #     (source_df['age'] > 7.5)
    # )

    sdf = source_df[sel]
    print(
        'after making cuts on T<14, log(age) from 7.5-9.2, got {} stars, {} groups'
        .format(len(sdf), len(np.unique(sdf['group_id']))))

    df2 = pd.read_csv('../data/string_table2.csv')
    sdf2 = df2[df2['parallax'] > 5]
    # sdf2 = df2[(df2['age']>7.5) & (df2['age']<9.2)]
    # sdf2_str = sdf2[sdf2['string']=='y']

    # require that we ony look at close, middle-aged objects as flagged
    # from glue visualizations
    close_middle_aged = [
        1005, 208, 506, 424, 676, 507, 594, 209, 425, 595, 677, 905, 45, 7, 63
    ]

    # older (like age >~8.4), and a bit further... like max
    subset4 = [
        1345, 1273, 1274, 1346, 906, 784, 1089, 508, 678, 509, 1006, 907, 785,
        786
    ]

    # now given the gaia ids, get the rotation periods
    for ix, r in sdf.iterrows():

        source_id = np.int64(r['source_id'])
        ra, dec = float(r['ra_x']), float(r['dec_x'])
        name = str(r['name'])
        group_id = str(r['group_id'])

        ##########################################
        # NOTE: change often

        #if int(group_id) not in subset4:
        #    continue
        #if int(group_id) not in close_middle_aged:
        #    continue
        #if int(group_id) != 113:
        #    continue
        #if source_id != 5220404075366707584:
        #    continue
        #if int(group_id) not in np.array(sdf2_str['group_id']).astype(int):
        #    # require that we only look at things Kounkel labelled as strings
        #    continue
        #if name != 'AB_Dor':
        #    continue
        #if source_id != 5579169050153502976:
        #    continue
        ##########################################

        c_obj = SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs')

        #
        # require that we are on-silicon. for year 1, this roughly means --
        # were are in southern ecliptic hemisphere
        #
        if c_obj.barycentrictrueecliptic.lat > 0 * u.deg:
            print('group{}, {}: found in northern hemisphere. skip!'.format(
                group_id, name))
            continue

        workingdir = os.path.join(basedir, 'fits_pkls_results_pngs',
                                  'group{}_name{}'.format(group_id, name))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)
        workingdir = os.path.join(workingdir, str(source_id))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)

        outvppath = os.path.join(workingdir,
                                 'verification_page_{}.png'.format(source_id))
        if os.path.exists(outvppath):
            print('found {}, continue'.format(outvppath))
            continue
        if os.path.exists(os.path.join(workingdir, 'failed.bool')):
            print('found {}, continue'.format(
                os.path.join(workingdir, 'failed.bool')))
            continue

        #
        # if you already downloaded ffi cutouts for this object, dont get any
        # more. otherwise, get them
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            print('found {} cutouts in {}, skip'.format(
                len(cutouts), workingdir))
        else:
            try:
                gfc.get_fficutout(c_obj, cutoutdir=workingdir)
            except requests.exceptions.HTTPError as e:
                print('ERR! {}: {} failed to get FFI cutout'.format(
                    repr(e), workingdir))

        #
        # given the FFI cutouts, make simple light curves.
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            d = glgf.get_lc_given_fficutout(workingdir,
                                            cutouts,
                                            c_obj,
                                            return_pkl=True)
        else:
            d = np.nan
            print('WRN! did not find fficutout for {}'.format(workingdir))

        if not isinstance(d, dict) or len(d['time']) == 0:
            print('WRN! got bad light curve for {}. skipping.'.format(
                workingdir))
            os.mknod(os.path.join(workingdir, 'failed.bool'))
            continue

        outpath = os.path.join(workingdir, 'GLS_rotation_period.results')

        #
        # do Lomb scargle w/ uniformly weighted points.
        #
        ls = LombScargle(d['time'], d['rel_flux'])
        period_min = 0.1
        period_max = np.min(
            [0.9 * (np.max(d['time']) - np.min(d['time'])), 16])
        freq, power = ls.autopower(minimum_frequency=1 / period_max,
                                   maximum_frequency=1 / period_min)
        try:
            _ = power.max()
        except ValueError:
            print('WRN! got bad Lomb-Scargle for {}. skipping.'.format(
                workingdir))
            continue

        ls_fap = ls.false_alarm_probability(power.max(), method='baluev')
        ls_period = 1 / freq[np.argmax(power)]

        d['ls_fap'] = ls_fap
        d['ls_period'] = ls_period

        #
        # collect standard variability info
        #
        rel_flux_rms = np.std(d['rel_flux'])
        rel_flux_iqr = iqr(d['rel_flux'], rng=(25, 75))
        rel_flux_15_to_85 = iqr(d['rel_flux'], rng=(15, 85))
        rel_flux_5_to_95 = iqr(d['rel_flux'], rng=(5, 95))
        rel_flux_median = np.median(d['rel_flux'])
        rel_flux_mad = np.median(np.abs(d['rel_flux'] - rel_flux_median))

        #
        # try to get TIC Teff. search TIC within 5 arcseconds, then take the
        # Gaia-ID match.  (removing sources with no gaia ID, which do exist in
        # TICv8.
        #
        radius = 5.0 * u.arcsecond

        stars = Catalogs.query_region("{} {}".format(float(c_obj.ra.value),
                                                     float(c_obj.dec.value)),
                                      catalog="TIC",
                                      radius=radius)

        nbhr_source_ids = np.array(stars['GAIA'])

        stars = stars[nbhr_source_ids != '']
        nbhr_source_ids = nbhr_source_ids[nbhr_source_ids != '']

        sel = nbhr_source_ids.astype(int) == source_id

        if len(sel[sel]) == 1:
            star = stars[sel]
        else:
            raise NotImplementedError('did not get any TIC match. why?')

        teff = float(star['Teff'])
        if not isinstance(teff, float) and np.isfinite(teff):
            raise NotImplementedError('got nan TIC teff. what do?')

        #
        # make "check plot" analog for visual inspection
        #
        outd = {
            'ls_fap': d['ls_fap'],
            'ls_period': d['ls_period'],
            'source_id': source_id,
            'ra': ra,
            'dec': dec,
            'name': name,
            'group_id': group_id,
            'teff': teff,
            'rel_flux_rms': rel_flux_rms,
            'rel_flux_iqr': rel_flux_iqr,
            'rel_flux_15_to_85': rel_flux_15_to_85,
            'rel_flux_5_to_95': rel_flux_5_to_95,
            'rel_flux_median': rel_flux_median,
            'rel_flux_mad': rel_flux_mad
        }
        pu.save_status(outpath, 'variability_info', outd)
        pu.save_status(outpath, 'starinfo', dict(r))

        vp.generate_verification_page(d, ls, freq, power, cutouts, c_obj,
                                      outvppath, outd)