Пример #1
0
def periodcheck(thistime, thisflux, mflags):
    #dates,flux,flux_pcor,flux_ptcor,mflags = readpsfk2cor(k2name)
    ig = (mflags == 0)
    #
    # k2sc documentation:
    # https://github.com/OxES/k2sc/blob/master/relase_readme.txt
    # indicates that mflags ==0 would be good data.
    #
    # This section, not used, shows how to do sigma clipping. Unncessary since mgflags already
    # applies a ~4-5 sigma clip.
    #sigma clipping stuff ; see http://docs.astropy.org/en/stable/stats/robust.html#sigma-clipping
    #from astropy.stats import sigma_clip
    #filtered_data = sigma_clip(flux_ptcor, sigma=3, iters=10)
    # that would be a mask
    #
    # DISCOVERY: WILL CRASH IF ALL DATA FLAGGED AS BAD!!!
    # SOLUTION: DON'T GIVE IT THOSE FILES!
    #
    # Periodogram stuff
    # search good data only in period range 1 hour to 10 days
    ls = LombScargle(thistime[ig], thisflux[ig])
    frequency, power = ls.autopower(maximum_frequency=24.0,
                                    minimum_frequency=0.1)
    #
    best_frequency = frequency[np.argmax(power)]
    best_fap = ls.false_alarm_probability(power.max())
    #
    # Calculate the model if desired
    #y_fit = ls.model(dates, best_frequency)
    #plt.plot(t_fit,y_fit,'k-')
    #
    return frequency, power, best_frequency, best_fap
def perform_and_return_lombs(x, y):

    import numpy as np
    import matplotlib.pyplot as plt
    from scipy import signal
    from astropy.stats import LombScargle

    f = np.linspace(0.01, 3, 10_000)

    lombS = LombScargle(x, y)
    power = lombS.power(f)
    fmax = f[list(power).index(power.max())]
    period =  1/fmax

    pval = lombS.false_alarm_probability(max(list(power)), method='bootstrap')

    return power, f, period, pval, fmax
Пример #3
0
def periodogram(bjd, flux, flux_err):
    t = (bjd - bjd[0])*24.0
    mean = np.mean(flux)
    flux = flux - mean
    dt = [ t[i+1] - t[i-1] for i in range(1,len(t)-1)]
    fmax = 1.0/np.median(dt)
    fmin = 2.0/(max(t))
    ls = LombScargle(t, flux, flux_err)
    #Oversampling a factor of 10 to achieve frequency resolution
    freq, power = ls.autopower(minimum_frequency=fmin,
                               maximum_frequency=fmax,
                               samples_per_peak=10)
    best_f = freq[np.argmax(power)]
    period = 1.0/best_f #period from the LS periodogram
    fap_p = ls.false_alarm_probability(power.max())

    amp = np.sqrt(power)/mean
    return np.array(freq), np.array(amp), period, fap_p
Пример #4
0
def assert_lsperiod_is_approx(time,
                              flux,
                              err,
                              target_period,
                              significant=4,
                              verbose=True):
    """
    Given a light curve, require the Lomb Scargle period to be near a target
    value.

    args:
        time, flux, err: np.ndarrays
        target_period: float
        significant: int, number of significant digits used in assertion
        statement.
    """
    from astropy.stats import LombScargle

    period_min = target_period / 10
    period_max = target_period * 10

    ls = LombScargle(time, flux, err)
    freq, power = ls.autopower(minimum_frequency=1 / period_max,
                               maximum_frequency=1 / period_min,
                               samples_per_peak=20)

    ls_fap = ls.false_alarm_probability(power.max())

    ls_period = 1 / freq[np.argmax(power)]

    if verbose:
        msg = (
            f'LS Period: got {ls_period:.4f} d, target {target_period:.4f} d')
        print(msg)

    assert_approx_equal(ls_period, target_period, significant=significant)
Пример #5
0
# In[ ]:


def fourier_periodogram(t, y):
    N = len(t)
    frequency = np.fft.fftfreq(N, t[1] - t[0])
    y_fft = np.fft.fft(y.value) * y.unit
    positive = (frequency > 0)
    return frequency[positive], (1. / N) * abs(y_fft[positive]) ** 2


# In[ ]:


t_days = np.arange(100) * u.day
y_mags = rand.randn(100) * u.mag
frequency, PSD_fourier = fourier_periodogram(t_days, y_mags)


# In[ ]:


ls = LombScargle(t_days, y_mags, normalization= 'psd')


# In[ ]:


ls.false_alarm_probability(power.max(), method='baluev')  

Пример #6
0
from sklearn.utils import resample

radio = Table.read('target.phot.ll.txt', format='ascii')
period_bs = fits.getdata('period_bs_fullsamp.fits')
period_mc = fits.getdata('period_mc_1e5samples.fits')

minfreq = 2.0
maxfreq = 20
samppeak = 100

ls = LombScargle(radio['mjd'], radio['re'])      

frequency, power = ls.autopower(minimum_frequency=minfreq, maximum_frequency=maxfreq, samples_per_peak=samppeak)
best_frequency = frequency[np.argmax(power)]
best_period = (1. / best_frequency)*24.
fap = ls.false_alarm_probability(power.max())

fig = plt.figure(figsize=(20, 10))
fig.tight_layout()

grid = plt.GridSpec(2, 2, wspace=0.4, hspace=0.4)

ax1 = fig.add_subplot(grid[ :, 0])
ax1.plot((1. / frequency)*24., power, label='peak period:'+'{:10.3f}'.format(best_period)+' hours', color='orange')
ax1.set_xlabel('period (hr)', fontsize=30)
ax1.set_ylabel('periodogram power', fontsize=30)
ax1.set_xlim((1, 6))
ax1.text(0.5, 0.037, 'A', size=40)

ax1.legend(fontsize=15, bbox_to_anchor=(0.3,1.005))    
Пример #7
0
import scipy.io as sio
import matplotlib.pyplot as plt

mat_contents = sio.loadmat(
    '/home/aldo/Documents/Projects/Avtivemeter/Data/code_cruze_paper/Re actimetry/data_acti_4days.mat'
)

t = mat_contents['tt_4']
y = mat_contents['yy_4']

t = t.flatten()
y = y.flatten()
dy = 0.1

ls = LombScargle(t, y, dy)
freq, power = ls.autopower()
print(power.max())
print(ls.false_alarm_probability(power.max()))

plt.plot(freq, power)

best_frequency = frequency[np.argmax(power)]
t_fit = np.linspace(0, 1)
y_fit = LombScargle(t, y, dy).model(t_fit, best_frequency)

print("Now using fastchi2")
frequency, power = LombScargle(t, y).autopower(method='fastchi2')

print(power.max())
print(ls.false_alarm_probability(power.max()))
Пример #8
0
def LSfindP(file_name,
            data,
            plots=False,
            file_type='.png',
            min_period=0.1,
            max_period=30,
            root_dir='/Volumes/Zoe Bell Backup/'):
    '''
    Takes the name of a .fits file (without the extension) from the Everest K2 data and the data from that file read into a table,
    and optionally whether you want a plot saved, the file type you want it saved as, the minimum and maximum periods you want to look for, 
    and the root directory in which to save the plot within the folder LSPlotOutputs.
    Uses the Lomb-Scargle periodogram method to return the file name, the period that best fits the corrected flux data in that range, 
    the power at that period, and the associated false alarm probability.
    '''
    #start = file_name.rfind('/') + 1
    #name = file_name[start:-5]
    name = file_name

    #data = Table.read(file_name, format='fits')
    ok = np.where((data['QUALITY'] == 0) & (np.isfinite(data['TIME']))
                  & (np.isfinite(data['FCOR'])
                     & (np.isfinite(data['FRAW_ERR']))))

    t = np.array(data['TIME'][ok])
    fcor = np.array(data['FCOR'][ok])
    frawErr = np.array(data['FRAW_ERR'][ok])

    ls = LombScargle(t, fcor, frawErr)
    freq, power = ls.autopower(minimum_frequency=1 / max_period,
                               maximum_frequency=1 / min_period)
    best_freq = freq[np.argmax(power)]
    max_power = np.max(power)

    if (plots):
        plt.figure(figsize=(10, 7))

        plt.subplot(211)
        plt.plot(1 / freq, power)
        plt.title('Periodogram')
        plt.xlabel('Period')
        plt.ylabel('Power')
        plt.annotate('best period',
                     xy=(1 / best_freq, max_power),
                     xytext=(1 / best_freq * 0.5, max_power * 0.9),
                     arrowprops=dict(facecolor='black', width=1, headwidth=5))

        plt.subplot(212)
        t_fit = np.linspace(np.min(t),
                            np.max(t))  # make just select first and last
        f_fit = LombScargle(t, fcor, frawErr).model(t_fit, best_freq)
        plt.plot(t, fcor)
        plt.plot(t_fit, f_fit)
        plt.title('Comparison of Data and Model')
        plt.xlabel('Time')
        plt.ylabel('Flux')

        plt.suptitle(name)
        plt.tight_layout()
        plt.subplots_adjust(top=0.9)
        plt.savefig(root_dir + 'LSPlotOutputs/' + name + file_type, dpi=150)
        plt.close()

    return [
        name, 1 / best_freq, max_power,
        ls.false_alarm_probability(max_power)
    ]
Пример #9
0
data = Table.read('2M1047_calibch2_ap3_epoch2.fits', hdu=2)
jd2 = data['MJD']
flux2 = data['FLUX'] / np.nanmedian(data['FLUX'])

mask = (flux2 > 0.0) & (flux2 < 1.05)

jd2 = jd2[mask]
flux2 = flux2[mask]

ls2 = LombScargle(jd2, flux2)
frequency2, power2 = ls2.autopower(minimum_frequency=minfreq,
                                   maximum_frequency=maxfreq,
                                   samples_per_peak=samppeak)
best_frequency2 = frequency2[np.argmax(power2)]
best_period = (1. / best_frequency2) * 24.
fap2 = ls2.false_alarm_probability(power2.max())

# plot results
fig = plt.figure(figsize=(20, 10))
fig.tight_layout()

grid = plt.GridSpec(2, 2, wspace=0.4, hspace=0.4)

ax2 = fig.add_subplot(grid[:, 0])
ax2.tick_params(labelsize=20,
                labeltop=True,
                labelright=True,
                which="both",
                top=True,
                right=True)
ax2.plot((1. / frequency2) * 24.,
Пример #10
0
#     model.optimizer.period_range = (.2, 25)
#     model.fit(data['TIME'][ok][ok2], data['FCOR'][ok][ok2]- medflux[ok2], data['FRAW_ERR'][ok][ok2]);
    periods = np.linspace(.2, 25, 10000)
#     scores = model.score(periods)
    model = LombScargle(data['TIME'][ok][ok2], data['FCOR'][ok][ok2]- medflux[ok2], data['FRAW_ERR'][ok][ok2])
    pwr = model.power(1/periods)

    fig, ax = plt.subplots(1,2)
    fig.set_size_inches(13,5)

    ax[0].plot(data['TIME'][ok], data['FCOR'][ok], alpha =.5, label= 'FCOR');
    ax[0].plot(data['TIME'][ok][ok2], medflux[ok2], alpha =.5, label= 'medflux');
    #ax[0].plot(data['TIME'][ok], data['FLUX'][ok], alpha =.5, label= 'FLUX');
    #ax[0].plot(data['TIME'][ok], data['FRAW'][ok], alpha =.5, label= 'FRAW');
    #ax[0].set_xlim(2.0,3.6);
    #ax[0].set_ylim(0,700);
    ax[0].set_xlabel("Time(days)");
    ax[0].set_ylabel("Flux");
    ax[0].legend()

    ax[1].plot(periods, pwr);
    ax[1].set_xscale('log')
    ax[1].set_xlabel("Period(days)");
    ax[1].set_ylabel("Power");
    plt.show()

    best_period = periods[np.argmax(pwr)]
    print(best_period)
    best_periods[i] = best_period
    fap[i] = model.false_alarm_probability(pwr.max())
Пример #11
0
def main(kc19_groupid=113, Tmag_cutoff=14, clean_gaia_cache=False):

    #
    # get info needed to query gaia for comparison stars
    #
    source_df = pd.read_csv('../data/kounkel_table1_sourceinfo.csv')
    sdf = source_df[(source_df['Tmag_pred'] < Tmag_cutoff)
                    & (source_df['group_id'] == kc19_groupid)]
    n_sel_sources_in_group = len(sdf)

    df2 = pd.read_csv('../data/string_table2.csv')

    gdf = df2[df2['group_id'] == kc19_groupid]

    group_coord = SkyCoord(float(gdf['l']) * u.deg,
                           float(gdf['b']) * u.deg,
                           frame='galactic')
    ra = group_coord.icrs.ra
    dec = group_coord.icrs.dec
    plx_mas = float(gdf['parallax'])

    #
    # define relevant directories / paths
    #
    gaiadir = os.path.join(basedir, 'gaia_queries')
    if not os.path.exists(gaiadir):
        os.mkdir(gaiadir)

    outfile = os.path.join(
        gaiadir, 'group{}_comparison_sample.xml.gz'.format(kc19_groupid))

    #
    # run the gaia query. require the same cuts imposed by Kounkel & Covey 2019
    # on stellar quality. also require close on-sky (within 5 degrees of KC19
    # group position), and close in parallax space (within +/-20% of KC19
    # parallax).
    #
    if clean_gaia_cache and os.path.exists(outfile):
        os.remove(outfile)

    if not os.path.exists(outfile):

        Gaia.login(credentials_file=os.path.join(homedir, '.gaia_credentials'))

        jobstr = ('''
        SELECT *
        FROM gaiadr2.gaia_source
        WHERE 1=CONTAINS(
          POINT('ICRS', ra, dec),
            CIRCLE('ICRS', {ra:.8f}, {dec:.8f}, {sep_deg:.1f}))
        AND parallax < {plx_upper:.2f} AND parallax > {plx_lower:.2f}
        AND parallax > 1
        AND parallax_error < 0.1
        AND 1.0857/phot_g_mean_flux_over_error < 0.03
        AND astrometric_sigma5d_max < 0.3
        AND visibility_periods_used > 8
        AND (
                (astrometric_excess_noise < 1)
                OR
                (astrometric_excess_noise > 1 AND astrometric_excess_noise_sig < 2)
        )
        ''')

        query = jobstr.format(sep_deg=5.0,
                              ra=ra.value,
                              dec=dec.value,
                              plx_upper=1.3 * plx_mas,
                              plx_lower=0.7 * plx_mas)

        if not os.path.exists(outfile):
            print(42 * '-')
            print('launching\n{}'.format(query))
            print(42 * '-')
            j = Gaia.launch_job(query=query,
                                verbose=True,
                                dump_to_file=True,
                                output_file=outfile)

        Gaia.logout()

    vot = parse(outfile)
    tab = vot.get_first_table().to_table()
    field_df = tab.to_pandas()

    #
    # require the same Tmag cutoff for the nbhd stars. ensure no overlap w/
    # sample of stars from the group itself. then randomly sample the
    # collection of stars.
    #

    Tmag_pred = (
        field_df['phot_g_mean_mag'] - 0.00522555 *
        (field_df['phot_bp_mean_mag'] - field_df['phot_rp_mean_mag'])**3 +
        0.0891337 *
        (field_df['phot_bp_mean_mag'] - field_df['phot_rp_mean_mag'])**2 -
        0.633923 *
        (field_df['phot_bp_mean_mag'] - field_df['phot_rp_mean_mag']) +
        0.0324473)

    field_df['Tmag_pred'] = Tmag_pred

    sfield_df = field_df[field_df['Tmag_pred'] < Tmag_cutoff]
    common = sfield_df.merge(sdf, on='source_id', how='inner')
    sfield_df = sfield_df[~sfield_df.source_id.isin(common.source_id)]

    n_field = len(sfield_df)

    if 2 * n_sel_sources_in_group > n_field:
        errmsg = (
            'ngroup: {}. nfield: {}. plz tune gaia query to get >2x the stars'.
            format(n_sel_sources_in_group, n_field))
        raise AssertionError(errmsg)

    srfield_df = sfield_df.sample(n=n_sel_sources_in_group)

    #
    # now given the gaia ids, get the rotation periods
    #
    for ix, r in srfield_df.iterrows():

        source_id = np.int64(r['source_id'])
        ra, dec = float(r['ra']), float(r['dec'])
        group_id = kc19_groupid
        name = str(gdf['name'].iloc[0])

        c_obj = SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs')

        #
        # require that we are on-silicon. for year 1, this roughly means --
        # were are in southern ecliptic hemisphere
        #
        if c_obj.barycentrictrueecliptic.lat > 0 * u.deg:
            print('group{}, {}: found in northern hemisphere. skip!'.format(
                group_id, name))
            continue

        workingdir = os.path.join(
            basedir, 'fits_pkls_results_pngs',
            'field_star_comparison_group{}_name{}'.format(group_id, name))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)
        workingdir = os.path.join(workingdir, str(source_id))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)

        outvppath = os.path.join(workingdir,
                                 'verification_page_{}.png'.format(source_id))
        if os.path.exists(outvppath):
            print('found {}, continue'.format(outvppath))
            continue

        #
        # if you already downloaded ffi cutouts for this object, dont get any
        # more. otherwise, get them
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            print('found {} cutouts in {}, skip'.format(
                len(cutouts), workingdir))
        else:
            gfc.get_fficutout(c_obj, cutoutdir=workingdir)

        #
        # given the FFI cutouts, make simple light curves.
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            d = glgf.get_lc_given_fficutout(workingdir,
                                            cutouts,
                                            c_obj,
                                            return_pkl=False)
        else:
            d = np.nan
            print('WRN! did not find fficutout for {}'.format(workingdir))

        if not isinstance(d, dict):
            print('WRN! got bad light curve for {}. skipping.'.format(
                workingdir))
            continue

        outpath = os.path.join(workingdir, 'GLS_rotation_period.results')

        #
        # do Lomb scargle w/ uniformly weighted points.
        #
        ls = LombScargle(d['time'], d['rel_flux'])
        period_min = 0.1
        period_max = np.min(
            [0.9 * (np.max(d['time']) - np.min(d['time'])), 16])
        freq, power = ls.autopower(minimum_frequency=1 / period_max,
                                   maximum_frequency=1 / period_min)
        try:
            _ = power.max()
        except ValueError:
            print('WRN! got bad Lomb-Scargle for {}. skipping.'.format(
                workingdir))
            continue

        ls_fap = ls.false_alarm_probability(power.max(), method='baluev')
        ls_period = 1 / freq[np.argmax(power)]

        d['ls_fap'] = ls_fap
        d['ls_period'] = ls_period

        #
        # try to get TIC Teff. search TIC within 5 arcseconds, then take the
        # Gaia-ID match.  (removing sources with no gaia ID, which do exist in
        # TICv8.
        #
        radius = 5.0 * u.arcsecond

        stars = Catalogs.query_region("{} {}".format(float(c_obj.ra.value),
                                                     float(c_obj.dec.value)),
                                      catalog="TIC",
                                      radius=radius)

        nbhr_source_ids = np.array(stars['GAIA'])

        stars = stars[nbhr_source_ids != '']
        nbhr_source_ids = nbhr_source_ids[nbhr_source_ids != '']

        sel = nbhr_source_ids.astype(int) == source_id

        if len(sel[sel]) == 1:
            star = stars[sel]
        else:
            raise NotImplementedError('did not get any TIC match. why?')

        teff = float(star['Teff'])
        if not isinstance(teff, float) and np.isfinite(teff):
            raise NotImplementedError('got nan TIC teff. what do?')

        #
        # make "check plot" analog for visual inspection
        #
        outd = {
            'ls_fap': d['ls_fap'],
            'ls_period': d['ls_period'],
            'source_id': source_id,
            'ra': ra,
            'dec': dec,
            'name': name,
            'group_id': group_id,
            'teff': teff
        }
        pu.save_status(outpath, 'lomb-scargle', outd)

        vp.generate_verification_page(d, ls, freq, power, cutouts, c_obj,
                                      outvppath, outd)
Пример #12
0
ax[0].errorbar(star_time, star_flux, star_flux_err, fmt='.', elinewidth=1.5, capsize=0)
ax[0].set(xlim=(np.min(star_time) -20, np.max(star_time)+20),
          xlabel='Observation time (days)',
          ylabel='Observed Flux')

#Plot the periodogram
ax[1].plot(1. / freq, PLS)
ax[1].set(xlabel='period (days)',
          ylabel='Lomb-Scargle Power',
          xlim=(0.1, 50.0),
          ylim=(0, 1));

#Plot the false-alarm levels
z_false = ls.false_alarm_level(0.001)
ax[1].axhline(z_false, linestyle='dotted', color='black')
print("Peak at: " + str(best_freq) + "\nFAP: " + str(ls.false_alarm_probability(PLS.max())))

#Plot the phased data & model in the inset
inset.errorbar(phase, star_flux, star_flux_err, fmt='.k', capsize=0)
inset.plot(phase_fit, mag_fit)
inset.invert_yaxis()
inset.set_xlabel('phase')
inset.set_ylabel('Normalized Flux [Counts]')
plt.savefig(DIR_SAVE+'filename to save')
plt.show()

#Plot the periodogram in a single figure
fig, ax = plt.subplots()
ax.plot(1. / freq, PLS)
ax.set(xlabel=r'Period (days)',
          ylabel=r'Lomb-Scargle Power',
Пример #13
0
def main():

    source_df = pd.read_csv('../data/kounkel_table1_sourceinfo.csv')

    sel = ((source_df['Tmag_pred'] < 14) & (source_df['parallax'] > 5))

    # sel = (
    #     (source_df['Tmag_pred'] < 14)
    #     &
    #     (source_df['age'] < 9.2)
    #     &
    #     (source_df['age'] > 7.5)
    # )

    sdf = source_df[sel]
    print(
        'after making cuts on T<14, log(age) from 7.5-9.2, got {} stars, {} groups'
        .format(len(sdf), len(np.unique(sdf['group_id']))))

    df2 = pd.read_csv('../data/string_table2.csv')
    sdf2 = df2[df2['parallax'] > 5]
    # sdf2 = df2[(df2['age']>7.5) & (df2['age']<9.2)]
    # sdf2_str = sdf2[sdf2['string']=='y']

    # require that we ony look at close, middle-aged objects as flagged
    # from glue visualizations
    close_middle_aged = [
        1005, 208, 506, 424, 676, 507, 594, 209, 425, 595, 677, 905, 45, 7, 63
    ]

    # older (like age >~8.4), and a bit further... like max
    subset4 = [
        1345, 1273, 1274, 1346, 906, 784, 1089, 508, 678, 509, 1006, 907, 785,
        786
    ]

    # now given the gaia ids, get the rotation periods
    for ix, r in sdf.iterrows():

        source_id = np.int64(r['source_id'])
        ra, dec = float(r['ra_x']), float(r['dec_x'])
        name = str(r['name'])
        group_id = str(r['group_id'])

        ##########################################
        # NOTE: change often

        #if int(group_id) not in subset4:
        #    continue
        #if int(group_id) not in close_middle_aged:
        #    continue
        #if int(group_id) != 113:
        #    continue
        #if source_id != 5220404075366707584:
        #    continue
        #if int(group_id) not in np.array(sdf2_str['group_id']).astype(int):
        #    # require that we only look at things Kounkel labelled as strings
        #    continue
        #if name != 'AB_Dor':
        #    continue
        #if source_id != 5579169050153502976:
        #    continue
        ##########################################

        c_obj = SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs')

        #
        # require that we are on-silicon. for year 1, this roughly means --
        # were are in southern ecliptic hemisphere
        #
        if c_obj.barycentrictrueecliptic.lat > 0 * u.deg:
            print('group{}, {}: found in northern hemisphere. skip!'.format(
                group_id, name))
            continue

        workingdir = os.path.join(basedir, 'fits_pkls_results_pngs',
                                  'group{}_name{}'.format(group_id, name))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)
        workingdir = os.path.join(workingdir, str(source_id))
        if not os.path.exists(workingdir):
            os.mkdir(workingdir)

        outvppath = os.path.join(workingdir,
                                 'verification_page_{}.png'.format(source_id))
        if os.path.exists(outvppath):
            print('found {}, continue'.format(outvppath))
            continue
        if os.path.exists(os.path.join(workingdir, 'failed.bool')):
            print('found {}, continue'.format(
                os.path.join(workingdir, 'failed.bool')))
            continue

        #
        # if you already downloaded ffi cutouts for this object, dont get any
        # more. otherwise, get them
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            print('found {} cutouts in {}, skip'.format(
                len(cutouts), workingdir))
        else:
            try:
                gfc.get_fficutout(c_obj, cutoutdir=workingdir)
            except requests.exceptions.HTTPError as e:
                print('ERR! {}: {} failed to get FFI cutout'.format(
                    repr(e), workingdir))

        #
        # given the FFI cutouts, make simple light curves.
        #
        cutouts = glob(os.path.join(workingdir, '*.fits'))
        if len(cutouts) >= 1:
            d = glgf.get_lc_given_fficutout(workingdir,
                                            cutouts,
                                            c_obj,
                                            return_pkl=True)
        else:
            d = np.nan
            print('WRN! did not find fficutout for {}'.format(workingdir))

        if not isinstance(d, dict) or len(d['time']) == 0:
            print('WRN! got bad light curve for {}. skipping.'.format(
                workingdir))
            os.mknod(os.path.join(workingdir, 'failed.bool'))
            continue

        outpath = os.path.join(workingdir, 'GLS_rotation_period.results')

        #
        # do Lomb scargle w/ uniformly weighted points.
        #
        ls = LombScargle(d['time'], d['rel_flux'])
        period_min = 0.1
        period_max = np.min(
            [0.9 * (np.max(d['time']) - np.min(d['time'])), 16])
        freq, power = ls.autopower(minimum_frequency=1 / period_max,
                                   maximum_frequency=1 / period_min)
        try:
            _ = power.max()
        except ValueError:
            print('WRN! got bad Lomb-Scargle for {}. skipping.'.format(
                workingdir))
            continue

        ls_fap = ls.false_alarm_probability(power.max(), method='baluev')
        ls_period = 1 / freq[np.argmax(power)]

        d['ls_fap'] = ls_fap
        d['ls_period'] = ls_period

        #
        # collect standard variability info
        #
        rel_flux_rms = np.std(d['rel_flux'])
        rel_flux_iqr = iqr(d['rel_flux'], rng=(25, 75))
        rel_flux_15_to_85 = iqr(d['rel_flux'], rng=(15, 85))
        rel_flux_5_to_95 = iqr(d['rel_flux'], rng=(5, 95))
        rel_flux_median = np.median(d['rel_flux'])
        rel_flux_mad = np.median(np.abs(d['rel_flux'] - rel_flux_median))

        #
        # try to get TIC Teff. search TIC within 5 arcseconds, then take the
        # Gaia-ID match.  (removing sources with no gaia ID, which do exist in
        # TICv8.
        #
        radius = 5.0 * u.arcsecond

        stars = Catalogs.query_region("{} {}".format(float(c_obj.ra.value),
                                                     float(c_obj.dec.value)),
                                      catalog="TIC",
                                      radius=radius)

        nbhr_source_ids = np.array(stars['GAIA'])

        stars = stars[nbhr_source_ids != '']
        nbhr_source_ids = nbhr_source_ids[nbhr_source_ids != '']

        sel = nbhr_source_ids.astype(int) == source_id

        if len(sel[sel]) == 1:
            star = stars[sel]
        else:
            raise NotImplementedError('did not get any TIC match. why?')

        teff = float(star['Teff'])
        if not isinstance(teff, float) and np.isfinite(teff):
            raise NotImplementedError('got nan TIC teff. what do?')

        #
        # make "check plot" analog for visual inspection
        #
        outd = {
            'ls_fap': d['ls_fap'],
            'ls_period': d['ls_period'],
            'source_id': source_id,
            'ra': ra,
            'dec': dec,
            'name': name,
            'group_id': group_id,
            'teff': teff,
            'rel_flux_rms': rel_flux_rms,
            'rel_flux_iqr': rel_flux_iqr,
            'rel_flux_15_to_85': rel_flux_15_to_85,
            'rel_flux_5_to_95': rel_flux_5_to_95,
            'rel_flux_median': rel_flux_median,
            'rel_flux_mad': rel_flux_mad
        }
        pu.save_status(outpath, 'variability_info', outd)
        pu.save_status(outpath, 'starinfo', dict(r))

        vp.generate_verification_page(d, ls, freq, power, cutouts, c_obj,
                                      outvppath, outd)