Beispiel #1
0
def get_input_light_curves(path_to_input_files):
    '''
    This can be modified as desired to allow for arbitrary input,
    as long as the output format matches what is here.
    '''
    input_files = glob.glob(path_to_input_files + "/*")
    input_files = input_files[:10]
    print("Number of input files: " + str(len(input_files)))

    return_dict = {}
    number_skipped = 0

    for f in input_files:
        t, lc, err = pickle.load(open(f, "rb"), encoding='latin')
        if len(t) < 800:
            number_skipped += 1
            continue
        t, lc, err = sigclip_magseries(t,
                                       lc,
                                       err,
                                       sigclip=3,
                                       iterative=True,
                                       niterations=3)
        return_dict[f.split("/")[-1].split("_")[0]] = \
            (t,lc,err)

    print("Number skipped for too few points: " + str(number_skipped))

    # return format: a dictionary of form ID:(t,lc,err)
    return return_dict
Beispiel #2
0
def reduce_WASP_4b(plname='WASP-4b_dataset0'):

    # options when running
    try_to_recover_periodograms = False
    make_lc_plots = True

    # table 1 of Hellier et al 2009 discovery paper
    period, epoch = 1.3382282, 2454365.91464
    # decimal ra, dec of target used only for BJD conversion
    ra, dec = 353.56283333, -42.06141667

    # file parsing
    lcdir = '../data/WASP_lightcurves/'
    wasp_lcs = [f for f in glob(lcdir+'*.tbl') if plname in f]
    if not len(wasp_lcs)==1:
        raise AssertionError
    lcfile = wasp_lcs[0]

    fit_savdir = '../results/WASP_lightcurves/'
    chain_savdir = '/home/luke/local/emcee_chains/'
    savdir='../results/WASP_lightcurves/'
    for sdir in [fit_savdir, chain_savdir, savdir]:
        if not os.path.exists(sdir):
            os.mkdir(sdir)

    #########
    # begin #
    #########
    tempdf = read_WASP_lightcurve(lcfile)
    df = wrangle_WASP_lightcurve(tempdf, ra, dec, plname)

    times, mags, errs = (nparr(df['BJD_TDB']), nparr(df['RELMAG']),
                         nparr(df['ERR']))

    stimes, smags, serrs = sigclip_magseries(times, mags, errs, sigclip=[5,5],
                                             magsarefluxes=False)

    phzd = phase_magseries(stimes, smags, period, epoch, wrap=True, sort=True)

    # convert from mags to relative fluxes for fitting
    # m_x - m_x0 = -5/2 log10( f_x / f_x0 )
    # so
    # f_x = f_x0 * 10 ** ( -2/5 (m_x - m_x0) )
    m_x0, f_x0 = 10, 1e3 # arbitrary
    sfluxs = f_x0 * 10**( -0.4 * (smags - m_x0) )
    sfluxs /= np.nanmedian(sfluxs)

    if try_to_recover_periodograms:
        run_wasp_periodograms(stimes, smags, serrs)

    if make_lc_plots:
        plot_old_lcs(times, mags, stimes, smags, phzd, period, epoch, sfluxs,
                     plname, savdir=savdir, telescope='WASP')

    ####################################################################
    # fit the lightcurve, show the phased result, get the transit time #
    ####################################################################

    n_mcmc_steps = 500
    overwrite=1

    savstr = 'wasp_errs_1d'
    # use physical parameters from Wilson+ 2008 as fixed parameters
    empirical_errs = fit_lightcurve_get_transit_time(stimes, sfluxs, serrs,
                                                     savstr, plname, period, epoch,
                                                     n_mcmc_steps=n_mcmc_steps,
                                                     overwriteexistingsamples=overwrite)


    # the WASP errors are good for fitting an initial model to the data, but
    # they may be over/under-estimates. instead use the "empirical errors",
    # which are the measured 1-sigma standard deviations of the residual.

    savstr = 'wasp_empirical_errs_1d'
    eerrs = np.ones_like(serrs)*empirical_errs

    _ = fit_lightcurve_get_transit_time(stimes, sfluxs, eerrs,
                                        savstr, plname, period, epoch,
                                        n_mcmc_steps=n_mcmc_steps,
                                        overwriteexistingsamples=overwrite)
def reduce_all():
    # options when running
    make_lc_plots = True

    df = pd.read_csv('../data/asas_all_well-studied_HJs_depthgt1pct_'
                     'Vlt11_Plt10_manual_points.csv')
    df = df.drop(columns='System.1')

    sel = (df['asas_N_obs'] > 0)

    print('{:d} HJs from TEPCAT with V<11, P<10day, depth>1%'.format(len(df)))
    df = df[sel]
    print('{:d} with >0 ASAS data points'.format(len(df)))

    df['plname'] = nparr(df['System']) + 'b'
    df = df.rename(index=str,
                   columns={
                       'Period(day)': 'period',
                       'T0 (HJD or BJD)': 'epoch_HJD_or_BJD'
                   })
    c = SkyCoord(nparr(df['asas_query_str']), unit=(u.hourangle, u.deg))
    df['ra_decimal'] = c.ra.value
    df['dec_decimal'] = c.dec.value

    lcdir = '../data/ASAS_lightcurves/'
    df['lcpath'] = lcdir + nparr(df['asas_lc_name'])

    #FIXME: super janky that the epoch is in HJD OR BJD. which is it, for each
    #case?...
    df['epoch_is_BJD'] = np.ones_like(list(range(len(df))))

    fit_savdir = '../results/ASAS_lightcurves/'
    chain_savdir = '/home/luke/local/emcee_chains/'
    savdir = '../results/ASAS_lightcurves/'

    for plname, period, epoch_HJD_or_BJD, ra, dec, lcfile, epoch_is_BJD in list(
            zip(
                nparr(df['plname']),
                nparr(df['period']),
                nparr(df['epoch_HJD_or_BJD']),
                nparr(df['ra_decimal']),
                nparr(df['dec_decimal']),
                nparr(df['lcpath']),
                nparr(df['epoch_is_BJD']),
            )):

        tempdf, dslices = read_ASAS_lightcurve(lcfile)
        df = wrangle_ASAS_lightcurve(tempdf, dslices, ra, dec)

        times, mags, errs = (nparr(df['BJD_TDB']), nparr(df['SMAG_bestap']),
                             nparr(df['SERR_bestap']))

        stimes, smags, serrs = sigclip_magseries(times,
                                                 mags,
                                                 errs,
                                                 sigclip=[5, 5],
                                                 magsarefluxes=False)

        if epoch_is_BJD:
            epoch = epoch_HJD_or_BJD
        else:
            raise NotImplementedError

        phzd = phase_magseries(stimes,
                               smags,
                               period,
                               epoch,
                               wrap=True,
                               sort=True)

        # convert from mags to relative fluxes for fitting
        # m_x - m_x0 = -5/2 log10( f_x / f_x0 )
        # so
        # f_x = f_x0 * 10 ** ( -2/5 (m_x - m_x0) )
        m_x0, f_x0 = 10, 1e3  # arbitrary
        sfluxs = f_x0 * 10**(-0.4 * (smags - m_x0))
        sfluxs /= np.nanmedian(sfluxs)

        if make_lc_plots:
            plot_old_lcs(times, mags, stimes, smags, phzd, period, epoch,
                         sfluxs, plname)
def reduce_WASP_121b():

    # options when running
    try_to_recover_periodograms = False
    make_lc_plots = True

    plname = 'WASP-121b'
    # table 1 of Delrez et al 2016 discovery paper. (BJD_TDB)
    period = 1.2749255
    epoch = 2456636.345762 + period / 2
    # decimal ra, dec of target used only for BJD conversion
    ra, dec = 107.60023116745, -39.09727045928

    # file parsing
    lcdir = '../data/ASAS_lightcurves/'
    asas_lcs = [f for f in glob(lcdir + '*.txt') if 'WASP-121' in f]
    lcfile = asas_lcs[0]

    fit_savdir = '../results/ASAS_lightcurves/'
    chain_savdir = '/home/luke/local/emcee_chains/'
    savdir = '../results/ASAS_lightcurves/'

    #########
    # begin #
    #########
    tempdf, dslices = read_ASAS_lightcurve(lcfile)
    df = wrangle_ASAS_lightcurve(tempdf, dslices, ra, dec)

    times, mags, errs = (nparr(df['BJD_TDB']), nparr(df['SMAG_bestap']),
                         nparr(df['SERR_bestap']))

    stimes, smags, serrs = sigclip_magseries(times,
                                             mags,
                                             errs,
                                             sigclip=[5, 5],
                                             magsarefluxes=False)

    phzd = phase_magseries(stimes, smags, period, epoch, wrap=True, sort=True)

    # convert from mags to relative fluxes for fitting
    # m_x - m_x0 = -5/2 log10( f_x / f_x0 )
    # so
    # f_x = f_x0 * 10 ** ( -2/5 (m_x - m_x0) )
    m_x0, f_x0 = 10, 1e3  # arbitrary
    sfluxs = f_x0 * 10**(-0.4 * (smags - m_x0))
    sfluxs /= np.nanmedian(sfluxs)

    if try_to_recover_periodograms:
        run_asas_periodograms(stimes, smags, serrs)

    if make_lc_plots:
        plot_old_lcs(times, mags, stimes, smags, phzd, period, epoch, sfluxs,
                     'WASP-121b')

    savdf = pd.DataFrame({
        'time_BJDTDB': stimes,
        'sigclipped_mag_bestap': smags,
        'err_mag_from_ASAS': serrs
    })
    savdfpath = '../results/ASAS_lightcurves/wasp121b_asas_mag_time_err.csv'
    savdf.to_csv(savdfpath, index=False)
    print('made {}'.format(savdfpath))

    ####################################################################
    # fit the lightcurve, show the phased result, get the transit time #
    ####################################################################

    savstr = 'asas_errs_1d'

    empirical_errs = fit_lightcurve_get_transit_time(
        stimes,
        sfluxs,
        serrs,
        savstr,
        plname,
        period,
        epoch,
        n_mcmc_steps=1000,
        overwriteexistingsamples=True,
        true_b=0.16,
        true_t0=epoch,
        true_rp=np.sqrt(0.01551),
        true_sma=3.754,
        sma_au=None,
        rstar=None,
        u_linear=0.4081,
        u_quad=0.2533)

    # the ASAS errors are good for fitting an initial model to the data, but
    # they may be over/under-estimates. instead use the "empirical errors",
    # which are the measured 1-sigma standard deviations of the residual.

    savstr = 'empirical_errs_1d'
    eerrs = np.ones_like(serrs) * empirical_errs

    _ = fit_lightcurve_get_transit_time(stimes,
                                        sfluxs,
                                        eerrs,
                                        savstr,
                                        plname,
                                        period,
                                        epoch,
                                        n_mcmc_steps=1000,
                                        overwriteexistingsamples=True,
                                        true_b=0.16,
                                        true_t0=epoch,
                                        true_rp=np.sqrt(0.01551),
                                        true_sma=3.754,
                                        sma_au=None,
                                        rstar=None,
                                        u_linear=0.4081,
                                        u_quad=0.2533)
def reduce_WASP_18b():

    # options when running
    try_to_recover_periodograms = False
    make_lc_plots = True

    plname = 'WASP-18b'
    # table 1 of Hellier et al 2009 discovery paper
    period, epoch = 0.94145299, 2454221.48163
    # decimal ra, dec of target used only for BJD conversion
    ra, dec = 24.354311, -45.677887

    # file parsing
    lcdir = '../data/ASAS_lightcurves/'
    asas_lcs = [f for f in glob(lcdir + '*.txt') if 'WASP-18' in f]
    lcfile = asas_lcs[0]

    fit_savdir = '../results/ASAS_lightcurves/'
    chain_savdir = '/home/luke/local/emcee_chains/'
    savdir = '../results/ASAS_lightcurves/'

    #########
    # begin #
    #########
    tempdf, dslices = read_ASAS_lightcurve(lcfile)
    df = wrangle_ASAS_lightcurve(tempdf, dslices, ra, dec)

    times, mags, errs = (nparr(df['BJD_TDB']), nparr(df['SMAG_bestap']),
                         nparr(df['SERR_bestap']))

    stimes, smags, serrs = sigclip_magseries(times,
                                             mags,
                                             errs,
                                             sigclip=[5, 5],
                                             magsarefluxes=False)

    phzd = phase_magseries(stimes, smags, period, epoch, wrap=True, sort=True)

    # convert from mags to relative fluxes for fitting
    # m_x - m_x0 = -5/2 log10( f_x / f_x0 )
    # so
    # f_x = f_x0 * 10 ** ( -2/5 (m_x - m_x0) )
    m_x0, f_x0 = 10, 1e3  # arbitrary
    sfluxs = f_x0 * 10**(-0.4 * (smags - m_x0))
    sfluxs /= np.nanmedian(sfluxs)

    if try_to_recover_periodograms:
        run_asas_periodograms(stimes, smags, serrs)

    if make_lc_plots:
        plot_old_lcs(times, mags, stimes, smags, phzd, period, epoch, sfluxs,
                     'WASP-18b')

    savdf = pd.DataFrame({
        'time_BJDTDB': stimes,
        'sigclipped_mag_bestap': smags,
        'err_mag_from_ASAS': serrs
    })
    savdfpath = '../results/ASAS_lightcurves/wasp18b_asas_mag_time_err.csv'
    savdf.to_csv(savdfpath, index=False)
    print('made {}'.format(savdfpath))
    #FIXME
    assert 0

    ####################################################################
    # fit the lightcurve, show the phased result, get the transit time #
    ####################################################################

    savstr = 'asas_errs_1d'
    # use physical parameters from Hellier+ 2009 as fixed parameters
    plname = 'WASP-18b'
    period = 0.94145299
    epoch = 2454221.48163
    empirical_errs = fit_lightcurve_get_transit_time(
        stimes,
        sfluxs,
        serrs,
        savstr,
        plname,
        period,
        epoch,
        n_mcmc_steps=10,
        overwriteexistingsamples=False)

    # the ASAS errors are good for fitting an initial model to the data, but
    # they may be over/under-estimates. instead use the "empirical errors",
    # which are the measured 1-sigma standard deviations of the residual.

    savstr = 'empirical_errs_1d'
    eerrs = np.ones_like(serrs) * empirical_errs

    _ = fit_lightcurve_get_transit_time(stimes,
                                        sfluxs,
                                        eerrs,
                                        savstr,
                                        n_mcmc_steps=100,
                                        overwriteexistingsamples=False)
Beispiel #6
0
def _fit_transit_model_single_sector(tfa_sr_path,
                                     lcpath,
                                     outpath,
                                     mdf,
                                     source_id,
                                     supprow,
                                     suppfulldf,
                                     pfdf,
                                     pfrow,
                                     toidf,
                                     ctoidf,
                                     sector,
                                     nworkers,
                                     cdipsvnum=1,
                                     overwrite=1):
    try_mcmc = True
    identifier = source_id
    #
    # read and re-detrend lc if needed. (recall: these planet candidates were
    # found using a penalized spline detrending in most cases).
    #
    hdul_sr = fits.open(tfa_sr_path)
    hdul = fits.open(lcpath)

    lc_sr = hdul_sr[1].data
    lc, hdr = hdul[1].data, hdul[0].header

    # FIXME: logic needs updating in >=S14 processing
    raise NotImplementedError
    is_pspline_dtr = bool(pfrow['pspline_detrended'].iloc[0])

    fluxap = 'IRM2' if is_pspline_dtr else 'TFASR2'

    time, mag = lc_sr['TMID_BJD'], lc_sr[fluxap]
    try:
        time, mag = moe.mask_orbit_start_and_end(time,
                                                 mag,
                                                 raise_expectation_error=False)
    except AssertionError:
        raise AssertionError(
            'moe.mask_orbit_start_and_end failed for {}'.format(tfa_sr_path))

    flux = vp._given_mag_get_flux(mag)
    err = np.ones_like(flux) * 1e-4

    time, flux, err = sigclip_magseries(time,
                                        flux,
                                        err,
                                        magsarefluxes=True,
                                        sigclip=[50, 5])

    if is_pspline_dtr or identifier in KNOWN_EXTRA_DETREND:
        flux, _ = dtr.detrend_flux(time, flux)

    if identifier in KNOWN_EXTRA_DETREND:
        fit_savdir = os.path.dirname(outpath)
        dtrpath = os.path.join(fit_savdir, 'extra_detrend_lc.png')

        if not os.path.exists(dtrpath):
            plt.close('all')
            f, ax = plt.subplots(figsize=(6, 3))
            ax.scatter(time,
                       flux,
                       c='black',
                       alpha=0.9,
                       zorder=2,
                       s=8,
                       rasterized=True,
                       linewidths=0)
            ax.set_xlabel('bjdtdb')
            ax.set_ylabel('detrended flux')
            f.savefig(dtrpath, bbox_inches='tight')
            raise AssertionError(
                'U NEED TO MANUALLY LOOK AT {} AND VERIFY ITS OK'.format(
                    dtrpath))
        else:
            print('WRN! found {}. continuing to fit.'.format(dtrpath))

    #
    # define the paths. get the stellar parameters, and do the fit!
    #
    fit_savdir = os.path.dirname(outpath)
    chain_savdir = os.path.dirname(outpath).replace('fitresults', 'samples')

    try:
        teff, teff_err, rstar, rstar_err, logg, logg_err = (
            get_teff_rstar_logg(hdr))
    except (NotImplementedError, ValueError) as e:
        print(e)
        print('did not get rstar for {}. MUST MANUALLY FIX.'.format(source_id))
        try_mcmc = False

    #
    # initialize status file
    #
    status_file = os.path.join(fit_savdir, 'run_status.stat')
    fittype = 'fivetransitparam_fit'
    if not os.path.exists(status_file):
        save_status(status_file, fittype, {
            'is_converged': False,
            'n_steps_run': 0
        })
    status = load_status(status_file)[fittype]

    #
    # if not converged and no steps previously run:
    #   run 4k steps. write status file.
    #
    # reload status file.
    # if not converged and 4k steps previously run and in long ID list:
    #   run 25k steps, write status file.
    #
    # reload status file.
    # if not converged:
    #   print a warning.
    #
    if identifier in KNOWN_MCMC_FAILS:
        print('WRN! identifier {} requires manual fixing.'.format(identifier))
        try_mcmc = False

    if (not str2bool(status['is_converged'])
            and int(status['n_steps_run']) == 0 and try_mcmc):

        n_mcmc_steps = 4000

        mafr, tlsr, is_converged = fivetransitparam_fit_magseries(
            time,
            flux,
            err,
            teff,
            rstar,
            logg,
            identifier,
            fit_savdir,
            chain_savdir,
            n_mcmc_steps=n_mcmc_steps,
            overwriteexistingsamples=False,
            n_transit_durations=5,
            make_tlsfit_plot=True,
            exp_time_minutes=30,
            bandpass='******',
            magsarefluxes=True,
            nworkers=nworkers)

        status = {'is_converged': is_converged, 'n_steps_run': n_mcmc_steps}
        save_status(status_file, fittype, status)

    status = load_status(status_file)[fittype]
    if (not str2bool(status['is_converged'])
            and int(status['n_steps_run']) != 25000
            and int(identifier) in LONG_RUN_IDENTIFIERS and try_mcmc):

        n_mcmc_steps = 25000

        # NOTE: hard-code nworkers, since we dont get multithreading
        # improvement anyway (this is some kind of bug)
        mafr, tlsr, is_converged = fivetransitparam_fit_magseries(
            time,
            flux,
            err,
            teff,
            rstar,
            logg,
            identifier,
            fit_savdir,
            chain_savdir,
            n_mcmc_steps=n_mcmc_steps,
            overwriteexistingsamples=True,
            n_transit_durations=5,
            make_tlsfit_plot=True,
            exp_time_minutes=30,
            bandpass='******',
            magsarefluxes=True,
            nworkers=4)

        status = {'is_converged': is_converged, 'n_steps_run': n_mcmc_steps}
        save_status(status_file, fittype, status)

    #
    # if converged or in the list of IDs for which its fine to skip convegence
    # (because by-eye, the fits are converged), convert fit results to ctoi csv
    # format
    #
    status = load_status(status_file)[fittype]

    if (str2bool(status['is_converged'])
            or int(identifier) in SKIP_CONVERGENCE_IDENTIFIERS):

        try:
            _ = isinstance(mafr, dict)
        except UnboundLocalError:
            #
            # get the MCMC results from the pickle file; regenerate the TLS
            # result.
            #

            fitparamdir = os.path.dirname(status_file)
            fitpklsavpath = os.path.join(
                fitparamdir,
                '{}_phased_fivetransitparam_fit_empiricalerrs.pickle'.format(
                    identifier))
            with open(fitpklsavpath, 'rb') as f:
                mafr = pickle.load(f)

            tlsp = htls.tls_parallel_pfind(time,
                                           flux,
                                           err,
                                           magsarefluxes=True,
                                           tls_rstar_min=0.1,
                                           tls_rstar_max=10,
                                           tls_mstar_min=0.1,
                                           tls_mstar_max=5.0,
                                           tls_oversample=8,
                                           tls_mintransits=1,
                                           tls_transit_template='default',
                                           nbestpeaks=5,
                                           sigclip=None,
                                           nworkers=nworkers)
            tlsr = tlsp['tlsresult']

        ticid = int(hdr['TICID'])
        ra, dec = hdr['RA_OBJ'], hdr['DEC_OBJ']
        print('{} converged. writing ctoi csv.'.format(identifier))
        fit_results_to_ctoi_csv(ticid,
                                ra,
                                dec,
                                mafr,
                                tlsr,
                                outpath,
                                toidf,
                                ctoidf,
                                teff,
                                teff_err,
                                rstar,
                                rstar_err,
                                logg,
                                logg_err,
                                cdipsvnum=cdipsvnum)
    else:
        print('WRN! {} did not converge, after {} steps. MUST MANUALLY FIX.'.
              format(identifier, status['n_steps_run']))
def explore_flux_lightcurves(
    data, ticid, outdir=None, period=None, epoch=None, pipeline=None,
    detrend=False, window_length=None, do_phasefold=0, badtimewindows=None,
    get_lc=False, require_quality_zero=1, forceylim=None, normstitch=True,
    slideclipdict={'window_length':1, 'high':3, 'low':8},
    mask_orbit_edges=False
):
    """
    Given a list of SPOC 2 minute data FITS tables, stitch them across sectors
    and make diagnostic plots.

    Args:

        data (list): from `get_tess_data`, contents [hdulistA[1].data,
            hdulistB[1].data], etc..

        ticid (str): TIC ID.

        pipeline (str): one of ['cdips', 'spoc', 'eleanor', 'cdipspre',
        'kepler', 'qlp'].  This is used to access the flux, provenance, etc.

        outdir (str): diagnostic plots are written here. If None, goes to
        cdips_followup results directory.

    Optional kwargs:

        period, epoch (float): optional

        detrend (bool, or string): 'biweight' or 'pspline' accepted. Default
        parameters assumed for each.

        badtimewindows (list): to manually mask out, [(1656, 1658), (1662,
            1663)], for instance.

        get_lc (bool): if True, returns time and flux arrays.

        require_quality_zero (bool): if True, sets QUALITY==0, throwing out
        lots of data.

        normstitch (bool): normalize flux across sectors s.t. the relative
        amplitude remains fixed.

        slideclipdict (dict): e.g., {'window_length':1, 'high':3, 'low':8} for
        1 day sliding window, exclude +3MAD from median above and -8MAD from
        median below.
    """

    assert isinstance(data, list), 'Expected list of FITStables.'

    if pipeline not in ['spoc', 'kepler', 'qlp', 'cdips']:
        raise NotImplementedError

    if isinstance(epoch, float):
        if epoch < 2450000:
            raise ValueError(f'Expected epoch in BJDTDB. Got epoch={epoch:.6f}.')

    ykey = LCKEYDICT[pipeline]['flux']
    xkey = LCKEYDICT[pipeline]['time']
    qualkey = LCKEYDICT[pipeline]['quality']
    prov = LCKEYDICT[pipeline]['prov']
    inst = LCKEYDICT[pipeline]['inst']

    if outdir is None:
        outdir = os.path.join(RESULTSDIR, 'quicklooklc', f'TIC{ticid}')

    times, fluxs= [], []
    for ix, d in enumerate(data):

        savpath = os.path.join(
            outdir, f'TIC{ticid}_{prov}_{inst}_lightcurve_{str(ix).zfill(2)}.png'
        )
        if detrend:
            savpath = os.path.join(
                outdir, f'TIC{ticid}_{prov}_{inst}_lightcurve_{detrend}_{str(ix).zfill(2)}.png'
            )

        plt.close('all')
        f,ax = plt.subplots(figsize=(16*2,4*1.5))

        if require_quality_zero:
            okkey = 0 if pipeline in 'spoc,kepler,qlp'.split(',') else 'G'
            sel = (d[qualkey] == okkey) & (d[ykey] > 0)
            print(42*'.')
            print('WRN!: omitting all non-zero quality flags. throws out good data!')
            print(42*'.')
        else:
            sel = (d[ykey] > 0)
        if badtimewindows is not None:
            for w in badtimewindows:
                sel &= ~(
                    (d[xkey] > w[0])
                    &
                    (d[xkey] < w[1])
                )

        # correct time column to BJD_TDB
        x_offset = LCKEYDICT[pipeline]['time_offset']
        x_obs = d[xkey][sel] + x_offset

        # get the median-normalized flux
        y_obs = d[ykey][sel]
        if pipeline == 'cdips':
            y_obs, _ = _given_mag_get_flux(y_obs, y_obs*1e-3)
        y_obs /= np.nanmedian(y_obs)

        if mask_orbit_edges:
            x_obs, y_obs, _ = moe.mask_orbit_start_and_end(
                x_obs, y_obs, raise_expectation_error=False, orbitgap=0.7,
                orbitpadding=12/(24),
                return_inds=True
            )

        # slide clip -- remove outliers with windowed stdevn removal
        if isinstance(slideclipdict, dict):
            y_obs = slide_clip(x_obs, y_obs, slideclipdict['window_length'],
                               low=slideclipdict['low'],
                               high=slideclipdict['high'], method='mad',
                               center='median')


        if detrend:
            ax.scatter(x_obs, y_obs, c='k', s=4, zorder=2)

            # # default pspline detrending
            if detrend=='pspline':
                y_obs, y_trend = dtr.detrend_flux(x_obs, y_obs)
                x_trend = deepcopy(x_obs)

            # in some cases, might prefer the biweight
            elif detrend == 'biweight':
                y_obs, y_trend = dtr.detrend_flux(x_obs, y_obs,
                                                  method='biweight', cval=5,
                                                  window_length=0.5,
                                                  break_tolerance=0.5)
                x_trend = deepcopy(x_obs)

            elif detrend == 'minimal':
                y_obs, y_trend = dtr.detrend_flux(x_obs, y_obs,
                                                  method='biweight', cval=2,
                                                  window_length=3.5,
                                                  break_tolerance=0.5)
                x_trend = deepcopy(x_obs)

            elif detrend == 'median':
                y_obs, y_trend = dtr.detrend_flux(x_obs, y_obs,
                                                  method='median',
                                                  window_length=0.6,
                                                  break_tolerance=0.5,
                                                  edge_cutoff=0.)
                x_trend = deepcopy(x_obs)

            elif detrend == 'best':
                from cdips.lcproc.find_planets import run_periodograms_and_detrend
                dtr_dict = {'method':'best', 'break_tolerance':0.5, 'window_length':0.5}
                lsp_options = {'period_min':0.1, 'period_max':20}

                # r = [source_id, ls_period, ls_fap, ls_amplitude, tls_period, tls_sde,
                #      tls_t0, tls_depth, tls_duration, tls_distinct_transit_count,
                #      tls_odd_even, dtr_method]

                r, search_time, search_flux, dtr_stages_dict = run_periodograms_and_detrend(
                    ticid, x_obs, y_obs, dtr_dict,
                    period_min=lsp_options['period_min'],
                    period_max=lsp_options['period_max'], dtr_method='best',
                    return_extras=True,
                    magisflux=True
                )
                y_trend, x_trend, dtr_method = (
                    dtr_stages_dict['trend_flux'],
                    dtr_stages_dict['trend_time'],
                    dtr_stages_dict['dtr_method_used']
                )
                x_obs, y_obs = deepcopy(search_time), deepcopy(search_flux)
                print(f'TIC{ticid} TLS results')
                print(f'dtr_method_used: {dtr_method}')
                print(r)

            else:
                raise NotImplementedError

        if detrend:
            ax.plot(x_trend, y_trend, c='r', lw=0.5, zorder=3)
        else:
            ax.scatter(x_obs, y_obs, c='k', s=4, zorder=2)

        times.append( x_obs )
        fluxs.append( y_obs )

        ax.set_xlabel('time [bjdtdb]')
        ax.set_ylabel(ykey)
        ylim = ax.get_ylim()

        ax.set_title(ix)

        if detrend:
            _ylim = _get_ylim(y_trend)
        else:
            _ylim = _get_ylim(y_obs)

        ax.set_ylim(_ylim)
        if isinstance(forceylim, list) or isinstance(forceylim, tuple):
            ax.set_ylim(forceylim)

        if not epoch is None:
            tra_times = epoch + np.arange(-1000,1000,1)*period

            xlim = ax.get_xlim()
            ylim = ax.get_ylim()

            ax.set_ylim((min(ylim), max(ylim)))
            ax.vlines(tra_times, min(ylim), max(ylim), color='orangered',
                      linestyle='--', zorder=-2, lw=0.5, alpha=0.3)
            ax.set_ylim((min(ylim), max(ylim)))
            ax.set_xlim(xlim)

        f.savefig(savpath, dpi=300, bbox_inches='tight')
        print('made {}'.format(savpath))

    if normstitch:
        times, fluxs, _ = lcu.stitch_light_curves(
            times, fluxs, fluxs, magsarefluxes=True, normstitch=True
        )
    else:
        times = np.hstack(np.array(times).flatten())
        fluxs = np.hstack(np.array(fluxs).flatten())

    # NOTE: this call is deprecated
    stimes, smags, _ = lcmath.sigclip_magseries(
        times, fluxs, np.ones_like(fluxs), sigclip=[20,20], iterative=True,
        magsarefluxes=True
    )

    savpath = os.path.join(
        outdir, f'TIC{ticid}_{prov}_{inst}_lightcurve_{str(ykey).zfill(2)}_allsector.png'
    )
    if detrend:
        savpath = os.path.join(
            outdir, f'TIC{ticid}_{prov}_{inst}_lightcurve_{detrend}_{str(ykey).zfill(2)}_allsector.png'
        )

    plt.close('all')
    f,ax = plt.subplots(figsize=(16,4))

    ax.scatter(stimes, smags, c='k', s=1)

    if not epoch is None:
        tra_times = epoch + np.arange(-1000,1000,1)*period

        xlim = ax.get_xlim()
        ylim = ax.get_ylim()

        ax.set_ylim((min(ylim), max(ylim)))
        ax.vlines(tra_times, min(ylim), max(ylim), color='orangered',
                  linestyle='--', zorder=-2, lw=0.5, alpha=0.3)
        ax.set_ylim((min(ylim), max(ylim)))
        ax.set_xlim(xlim)

    ax.set_xlabel('time [bjdtdb]')
    ax.set_ylabel('relative '+ykey)

    ax.set_title(ix)

    f.savefig(savpath, dpi=400, bbox_inches='tight')
    print('made {}'.format(savpath))

    csvpath = savpath.replace('.png','_sigclipped.csv')
    pd.DataFrame({
        'time': stimes, 'flux': smags,
    }).to_csv(csvpath, index=False)
    print(f'made {csvpath}')


    if do_phasefold:

        assert (
            isinstance(period, (float,int)) and isinstance(epoch, (float,int))
        )

        #
        # ax: primary transit
        #
        if inst == 'kepler':
            phasebin = 1e-3
        elif inst == 'tess':
            phasebin = 5e-3
        minbinelems = 2
        plotxlims = [(-0.5, 0.5), (-0.05,0.05)]
        xlimstrs = ['xwide','xnarrow']
        plotylim = [0.9, 1.08]#None #[0.9,1.1]
        do_vlines = False

        for plotxlim, xstr in zip(plotxlims, xlimstrs):

            plt.close('all')
            fig, ax = plt.subplots(figsize=(4,3))

            # use times and fluxs, instead of the sigma clipped thing.
            _make_phased_magseries_plot(ax, 0, times, fluxs,
                                        np.ones_like(fluxs)/1e4, period, epoch,
                                        True, True, phasebin, minbinelems,
                                        plotxlim, '', xliminsetmode=False,
                                        magsarefluxes=True, phasems=0.8,
                                        phasebinms=4.0, verbose=True)
            if isinstance(plotylim, (list, tuple)):
                ax.set_ylim(plotylim)
            else:
                plotylim = _get_ylim(fluxs)
                ax.set_ylim(plotylim)

            if do_vlines:
                ax.vlines(1/6, min(plotylim), max(plotylim), color='orangered',
                          linestyle='--', zorder=-2, lw=1, alpha=0.8)
                ax.set_ylim(plotylim)

            dstr = detrend if detrend else ''
            savpath = os.path.join(
                outdir, f'TIC{ticid}_{prov}_{inst}_lightcurve_{dstr}_{ykey}_{xstr}_allsector_phasefold.png'
            )

            fig.savefig(savpath, dpi=400, bbox_inches='tight')
            print(f'made {savpath}')

        csvpath = savpath.replace('png','csv')
        pd.DataFrame({
            'time': times, 'flux': fluxs
        }).to_csv(csvpath, index=False)
        print(f'made {csvpath}')

    if get_lc:
        return times, fluxs
def explore_mag_lightcurves(data, ticid, period=None, epoch=None):

    for ykey in ['IRM1','IRM2','IRM3','PCA1','PCA2','PCA3','TFA1','TFA2','TFA3',]:

        times, mags= [], []
        for ix, d in enumerate(data):

            savpath = (
                '../results/quicklooklc/TIC{}/mag_lightcurve_{}_{}.png'.
                format(ticid, ykey, ix)
            )
            if os.path.exists(savpath):
                print('found {}, rewriting'.format(savpath))

            plt.close('all')
            f,ax = plt.subplots(figsize=(16,4))

            ax.scatter(d['TMID_BJD'], d[ykey], c='k', s=5)
            times.append(d['TMID_BJD'])
            mags.append( d[ykey] - np.nanmedian(d[ykey]) )

            ax.set_xlabel('time [bjdtdb]')
            ax.set_ylabel(ykey)
            ylim = ax.get_ylim()
            ax.set_ylim((max(ylim), min(ylim)))

            if not epoch is None:
                if np.min(d['TMID_BJD']) < 2450000 and epoch > 2450000:
                    epoch -= 2457000
                if np.min(d['TMID_BJD']) > 2450000 and epoch < 2450000:
                    epoch += 2457000

                tra_times = epoch + np.arange(-1000,1000,1)*period

                xlim = ax.get_xlim()
                ylim = ax.get_ylim()

                ax.vlines(tra_times, max(ylim), min(ylim), color='orangered',
                          linestyle='--', zorder=-2, lw=0.5, alpha=0.3)
                ax.set_ylim((max(ylim), min(ylim)))
                ax.set_xlim(xlim)

            ax.set_title(ix)

            f.savefig(savpath, dpi=300, bbox_inches='tight')
            print('made {}'.format(savpath))

        times = np.hstack(np.array(times).flatten())
        mags = np.hstack(np.array(mags).flatten())

        stimes, smags, _ = lcmath.sigclip_magseries(
            times, mags, np.ones_like(mags), sigclip=[20,3], iterative=True
        )

        savpath = (
            '../results/quicklooklc/TIC{}/mag_lightcurve_{}_allsector.png'.
            format(ticid, ykey)
        )
        if os.path.exists(savpath):
            print('found {}, rewriting'.format(savpath))

        plt.close('all')
        f,ax = plt.subplots(figsize=(16,4))

        ax.scatter(stimes, smags, c='k', s=5)

        xlim = ax.get_xlim()
        ylim = ax.get_ylim()

        if not epoch is None:
            if np.min(d['TMID_BJD']) < 2450000 and epoch > 2450000:
                epoch -= 2457000
            if np.min(d['TMID_BJD']) > 2450000 and epoch < 2450000:
                epoch += 2457000

            tra_times = epoch + np.arange(-1000,1000,1)*period

            xlim = ax.get_xlim()
            ylim = ax.get_ylim()

            ax.vlines(tra_times, max(ylim), min(ylim), color='orangered',
                      linestyle='--', zorder=-2, lw=0.5, alpha=0.3)
            ax.set_ylim((max(ylim), min(ylim)))
            ax.set_xlim(xlim)

        ax.set_xlabel('time [bjdtdb]')
        ax.set_ylabel('relative '+ykey)
        ylim = ax.get_ylim()
        ax.set_ylim((max(ylim), min(ylim)))

        ax.set_title(ix)

        f.savefig(savpath, dpi=300, bbox_inches='tight')
        print('made {}'.format(savpath))
def explore_eleanor_lightcurves(data, ticid, period=None, epoch=None,
                                require_quality_zero=0, detrend=0,
                                do_phasefold=0):

    ykey = 'CORR_FLUX'

    times, fluxs= [], []
    for ix, d in enumerate(data):

        outdir = '../results/quicklooklc/TIC{}'.format(ticid)
        savpath = os.path.join(outdir, 'eleanor_lightcurve_{}.png'.format(ix))
        if detrend:
            savpath = os.path.join(outdir, 'eleanor_lightcurve_detrended_{}.png'.format(ix))

        plt.close('all')
        f,ax = plt.subplots(figsize=(16,4))

        sel = (d['QUALITY'] == 0) & (d[ykey] > 0)
        if require_quality_zero:
            if 'QUALITY' in d.names:
                qualkey = 'QUALITY'
            else:
                qualkey = 'SAP_QUALITY'
            sel = (d[qualkey] == 0) & (d[ykey] > 0)
            print(42*'.')
            print('WRN!: omitting all non-zero quality flags. throws out good data!')
            print(42*'.')
        else:
            sel = (d[ykey] > 0)

        x_obs = d['TIME'][sel]
        y_obs = d[ykey][sel]

        if detrend:
            ax.scatter(x_obs, y_obs, c='k', s=4, zorder=2)

            # # default pspline detrending
            if detrend=='pspline':
                y_obs, y_trend = dtr.detrend_flux(x_obs, y_obs)

            # in some cases, might prefer the biweight
            elif detrend == 'biweight':
                y_obs, y_trend = dtr.detrend_flux(x_obs, y_obs,
                                                  method='biweight', cval=5,
                                                  window_length=0.5,
                                                  break_tolerance=0.5)

            else:
                raise NotImplementedError

        if detrend:
            ax.plot(x_obs, y_trend, c='r', lw=0.5, zorder=3)
        else:
            ax.scatter(x_obs, y_obs, c='k', s=4, zorder=2)

        ax.set_xlabel('time [bjdtdb]')
        ax.set_ylabel(ykey)
        ylim = ax.get_ylim()

        ax.set_title(ix)

        f.savefig(savpath, dpi=300, bbox_inches='tight')
        print('made {}'.format(savpath))

        times.append(x_obs)
        fluxs.append( y_obs / np.nanmedian(y_obs) )


    times = np.hstack(np.array(times).flatten())
    fluxs = np.hstack(np.array(fluxs).flatten())

    stimes, smags, _ = lcmath.sigclip_magseries(
        times, fluxs, np.ones_like(fluxs), sigclip=[5,2.5], iterative=True,
        magsarefluxes=True
    )

    savpath = os.path.join(
        outdir, f'eleanor_lightcurve_{ykey}_allsector.png'
    )
    if detrend:
        savpath = os.path.join(
            outdir, f'eleanor_lightcurve_detrended_{ykey}_allsector.png'
        )

    # do the sigma clipped
    x_obs, y_obs = stimes, smags

    plt.close('all')
    f,ax = plt.subplots(figsize=(16,4))

    ax.scatter(x_obs, y_obs, c='k', s=4, zorder=2)
    if not epoch is None:
        tra_times = epoch + np.arange(-1000,1000,1)*period - 2457000

        xlim = ax.get_xlim()
        ylim = ax.get_ylim()

        ax.set_ylim((min(ylim), max(ylim)))
        ax.vlines(tra_times, min(ylim), max(ylim), color='orangered',
                  linestyle='--', zorder=-2, lw=0.5, alpha=0.3)
        ax.set_ylim((min(ylim), max(ylim)))
        ax.set_xlim(xlim)

    ax.set_xlabel('time [bjdtdb]')
    ax.set_ylabel('relative '+ykey)

    ax.set_title(ix)

    f.savefig(savpath, dpi=400, bbox_inches='tight')
    print('made {}'.format(savpath))

    if do_phasefold:

        assert isinstance(period, float) and isinstance(epoch, float)

        #
        # ax: primary transit
        #
        phasebin = 1e-2
        minbinelems = 2
        plotxlims = [(-0.5, 0.5), (-0.05,0.05)]
        xlimstrs = ['xwide','xnarrow']
        plotylim = None # (0.994, 1.005)
        do_vlines = False

        for plotxlim, xstr in zip(plotxlims, xlimstrs):

            plt.close('all')
            fig, ax = plt.subplots(figsize=(4,3))

            _make_phased_magseries_plot(ax, 0, x_obs, y_obs,
                                        np.ones_like(fluxs)/1e4, period, epoch,
                                        True, True, phasebin, minbinelems,
                                        plotxlim, '', xliminsetmode=False,
                                        magsarefluxes=True, phasems=0.8,
                                        phasebinms=4.0, verbose=True)
            if plotylim is not None:
                ax.set_ylim(plotylim)

            if do_vlines:
                ax.vlines(1/6, min(plotylim), max(plotylim), color='orangered',
                          linestyle='--', zorder=-2, lw=1, alpha=0.8)
                ax.set_ylim(plotylim)

            dstr = 'detrended' if detrend else ''
            savpath = os.path.join(
                outdir, f'eleanor_lightcurve_{dstr}_{ykey}_{xstr}_allsector_phasefold.png'
            )

            fig.savefig(savpath, dpi=400, bbox_inches='tight')
            print(f'made {savpath}')

        csvpath = savpath.replace('png','csv')
        # sigma clipped and detrended
        pd.DataFrame({
            'time': x_obs, 'flux': y_obs
        }).to_csv(csvpath, index=False)
        print(f'made {csvpath}')
Beispiel #10
0
def explore_mag_lightcurves(data):

    for yval in ['TFA1', 'TFA2', 'TFA3', 'IRM1', 'IRM2', 'IRM3']:

        times, mags = [], []
        for ix, d in enumerate(data):

            savpath = (
                '../../results/TIC308538095/mag_lightcurve_{}_{}.png'.format(
                    yval, ix))
            if os.path.exists(savpath):
                print('found {}, continue'.format(savpath))
                continue

            plt.close('all')
            f, ax = plt.subplots(figsize=(16, 4))

            ax.scatter(d['TMID_BJD'], d[yval], c='k', s=5)
            times.append(d['TMID_BJD'])
            mags.append(d[yval] - np.nanmedian(d[yval]))

            ax.set_xlabel('time [bjdtdb]')
            ax.set_ylabel(yval)
            ylim = ax.get_ylim()
            ax.set_ylim((max(ylim), min(ylim)))

            ax.set_title(ix)

            f.savefig(savpath, dpi=300, bbox_inches='tight')
            print('made {}'.format(savpath))

        times = np.hstack(np.array(times).flatten())
        mags = np.hstack(np.array(mags).flatten())

        stimes, smags, _ = lcmath.sigclip_magseries(times,
                                                    mags,
                                                    np.ones_like(mags),
                                                    sigclip=[20, 3],
                                                    iterative=True)

        savpath = ('../../results/TIC308538095/mag_lightcurve_{}_allsector.png'
                   .format(yval))
        if os.path.exists(savpath):
            print('found {}, continue'.format(savpath))
            continue

        plt.close('all')
        f, ax = plt.subplots(figsize=(16, 4))

        ax.scatter(stimes, smags, c='k', s=5)

        period = 11.69201165
        epoch = 2458642.44550000

        tra_times = epoch + np.arange(-100, 100, 1) * period

        xlim = ax.get_xlim()
        ylim = ax.get_ylim()

        ax.set_ylim((min(ylim), max(ylim)))
        ax.vlines(tra_times,
                  min(ylim),
                  max(ylim),
                  color='orangered',
                  linestyle='--',
                  zorder=-2,
                  lw=2,
                  alpha=0.3)
        ax.set_ylim((min(ylim), max(ylim)))
        ax.set_xlim(xlim)

        ax.set_xlabel('time [bjdtdb]')
        ax.set_ylabel('relative ' + yval)
        ylim = ax.get_ylim()
        ax.set_ylim((max(ylim), min(ylim)))

        ax.set_title(ix)

        f.savefig(savpath, dpi=300, bbox_inches='tight')
        print('made {}'.format(savpath))
Beispiel #11
0
def explore_eleanor_lightcurves(data):

    yval = 'PCA_FLUX'

    times, fluxs = [], []
    for ix, d in enumerate(data):

        savpath = (
            '../../results/TIC308538095/eleanor_lightcurve_{}.png'.format(ix))
        #if os.path.exists(savpath):
        #    print('found {}, continue'.format(savpath))
        #    continue

        plt.close('all')
        f, ax = plt.subplots(figsize=(16, 4))

        sel = (d['QUALITY'] == 0) & (d[yval] > 0)
        if ix == 2:
            # hack for a wonky ramp
            sel = (d['QUALITY'] == 0) & (d[yval] > 1600)

        ax.scatter(d['TIME'][sel], d[yval][sel], c='k', s=5)
        times.append(d['TIME'][sel])
        fluxs.append(d[yval][sel] / np.nanmedian(d[yval][sel]))

        ax.set_xlabel('time [bjdtdb]')
        ax.set_ylabel(yval)
        ylim = ax.get_ylim()

        ax.set_title(ix)

        f.savefig(savpath, dpi=300, bbox_inches='tight')
        print('made {}'.format(savpath))

    times = np.hstack(np.array(times).flatten())
    fluxs = np.hstack(np.array(fluxs).flatten())

    stimes, smags, _ = lcmath.sigclip_magseries(times,
                                                fluxs,
                                                np.ones_like(fluxs),
                                                sigclip=[15, 3],
                                                iterative=True,
                                                magsarefluxes=True)

    savpath = ('../../results/TIC308538095/eleanor_lightcurve_{}_allsector.png'
               .format(yval))
    #if os.path.exists(savpath):
    #    print('found {}, continue'.format(savpath))
    #    return

    plt.close('all')
    f, ax = plt.subplots(figsize=(16, 4))

    ax.scatter(stimes, smags, c='k', s=5)

    period = 11.69201165
    epoch = 2458642.44550000

    tra_times = epoch + np.arange(-100, 100, 1) * period - 2457000

    xlim = ax.get_xlim()
    ylim = ax.get_ylim()

    ax.set_ylim((min(ylim), max(ylim)))
    ax.vlines(tra_times,
              min(ylim),
              max(ylim),
              color='orangered',
              linestyle='--',
              zorder=-2,
              lw=2,
              alpha=0.3)
    ax.set_ylim((min(ylim), max(ylim)))
    ax.set_xlim(xlim)

    ax.set_xlabel('time [bjdtdb]')
    ax.set_ylabel('relative ' + yval)

    ax.set_title(ix)

    f.savefig(savpath, dpi=300, bbox_inches='tight')
    print('made {}'.format(savpath))