Beispiel #1
0
def test_search_k2sff():
    """Can we search and download a K2SFF light curve?"""
    # Try an early campaign
    search = search_lightcurve("K2-18", author="K2SFF", campaign=1)
    assert len(search) == 1
    assert search.table["author"][0] == "K2SFF"
    lc = search.download()
    assert type(lc).__name__ == "KeplerLightCurve"
    assert lc.campaign == 1
    # Try a late campaign
    lc = search_lightcurve("GJ 9827", author="K2SFF", campaign=19).download()
    assert type(lc).__name__ == "KeplerLightCurve"
    assert lc.targetid == 246389858
    assert lc.campaign == 19
Beispiel #2
0
def get_lk_LCs(tic):
    #search MAST archive with tic. eventually needs to change to search_lightcurve,
    #but issues with .download_all() function from search_lightcurve
    lcf_search = search_lightcurve('TIC ' + str(tic))

    #save search table and remove all HLSP from results
    #(so far, only QLP HLSP seem compatitable with lightkurve)
    lcf_search_table = lcf_search.table
    lcf_search_table = lcf_search_table[lcf_search_table['obs_collection'] ==
                                        'TESS']

    # lcf_df = lcf_search_table.to_pandas()
    # all_lcs = []
    # for i,row in lcf_df.iterrows():
    #     temp_search_table = lcf_search_table[lcf_search_table['obs_id'] == row['obs_id']]
    #     temp_search = lk.SearchResult(table = temp_search_table)
    #     try:
    #         all_LCs.append(temp_search.download())
    #     except:
    #         print("Error lk download.")

    #download all lightcurves from search result
    lcf_search = lk.SearchResult(table=lcf_search_table)
    lcf = lcf_search.download_all()
    all_lcs = lcf.data  #save all lightcurves to all_lcs list

    spoc120_lc = spoc120(lcf_search, all_lcs)  #extract spoc120 LC if available

    return (all_lcs, spoc120_lc, lcf_search_table)
Beispiel #3
0
def _from_mast_K2(targetid,
                  mode,
                  c,
                  flux_type="PDCSAP_FLUX",
                  cadence="long",
                  aperture_mask="default",
                  download_dir=None):
    mission = "K2"
    if mode == "TPF":

        tpffilelist = search_targetpixelfile(targetid,
                                             mission=mission,
                                             campaign=c,
                                             cadence=cadence)
        tpf = tpffilelist.download(download_dir=download_dir)

        if aperture_mask == "default":
            aperture_mask = tpf.pipeline_mask

        lc = tpf.to_lightcurve(aperture_mask=aperture_mask)

        flc = _convert_TPF_to_FLC(tpf, lc)

        return flc

    elif mode == "LC":

        flcfilelist = search_lightcurve(targetid,
                                        mission=mission,
                                        campaign=c,
                                        cadence=cadence,
                                        author="K2")

        return _handle_missions(flcfilelist, mission, flux_type, cadence,
                                download_dir, targetid, c)
Beispiel #4
0
def test_search_tasoc():
    """Can we search and download a TASOC light curve?"""
    search = search_lightcurve("TIC 412064070", author="TASOC", sector=1)
    assert len(search) == 1
    assert search.table["author"][0] == "TASOC"
    lc = search.download()
    assert type(lc).__name__ == "TessLightCurve"
    assert lc.sector == 1
Beispiel #5
0
def test_search_everest():
    """Can we search and download an EVEREST light curve?"""
    search = search_lightcurve("GJ 9827", author="EVEREST", campaign=12)
    assert len(search) == 1
    assert search.table["author"][0] == "EVEREST"
    lc = search.download()
    assert type(lc).__name__ == "KeplerLightCurve"
    assert lc.campaign == 12
Beispiel #6
0
def test_search_cdips():
    """Can we search and download a cdips light curve?"""
    search = search_lightcurve("TIC 93270923", author="CDIPS", sector=8)
    assert len(search) == 1
    assert search.table["author"][0] == "CDIPS"
    lc = search.download()
    assert type(lc).__name__ == "TessLightCurve"
    assert lc.sector == 8
Beispiel #7
0
def kepler_prime_LC(kic):
    #search light curve for given KIC ID
    search_res = lk.search_lightcurve('KIC ' + str(kic))
    #initialize SPOC found,first found
    lc_found = False
    lc_first = False

    try:
        authors = search_res.table['author']
    except:
        kepler_avail = False
        kepler_lc = pd.DataFrame()
        return (kepler_lc, kepler_avail)

    if 'Kepler' in authors:
        kepler_avail = True
    else:
        kepler_avail = False
        kepler_lc = pd.DataFrame()
        return (kepler_lc, kepler_avail)

    lc_holder = []
    for i in range(len(search_res)):
        #select search result object
        search_i = search_res[i]
        #skip if not SPOC, 120 s exposure
        if (search_i.author.data[0] == 'Kepler'
            ):  # & (search_i.exptime.data[0] == 120):
            print("Found " + str(search_i.mission[0]) + " data for KIC " +
                  str(kic) + "!")
            lc_found = True
            if (lc_first == False) & (lc_first is not None):
                lc_first = True
        else:
            continue
        lk_lc = search_res[i].download()
        lk_lc = lk_lc.remove_outliers(sigma=5.0)
        lk_lc_df = lk_lc.to_pandas().reset_index(drop=False)

        lk_lc_df['quarter'] = np.repeat(
            a=lk_lc.QUARTER,
            repeats=len(lk_lc))  #add sector label for my plotting functions
        lc_holder.append(lk_lc_df)  #store in lc_holder

        #delete stuff
        fn = lk_lc.FILENAME
        del lk_lc
        os.remove(path=fn)

    if lc_found == False:
        print("No Kepler data found for KIC " + str(kic) + ".")
        kepler_lc = pd.DataFrame()
    else:
        kepler_lc = pd.concat(lc_holder)  #combine lc into 1 pandas dataframe

    return (kepler_lc, kepler_avail)
Beispiel #8
0
def _from_mast_Kepler(targetid,
                      c,
                      flux_type="PDCSAP_FLUX",
                      cadence="long",
                      download_dir=None):

    mission = "Kepler"
    flcfilelist = search_lightcurve(targetid,
                                    mission=mission,
                                    quarter=c,
                                    cadence=cadence)

    return _handle_missions(flcfilelist, mission, flux_type, cadence,
                            download_dir, targetid, c)
Beispiel #9
0
def _getMASTidentifier(ID, lkwargs):
    """ return KIC/TIC/EPIC for given ID.
    
    If input ID is not a KIC/TIC/EPIC identifier then the target is looked up
    on MAST and the identifier is retried. If a mission is not specified the 
    set of observations with the most quarters/sectors etc. will be used. 
    
    Parameters
    ----------
    ID : str
        Target ID
    lkwargs : dict
        Dictionary with arguments to be passed to lightkurve. In this case
        mission and exptime.
    
    Returns
    -------
    ID : str
        The KIC/TIC/EPIC ID of the target.    
    """

    if not any([x in ID for x in ['KIC', 'TIC', 'EPIC']]):

        search = lk.search_lightcurve(ID,
                                      exptime=lkwargs['exptime'],
                                      mission=lkwargs['mission'])

        if len(search) == 0:
            raise ValueError(f'No results for {ID} found on MAST')

        maxFreqName = max(set(list(search.table['target_name'])),
                          key=list(search.table['target_name']).count)
        maxFreqObsCol = max(set(list(search.table['obs_collection'])),
                            key=list(search.table['obs_collection']).count)

        if maxFreqObsCol == 'TESS':
            prefix = 'TIC'
        else:
            prefix = ''

        temp_id = prefix + maxFreqName

        ID = _format_name(temp_id).replace(' ', '')
        lkwargs['mission'] = maxFreqObsCol
    else:
        ID = ID.replace(' ', '')
    return ID
Beispiel #10
0
def _from_mast_TESS(targetid,
                    c,
                    flux_type="PDCSAP_FLUX",
                    cadence="long",
                    download_dir=None,
                    **kwargs):

    mission = "TESS"
    # print(targetid, mission, c, cadence, kwargs)
    flcfilelist = search_lightcurve(targetid,
                                    mission=mission,
                                    sector=c,
                                    cadence=cadence,
                                    **kwargs)
    # print(flcfilelist)
    return _handle_missions(flcfilelist, mission, flux_type, cadence,
                            download_dir, targetid, c)
Beispiel #11
0
def retrieve(args: Args) -> List[LightCurve]:
    """ Retrieve a collection of lightcurves.

    Retrieve a collection of light curves.
    This is a wrapper for [lightkurve.search_lightcurve](https://docs.lightkurve.org/reference/api/lightkurve.search_lightcurve.html?highlight=search_lightcurve).

    Args:
        args (Args): an instance of the Args class

    Returns:
        lcc: a list of light curves
    """
    quarters = list(args.quarters)

    lcc = lk.search_lightcurve(args.target,
                               mission=args.mission,
                               quarter=quarters,
                               cadence=args.cadence).download_all()

    return list(lcc)
Beispiel #12
0
def test_underfit_metric_neighbors():
    """Sanity checks for `underfit_metric_neighbors`."""
    # PDCSAP_FLUX has a very good score (>0.99) because it has been corrected
    lc_pdcsap = search_lightcurve(
        "Proxima Cen", sector=11,
        author="SPOC").download(flux_column="pdcsap_flux")
    assert underfit_metric_neighbors(lc_pdcsap, min_targets=3,
                                     max_targets=3) > 0.99

    # SAP_FLUX has a worse score (<0.9) because it hasn't been corrected
    lc_sap = lc_pdcsap.copy()
    lc_sap.flux = lc_pdcsap.sap_flux
    lc_sap.flux_err = lc_pdcsap.sap_flux_err
    assert underfit_metric_neighbors(lc_sap, min_targets=3,
                                     max_targets=3) < 0.9

    # A flat light curve should have a perfect score (1)
    notnan = ~np.isnan(lc_sap.flux)
    lc_sap.flux.value[notnan] = np.ones(notnan.sum())
    assert underfit_metric_neighbors(lc_sap, min_targets=3,
                                     max_targets=3) == 1.0
Beispiel #13
0
def _search_and_dump(ID, lkwargs, search_cache):
    """ Get lightkurve search result online.
    
    Uses the lightkurve search_lightcurve to find the list of available
    data for a target ID. 
    
    Stores the result in the ~/.lightkurve-cache/searchResult directory as a 
    dictionary with the search result object and a timestamp.
    
    Parameters
    ----------
    ID : str
        ID string of the target
    lkwargs : dict
        Dictionary to be passed to LightKurve
    search_cache : str
        Directory to store the search results in. 
        
    Returns
    -------
    resultDict : dict
        Dictionary with the search result object and timestamp.    
    """

    current_date = datetime.now().isoformat()
    store_date = current_date[:current_date.index('T')].replace('-', '')

    search = lk.search_lightcurve(ID,
                                  exptime=lkwargs['exptime'],
                                  mission=lkwargs['mission'])

    resultDict = {'result': search, 'timestamp': store_date}

    fname = os.path.join(
        *[search_cache, f"{ID}_{lkwargs['exptime']}.lksearchresult"])

    pickle.dump(resultDict, open(fname, "wb"))

    return resultDict
Beispiel #14
0
def test_one_lpp():
    """"Use case is to get values for one TCE."""

    period = 3.5224991 * u.day
    tzero = (54953.6193 + 2400000.5 - 2454833.0) * u.day
    duration = 3.1906 * u.hour
    depth = 0.009537 * const.frac_amp
    target_name = "Kepler-8"
    event_name = "Kepler-8 b"

    tce = Tce(period=period,
              epoch=tzero,
              duration=duration,
              target_name=target_name,
              depth=depth,
              event_name=event_name,
              epoch_offset=0 * u.day,
              snr=10)

    # Specify the lightcurve to vet
    mission = "Kepler"
    q = 4

    # Generic function that runs lightkurve and returns a lightkurve object
    lcf = search_lightcurve(target_name,
                            quarter=q,
                            mission=mission,
                            exptime=1800).download(flux_column="sap_flux")
    lc = lcf.remove_nans().remove_outliers()
    flat = lc.flatten(window_length=81)
    flat.flux = flat.flux.value - 1.0

    # Use default .mat file from SourceForge
    lpp = vetters.Lpp(lc_name="flux", map_filename=None)

    _ = lpp.run(tce, flat)

    # Accepted value if data doesn't change
    assert_allclose(lpp.norm_lpp, 0.17, atol=0.09)
Beispiel #15
0
def test_pipeline_1():
    # TODO: Implement me
    ticid = 377780790
    sector = 14

    lcdata = lk.search_lightcurve("Kepler-10", mission='TESS',
                                  sector=14)[0].download()

    config = pipe.load_def_config()
    vetter_list = pipe.load_def_vetter()
    thresholds = {
        'snr': 1,
        'norm_lpp': 2.0,
        'tp_cover': 0.6,
        'oe_sigma': 3,
        'sweet': 3
    }

    tce_tces, result_strings, metrics_list = pipe.search_and_vet_one(
        ticid, sector, lcdata, config, vetter_list, thresholds, plot=False)

    assert lk.__version__ == '2.0b5'
    assert tce_tces[0]['snr'] < 1
Beispiel #16
0
def hlsp(ticid, sector, author="tess-spoc", local_dir = None):
    """
    

    Parameters
    ----------
    ticid : int
        DESCRIPTION.
    sector : int
        Sector of observations to vet
    author : string, OPTIONAL
        options include tess-spoc and tess-qlp.
        The default is "tess-spoc".
    loocaldir : string
        local directory to read from None: Default

    Returns
    -------
    lc : lightkurve object
        lightkurve object of the data requested.

    """
    
    #print(f'TIC {ticid}')
    
    if local_dir is None:
    
        lc = lk.search_lightcurve(f"TIC {ticid}", sector=sector,
                              cadence="ffi",author=author).download()
    else:
        
        filename  = get_hlsp_filename(ticid, sector, author)
        
        lc = lk.io.read(local_dir + "/" + filename)
    
    
    return lc
def Periodogram(target, targetquarter):
    lc = search_lightcurve(target, author="Kepler", quarter=targetquarter, cadence="long").download().remove_nans()
    
    pg = lc.to_periodogram(oversample_factor=10)
    pg.plot();
    plt.title("Periodogram Oversample Factor [10]");

    pg = lc.to_periodogram(oversample_factor=1)
    pg.plot();
    plt.title("Periodogram Oversample Factor [1]");

    pg.plot(view='period', scale='log');
    plt.title("Log Scale Period Space");

    lc.fold(period=pg.period_at_max_power).scatter();
    plt.title("Period at Max Power");

    lc.fold(period=2*pg.period_at_max_power, wrap_phase=0.2).scatter();
    plt.title("Plot With Double the Period");

    lc.fold(period=4*pg.period_at_max_power, wrap_phase=0.2).scatter();
    plt.title("Plot With Quadruple the Period");

    print("\nPeriod at Max Power\n" + str(pg.period_at_max_power) + "\n")
def test_sff_nan_centroids():
    """Regression test for #827: SFF failed if light curve contained
    NaNs in its `centroid_col` or `centroid_row` columns."""
    lc = search_lightcurve("EPIC 211083408", author="K2").download()
    # This previously raised a ValueError:
    lc[200:500].remove_nans().to_corrector("sff").correct()

###################################  003  #########################################
# %%
print("\nSection 003\n")

t = """
Tabby's Star (also known as Boyajian's Star and WTF Star, and designated KIC 8462852 
in the Kepler Input Catalog) is an F-type main-sequence star in the constellation Cygnus 
approximately 1,470 light-years (450 pc) from Earth. 
Unusual light fluctuations of the star, including up to a 22% dimming in brightness.
"""

print(t + "\n")

search_result = lk.search_lightcurve('KIC 8462852', author='Kepler')
print(search_result)
print("\n")

ProcessTarget("KIC 8462852", 1, 1800, )

Periodogram("KIC 8462852", 1)

print("\nThe plot reveals a short-lived 20% dip in the brightness of the star. It looks like we re-discovered one of the intriguing dips in Tabby's star.\n")


pixelfile = lk.search_targetpixelfile("KIC 8462852")[1].download();
lc = pixelfile.to_lightcurve(method="pld").remove_outliers().flatten()
period = lc.to_periodogram("bls").period_at_max_power
lc.fold(period).scatter();
plt.title("KIC 8462852 (1)");
from astropy import units as u
import lightkurve as lk
import numpy as np
'''
4955894425631686912 160222069
4955894528710881408 160222069
6756793523028737024 425561347
5801881413893258112 311179742
6560686965549688960 140067837
6518496707230129920 139456051
6518497016468468224 139456051
5801881413893258112 311179742
5019047693471033088 32451836
6507063646023467520 161169240
'''
lc = lk.search_lightcurve('TIC 160222069', mission='TESS').download()
#lc[0].plot()
#print(np.nanmedian(lc[0].flux.value), np.nanmedian(lc[2].flux.value), np.nanmedian(lc[4].flux.value))
#plt.scatter(lc[0].time.value,lc[0].flux.value/np.nanmedian(lc[0].flux.value), c='k', marker='.')
#plt.scatter(lc[2].time.value,lc[2].flux.value/np.nanmedian(lc[2].flux.value), c='k', marker='.')
#plt.scatter(lc[4].time.value,lc[4].flux.value/np.nanmedian(lc[4].flux.value), c='k', marker='.')
#plt.show()

#time = np.concatenate((lc[0].time.value,lc[2].time.value,lc[4].time.value))
#flux = np.concatenate((lc[0].flux.value/np.nanmedian(lc[0].flux.value),lc[2].flux.value/np.nanmedian(lc[2].flux.value),lc[4].flux.value/np.nanmedian(lc[4].flux.value)))

#plt.scatter(time, flux)
#plt.show()

new_lc = lk.LightCurve(time=lc.time.value, flux=lc.flux.value)
folded = new_lc.fold(period=5.850943)  #, epoch_time = 2459135.870906)
    lc.plot();
    print("\nMask Done!")

    """lc = pixelfile.to_lightcurve(aperture_mask='all');
    lc.plot();
    print("\nAll Done!")"""

    flat_lc = lc.flatten(window_length=401);
    flat_lc.plot();
    print("\nFlat Done!")
    
# %%

# %%
print("\nSection 003\n")
search_result = lk.search_lightcurve('KIC 8462852', author='Kepler')
print(search_result)

ProcessTarget("KIC 8462852", 16, 1800, )

print("\nThe plot reveals a short-lived 20% dip in the brightness of the star. It looks like we re-discovered one of the intriguing dips in Tabby's star.\n")
# %%




# %%
print("\nSection 004\n")
# https://docs.lightkurve.org/tutorials/1-getting-started/what-are-lightcurve-objects.html

search_result = lk.search_lightcurve('KIC 6922244', author='Kepler')
import matplotlib.pyplot as plt
from scipy.signal import argrelextrema, savgol_filter

# lcf = lightkurve.search_lightcurve("TIC 352315023", mission="TESS", cadence="short", sector=[13, 27], author="SPOC")\
#     .download_all()
# lc = lcf.stitch().remove_nans()
# lc = lc.remove_outliers(sigma_lower=float('inf'), sigma_upper=3)
# model = tls.transitleastsquares(lc.time.value, lc.flux.value)
# results = model.power(period_min=0.45, period_max=40, use_threads=multiprocessing.cpu_count(),
#                       oversampling_factor=1.1119355997446583, T0_fit_margin=0.05, duration_grid_step=1.1)
# print(results)
from scipy import stats, signal
from scipy.interpolate import interp1d
import time
t0 = time.time()
lcf = lightkurve.search_lightcurve("TIC 251848941", mission="TESS", cadence="short", sector=[2], author="SPOC")\
    .download_all()
lc = lcf.stitch().remove_nans()
lc = lc.remove_outliers(sigma_lower=float('inf'), sigma_upper=3)
lc_time = lc.time.value
flux = lc.flux.value
cadence = 2
window_length = 25 / cadence
flux = savgol_filter(flux, 11, 3)
R_s = 1.1
M_s = 1.3
P_min = 0.5
P_max = 22
ld_coefficients = [0.2, 0.1]
min_duration = wotan.t14(R_s, M_s, P_min, True)
max_duration = wotan.t14(R_s, M_s, P_max, True)
duration_grid = np.arange(min_duration * 24 * 60 // cadence,
Beispiel #23
0
def get_mastlc(name=None,
               coords=None,
               tic=None,
               sectors='all',
               author=['SPOC', 'TESS-SPOC'],
               cadence='shortest',
               out_sec=False):
    #!!!Need to do some comprehension if multiple authors are specified so
    #   that their list order also denotes their priority order!!!
    #!!!mix_cadence keyword!!!
    #!!!ffi_only or no_ffi flags!!
    #!!!Add ability to bin sectors to a given cadence!!!
    """
    Function to retrieve 2 minute cadence TESS lightcurve for a given TIC ID 
    and given sectors. Returns a combined lightcurve in the form of a
    lightkurve object. If light curves from multiple sectors are combined, each
    light curve is individually normalized prior to combining.

    Parameters
    ----------
    name : str
       Name of the target requested.
    coords : tuple, 2-element list, or SkyCoord coordinates
       RA and Dec of the target being queried. Can be in decimal or sexigesimal.
    tic : integer
       TESS Input Catalog ID for desired target. At this time, common names are
       not accepted input, only TIC IDs.
    sectors : list, numpy array, or 'all'
       List of desired sectors to include when fetching the SPOC-processed light
       curve. If 'all' is specified, all available light curves
       will be downloaded.
    author : str or list
       Specifies the author of the light curves. Useful for retrieving light
       curves created by non-SPOC entities that have been posted on MAST.
       Default ['SPOC', 'TESS-SPOC'].
    cadence : str or int
       Specifies the desired cadence of the light curves. Options are 'shortest'
       for the shortest cadence in each sector, '2min' or 120 for 2 minute
       cadence, and '20sec' or 20 for 20 second cadence. Default is 'shortest'.
    out_sec : bool
       A flag to determine whether the sectors from which light curves were
       downloaded are included as an output. If True, command will provide two
       outputs, the light curve object and a numpy array of sectors, in that 
       order.

    Returns
    -------
    lc : 'LightCurve'
       Combined light curve of all available light curves at TESS 2 minute 
       cadence for specified TIC ID.
    secs : numpy array, optional
       List of sectors from which light curve was gathered.
    """
    if coords is None and tic is None and name is None:
        raise ValueError("Valid coordinates, a TIC ID, or a name must be " +
                         "specified!")

    if cadence == '2min' or cadence == 120 or cadence == '2minutes':
        exptime = 120  #add def_authors here for short or long cadence flags?
        #add flag for only short cadence and no ffis
    elif cadence == '20sec' or cadence == 20 or cadence == '20seconds':
        exptime = 20
    elif cadence == '30min' or cadence == 1800 or cadence == '30minutes':
        exptime = 1800
    elif cadence == '10min' or cadence == 600 or cadence == '10minutes':
        exptime = 600

    if tic is not None:
        name = "TIC " + str(tic)

    if name is not None:
        search_result = lk.search_lightcurve(name, author=author)
    else:
        search_result = lk.search_lightcurve((coords[0] + " " + coords[1]),
                                             author=author)

    if len(search_result) == 0:
        raise ValueError('No valid sectors found for target with specified ' +
                         'inputs!')

    secs = list(map(int, [row[12:] for row in search_result.table['mission']]))

    #remove unwanted sectors
    if sectors != 'all':
        if not isinstance(sectors, list):
            sectors = list(sectors)
        search_result = search_result[:][[(element in sectors)
                                          for element in secs]]
        if len(search_result) == 0:
            raise ValueError('None of the specified sectors found for the ' +
                             'specified target!')

    #remove duplicate sectors based on shortest cadence or specify exptime
    if cadence != 'shortest':
        search_result = search_result[search_result.table['exptime'] ==
                                      exptime]
    else:
        df = pd.DataFrame([[int(row['mission'][12:]),
                            int(row['exptime'])]
                           for row in search_result.table],
                          columns=['sector', 'exptime'])

        keep = np.zeros(len(df['sector']), dtype=bool)
        keep[df.loc[df.groupby('sector').exptime.idxmin()].index] = True

        search_result = search_result[keep]

    if len(search_result) == 0:
        raise ValueError('No light curves found at the specified cadence!')

    secs = list(map(int, [row[12:] for row in search_result.table['mission']]))

    #download light curves
    lc_col = search_result.download_all()
    lc = lc_col.stitch()

    if out_sec:
        return lc, secs
    else:
        return lc
Beispiel #24
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 23 21:09:54 2021

@author: smullally
"""

import numpy
import matplotlib
import exovetter as exo
from exovetter import const
from exovetter import utils
import exovetter.vetters as vet
import lightkurve as lk

candidate = "TOI 565.01"
tce = utils.get_mast_tce(candidate)
lc = lk.search_lightcurve(candidate, exptime=120)[0].download()
lc.plot()
tpf = lk.search_targetpixelfile(candidate, exptime=120)[0].download()

cent = vet.Centroid()
cent.run(tce[0], tpf, plot=True)
Beispiel #25
0
def test_tasoc_ppm_units():
    """Regression test for #956."""
    lc = lk.search_lightcurve('HV 2112', author='TASOC', sector=1, exptime=1800).download()
    assert lc['flux_corr'].unit == "ppm"
    assert "Unrecognized" not in repr(lc['flux_corr'].unit)
Beispiel #26
0
display(r1_select_variable)


# %%


##################################  004  #########################################
# %%
print("\nSection 004\n")

# Print a list of all target entries

target1 = r1_select_variable.value
print("Target: " + str(target1))

search_result = lk.search_lightcurve(target1)
print(search_result)
print("\n")

Target_Name = " "

# %%


##################################  004B  #########################################
# %%
print("\nSection 004B\n")

# Print the first entry
print("\nThe first entry\n")
Beispiel #27
0
def spoc120(tic):
    # '''
    # Extracts 2-minute cadence SPOC LCs from a lightkurve query.

    # Parameters
    # ----------
    # lcf_search : lk.search_lightcurvefile object
    #     DESCRIPTION.
    # all_lcs : lk.search_lightcurvefile.download_all().data object
    #     DESCRIPTION.

    # Returns
    # -------
    # spoc_120_lc : pandas dataframe
    #     SPOC 2 minute cadence for all sectors with sector label column

    # '''
    # search_df = lcf_search.table.to_pandas()
    # spoc120_index = list(search_df[(search_df['author'] == 'SPOC') & (search_df['exptime'] == 120)].index.to_numpy(dtype = 'int'))
    # if (len(spoc120_index) > 0) & (len(all_lcs) > 0):
    #     spoc120_LCs = [all_lcs[i] for i in spoc120_index]

    #     for i,lc in enumerate(spoc120_LCs):
    #         sector = lc.sector

    #         temp_lc = lc.to_pandas().reset_index()[['time','pdcsap_flux','pdcsap_flux_err','sap_flux','sap_flux_err','quality']]

    #         sector_repeats = np.repeat(a = sector, repeats = len(temp_lc))
    #         temp_lc['sector'] = sector_repeats

    #         if i == 0:
    #             spoc120_full_LC = temp_lc
    #         else:
    #             spoc120_full_LC = pd.concat([spoc120_full_LC,temp_lc])

    #     return(spoc120_full_LC)
    # else:
    #     return(pd.DataFrame())

    #search light curve for given TIC ID
    search_res = lk.search_lightcurve('TIC ' + str(tic))
    #initialize SPOC found,first found
    spoc_found = False
    spoc_first = False

    lc_holder = []
    for i in range(len(search_res)):
        #select search result object
        search_i = search_res[i]
        #skip if not SPOC, 120 s exposure
        if (search_i.author.data[0] == 'SPOC') & (search_i.exptime.data[0]
                                                  == 120):
            print("Found SPOC " + str(search_i.mission[0]) + " data for TIC " +
                  str(tic) + "!")
            spoc_found = True
            if (spoc_first == False) & (spoc_first is not None):
                spoc_first = True
        else:
            continue
        lk_lc = search_res[i].download()
        lk_lc = lk_lc.remove_outliers(sigma=5.0)
        lk_lc_df = lk_lc.to_pandas().reset_index(drop=False)

        lk_lc_df['sector'] = np.repeat(
            a=lk_lc.sector,
            repeats=len(lk_lc))  #add sector label for my plotting functions
        lc_holder.append(lk_lc_df)  #store in lc_holder

        #delete stuff
        fn = lk_lc.FILENAME
        del lk_lc
        os.remove(path=fn)

    if spoc_found == False:
        print("No SPOC data found for TIC " + str(tic) + ".")
        spoc_lc = pd.DataFrame()
    else:
        spoc_lc = pd.concat(lc_holder)  #combine lc into 1 pandas dataframe

    return (spoc_lc)
        try:
            search_result = lk.search_lightcurve(f'TOI{TOI}', mission='TESS', cadence="short", author='SPOC')
            #tpf_file = lk.search_targetpixelfile(f'TIC {TIC}', mission='TESS', cadence="short", author='SPOC').download_all(quality_bitmask='default')
            #tpf_file.plot()
            #plt.show()

        except HTTPError:
            print('HTTPError, retry.')
        else:
            break
    '''
    #folding_lc_from_csv(homedir, TOInumber)
    #print(f'Analysis completed: {TOInumber}')
    #import pdb;pdb.set_trace()
    search_result = lk.search_lightcurve(f'TOI{TOI}',
                                         mission='TESS',
                                         cadence="short",
                                         author='SPOC')

    lc_collection = search_result.download_all()
    try:
        lc = lc_collection.stitch().remove_nans()  #initialize lc
    except AttributeError:
        continue
    '''
    """bls analysis"""
    bls_period = np.linspace(10, 50, 10000)
    bls = lc.to_periodogram(method='bls',period=bls_period)#oversample_factor=1)\
    print('bls period = ', bls.period_at_max_power)
    print(f'period = {period}')
    print('bls transit time = ', bls.transit_time_at_max_power)
    print(f'transit time = {transit_time}')
Beispiel #29
0
 def build(self, object_info: MissionObjectInfo, sherlock_dir, caches_root_dir):
     mission_id = object_info.mission_id()
     sherlock_id = object_info.sherlock_id()
     logging.info("Retrieving star catalog info...")
     mission, mission_prefix, id = super().parse_object_id(mission_id)
     if mission_prefix not in self.star_catalogs:
         raise ValueError("Wrong object id " + mission_id)
     cadence = object_info.cadence if object_info.cadence is not None else "short"
     author = object_info.author if object_info.author is not None else self.authors[mission]
     star_info = starinfo.StarInfo(sherlock_id, *self.star_catalogs[mission_prefix].catalog_info(id))
     logging.info("Downloading lightcurve files...")
     sectors = None if object_info.sectors == 'all' or mission != constants.MISSION_TESS else object_info.sectors
     campaigns = None if object_info.sectors == 'all' or mission != constants.MISSION_K2 else object_info.sectors
     quarters = None if object_info.sectors == 'all' or mission != constants.MISSION_KEPLER else object_info.sectors
     tokens = sectors if sectors is not None else campaigns if campaigns is not None else quarters
     tokens = tokens if tokens is not None else "all"
     apertures = {}
     tpf_search_results = lk.search_targetpixelfile(str(mission_id))
     for tpf_search_result in tpf_search_results:
         logging.info("There is data for Mission: %s, Year %.0f, Author: %s, ExpTime: %.0f",
                      tpf_search_result.mission[0], tpf_search_result.year[0], tpf_search_result.author[0],
                      tpf_search_result.exptime[0].value)
     tpfs_dir = sherlock_dir + "/tpfs/"
     if not os.path.exists(tpfs_dir):
         os.mkdir(tpfs_dir)
     if object_info.apertures is None:
         lcf_search_results = lk.search_lightcurve(str(mission_id), mission=mission, cadence=cadence,
                                        sector=sectors, quarter=quarters,
                                        campaign=campaigns, author=author)
         lcf = lcf_search_results.download_all(download_dir=caches_root_dir + LIGHTKURVE_CACHE_DIR)
         tpfs = lk.search_targetpixelfile(str(mission_id), mission=mission, cadence=cadence,
                                        sector=sectors, quarter=quarters,
                                        campaign=campaigns, author=author)\
             .download_all(download_dir=caches_root_dir + LIGHTKURVE_CACHE_DIR,
                           cutout_size=(CUTOUT_SIZE, CUTOUT_SIZE))
         if lcf is None:
             raise ObjectProcessingError("The target " + str(mission_id) + " is not available for the author " + author +
                              ", cadence " + str(cadence) + "s and sectors " + str(tokens))
         lc_data = self.extract_lc_data(lcf)
         lc = None
         matching_objects = []
         for tpf in tpfs:
             shutil.copy(tpf.path, tpfs_dir + os.path.basename(tpf.path))
             if mission_prefix == self.MISSION_ID_KEPLER:
                 sector = tpf.quarter
             elif mission_prefix == self.MISSION_ID_TESS:
                 sector = tpf.sector
             if mission_prefix == self.MISSION_ID_KEPLER_2:
                 sector = tpf.campaign
             apertures[sector] = ApertureExtractor.from_boolean_mask(tpf.pipeline_mask, tpf.column, tpf.row)
         for i in range(0, len(lcf.data)):
             if lcf.data[i].label == mission_id:
                 if lc is None:
                     lc = lcf.data[i].normalize()
                 else:
                     lc = lc.append(lcf.data[i].normalize())
             else:
                 matching_objects.append(lcf.data[i].label)
         matching_objects = set(matching_objects)
         if len(matching_objects) > 0:
             logging.warning("================================================")
             logging.warning("TICS IN THE SAME PIXEL: " + str(matching_objects))
             logging.warning("================================================")
         if lc is None:
             tokens = sectors if sectors is not None else campaigns if campaigns is not None else quarters
             tokens = tokens if tokens is not None else "all"
             raise ObjectProcessingError("The target " + str(mission_id) + " is not available for the author " + author +
                              ", cadence " + str(cadence) + "s and sectors " + str(tokens))
         lc = lc.remove_nans()
         transits_min_count = self.__calculate_transits_min_count(len(lcf))
         if mission_prefix == self.MISSION_ID_KEPLER:
             sectors = [lcfile.quarter for lcfile in lcf]
         elif mission_prefix == self.MISSION_ID_TESS:
             sectors = [file.sector for file in lcf]
         elif mission_prefix == self.MISSION_ID_KEPLER_2:
             logging.info("Correcting K2 motion in light curve...")
             sectors = [lcfile.campaign for lcfile in lcf]
             lc = lc.to_corrector("sff").correct(windows=20)
         source = "tpf"
     else:
         logging.info("Using user apertures!")
         tpf_search_results = lk.search_targetpixelfile(str(mission_id), mission=mission, cadence=cadence,
                                          sector=sectors, quarter=quarters, campaign=campaigns,
                                          author=author)
         tpfs = tpf_search_results.download_all(download_dir=caches_root_dir + LIGHTKURVE_CACHE_DIR,
                                                cutout_size=(CUTOUT_SIZE, CUTOUT_SIZE))
         source = "tpf"
         apertures = object_info.apertures
         lc = None
         for tpf in tpfs:
             shutil.copy(tpf.path, tpfs_dir + os.path.basename(tpf.path))
             if mission_prefix == self.MISSION_ID_KEPLER:
                 sector = tpf.quarter
             elif mission_prefix == self.MISSION_ID_TESS:
                 sector = tpf.sector
             elif mission_prefix == self.MISSION_ID_KEPLER_2:
                 sector = tpf.campaign
             boolean_aperture = ApertureExtractor.from_pixels_to_boolean_mask(apertures[sector], tpf.column, tpf.row,
                                                                      CUTOUT_SIZE, CUTOUT_SIZE)
             tpf.plot(aperture_mask=boolean_aperture, mask_color='red')
             plt.savefig(sherlock_dir + "/fov/Aperture_[" + str(sector) + "].png")
             plt.close()
             if mission_prefix == self.MISSION_ID_KEPLER:
                 corrector = lk.KeplerCBVCorrector(tpf)
                 corrector.plot_cbvs([1, 2, 3, 4, 5, 6, 7])
                 raw_lc = tpf.to_lightcurve(aperture_mask=boolean_aperture).remove_nans()
                 plt.savefig(sherlock_dir + "/Corrector_components[" + str(sector) + "].png")
                 plt.close()
                 it_lc = corrector.correct([1, 2, 3, 4, 5])
                 ax = raw_lc.plot(color='C3', label='SAP Flux', linestyle='-')
                 it_lc.plot(ax=ax, color='C2', label='CBV Corrected SAP Flux', linestyle='-')
                 plt.savefig(sherlock_dir + "/Raw_vs_CBVcorrected_lc[" + str(sector) + "].png")
                 plt.close()
             elif mission_prefix == self.MISSION_ID_KEPLER_2:
                 raw_lc = tpf.to_lightcurve(aperture_mask=boolean_aperture).remove_nans()
                 it_lc = raw_lc.to_corrector("sff").correct(windows=20)
                 ax = raw_lc.plot(color='C3', label='SAP Flux', linestyle='-')
                 it_lc.plot(ax=ax, color='C2', label='CBV Corrected SAP Flux', linestyle='-')
                 plt.savefig(sherlock_dir + "/Raw_vs_SFFcorrected_lc[" + str(sector) + "].png")
                 plt.close()
             elif mission_prefix == self.MISSION_ID_TESS:
                 temp_lc = tpf.to_lightcurve(aperture_mask=boolean_aperture)
                 where_are_NaNs = np.isnan(temp_lc.flux)
                 temp_lc = temp_lc[np.where(~where_are_NaNs)]
                 regressors = tpf.flux[np.argwhere(~where_are_NaNs), ~boolean_aperture]
                 temp_token_lc = [temp_lc[i: i + 2000] for i in range(0, len(temp_lc), 2000)]
                 regressors_token = [regressors[i: i + 2000] for i in range(0, len(regressors), 2000)]
                 it_lc = None
                 raw_it_lc = None
                 item_index = 0
                 for temp_token_lc_item in temp_token_lc:
                     regressors_token_item = regressors_token[item_index]
                     design_matrix = lk.DesignMatrix(regressors_token_item, name='regressors').pca(5).append_constant()
                     corr_lc = lk.RegressionCorrector(temp_token_lc_item).correct(design_matrix)
                     if it_lc is None:
                         it_lc = corr_lc
                         raw_it_lc = temp_token_lc_item
                     else:
                         it_lc = it_lc.append(corr_lc)
                         raw_it_lc = raw_it_lc.append(temp_token_lc_item)
                     item_index = item_index + 1
                 ax = raw_it_lc.plot(label='Raw light curve')
                 it_lc.plot(ax=ax, label='Corrected light curve')
                 plt.savefig(sherlock_dir + "/Raw_vs_DMcorrected_lc[" + str(sector) + "].png")
                 plt.close()
             if lc is None:
                 lc = it_lc.normalize()
             else:
                 lc = lc.append(it_lc.normalize())
         lc = lc.remove_nans()
         lc.plot(label="Normalized light curve")
         plt.savefig(sherlock_dir + "/Normalized_lc[" + str(sector) + "].png")
         plt.close()
         transits_min_count = self.__calculate_transits_min_count(len(tpfs))
         if mission_prefix == self.MISSION_ID_KEPLER or mission_id == self.MISSION_ID_KEPLER_2:
             sectors = [lcfile.quarter for lcfile in tpfs]
         elif mission_prefix == self.MISSION_ID_TESS:
             sectors = [file.sector for file in tpfs]
         if mission_prefix == self.MISSION_ID_KEPLER_2:
             logging.info("Correcting K2 motion in light curve...")
             sectors = [lcfile.campaign for lcfile in tpfs]
         sectors = None if sectors is None else np.unique(sectors)
         lc_data = None
     return LcBuild(lc, lc_data, star_info, transits_min_count, cadence, None, sectors, source, apertures)
def test_CBVCorrector_retrieval():
    """Tests CBVCorrector by retrieving some sample Kepler/TESS light curves
    and correcting them
    """

    # ***
    # A good TESS example of both over- and under-fitting
    # The "over-fitted" curve looks better to the eye, but eyes can be deceiving!
    lc = search_lightcurve("TIC 357126143",
                           mission="tess",
                           author="spoc",
                           sector=10).download(flux_column="sap_flux")
    cbvCorrector = CBVCorrector(lc)
    assert isinstance(cbvCorrector, CBVCorrector)

    cbv_type = ["SingleScale", "Spike"]
    cbv_indices = [np.arange(1, 9), "ALL"]

    # Gaussian Prior correction
    lc = cbvCorrector.correct_gaussian_prior(cbv_type=cbv_type,
                                             cbv_indices=cbv_indices,
                                             alpha=1e-2)
    assert isinstance(lc, TessLightCurve)
    # Check that returned lightcurve is in flux units
    assert lc.flux.unit == u.Unit("electron / second")
    ax = cbvCorrector.diagnose()
    assert len(ax) == 2 and isinstance(ax[0], matplotlib.axes._subplots.Axes)

    # ElasticNet corrections
    lc = cbvCorrector.correct_elasticnet(cbv_type=cbv_type,
                                         cbv_indices=cbv_indices,
                                         alpha=1e1,
                                         l1_ratio=0.5)
    assert isinstance(lc, TessLightCurve)
    assert lc.flux.unit == u.Unit("electron / second")
    ax = cbvCorrector.diagnose()
    assert len(ax) == 2 and isinstance(ax[0], matplotlib.axes._subplots.Axes)

    # Correction optimizer
    lc = cbvCorrector.correct(
        cbv_type=cbv_type,
        cbv_indices=cbv_indices,
        alpha_bounds=[1e-4, 1e4],
        target_over_score=0.5,
        target_under_score=0.8,
    )
    assert isinstance(lc, TessLightCurve)
    assert lc.flux.unit == u.Unit("electron / second")
    ax = cbvCorrector.diagnose()
    assert len(ax) == 2 and isinstance(ax[0], matplotlib.axes._subplots.Axes)

    # Goodness metric scan plot
    ax = cbvCorrector.goodness_metric_scan_plot(cbv_type=cbv_type,
                                                cbv_indices=cbv_indices)
    assert isinstance(ax, matplotlib.axes.Axes)

    # Try multi-scale basis vectors
    cbv_type = ["MultiScale.1", "MultiScale.2", "MultiScale.3"]
    cbv_indices = ["ALL", "ALL", "ALL"]
    lc = cbvCorrector.correct_gaussian_prior(cbv_type=cbv_type,
                                             cbv_indices=cbv_indices,
                                             alpha=1e-2)
    assert isinstance(lc, TessLightCurve)

    # ***
    # A Kepler and K2 example
    lc = search_lightcurve("KIC 6508221",
                           mission="kepler",
                           author="kepler",
                           quarter=5).download(flux_column="sap_flux")
    cbvCorrector = CBVCorrector(lc)
    lc = cbvCorrector.correct_gaussian_prior(alpha=1.0)
    assert isinstance(lc, KeplerLightCurve)
    assert lc.flux.unit == u.Unit("electron / second")

    lc = search_lightcurve("EPIC 247887989", mission="k2",
                           author="k2").download(flux_column="sap_flux")
    cbvCorrector = CBVCorrector(lc)
    lc = cbvCorrector.correct_gaussian_prior(alpha=1.0)
    assert isinstance(lc, KeplerLightCurve)
    assert lc.flux.unit == u.Unit("electron / second")

    lc = cbvCorrector.correct()
    assert isinstance(lc, KeplerLightCurve)
    assert lc.flux.unit == u.Unit("electron / second")

    # ***
    # Try some expected failures

    # cbv_type and cbv_indices not the same list lengths
    with pytest.raises(AssertionError):
        lc = cbvCorrector.correct_gaussian_prior(
            cbv_type=["SingleScale", "Spike"], cbv_indices=["all"], alpha=1e-2)

    # cbv_type is not a list
    with pytest.raises(AssertionError):
        lc = cbvCorrector.correct_gaussian_prior(cbv_type="SingleScale",
                                                 cbv_indices=["all"],
                                                 alpha=1e-2)

    # cbv_indices is not a list
    with pytest.raises(AssertionError):
        lc = cbvCorrector.correct_gaussian_prior(cbv_type=["SingleScale"],
                                                 cbv_indices="all",
                                                 alpha=1e-2)

    #***
    # Test the need for extrapolation with Kepler data
    lc = search_lightcurve("KIC 2437317",
                           mission="Kepler",
                           author="kepler",
                           cadence='long',
                           quarter=6).download(flux_column="sap_flux")
    cbv_type = ['SingleScale']
    cbv_indices = [np.arange(1, 9)]
    # This will generate an warning about the need for extrapolation
    cbvCorrector = CBVCorrector(lc,
                                interpolate_cbvs=True,
                                extrapolate_cbvs=False)
    # This will generate a light curve with the last value well below the median corrected flux
    cbvCorrector.correct_gaussian_prior(cbv_type=cbv_type,
                                        cbv_indices=cbv_indices,
                                        alpha=1e-4)
    assert (cbvCorrector.corrected_lc.flux[-1] -
            np.median(cbvCorrector.corrected_lc.flux)).value < -300
    # This will generate a light curve with the last value at about the median corrected flux
    cbvCorrector = CBVCorrector(lc,
                                interpolate_cbvs=True,
                                extrapolate_cbvs=True)
    cbvCorrector.correct_gaussian_prior(cbv_type=cbv_type,
                                        cbv_indices=cbv_indices,
                                        alpha=1e-4)
    assert ((cbvCorrector.corrected_lc.flux[-1] -
             np.median(cbvCorrector.corrected_lc.flux)).value > 0.0
            and (cbvCorrector.corrected_lc.flux[-1] -
                 np.median(cbvCorrector.corrected_lc.flux)).value < 20)