def __getitem__(self, index):
     example = self.examples[index]
     light_curve_file_name = example.file_name
     if example.label:
         example_directory = positive_data_directory
     else:
         example_directory = negative_data_directory
     light_curve = lightkurve.open(
         os.path.join(example_directory, light_curve_file_name))
     flux = light_curve.hdu[1].columns['FLUX'].array
     flux = flux.byteswap().newbyteorder()
     flux = (2 * (flux - flux.min()) /
             (flux.max() - flux.min())) - 1  # Normalize flux.
     padding_required = padded_example_length - flux.size
     if padding_required < 0:
         trimming_start = random.randint(0, abs(padding_required))
         trimming_end = -(abs(padding_required) - trimming_start)
         if trimming_end == 0:
             trimming_end = None
         padded_flux = flux[trimming_start:trimming_end]
     else:
         pre_padding = random.randint(0, padding_required)
         post_padding = padding_required - pre_padding
         padded_flux = np.pad(flux, (pre_padding, post_padding),
                              mode='reflect')
     return torch.tensor(padded_flux), torch.tensor(example.label,
                                                    dtype=torch.float32)
def print_dataset_statistics():
    """Prints off various statistics about the dataset."""
    dataset = KoiCatalogDataset()
    element_counts = []
    max_values = []
    min_values = []
    min_to_max_differences = []
    min_to_max_ratio = []
    for example in dataset.examples:
        light_curve_file_name = example.file_name
        if example.label:
            example_directory = positive_data_directory
        else:
            example_directory = negative_data_directory
        light_curve = lightkurve.open(
            os.path.join(example_directory, light_curve_file_name))
        flux = light_curve.hdu[1].columns['FLUX'].array
        element_counts.append(len(flux))
        max_values.append(flux.max())
        min_values.append(flux.min())
        min_to_max_differences.append(flux.max() - flux.min())
        min_to_max_ratio.append(flux.min() / flux.max())
    print(f'Max elements: {np.max(element_counts)}')
    print(f'Min elements: {np.min(element_counts)}')
    print(f'Std elements: {np.std(element_counts)}')
    print(f'Mean elements: {np.mean(element_counts)}')
    print(f'Max ratio: {np.max(min_to_max_ratio)}')
    print(f'Min ratio: {np.min(min_to_max_ratio)}')
    print(f'Std ratio: {np.std(min_to_max_ratio)}')
    print(f'Mean ratio: {np.mean(min_to_max_ratio)}')
Esempio n. 3
0
def _query_lightkurve(id, download_dir, use_cached, lkwargs):
    """ Check cache for fits file, or download it.

    Based on use_cached flag, will look in the cache for fits file
    corresponding to request id star. If nothing is found in cached it will be
    downloaded from the MAST server.

    Parameters
    ----------
    id : string
        Identifier for the requested star. Must be resolvable by Lightkurve.
    download_dir : str
        Path to the cache directory    
    use_cached: bool
        Whether or not to used data in the Lightkurve cache.
    lkwargs : dict
        Dictionary containing keywords for the Lightkurve search.
        cadence, quarter, campaign, sector, month.

    Note:
    -----
    Prioritizes long cadence over short cadence unless otherwise specified.

    """

    cache_dir = _set_cache_dir(download_dir)

    _set_mission(id, lkwargs)

    ext = _set_cadence(lkwargs)

    tgtfiles = _lookup_cached_files(id, cache_dir, ext)

    if (use_cached and (len(tgtfiles) != 0)):
        lc_col = [lk.open(n) for n in tgtfiles]

    elif (not use_cached) or (use_cached and (len(tgtfiles) == 0)):
        if (use_cached and (len(tgtfiles) == 0)):
            warnings.warn(
                'Could not find %s cadence data for %s in cache, checking MAST...'
                % (lkwargs['cadence'], id))

        lc_col = _launch_query(id, cache_dir, lkwargs)

        if len(lc_col) == 0:
            raise ValueError(
                "Could not find %s cadence data for %s in cache or on MAST" %
                (lkwargs['cadence'], id))
    else:
        raise ValueError(
            'Could not find any cached data, and failed to access MAST')

    # Perform reduction on first lc of the lc collection and append the rest
    lc0 = _clean_lc(lc_col[0].PDCSAP_FLUX)
    for i, lc in enumerate(lc_col[1:]):
        lc0 = lc0.append(_clean_lc(lc.PDCSAP_FLUX))

    return lc0
Esempio n. 4
0
 def __getitem__(self, index):
     observation_file_name = self.observation_file_names[index]
     observation = lightkurve.open(
         os.path.join(data_directory, observation_file_name))
     target_flare_data = self.catalog_data_frame.loc[observation.targetid]
     flare_frequency_coefficients = (target_flare_data['alpha_ffd'],
                                     target_flare_data['beta_ffd'])
     return torch.tensor(observation.flux.newbyteorder()), torch.tensor(
         flare_frequency_coefficients)
def ffi_lowess_detrend(save_path='',
                       sector=1,
                       target_ID_list=[],
                       pipeline='2min',
                       multi_sector=False,
                       use_peak_cut=False,
                       binned=False,
                       transit_mask=False,
                       injected_planet='user_defined',
                       injected_rp=0.1,
                       injected_per=8.0,
                       detrending='lowess_partial',
                       single_target_ID=['HIP 1113'],
                       n_bins=30):
    for target_ID in target_ID_list:
        try:
            lc_30min = lightkurve.lightcurve.TessLightCurve(time=[], flux=[])
            if multi_sector != False:
                sap_lc, pdcsap_lc = two_min_lc_download(target_ID,
                                                        sector=multi_sector[0],
                                                        from_file=False)
                lc_30min = pdcsap_lc
                nancut = np.isnan(lc_30min.flux) | np.isnan(lc_30min.time)
                lc_30min = lc_30min[~nancut]
                clean_time, clean_flux, clean_flux_err = clean_tess_lc(
                    lc_30min.time, lc_30min.flux, lc_30min.flux_err, target_ID,
                    multi_sector[0], save_path)
                lc_30min.time = clean_time
                lc_30min.flux = clean_flux
                lc_30min.flux_err = clean_flux_err
                for sector_num in multi_sector[1:]:
                    sap_lc_new, pdcsap_lc_new = two_min_lc_download(
                        target_ID, sector_num, from_file=False)
                    lc_30min_new = pdcsap_lc_new
                    nancut = np.isnan(lc_30min_new.flux) | np.isnan(
                        lc_30min_new.time)
                    lc_30min_new = lc_30min_new[~nancut]
                    clean_time, clean_flux, clean_flux_err = clean_tess_lc(
                        lc_30min_new.time, lc_30min_new.flux,
                        lc_30min_new.flux_err, target_ID, sector_num,
                        save_path)
                    lc_30min_new.time = clean_time
                    lc_30min_new.flux = clean_flux
                    lc_30min_new.flux_err = clean_flux_err
                    lc_30min = lc_30min.append(lc_30min_new)
#                    lc_30min.flux = lc_30min.flux.append(lc_30min_new.flux)
#                    lc_30min.time = lc_30min.time.append(lc_30min_new.time)
#                    lc_30min.flux_err = lc_30min.flux_err.append(lc_30min_new.flux_err)
            else:
                try:
                    if pipeline == 'DIA':
                        lc_30min, filename = diff_image_lc_download(
                            target_ID,
                            sector,
                            plot_lc=True,
                            save_path=save_path,
                            from_file=True)
                    elif pipeline == '2min':
                        sap_lc, pdcsap_lc = two_min_lc_download(
                            target_ID, sector=sector, from_file=False)
                        lc_30min = pdcsap_lc
                        nancut = np.isnan(lc_30min.flux) | np.isnan(
                            lc_30min.time)
                        lc_30min = lc_30min[~nancut]
                    elif pipeline == 'eleanor':
                        raw_lc, corr_lc, pca_lc = eleanor_lc_download(
                            target_ID,
                            sector,
                            from_file=False,
                            save_path=save_path,
                            plot_pca=False)
                        lc_30min = pca_lc
                    elif pipeline == 'from_file':
                        lcf = lightkurve.open(
                            'tess2019140104343-s0012-0000000212461524-0144-s_lc.fits'
                        )
                        lc_30min = lcf.PDCSAP_FLUX
                    elif pipeline == 'from_pickle':
                        with open('Original_time.pkl', 'rb') as f:
                            original_time = pickle.load(f)
                        with open('Original_flux.pkl', 'rb') as f:
                            original_flux = pickle.load(f)
                        lc_30min = lightkurve.lightcurve.TessLightCurve(
                            time=original_time, flux=original_flux)
                    elif pipeline == 'raw':
                        lc_30min = raw_FFI_lc_download(target_ID,
                                                       sector,
                                                       plot_tpf=False,
                                                       plot_lc=True,
                                                       save_path=save_path,
                                                       from_file=False)
                        pipeline = "raw"
                    else:
                        print('Invalid pipeline')

                except:
                    print('Lightcurve for {} not available'.format(target_ID))

            ################### Clean TESS lc pointing systematics ########################
            if multi_sector == False:
                clean_time, clean_flux, clean_flux_err = clean_tess_lc(
                    lc_30min.time, lc_30min.flux, lc_30min.flux_err, target_ID,
                    sector, save_path)
                lc_30min.time = clean_time
                lc_30min.flux = clean_flux
                lc_30min.flux_err = clean_flux_err

            ######################### Find rotation period ################################
            normalized_flux = np.array(lc_30min.flux) / np.median(
                lc_30min.flux)

            # From Lomb-Scargle
            freq = np.arange(0.04, 4.1, 0.00001)
            power = LombScargle(lc_30min.time, normalized_flux).power(freq)
            ls_fig = plt.figure()
            plt.plot(freq, power, c='k', linewidth=1)
            plt.xlabel('Frequency')
            plt.ylabel('Power')
            plt.title(
                '{} LombScargle Periodogram for original lc'.format(target_ID))
            #ls_plot.show(block=True)
            #        ls_fig.savefig(save_path + '{} - Lomb-Scargle Periodogram for original lc.png'.format(target_ID))
            plt.close(ls_fig)
            i = np.argmax(power)
            freq_rot = freq[i]
            p_rot = 1 / freq_rot
            print('Rotation Period = {:.3f}d'.format(p_rot))

            # From BLS
            durations = np.linspace(0.05, 1, 22) * u.day
            model = BoxLeastSquares(lc_30min.time * u.day, normalized_flux)
            results = model.autopower(durations, frequency_factor=1.0)
            rot_index = np.argmax(results.power)
            rot_period = results.period[rot_index]
            print("Rotation Period from BLS of original = {}d".format(
                rot_period))

            ########################### batman stuff ######################################
            if injected_planet != False:
                params = batman.TransitParams(
                )  #object to store transit parameters
                params.t0 = -10.0  #time of inferior conjunction
                params.per = 8.0
                params.rp = 0.1
                table_data = Table.read("BANYAN_XI-III_members_with_TIC.csv",
                                        format='ascii.csv')
                i = list(table_data['main_id']).index(target_ID)
                m_star = table_data['Stellar Mass'][i] * m_Sun
                r_star = table_data['Stellar Radius'][i] * r_Sun * 1000
                params.a = (((G * m_star * (params.per * 86400.)**2) /
                             (4. * (np.pi**2)))**(1. / 3)) / r_star
                if np.isnan(params.a) == True:
                    params.a = 17.  #semi-major axis (in units of stellar radii)
                params.inc = 90.
                params.ecc = 0.
                params.w = 90.  #longitude of periastron (in degrees)
                params.limb_dark = "nonlinear"  #limb darkening model
                params.u = [0.5, 0.1, 0.1, -0.1
                            ]  #limb darkening coefficients [u1, u2, u3, u4]

                if injected_planet == 'user_defined':
                    # Build planet from user specified parameters
                    params.per = injected_per  #orbital period (days)
                    params.rp = injected_rp  #planet radius (in units of stellar radii)
                    params.a = (((G * m_star * (params.per * 86400.)**2) /
                                 (4. * (np.pi**2)))**(1. / 3)) / r_star
                    if np.isnan(params.a) == True:
                        params.a = 17  # Recalculates a if period has changed
                    params.inc = 90.  #orbital inclination (in degrees)
                    params.ecc = 0.  #eccentricity
                else:
                    raise NameError('Invalid inputfor injected planet')

                # Defines times at which to calculate lc and models batman lc
                t = np.linspace(-13.9165035, 13.9165035, len(lc_30min.time))
                index = int(len(lc_30min.time) // 2)
                mid_point = lc_30min.time[index]
                t = lc_30min.time - lc_30min.time[index]
                m = batman.TransitModel(params, t)
                t += lc_30min.time[index]
                batman_flux = m.light_curve(params)
                batman_model_fig = plt.figure()
                plt.scatter(lc_30min.time, batman_flux, s=2, c='k')
                plt.xlabel("Time - 2457000 (BTJD days)")
                plt.ylabel("Relative flux")
                plt.title("batman model transit for {}R ratio".format(
                    params.rp))
                #batman_model_fig.savefig(save_path + "batman model transit for {}d {}R planet.png".format(params.per,params.rp))
                #plt.close(batman_model_fig)
                plt.show()

            ################################# Combining ###################################
            if injected_planet != False:
                combined_flux = np.array(lc_30min.flux) / np.median(
                    lc_30min.flux) + batman_flux - 1

                injected_transit_fig = plt.figure()
                plt.scatter(lc_30min.time, combined_flux, s=2, c='k')
                plt.xlabel("Time - 2457000 (BTJD days)")
                plt.ylabel("Relative flux")
                plt.title(
                    "{} with injected transits for a {}R {}d planet to star ratio."
                    .format(target_ID, params.rp, params.per))
                ax = plt.gca()
                for n in range(int(-1 * 8 / params.per),
                               int(2 * 8 / params.per + 2)):
                    ax.axvline(params.t0 + n * params.per + mid_point,
                               ymin=0.1,
                               ymax=0.2,
                               lw=1,
                               c='r')
                ax.axvline(params.t0 + lc_30min.time[index],
                           ymin=0.1,
                           ymax=0.2,
                           lw=1,
                           c='r')
                ax.axvline(params.t0 + params.per + lc_30min.time[index],
                           ymin=0.1,
                           ymax=0.2,
                           lw=1,
                           c='r')
                ax.axvline(params.t0 + 2 * params.per + lc_30min.time[index],
                           ymin=0.1,
                           ymax=0.2,
                           lw=1,
                           c='r')
                #            injected_transit_fig.savefig(save_path + "{} - Injected transits fig - Period {} - {}R transit.png".format(target_ID, params.per, params.rp))
                #            plt.close(injected_transit_fig)
                plt.show()

        ############################## Removing peaks #################################
            if injected_planet == False:
                combined_flux = np.array(lc_30min.flux) / np.median(
                    lc_30min.flux)
#            combined_flux = lc_30min.flux
            if use_peak_cut == True:
                peaks, peak_info = find_peaks(combined_flux,
                                              prominence=0.001,
                                              width=15)
                troughs, trough_info = find_peaks(-combined_flux,
                                                  prominence=-0.001,
                                                  width=15)
                flux_peaks = combined_flux[peaks]
                flux_troughs = combined_flux[troughs]
                amplitude_peaks = ((flux_peaks[0] - 1) +
                                   (1 - flux_troughs[0])) / 2
                print("Absolute amplitude of main variability = {}".format(
                    amplitude_peaks))
                peak_location_fig = plt.figure()
                plt.scatter(lc_30min.time, combined_flux, s=2, c='k')
                plt.plot(lc_30min.time[peaks], combined_flux[peaks], "x")
                plt.plot(lc_30min.time[troughs],
                         combined_flux[troughs],
                         "x",
                         c='r')
                #peak_location_fig.savefig(save_path + "{} - Peak location fig.png".format(target_ID))
                peak_location_fig.show()
                #                plt.close(peak_location_fig)

                near_peak_or_trough = [False] * len(combined_flux)

                for i in peaks:
                    for j in range(len(lc_30min.time)):
                        if abs(lc_30min.time[j] - lc_30min.time[i]) < 0.1:
                            near_peak_or_trough[j] = True

                for i in troughs:
                    for j in range(len(lc_30min.time)):
                        if abs(lc_30min.time[j] - lc_30min.time[i]) < 0.1:
                            near_peak_or_trough[j] = True

                near_peak_or_trough = np.array(near_peak_or_trough)

                t_cut = lc_30min.time[~near_peak_or_trough]
                flux_cut = combined_flux[~near_peak_or_trough]
                flux_err_cut = lc_30min.flux_err[~near_peak_or_trough]

                # Plot new cut version
                peak_cut_fig = plt.figure()
                plt.scatter(t_cut, flux_cut, c='k', s=2)
                plt.xlabel('Time - 2457000 [BTJD days]')
                plt.ylabel("Relative flux")
                plt.title(
                    '{} lc after removing peaks/troughs'.format(target_ID))
                ax = plt.gca()
                #peak_cut_fig.savefig(save_path + "{} - Peak cut fig.png".format(target_ID))
                peak_cut_fig.show()
#                plt.close(peak_cut_fig)
            else:
                t_cut = lc_30min.time
                flux_cut = combined_flux
                flux_err_cut = lc_30min.flux_err
                print('Flux cut skipped')

        ############################## Apply transit mask #########################

            if transit_mask == True:
                period = 8.138
                epoch = 1332.31
                duration = 0.15
                phase = np.mod(t_cut - epoch - period / 2, period) / period

                near_transit = [False] * len(flux_cut)

                for i in range(len(t_cut)):
                    if abs(phase[i] - 0.5) < duration / period:
                        near_transit[i] = True

                near_transit = np.array(near_transit)

                t_masked = t_cut[~near_transit]
                flux_masked = flux_cut[~near_transit]
                flux_err_masked = flux_err_cut[~near_transit]
                t_new = t_cut[near_transit]

                f = interpolate.interp1d(t_masked,
                                         flux_masked,
                                         kind='quadratic')

                flux_new = f(t_new)
                interpolated_fig = plt.figure()
                #                plt.scatter(t_masked, flux_masked, s = 2, c = 'k')
                plt.scatter(t_cut, flux_cut, s=2, c='k')
                plt.scatter(t_new, flux_new, s=2, c='r')
                plt.xlabel('Time - 2457000 [BTJD days]')
                plt.ylabel('Relative flux')
                #                interpolated_fig.savefig(save_path + "{} - Interpolated over transit mask fig.png".format(target_ID))

                t_transit_mask = np.concatenate((t_masked, t_new), axis=None)
                flux_transit_mask = np.concatenate((flux_masked, flux_new),
                                                   axis=None)

                sorted_order = np.argsort(t_transit_mask)
                t_transit_mask = t_transit_mask[sorted_order]
                flux_transit_mask = flux_transit_mask[sorted_order]

        ############################## LOWESS detrending ##############################

        # Full lc
            if detrending == 'lowess_full':
                full_lowess_flux = np.array([])
                if transit_mask == True:
                    lowess = sm.nonparametric.lowess(flux_transit_mask,
                                                     t_transit_mask,
                                                     frac=0.03)
                else:
                    lowess = sm.nonparametric.lowess(flux_cut,
                                                     t_cut,
                                                     frac=0.03)

                overplotted_lowess_full_fig = plt.figure()
                plt.scatter(t_cut, flux_cut, c='k', s=2)
                plt.plot(lowess[:, 0], lowess[:, 1])
                plt.title(
                    '{} lc with overplotted lowess full lc detrending'.format(
                        target_ID))
                plt.xlabel('Time - 2457000 [BTJD days]')
                plt.ylabel('Relative flux')
                #overplotted_lowess_full_fig.savefig(save_path + "{} lc with overplotted LOWESS full lc detrending.png".format(target_ID))
                plt.show()
                #                plt.close(overplotted_lowess_full_fig)

                residual_flux_lowess = flux_cut / lowess[:, 1]
                full_lowess_flux = np.concatenate(
                    (full_lowess_flux, lowess[:, 1]))

                lowess_full_residuals_fig = plt.figure()
                plt.scatter(t_cut, residual_flux_lowess, c='k', s=2)
                plt.title(
                    '{} lc after lowess full lc detrending'.format(target_ID))
                plt.xlabel('Time - 2457000 [BTJD days]')
                plt.ylabel('Relative flux')
                ax = plt.gca()
                #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #lowess_full_residuals_fig.savefig(save_path + "{} lc after LOWESS full lc detrending.png".format(target_ID))
                plt.show()
                #plt.close(lowess_full_residuals_fig)

            # Partial lc
            if detrending == 'lowess_partial':
                time_diff = np.diff(t_cut)
                residual_flux_lowess = np.array([])
                time_from_lowess_detrend = np.array([])
                full_lowess_flux = np.array([])

                overplotted_detrending_fig = plt.figure()
                plt.scatter(t_cut, flux_cut, c='k', s=2)
                plt.xlabel('Time - 2457000 [BTJD days]')
                plt.ylabel("Normalized flux")
                plt.title(
                    '{} lc with overplotted detrending'.format(target_ID))

                low_bound = 0
                if pipeline == '2min':
                    n_bins = 450
                else:
                    n_bins = n_bins
                for i in range(len(t_cut) - 1):
                    if time_diff[i] > 0.1:
                        high_bound = i + 1

                        t_section = t_cut[low_bound:high_bound]
                        flux_section = flux_cut[low_bound:high_bound]
                        if len(t_section) >= n_bins:
                            if transit_mask == True:
                                lowess = sm.nonparametric.lowess(
                                    flux_transit_mask[low_bound:high_bound],
                                    t_transit_mask[low_bound:high_bound],
                                    frac=n_bins / len(t_section))
                            else:
                                lowess = sm.nonparametric.lowess(
                                    flux_section,
                                    t_section,
                                    frac=n_bins / len(t_section))
                            lowess_flux_section = lowess[:, 1]
                            plt.plot(t_section, lowess_flux_section, '-')

                            residuals_section = flux_section / lowess_flux_section
                            residual_flux_lowess = np.concatenate(
                                (residual_flux_lowess, residuals_section))
                            time_from_lowess_detrend = np.concatenate(
                                (time_from_lowess_detrend, t_section))
                            full_lowess_flux = np.concatenate(
                                (full_lowess_flux, lowess_flux_section))
                            low_bound = high_bound
                        else:
                            print('LOWESS skipped one gap at {}'.format(
                                t_section[-1]))

                # Carries out same process for final line (up to end of data)
                high_bound = len(t_cut)
                t_section = t_cut[low_bound:high_bound]
                flux_section = flux_cut[low_bound:high_bound]
                if transit_mask == True:
                    lowess = sm.nonparametric.lowess(
                        flux_transit_mask[low_bound:high_bound],
                        t_transit_mask[low_bound:high_bound],
                        frac=n_bins / len(t_section))
                else:
                    lowess = sm.nonparametric.lowess(flux_section,
                                                     t_section,
                                                     frac=n_bins /
                                                     len(t_section))
                lowess_flux_section = lowess[:, 1]
                plt.plot(t_section, lowess_flux_section, '-')
                #                if injected_planet != False:
                #                    overplotted_detrending_fig.savefig(save_path + "{} - Overplotted lowess detrending - partial lc - {}R {}d injected planet.png".format(target_ID, params.rp, params.per))
                #                else:
                #                    overplotted_detrending_fig.savefig(save_path + "{} - Overplotted lowess detrending - partial lc".format(target_ID))
                overplotted_detrending_fig.show()
                #                plt.close(overplotted_detrending_fig)

                residuals_section = flux_section / lowess_flux_section
                residual_flux_lowess = np.concatenate(
                    (residual_flux_lowess, residuals_section))
                time_from_lowess_detrend = np.concatenate(
                    (time_from_lowess_detrend, t_section))
                full_lowess_flux = np.concatenate(
                    (full_lowess_flux, lowess_flux_section))

                residuals_after_lowess_fig = plt.figure()
                plt.scatter(time_from_lowess_detrend,
                            residual_flux_lowess,
                            c='k',
                            s=2)
                plt.title('{} lc after LOWESS partial lc detrending'.format(
                    target_ID))
                plt.xlabel('Time - 2457000 [BTJD days]')
                plt.ylabel('Relative flux')
                #ax = plt.gca()
                #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r')
                #                if injected_planet != False:
                #                    residuals_after_lowess_fig.savefig(save_path + "{} lc after LOWESS partial lc detrending - {}R {}d injected planet.png".format(target_ID, params.rp, params.per))
                #                else:
                #                    residuals_after_lowess_fig.savefig(save_path + "{} lc after LOWESS partial lc detrending".format(target_ID))
                residuals_after_lowess_fig.show()
#                plt.close(residuals_after_lowess_fig)

#    ###################### Periodogram Construction ##################

# Create periodogram
            durations = np.linspace(0.05, 1, 22) * u.day
            if detrending == 'lowess_full' or detrending == 'lowess_partial':
                BLS_flux = residual_flux_lowess
            else:
                BLS_flux = combined_flux
            model = BoxLeastSquares(t_cut * u.day, BLS_flux)
            results = model.autopower(durations,
                                      minimum_n_transit=3,
                                      frequency_factor=1.0)

            # Find the period and epoch of the peak
            index = np.argmax(results.power)
            period = results.period[index]
            #print(results.period)
            t0 = results.transit_time[index]
            duration = results.duration[index]
            transit_info = model.compute_stats(period, duration, t0)
            print(transit_info)

            epoch = transit_info['transit_times'][0]

            periodogram_fig, ax = plt.subplots(1, 1)

            # Highlight the harmonics of the peak period
            ax.axvline(period.value, alpha=0.4, lw=3)
            for n in range(2, 10):
                ax.axvline(n * period.value,
                           alpha=0.4,
                           lw=1,
                           linestyle="dashed")
                ax.axvline(period.value / n,
                           alpha=0.4,
                           lw=1,
                           linestyle="dashed")

            # Plot and save the periodogram
            ax.plot(results.period, results.power, "k", lw=0.5)
            ax.set_xlim(results.period.min().value, results.period.max().value)
            ax.set_xlabel("period [days]")
            ax.set_ylabel("log likelihood")
            #            ax.set_title('{} - BLS Periodogram after {} detrending - {}R {}d injected planet'.format(target_ID, detrending, params.rp, params.per))
            ax.set_title('{} - BLS Periodogram after {} detrending'.format(
                target_ID, detrending))
            #            periodogram_fig.savefig(save_path + '{} - BLS Periodogram after lowess partial detrending - {}R {}d injected planet.png'.format(target_ID, params.rp, params.per))
            #            periodogram_fig.savefig(save_path + '{} - BLS Periodogram after lowess partial detrending.png'.format(target_ID))
            #            plt.close(periodogram_fig)
            periodogram_fig.show()

            ################################## Phase folding ##########################
            # Find indices of 2nd and 3rd peaks of periodogram
            all_peaks = scipy.signal.find_peaks(results.power,
                                                width=5,
                                                distance=10)[0]
            all_peak_powers = results.power[all_peaks]
            sorted_power_indices = np.argsort(all_peak_powers)
            sorted_peak_powers = all_peak_powers[sorted_power_indices]
            #        sorted_peak_periods = results.period[sorted_power_indices]

            # Find info for 2nd largest peak in periodogram
            index_peak_2 = np.where(results.power == sorted_peak_powers[-2])[0]
            period_2 = results.period[index_peak_2[0]]
            t0_2 = results.transit_time[index_peak_2[0]]

            # Find info for 3rd largest peak in periodogram
            index_peak_3 = np.where(results.power == sorted_peak_powers[-3])[0]
            period_3 = results.period[index_peak_3[0]]
            t0_3 = results.transit_time[index_peak_3[0]]

            phase_fold_plot(
                t_cut, BLS_flux, period.value, t0.value, target_ID, save_path,
                '{} {} residuals folded by Periodogram Max ({:.3f} days)'.
                format(target_ID, detrending, period.value))
            period_to_test = p_rot
            t0_to_test = 1332
            period_to_test2 = period_2.value
            t0_to_test2 = t0_2.value
            period_to_test3 = period_3.value
            t0_to_test3 = t0_3.value
            phase_fold_plot(
                t_cut, BLS_flux, p_rot, t0_to_test, target_ID, save_path,
                '{} folded by rotation period ({} days)'.format(
                    target_ID, period_to_test))
            phase_fold_plot(
                t_cut, BLS_flux, period_to_test2, t0_to_test2, target_ID,
                save_path,
                '{} detrended lc folded by 2nd largest peak ({:0.4} days)'.
                format(target_ID, period_to_test2))
            phase_fold_plot(
                t_cut, BLS_flux, period_to_test3, t0_to_test3, target_ID,
                save_path,
                '{} detrended lc folded by 3rd largest peak ({:0.4} days)'.
                format(target_ID, period_to_test3))
            #variability_table.add_row([target_ID,p_rot,rot_period,amplitude_peaks])

            ############################# Eyeballing ##############################
            """
            Generate 2 x 2 eyeballing plot
            """
            eye_balling_fig, axs = plt.subplots(2,
                                                2,
                                                figsize=(16, 10),
                                                dpi=120)

            # Original DIA with injected transits setup
            axs[0, 0].scatter(lc_30min.time, combined_flux, s=1, c='k')
            axs[0, 0].set_ylabel('Normalized Flux')
            axs[0, 0].set_xlabel('Time')
            axs[0, 0].set_title('{} - {} light curve'.format(target_ID, 'DIA'))
            #for n in range(int(-1*8/params.per),int(2*8/params.per+2)):
            #    axs[0,0].axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r')

            # Detrended figure setup
            axs[0, 1].scatter(t_cut,
                              BLS_flux,
                              c='k',
                              s=1,
                              label='{} residuals after {} detrending'.format(
                                  target_ID, detrending))
            #            axs[0,1].set_title('{} residuals after {} detrending - Sector {}'.format(target_ID, detrending, sector))
            axs[0, 1].set_title(
                '{} residuals after {} detrending - Sectors 14-18'.format(
                    target_ID, detrending))
            axs[0, 1].set_ylabel('Normalized Flux')
            axs[0, 1].set_xlabel('Time - 2457000 [BTJD days]')
            #            binned_time, binned_flux = bin(t_cut, BLS_flux, binsize=15, method='mean')
            #            axs[0,1].scatter(binned_time, binned_flux, c='r', s=4)
            #for n in range(int(-1*8/params.per),int(2*8/params.per+2)):
            #    axs[0,1].axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r')

            # Periodogram setup
            axs[1, 0].plot(results.period, results.power, "k", lw=0.5)
            axs[1, 0].set_xlim(results.period.min().value,
                               results.period.max().value)
            axs[1, 0].set_xlabel("period [days]")
            axs[1, 0].set_ylabel("log likelihood")
            axs[1, 0].set_title(
                '{} - BLS Periodogram of residuals'.format(target_ID))
            axs[1, 0].axvline(period.value, alpha=0.4, lw=3)
            for n in range(2, 10):
                axs[1, 0].axvline(n * period.value,
                                  alpha=0.4,
                                  lw=1,
                                  linestyle="dashed")
                axs[1, 0].axvline(period.value / n,
                                  alpha=0.4,
                                  lw=1,
                                  linestyle="dashed")

            # Folded or zoomed plot setup
            epoch = t0.value
            period = period.value
            phase = np.mod(t_cut - epoch - period / 2, period) / period
            axs[1, 1].scatter(phase, BLS_flux, c='k', s=1)
            axs[1, 1].set_title('{} Lightcurve folded by {:0.4} days'.format(
                target_ID, period))
            axs[1, 1].set_xlabel('Phase')
            axs[1, 1].set_ylabel('Normalized Flux')
            #            binned_phase, binned_lc = bin(phase, BLS_flux, binsize=15, method='mean')
            #            plt.scatter(binned_phase, binned_lc, c='r', s=4)

            eye_balling_fig.tight_layout()
            #            eye_balling_fig.savefig(save_path + '{} - Full eyeballing fig.pdf'.format(target_ID))
            #            plt.close(eye_balling_fig)
            plt.show()

            ########################### ADDING INFO ROWS ######################


#            sensitivity_table.add_row([target_ID,sector,pipeline,params.per,params.a,params.rp,period,np.max(results.power),period_2.value,period_3.value])

        except RuntimeError:
            print('No DiffImage lc exists for {}'.format(target_ID))
        except:
            print('Some other error for {}'.format(target_ID))
    return t_cut, BLS_flux, phase, epoch, period
Esempio n. 6
0
observations = Observations.query_criteria(
    target_name=f"{tic}",
    radius=0.0001,
    project=["TESS"],
    obs_collection=["TESS"],
    provenance_name="SPOC",
    dataproduct_type="timeseries",
)
if not len(observations):
    raise RuntimeError("no 2-minute cadence data")
products = Observations.get_product_list(observations)
products = products[products["productSubGroupDescription"] == "LC"]
files = Observations.download_products(
    products, download_dir=tess_world.get_lightkurve_directory())
lcfs = lk.LightCurveCollection(
    [lk.open(file).PDCSAP_FLUX for file in files["Local Path"]])
lc = lcfs.stitch().remove_nans()

# Extract the data in the correct format
x = np.ascontiguousarray(lc.time, dtype=np.float64)
y = np.ascontiguousarray(1e3 * (lc.flux - 1), dtype=np.float64)
yerr = np.ascontiguousarray(1e3 * lc.flux_err, dtype=np.float64)

# Plot the light curve
plt.plot(x, y, "k", linewidth=0.5)
plt.xlabel("time [days]")
plt.ylabel("relative flux [ppt]")
plt.title(f"TOI {toi_num}; TIC {tic}", fontsize=14)

# Label the transits on the plot
for n in range(num_toi):
Esempio n. 7
0
def xmkpy3_tess_tpf_overlay_v6():
    """
Unit test
    """
    import argparse
    import ast
    import lightkurve as lk
    import mkpy3
    #
    #
    # ===== argparse:BEGIN ====================================================
    #
    parser = argparse.ArgumentParser()
    #
    parser.add_argument(
        '--tpf_filename', action="store", type=str, default=None,
        help="Filename of the Target Pixel File (TPF) [default: None]")
    parser.add_argument(
        '--frame', action="store", type=int, default=0,
        help='Frame number (integer) [default: 0]')
    parser.add_argument(
        '--survey', action="store", type=str, default='2MASS-J',
        help="Survey name (str) [default: '2MASS-J']")
    parser.add_argument(
        '--rotationAngle_deg', action="store",
        type=ast.literal_eval, default=None,
        help="Rotation angle in degrees (string) [default: None] "
        "[examples: None or 12.345 (float) or 'tpf'")
    parser.add_argument(
        '--width_height_arcmin', action="store", type=float, default=6.0,
        help='Width and height size in arcmin (float) [default: 6.0]')
    parser.add_argument(
        '--shrink', type=float, default=1.0,
        help='Survey search radius shrink factor (float) [default: 1.0]')
    parser.add_argument(
        '--show_plot', type=mkpy3.mkpy3_util_str2bool, default=True,
        help='If True, show the plot [default=True]')
    parser.add_argument(
        '--plot_file', action="store", type=str, default='mkpy3_plot.png',
        help='Filename of the output plot [default: "mkpy3_plot.png"]')
    parser.add_argument(
        '--overwrite', type=mkpy3.mkpy3_util_str2bool, default=False,
        help='If True, overwrite ("clobber") an existing output file '
        '[default: False]')
    parser.add_argument(
        '--figsize_str', action="store",
        type=ast.literal_eval, default="[9,9]",
        help="string of a 2-item list of figure width and height [Matplotlib] "
        "(str) [default: '[9,9]'")
    parser.add_argument(
        '--title', action="store", type=str, default=None,
        help='Title of the finder chart (str) [default: None]')
    parser.add_argument(
        '--percentile', action="store", type=float, default=99.5,
        help='Percentile [percentage of pixels to keep: 0.0 to 100.0] '
        '(float) [default: 99.5]')
    parser.add_argument(
        '--cmap', action="store", type=str, default=None,
        help="Colormap name [Matplotlib] (str) [default: 'gray_r']")
    parser.add_argument(
        '--colors_str', action="store",
        type=ast.literal_eval, default="[None,'dodgerblue','red']",
        help="string of a 3-item list of overlay color names [Matplotlib] "
        "(str) [default: \"['None','dodgerblue','red']\"")
    parser.add_argument(
        '--lws_str', action="store",
        type=ast.literal_eval, default="[0,3,4]",
        help="string of a 3-item list of overlay line widths [Matplotlib] "
        "(str) [default: \"[0,3,4]\"")
    parser.add_argument(
        '--zorders_str', action="store",
        type=ast.literal_eval, default="[0,2,4]",
        help="string of a 3-item list of overlay zorder values [Matplotlib] "
        "(str) [default: \"[0,2,4]\"")
    kwargs_ = "{'edgecolor':'yellow', 's':600, 'facecolor':'None', 'lw':3, "\
        "'zorder':10}"
    parser.add_argument(
        '--marker_kwargs_str', action="store",
        type=ast.literal_eval, default=kwargs_,
        help="marker kwargs (string of a dictonary) for ax.scatter() "
        "[Matplotlib] " + '(str) [default: "' + kwargs_ + '"')
    kwargs_ = "{'edgecolor':'cyan', 's':150, 'facecolor':'None', 'lw':3, "\
        "'zorder':20}"
    parser.add_argument(
        '--print_gaia_dr2', type=mkpy3.mkpy3_util_str2bool, default=True,
        help='If True, print the GAIA DR2 catalog results [default=True]')
    parser.add_argument(
        '--gaia_dr2_kwargs_str', action="store",
        type=ast.literal_eval, default=kwargs_,
        help="GAIA DR2 marker kwargs (string of a dictonary) for ax.scatter() "
        "[Matplotlib] "'(str) [default: "' + kwargs_ + '"')
    kwargs_ = "{'s':900, 'color':'lawngreen', 'marker':'x', 'lw':5, "\
        "'zorder':30}"
    parser.add_argument(
        '--print_vsx', type=mkpy3.mkpy3_util_str2bool, default=True,
        help='If True, print the VSX catalog results [default=True]')
    parser.add_argument(
        '--vsx_kwargs_str', action="store",
        type=ast.literal_eval, default=kwargs_,
        help="VSX marker kwargs (string of a dictonary) for ax.scatter() "
        "[Matplotlib] (str) [default: '" + kwargs_ + "'")
    parser.add_argument(
        '--sexagesimal', type=mkpy3.mkpy3_util_str2bool, default=False,
        help='Print catalog positions as sexagesimal [hms dms] if True (bool) '
        '[default=False]')
    parser.add_argument(
        '--verbose', type=mkpy3.mkpy3_util_str2bool, default=False,
        help='Print extra information if True (bool) [default=False]')
    #
    args = parser.parse_args()
    #
    # ===== argparse:END ======================================================
    #

    tpf_filename = args.tpf_filename
    frame = args.frame
    survey = args.survey
    rotationAngle_deg = args.rotationAngle_deg
    width_height_arcmin = args.width_height_arcmin
    shrink = args.shrink
    show_plot = args.show_plot
    plot_file = args.plot_file
    overwrite = args.overwrite
    figsize_str = str(args.figsize_str)
    title = args.title
    percentile = args.percentile
    cmap = args.cmap
    colors_str = str(args.colors_str)
    lws_str = str(args.lws_str)
    zorders_str = str(args.zorders_str)
    marker_kwargs_str = str(args.marker_kwargs_str)
    print_gaia_dr2 = args.print_gaia_dr2
    gaia_dr2_kwargs_str = str(args.gaia_dr2_kwargs_str)
    print_vsx = args.print_vsx
    vsx_kwargs_str = str(args.vsx_kwargs_str)
    sexagesimal = args.sexagesimal
    verbose = args.verbose

    if (tpf_filename is not None):
        mkpy3.mkpy3_util_check_file_exists(tpf_filename, True)
        tpf = lk.open(tpf_filename, quality_bitmask=0)
    # pass:if

    if (tpf_filename is None):
        tpf = None
    # pass:if

    shrink = 0.4
    ax = mkpy3_tess_tpf_overlay_v6(
      tpf=tpf,
      frame=frame,
      survey=survey,
      rotationAngle_deg=rotationAngle_deg,
      width_height_arcmin=width_height_arcmin,
      shrink=shrink,
      show_plot=show_plot,
      plot_file=plot_file,
      overwrite=overwrite,
      figsize_str=figsize_str,
      title=title,
      percentile=percentile,
      cmap=cmap,
      colors_str=colors_str,
      lws_str=lws_str,
      zorders_str=zorders_str,
      marker_kwargs_str=marker_kwargs_str,
      print_gaia_dr2=print_gaia_dr2,
      gaia_dr2_kwargs_str=gaia_dr2_kwargs_str,
      print_vsx=print_vsx,
      vsx_kwargs_str=vsx_kwargs_str,
      sexagesimal=sexagesimal,
      verbose=verbose
    )
Esempio n. 8
0
    title = args.title
    percentile = args.percentile
    cmap = args.cmap
    colors_str = str(args.colors_str)
    lws_str = str(args.lws_str)
    zorders_str = str(args.zorders_str)
    marker_kwargs_str = str(args.marker_kwargs_str)
    gaia_dr2_kwargs_str = str(args.gaia_dr2_kwargs_str)
    vsx_kwargs_str = str(args.vsx_kwargs_str)
    sexagesimal = args.sexagesimal
    verbose = args.verbose

    print()
    if (tpf_filename is not None):
        check_file_exists(tpf_filename, True)
        tpf = lk.open(tpf_filename)
    else:
        print('No TargetPixelFile (TPF) filename given.\n')
        tpf = lk.search_targetpixelfile(target='kepler-138b',
                                        mission='kepler',
                                        quarter=10).download(quality_bitmask=0)
        # ^--- exoplanet Kelper-138b is "KIC 7603200"
        print()
        print('Using default TPF [Kepler Q10 observations of exoplanet Kepler'
              '-138b (KIC 760320)].')
        print()
        shrink *= 0.8
    # pass:if
    try:
        print('TPF filename:', ntpath.basename(tpf.path))
        print('TPF dirname: ', os.path.dirname(tpf.path))
Esempio n. 9
0
def xmkpy3_k2_tpf_overlay_v2():
    '''
Unit test
    '''
    import os
    import sys
    import ntpath
    import argparse
    import ast
    import lightkurve as lk
    #
    import mkpy3
    #
    # ===== argparse:BEGIN ====================================================
    #
    parser = argparse.ArgumentParser()
    #
    parser.add_argument(
        '--tpf_filename', action="store", type=str, default=None,
        help="Filename of the Target Pixel File (TPF) [default: None]")
    parser.add_argument(
        '--frame', action="store", type=int, default=0,
        help='Frame number (integer) [default: 0]')
    parser.add_argument(
        '--survey', action="store", type=str, default='2MASS-J',
        help="Survey name (str) [default: '2MASS-J']")
    parser.add_argument(
        '--width_height_arcmin', action="store", type=float, default=2.0,
        help='Width and height size in arcmin (float) [default: 2.0]')
    parser.add_argument(
        '--shrink', type=float, default=1.0,
        help='Survey search radius shrink factor (float) [default: 1.0]')
    parser.add_argument(
        '--show_plot', type=mkpy3.mkpy3_util_str2bool, default=True,
        help='If True, show the plot [default=True]')
    parser.add_argument(
        '--plot_file', action="store", type=str, default='mkpy3_plot.png',
        help='Filename of the output plot [default: "mkpy3_plot.png"]')
    parser.add_argument(
        '--overwrite', type=mkpy3.mkpy3_util_str2bool, default=False,
        help='If True, overwrite ("clobber") an existing output file '
        '[default: False]')
    parser.add_argument(
        '--figsize_str', action="store",
        type=ast.literal_eval, default="[9,9]",
        help="string of a 2-item list of figure width and height [Matplotlib] "
        "(str) [default: '[9,9]'")
    parser.add_argument(
        '--title', action="store", type=str, default=None,
        help='Title of the finder chart (str) [default: None]')
    parser.add_argument(
        '--percentile', action="store", type=float, default=99.0,
        help='Percentile [percentage of pixels to keep: 0.0 to 100.0] '
        '(float) [default: 99.0]')
    parser.add_argument(
        '--cmap', action="store", type=str, default=None,
        help="Colormap name [Matplotlib] (str) [default: 'gray_r']")
    parser.add_argument(
        '--colors_str', action="store",
        type=ast.literal_eval, default="[None,'cornflowerblue','red']",
        help="string of a 3-item list of overlay color names [Matplotlib] "
        "(str) [default: \"['None','cornflowerblue','red']\"")
    parser.add_argument(
        '--lws_str', action="store",
        type=ast.literal_eval, default="[0,3,4]",
        help="string of a 3-item list of overlay line widths [Matplotlib] "
        "(str) [default: \"[0,3,4]\"")
    parser.add_argument(
        '--zorders_str', action="store",
        type=ast.literal_eval, default="[0,2,4]",
        help="string of a 3-item list of overlay zorder values [Matplotlib] "
        "(str) [default: \"[0,2,4]\"")
    kwargs_ = "{'edgecolor':'yellow', 's':600, 'facecolor':'None', 'lw':3, "\
        "'zorder':10}"
    parser.add_argument(
        '--marker_kwargs_str', action="store",
        type=ast.literal_eval, default=kwargs_,
        help="marker kwargs (string of a dictonary) for ax.scatter() "
        "[Matplotlib] " + '(str) [default: "' + kwargs_ + '"')
    kwargs_ = "{'edgecolor':'cyan', 's':300, 'facecolor':'None', 'lw':3, "\
        "'zorder':20}"
    parser.add_argument(
        '--gaia_dr2_kwargs_str', action="store",
        type=ast.literal_eval, default=kwargs_,
        help="GAIA DR2 marker kwargs (string of a dictonary) for ax.scatter() "
        "[Matplotlib] "'(str) [default: "' + kwargs_ + '"')
    kwargs_ = "{'s':900, 'color':'lawngreen', 'marker':'x', 'lw':5, "\
        "'zorder':30}"
    parser.add_argument(
        '--vsx_kwargs_str', action="store",
        type=ast.literal_eval, default=kwargs_,
        help="VSX marker kwargs (string of a dictonary) for ax.scatter() "
        "[Matplotlib] (str) [default: '" + kwargs_ + "'")
    parser.add_argument(
        '--sexagesimal', type=mkpy3.mkpy3_util_str2bool, default=False,
        help='Print catalog positions as sexagesimal [hms dms] if True (bool) '
        '[default=False]')
    parser.add_argument(
        '--verbose', type=mkpy3.mkpy3_util_str2bool, default=False,
        help='Print extra information if True (bool) [default=False]')
    #
    args = parser.parse_args()
    #
    # ===== argparse:END ======================================================
    #

    tpf_filename = args.tpf_filename
    frame = args.frame
    survey = args.survey
    width_height_arcmin = args.width_height_arcmin
    shrink = args.shrink
    show_plot = args.show_plot
    plot_file = args.plot_file
    overwrite = args.overwrite
    figsize_str = str(args.figsize_str)
    title = args.title
    percentile = args.percentile
    cmap = args.cmap
    colors_str = str(args.colors_str)
    lws_str = str(args.lws_str)
    zorders_str = str(args.zorders_str)
    marker_kwargs_str = str(args.marker_kwargs_str)
    gaia_dr2_kwargs_str = str(args.gaia_dr2_kwargs_str)
    vsx_kwargs_str = str(args.vsx_kwargs_str)
    sexagesimal = args.sexagesimal
    verbose = args.verbose

    print()
    if (tpf_filename is not None):
        mkpy3.mkpy3_util_check_file_exists(tpf_filename, True)
        tpf = lk.open(tpf_filename)
    else:
        print('No TargetPixelFile (TPF) filename given.\n')
        tpf = lk.search_targetpixelfile(
            target='k2-34b', mission='k2',
            campaign=18).download(quality_bitmask=0)
        # ^--- exoplanet K2-34b is "EPIC 21211088"
        print()
        print(
            'Using default TPF [K2 C18 observations of exoplanet K2-34b '
            '(EPIC 21211088)].')
        print()
        shrink *= 0.8
    # pass:if
    try:
        print('TPF filename:', ntpath.basename(tpf.path))
        print('TPF dirname: ', os.path.dirname(tpf.path))
        assert(tpf.mission == 'K2')
        print()
    except Exception:
        print(tpf_filename, '=tpf_filename')
        print('^--- *** ERROR *** This file does not appear to be a K2 '
              'TargetPixelFile')
        print()
        print('Bye...\n', flush=True)
        sys.exit(1)
    # pass:try

    ax = mkpy3.mkpy3_tpf_overlay_v6(
      tpf=tpf,
      frame=frame,
      survey=survey,
      width_height_arcmin=width_height_arcmin,
      shrink=shrink,
      show_plot=show_plot,
      plot_file=plot_file,
      overwrite=overwrite,
      figsize_str=figsize_str,
      title=title,
      percentile=percentile,
      cmap=args.cmap,
      colors_str=colors_str,
      lws_str=lws_str,
      zorders_str=zorders_str,
      marker_kwargs_str=marker_kwargs_str,
      gaia_dr2_kwargs_str=gaia_dr2_kwargs_str,
      vsx_kwargs_str=vsx_kwargs_str,
      sexagesimal=sexagesimal,
      verbose=verbose
    )
Esempio n. 10
0
def generate_target(mag=12.,
                    roll=1.,
                    coords=None,
                    background_level=0.,
                    neighbor_magdiff=1.,
                    ncadences=1000,
                    apsize=7,
                    ID=205998445,
                    transit=False,
                    variable=False,
                    neighbor=False,
                    tpf_path=None,
                    no_sensitivity_variation=False,
                    signal=None,
                    **kwargs):
    """
    Parameters
    ----------
     `mag` :
         Magnitude of primary target PSF.
     `roll` :
         Coefficient on K2 motion vectors of target. roll=1 corresponds to current K2 motion.
     `coords` : tuple
         Coordinates of the PSF centroid.
     `background_level` :
         Constant background signal in each pixel. Defaults to 0.
     `neighbor_magdiff` :
         Difference between magnitude of target and neighbor. Only accessed if neighbor initialized as
         `True` or if AddNeighbor() function is called.
     `photnoise_conversion` :
         Conversion factor for photon noise, defaults to 0.000625 for consistency with benchmark.
     `ncadences` :
         Number of cadences in simulated light curve.
     `apsize` :
         Dimension of aperture on each side.

     Returns
     -------
     `Target`: :class:`Target` object
        A simulated CCD observation
    """

    aperture = np.ones((ncadences, apsize, apsize))

    # calculate PSF amplitude for given Kp Mag
    A = _calculate_PSF_amplitude(mag)

    if tpf_path is None:
        # read in K2 motion vectors for provided K2 target (EPIC ID #)
        try:
            tpf = lk.search_targetpixelfile(ID)[0].download()
        except OSError:
            raise ScopeError(
                'Unable to access internet. Please provide a path '
                '(str) to desired file for motion using the `tpf` '
                'keyword.')
    else:
        tpf = lk.open(tpf_path)

    xpos = tpf.pos_corr1
    ypos = tpf.pos_corr2
    t = tpf.time

    # If a transit is included, create the model
    if transit:
        model = TransitModel(t)
        signal = model.create_starry_model(**kwargs)

    # throw out outliers
    for i in range(len(xpos)):
        if abs(xpos[i]) >= 50 or abs(ypos[i]) >= 50:
            xpos[i] = 0
            ypos[i] = 0
        if np.isnan(xpos[i]):
            xpos[i] = 0
        if np.isnan(ypos[i]):
            ypos[i] = 0

    # crop to desired length and multiply by roll coefficient
    xpos = xpos[0:ncadences] * roll
    ypos = ypos[0:ncadences] * roll

    if no_sensitivity_variation:
        cx = [1., 0., 0.]
        cy = [1., 0., 0.]
        inter = np.ones((apsize, apsize))
    else:
        # create self.inter-pixel sensitivity variation matrix
        # random normal distribution centered at 0.975
        inter = np.zeros((apsize, apsize))
        for i in range(apsize):
            for j in range(apsize):
                inter[i][j] = (0.975 + 0.001 * np.random.randn())

        # cx,cy: intra-pixel variation polynomial coefficients in x,y
        cx = [1.0, 0.0, -0.05]
        cy = [1.0, 0.0, -0.05]

    if coords is None:
        # x0,y0: center of PSF, half of aperture size plus random deviation
        x0 = (apsize / 2.0) + 0.2 * np.random.randn()
        y0 = (apsize / 2.0) + 0.2 * np.random.randn()
    else:
        x0, y0 = coords

    # sx,sy: standard deviation of Gaussian in x,y
    # rho: rotation angle between x and y dimensions of Gaussian
    sx = [0.5]
    sy = [0.5]
    rho = [0.0]

    psf_args = dict({
        'A': A,
        'x0': np.array([x0]),
        'y0': np.array([y0]),
        'sx': sx,
        'sy': sy,
        'rho': rho
    })

    ccd_args = dict({
        'cx': cx,
        'cy': cy,
        'apsize': apsize,
        'background_level': background_level,
        'inter': inter,
        'photnoise_conversion': 0.000625
    })

    fpix, flux, ferr, target = calculate_pixel_values(ncadences=ncadences,
                                                      apsize=apsize,
                                                      psf_args=psf_args,
                                                      ccd_args=ccd_args,
                                                      xpos=xpos,
                                                      ypos=ypos,
                                                      signal=signal)

    t = t[:ncadences]

    return Target(fpix,
                  flux,
                  ferr,
                  target,
                  t,
                  mag=mag,
                  roll=roll,
                  neighbor_magdiff=neighbor_magdiff,
                  ncadences=ncadences,
                  apsize=apsize,
                  ccd_args=ccd_args,
                  psf_args=psf_args,
                  xpos=xpos,
                  ypos=ypos)
Esempio n. 11
0
    objective = args.objective

    starname = args.name
    fname = '%s/%s_halo_lc_%s.fits' % (ddir_halo, starname, objective)
    f = fitsio.FITS(fname)
    hdr = fitsio.read_header(fname)

    # read in our halo work

    #load a lightkurve object with all the desired metadata

    if args.tpf_fname is not None:
        print('Loaded manually chosen TPF')
        tpf_fname = args.tpf_fname
        tpf = lightkurve.open('%s/%s' % (ddir_raw, tpf_fname))
        epic = args.epic
    else:
        all_stars = Table.read('../data/haloC%d.csv' % campaign_name,
                               format='ascii')
        star = all_stars[all_stars['Name'] == starname.replace('_', ' ')]
        epic = star['EPIC ID'].data.data[0]
        tpf_fname = '/ktwo%d-c%02d_lpd-targ.fits.gz' % (epic, campaign)

        tpf = lightkurve.open('%s/%s' % (ddir_raw, tpf_fname))
    lc = tpf.to_lightcurve('aperture')

    lc.pos_corr1 = tpf.pos_corr1
    lc.pos_corr2 = tpf.pos_corr2
    lc.primary_header = tpf.hdu[0].header
    lc.data_header = tpf.hdu[1].header
Esempio n. 12
0
def k2_cadence_events(\
  filename=None,
bitmask=None,
from_archive=True,
target=None,
cadence=None,
campaign=None,
tag=None,
plotfile=None,
scatter=True,
bars_yy=None,
xlim=None,
ylim=None,
SAP_FLUX=True,
report=False,
report_filename=None,
n_before=0,
n_after=0,
bitmask_decode=False,
bitmask_flags=False,
show_plot=True,
new_filename=None,
overwrite=False,
useTPF=False,
xcut=None,
ycut=None):
    """
    Parameters
    ----------
    filename : str  [default: None]
        Filename of the KeplerLightCurveFile to be analyzed
    bitmask : int  [default: None]
        Bitmask value (integer) specifying quality flag bitmask
        of cadences to *show* events.  See Table 2-3 of the 
        Kepler Archive Manual (KDMC-10008-006) for more information.
    from_archive : str  [default: True]
        If True, get the data from the Barbara A. Mikulski Archive for Space
        Telescopes (MAST) at the Space Telescope Science Institute (STScI).
    target : str or int  [default: None]
        Target name or EPIC number.
    cadence : str  [default: 'long']
        Type of Kepler/K2 cadence: 'short' or 'long'
    campaign : int  [default: None]
        The K2 Campaign number.
    tag : str  [default: None]
        String written at the start of the title of the plot.
    plotfile : str  [default: None]
        Filename of the output plotfile (if any).
    scatter: bool  [default: True]
        If True: the data is plotted as a scatter plot.
        If False: the data is plotted as a line plot.
    bars_yy : float  [default: None]
        Used to set the Y axis location of the gray vertical 
        lines showing events.
    xlim : 2-item tuple  [default: None]
        User-defined right and left limits for the X axis. 
        Example: xlim=(3360,3390)
    ylim : 2-item tuple  [default: None]
        User-defined bottom and top limits for the Y axis. 
        Example: ylim=(0.0,1.1)
    SAP_FLUX : bool  [default: True]
        If True: flux is SAP_FLUX
        If False: flux is PDCSAP_FLUX
    report : bool  [default: False]
        If True, print(out the time, flux, cadence number and
        the QUALITY value for each event.
    report_filename : str  [default: None]
        Filename of the report (if any).
    n_before : int [default: 0]
        Number of observations (cadences) before an event to mark as bad.
    n_after : int [default: 0]
        Number of observations (cadences) after an event to mark as bad.
    bitmask_decode : [default: False]
        Decodes (translate) the bitmask value to K2 Quality Flag Events 
    bitmask_flags : [default: False]
        If True, show the QUALITY bit flags. See Table 2-3 of the Kepler
        Archive Manual (KDMC-10008-006) for more information.
    show_plot : bool [default: True]
        If True, show the plot
    new_filename : str  [default: None]
        Filename of the new long (llc) or short (slc) light curve file (or
        target pixel file) with the event and bad cadences removed.
    overwrite : bool [default: False]
        If True and new_filename is not None, 
        overwrite ("clobber") the new_filename if it exists. 
    useTPF: bool [default: False]
        if False, return a KeplerLightCurveFile.  
        If True, return a KeplerTargetPixelFile.
    xcut : 2-item tuple [default: None]
        Cadences with time (X axis) values within the xcut limits will be 
        flagged for removal.
    ycut : 2-item tuple [default: None]
        Cadences with normalized flux (Y axis) values within the ycut limits 
        will be flagged for removal.

    Returns
    -------
    ax : matplotlib.axes._subplots.AxesSubplot
        The matplotlib axes object created by the function
    n_events : int
        The number of events (cadences) with a QUALITY value 
        featuring at least one bit in the bitmask)
    objf: KeplerLightCurveFile object or KeplerTargetPixelFile object
        The KeplerLightCurveFile object or KeplerTargetPixelFile object 
        created by the function.
    idx : numpy boolean array
        Normally: array of events created by the function.  If the keywords 
        n_before or n_after are greater than zero, then the idx array is a
        combination of events and bad cadences.  
    """
    #
    ftypes = ['Light Curve File', 'Target Pixel File']
    color = ['dodgerblue', 'red', 'slategrey', 'navy']
    #
    print('**********************************************')
    print('%s %s' % ('Kepler K2 Cadence Events (k2ce): Version', __version__))
    print('**********************************************')
    # if no target information given, use this default target:
    if ((filename is None) and (target is None) and (campaign is None)):
        if (command_line):
            print('\n*******************************************************')
            print('***** Use --help to see the command line options. *****')
            print('*******************************************************\n')
        from_archive = True
        target = '212803289'  # exoplanet K2-99b
        campaign = 17
        cadence = 'short'
        print('\nUsing default target (exoplanet K2-99b):\n')
        print('  from_archive=%s' % (from_archive))
        print('  target=%s' % (target))
        print('  campaign=%d' % (campaign))
        print('  cadence=%s' % (cadence))
        bitmask_decode = True
    #
    if (filename is not None):
        from_archive = False
    isLCF = False
    isTPF = False
    keplerData = False
    tessData = False
    objf = None
    ok = False
    if (from_archive):
        msg0 = '***** ERROR *****\n'
        msg1 = 'If the keyword from_archive=True,\n'
        msg2 = 'The keyword target must be an integer or string (KIC/EPIC ID '\
          +'or object name)'
        msg3 = "The keyword cadence must be a string: 'short' or 'long'"
        msg4 = 'The keyword campaign must be an integer (a valid K2 Campaign '\
          +'number)'
        assert (target is not None), msg0 + msg1 + msg2
        assert (cadence is not None), msg0 + msg1 + msg3
        assert (campaign is not None), msg0 + msg1 + msg4
        if (not useTPF):
            try:
                objf = lk.search_lightcurvefile(target=target,cadence=cadence,\
                  campaign=campaign).download(quality_bitmask=0)
            except Exception as e:
                print("[1] Exception raised: {}".format(e))
                sys.exit(1)
            isLCF = True
        else:
            try:
                objf = lk.search_targetpixelfile(target=target,\
                  cadence=cadence,
                  campaign=campaign).download(quality_bitmask=0)
            except Exception as e:
                print("[2] Exception raised: {}".format(e))
                sys.exit(1)
            isTPF = True
        keplerData = True
        ok = True
    else:
        assert (filename is not None),\
          '***** ERROR ***** A filename must be given'
        assert (os.path.isfile(filename)),\
          '***** ERROR ***** This file does not exist:\n %s' % (filename)
        ok = False
        if (not ok):
            try:
                objf = lk.open(filename, quality_bitmask=0)
            except Exception as e:
                print("[2] Exception raised: {}".format(e))
                sys.exit(1)
            if isinstance(objf, lk.lightcurvefile.KeplerLightCurveFile):
                keplerData = True
                isLCF = True
                ok = True
            elif isinstance(objf, lk.targetpixelfile.KeplerTargetPixelFile):
                keplerData = True
                isTPF = True
                ok = True
            elif isinstance(objf, lk.lightcurvefile.TessLightCurveFile):
                tessData = True
                isLCF = True
                ok = True
            elif isinstance(objf, lk.targetpixelfile.TessTargetPixelFile):
                tessData = True
                isTPF = True
                ok = True
            else:
                str_ = type(objf)
                print('***** ERROR *****:  '\
                  'lk.open returned an unknown type of object!'+str_)
                sys.exit(1)
    pass  #}  if (from_archive):
    assert (ok)
    pass  #} if (from_archive):
    assert (ok)
    if (isLCF):
        lcf = objf
        ftype = ftypes[0]
    if (isTPF):
        tpf = objf
        ftype = ftypes[1]
    #
    filename = objf.path
    path, fn = os.path.split(filename)
    if (len(path) == 0):
        path = os.getcwd()
    print('\nfilename=%s/%s' % (path, fn))
    #
    try:
        telescop = objf.hdu[0].header['TELESCOP']
    except:
        print('\n***** ERROR *****\n\nMissing keyword: TELESCOP')
        sys.exit(1)
    telescop = objf.hdu[0].header['TELESCOP']
    if (telescop == 'Kepler'):
        assert (keplerData)
        assert (not tessData)
    elif (telescop == 'TESS'):
        assert (not keplerData)
        assert (tessData)
    #
    extname = objf.hdu[1].header['EXTNAME']  # FITS keyword
    if (isLCF):
        assert (extname == 'LIGHTCURVE')
    else:
        if (keplerData):
            assert (extname == 'TARGETTABLES')
        if (tessData):
            assert (extname) == 'PIXELS'
    #
    obsmode = None
    if (keplerData):
        try:
            obsmode = objf.hdu[0].header['OBSMODE']
        except:
            obsmode = ''
    try:
        mission = objf.hdu[0].header['MISSION']
    except:
        mission = None
    if ((mission is not None) and (obsmode is not None)):
        if (mission == 'K2'):
            print('\n%s/%s %s %s\n' % (telescop, mission, obsmode, ftype))
        if (mission == 'Kepler'):
            print('\n%s %s %s\n' % (telescop, obsmode, ftype))
    elif (obsmode is not None):
        print('\n%s %s %s\n' % (telescop, obsmode, ftype))
    else:
        print('\n%s %s\n' % (telescop, ftype))
    #
    time = objf.hdu[1].data['time'].copy()
    cadenceno = objf.hdu[1].data['CADENCENO'].copy()
    if (SAP_FLUX):
        flux_type = 'SAP_FLUX'
    else:
        flux_type = 'PDCSAP_FLUX'
    if (isLCF):
        flux = lcf.hdu[1].data[flux_type].copy()
        if (keplerData):
            quality = lcf.hdu[1].data['SAP_QUALITY'].copy()
        else:
            quality = lcf.hdu[1].data['QUALITY'].copy()
    else:
        flux = tpf.to_lightcurve().flux.copy()
        quality = tpf.hdu[1].data['QUALITY'].copy()
        flux_type = 'SAP_FLUX'
    assert (time.size == cadenceno.size)
    assert (time.size == quality.size)
    assert (time.size == flux.size)
    flux_median = np.nanmedian(flux)
    flux /= flux_median  # Normalized flux
    #
    try:
        campaign = objf.hdu[0].header['CAMPAIGN']  # FITS keyword
        print('K2 Campaign %d\n' % campaign)
    except:
        campaign = None
    try:
        quarter = objf.hdu[0].header['QUARTER']  # FITS keyword
        print('Kepler Quarter %d\n' % quarter)
    except:
        quarter = None
    try:
        sector = objf.hdu[0].header['SECTOR']  # FITS keyword
        print('TESS Sector %d\n' % sector)
    except:
        sector = None
    bjdrefi = objf.hdu[1].header['BJDREFI']  # FITS keyword
    object = objf.hdu[0].header['OBJECT']  # FITS keyword
    print('target: %s\n' % (object))
    print('%d cadences\n' % (len(time)))
    if (bitmask is not None):
        mask = bitmask
    else:
        if (keplerData):
            bit21 = (1 << 20)  # thruster firing
            #assert (bit21 == 1048576)
            bit20 = (1 << 19)  # possible thruster firing
            #assert (bit20 == 524288)
            bit16 = (1 << 15)  # spacecraft not in fine point
            #assert (bit16 == 32768)
            bit03 = (1 << 2)  # spacecraft is in coarse point
            #assert (bit03 == 4)
            mask = bit21 + bit20 + bit16 + bit03
            #assert (mask == 1605636)
        else:
            # TESS data
            bit01 = (1 << 0)  # AttitudeTweak
            #assert (bit01 == 1)
            bit02 = (1 << 1)  # SafeMode
            #assert (bit02 == 2)
            bit03 = (1 << 2)  # CoarsePoint
            #assert (bit03 == 4)
            bit04 = (1 << 3)  # EarthPoint
            #assert (bit04 == 8)
            bit05 = (1 << 4)  # Argabrightening
            #assert (bit05 == 16)
            bit06 = (1 << 5)  # Desat
            #assert (bit06 == 32)
            bit08 = (1 << 7)  # ManualExclude
            assert (bit08 == 128)
            bit10 = (1 << 9)  # ImpulsiveOutlier
            #assert (bit10 == 512)
            mask = bit01 + bit02 + bit03 + bit04 + bit05 + bit06 + bit08 + bit10
            assert (mask == 703)
        bitmask = mask
        print('Using default bitmask value of %d.' % (bitmask))
    #
    if (bitmask_decode):
        if (keplerData):
            bitmask_str = '{0:021b}'.format(bitmask)
            print('\nThe bitmask value of %d = %s\n' % (bitmask, bitmask_str))
            print('translates as\n')
            print(lk.KeplerQualityFlags.decode(bitmask))
        if (tessData):
            bitmask_str = '{0:012b}'.format(bitmask)
            print('\nThe bitmask value of %d = %s\n' % (bitmask, bitmask_str))
            print('translates as\n')
            print(lk.TessQualityFlags.decode(bitmask))
        print('')
    #
    if (bitmask_flags):
        if (keplerData):
            d = lk.KeplerQualityFlags.STRINGS
            # supply missing dictionary item:
            d[512] = 'This bit unused by Kepler'
            print('\nName     Value   Explanation (Kepler/K2)')
        if (tessData):
            d = lk.TessQualityFlags.STRINGS
            print('\nName     Value   Explanation (TESS)')
        if (pyver == 27):
            list_sorted = sorted(((k, v) for k, v in d.iteritems()))
        if (pyver >= 30):
            list_sorted = sorted(((k, v) for k, v in d.items()))
        for j, (v, k) in enumerate(list_sorted):
            print('Bit%02d  %7d : %s' % ((j + 1), v, k))
        print('')
    #
    xx = time
    yy = flux
    cc = cadenceno
    qq = quality
    #campaign_str = 'C'+str(campaign)
    mtag = ''
    if (mission is not None):
        if (mission == 'K2'):
            mtag = 'C' + str(campaign)
        if (mission == 'Kepler'):
            mtag = 'Q' + str(quarter)
    if (keplerData):
        mask_str = '{0:021b}'.format(mask)
    else:
        mask_str = '{0:012b}'.format(mask)
    title = object + '   '
    if (keplerData):
        title += '[' + mtag + ']'
    title += '   [bitmask: ' + mask_str + ']'
    if (tag is not None):
        title = tag + title
    kwargs1 = dict(color=color[0], zorder=0)
    fig, ax = plt.subplots(figsize=(14, 5))
    if (not scatter):
        ax.plot(xx, yy, **kwargs1)
    else:
        ax.scatter(xx, yy, s=6, **kwargs1)
    xlabel = 'Time  [BJD - ' + str(bjdrefi) + ']  [days]'
    ax.set_xlabel(xlabel, size='x-large')
    ax.set_ylabel('Normalized ' + flux_type, size='x-large')
    ax.set_title(title, size='x-large')
    ax.grid(alpha=0.5)
    #
    idx = (qq & mask) > 0
    n_events = np.count_nonzero(idx)
    #
    xxx = xx[idx].copy()
    yyy = yy[idx].copy()
    ccc = cc[idx].copy()
    qqq = qq[idx].copy()
    #
    if (report_filename is not None):
        assert (isinstance(report_filename, str))
        report = True
    if (report):
        if (report_filename is not None):
            check_file_exists(report_filename, overwrite)
            f = open(report_filename, 'w')
            f.write('# REPORT\n')
            f.write('#\n')
            f.write('# %s\n' % (filename))
            f.write('#\n')
            f.write('# %d events with bitmask value of %d (= %s)\n' %\
              (n_events,mask,mask_str))
            if (n_events > 0):
                f.write('#\n')
                f.write('#  Event         Time  Normalized_Flux  CADENCENO  '\
                  +'===========QUALITY============\n')
                for j, (xxx_, yyy_, ccc_, qqq_) in \
                  enumerate(zip(xxx,yyy,ccc,qqq)):
                    if (keplerData):
                        qqq_bitmask_str = '{0:021b}'.format(qqq_)
                    else:
                        qqq_bitmask_str = '{0:012b}'.format(qqq_)
                    jj = j + 1
                    f.write('%8d %12.6f %16.11f %9d %8d = %s\n' % \
                      (jj, xxx_, yyy_, ccc_, qqq_, qqq_bitmask_str))
            f.close()
            print('')
            print('%s <--- report written  :-)\n' % (report_filename))
            print('')
        else:
            print('# REPORT')
            print('#')
            print('# %s' % (filename))
            print('#')
            print('# %d events with bitmask value of %d (= %s)' %\
              (n_events,mask,mask_str))
            if (n_events > 0):
                print('#')
                print('#  Event         Time  Normalized_Flux  CADENCENO  '\
                  +'=============QUALITY============')
                for j, (xxx_, yyy_, ccc_, qqq_) in \
                  enumerate(zip(xxx,yyy,ccc,qqq)):
                    if (keplerData):
                        qqq_bitmask_str = '{0:021b}'.format(qqq_)
                    else:
                        qqq_bitmask_str = '{0:012b}'.format(qqq_)
                    jj = j + 1
                    print('%8d %12.6f %16.11f %9d %8d = %s' % \
                      (jj, xxx_, yyy_, ccc_, qqq_, qqq_bitmask_str))
    #
    assert (n_before >= 0)
    assert (n_after >= 0)
    clipit = (n_before > 0) or (n_after > 0)
    if (clipit):
        # mark bad cadences in the index array
        jdx = idx.copy()
        jje = jdx.size
        jj_min = 0
        jj_max = jje - 1
        for jj in range(jje):
            if (idx[jj]):
                ccm = cc[jj]
                ccb = ccm - n_before
                cce = ccm + n_after
                jb = np.clip(jj - n_before, jj_min, jj_max)
                assert (jb >= jj_min)
                je = np.clip(jj + n_after, jj_min, jj_max)
                assert (je <= jj_max)
                jn = je - jb + 1
                for j in range(jn):
                    k = jb + j
                    cck = cc[k]
                    if ((cck >= ccb) and (cck <= cce)):
                        jdx[k] = True
        idx = jdx.copy()
    #
    clipx = False
    if (xcut is not None):
        kdx = idx.copy()
        xcut_ = sorted(xcut)
        xmin = xcut_[0]
        xmax = xcut_[1]
        je = kdx.size
        for j in range(je):
            x_ = xx[j]
            if (np.isfinite(x_)):
                if ((x_ >= xmin) and (x_ <= xmax)):
                    kdx[j] = True
        idx = kdx.copy()
        clipx = True
    #
    clipy = False
    if (ycut is not None):
        kdx = idx.copy()
        ycut_ = sorted(ycut)
        ymin = ycut_[0]
        ymax = ycut_[1]
        je = kdx.size
        for j in range(je):
            y_ = yy[j]
            if (np.isfinite(y_)):
                if ((y_ >= ymin) and (y_ <= ymax)):
                    kdx[j] = True
        idx = kdx.copy()
        clipy = True
    #
    if (clipit or clipx or clipy):
        xxx = xx[idx].copy()
        yyy = yy[idx].copy()
        ccc = cc[idx].copy()
        qqq = qq[idx].copy()
    #
    kwargs2 = dict(s=10, color=color[1], zorder=10)
    ax.scatter(xxx, yyy, **kwargs2)
    #
    if (xlim is not None):
        ax.set_xlim(xlim)
    if (ylim is not None):
        ax.set_ylim(ylim)
    #
    # mark events near the top of the plot with grey vertical bars
    yyy = yy[idx].copy()
    if (bars_yy is None):
        bottom, top = ax.get_ylim()
        bars_yy = bottom + (0.94 * (top - bottom))
    yyy[:] = bars_yy
    kwargs3 = dict(marker='|', alpha=0.5, color=color[2], zorder=0, s=(20**2))
    ax.scatter(xxx, yyy, **kwargs3)
    #
    # show path and filename on the right side of plot
    path, fn = os.path.split(filename)
    if (len(path) == 0):
        path = os.getcwd()
    plt.figtext( 0.95, 0.05, path+'/', ha='right', va='bottom', \
      color=color[3], size='small', rotation=90)
    plt.figtext( 0.96, 0.05, fn, ha='right', va='bottom', \
      color=color[3], size='small', rotation=90)
    #
    if (plotfile is not None):
        check_file_exists(plotfile, overwrite)
        plt.savefig(plotfile, dpi=300)
        print('%s <--- plotfile written  :-)\n' % (plotfile))
        #plt.show()
        plt.close()
    if (show_plot):
        plt.ioff()
        plt.show()
    if (new_filename is not None):
        check_file_exists(new_filename, overwrite)
        hdul = fits.open(filename)
        hdul[1].data = hdul[1].data[~idx]
        hdul.writeto(new_filename, overwrite=overwrite)
        hdul.close()
        print('')
        print('%s  <--- new FITS file written  :-)\n' % (new_filename))
        print('')
    sys.stdout.flush()
    return (ax, n_events, objf, idx)
Esempio n. 13
0
def query_lightkurve(id, download_dir, use_cached, lkwargs):
    """ Check cache for fits file, or download it.

    Based on use_cached flag, will look in the cache for fits file
    corresponding to request id star. If nothing is found in cached it will be
    downloaded from the MAST server.

    Parameters
    ----------
    id : string
        Identifier for the requested star. Must be resolvable by Lightkurve
    lkwargs : dict
        Dictionary containing keywords for the Lightkurve search.
        cadence, quarter, campaign, sector, month.
    use_cached: bool
        Whether or not to used data in the Lightkurve cache.

    Note:
    -----
    Prioritizes long cadence over short cadence unless otherwise specified.

    """
    if not download_dir:
        cache = os.path.join(*[os.path.expanduser('~'), '.lightkurve-cache'])
    else:
        cache = download_dir

    # Remove
    if isinstance(id, str):
        for prefix in ['KIC', 'EPIC', 'TIC', 'kplr', 'tic']:
            id = id.strip(prefix)

    if not lkwargs['cadence']:
        lkwargs['cadence'] = 'long'
    if lkwargs['cadence'] == 'short':
        tgtfiles = glob.glob(
            os.path.join(*[
                cache, 'mastDownload', '*', f'*{str(int(id))}*', '*_slc.fits'
            ]))
    elif lkwargs['cadence'] == 'long':
        tgtfiles = glob.glob(
            os.path.join(*[
                cache, 'mastDownload', '*', f'*{str(int(id))}*', '*_;lc.fits'
            ]))
    else:
        raise TypeError('Unrecognized cadence input for %s' % (id))

    if (not use_cached) or (use_cached and (len(tgtfiles) == 0)):
        if ((len(tgtfiles) == 0) and use_cached):
            warnings.warn(
                'Could not find %s cadence data for %s in cache, checking MAST...'
                % (lkwargs['cadence'], id))
        print(f'Querying MAST for {id}')
        lc_col = query_mast(id, cache, lkwargs)
        if len(lc_col) == 0:
            raise ValueError(
                "Could not find %s cadence data for %s in cache or on MAST" %
                (lkwargs['cadence'], id))

    elif (use_cached and (len(tgtfiles) != 0)):
        lc_col = [lk.open(n) for n in tgtfiles]
    else:
        raise ValueError('Unhandled Exception')
    lc0 = clean_lc(lc_col[0].PDCSAP_FLUX)
    for i, lc in enumerate(lc_col[1:]):
        lc0 = lc0.append(clean_lc(lc.PDCSAP_FLUX))
    return lc0
Esempio n. 14
0
python correct_halo.py -name Ascella -c 7 --do-plot
-----------------------------------------------------------------'''

if __name__ == '__main__':
    ap = ArgumentParser(
        description='halophot: K2 halo photometry with total variation.')
    ap.add_argument('epic', default=200000000, type=str, help='EPIC Number')
    ap.add_argument('--do-plot', action = 'store_true', default = True, \
                    help = 'produce plots')

    args = ap.parse_args()

    campaign = 6
    epic = args.epic

    tpf = lightkurve.open('../data/normal/ktwo%s-c06_lpd-targ.fits.gz' %
                          (epic))
    lc = tpf.to_lightcurve('aperture')
    lc.pos_corr1 = tpf.pos_corr1
    lc.pos_corr2 = tpf.pos_corr2
    lc.primary_header = tpf.hdu[0].header
    lc.data_header = tpf.hdu[1].header

    lc_pipeline = lightkurve.open('../data/normal/ktwo%s-c06_llc.fits' %
                                  (epic))

    savedir = '../release/c%d'

    cdpp_pdc = lc_pipeline.get_lightcurve(
        'PDCSAP_FLUX').flatten().estimate_cdpp()
    cdpp_sap = lc_pipeline.get_lightcurve('SAP_FLUX').flatten().estimate_cdpp()
    print('CDPP: %.2f (PDC), %.2f (SAP)' % (cdpp_pdc, cdpp_sap))