def process_lightcurve(lc, lc_duration): t = lc.times * u.day y_filt = lc.fluxes # Run this lightcurve through the astropy implementation of BLS durations = np.linspace(0.05, 0.2, 10) * u.day model = BoxLeastSquares(t, y_filt) results = model.autopower(durations, minimum_period=0.25 * u.day, maximum_period=lc_duration / 2 * u.day, minimum_n_transit=2, frequency_factor=1.0) # Clean up results: Astropy Quantity objects are not serialisable results = dict(results) for keyword in results: if isinstance(results[keyword], u.Quantity): value_quantity = results[keyword] value_numeric = value_quantity.value value_unit = str(value_quantity.unit) if isinstance(value_numeric, np.ndarray): value_numeric = list(value_numeric) results[keyword] = [value_numeric, value_unit] elif isinstance(results[keyword], np.ndarray): value_numeric = list(results[keyword]) results[keyword] = value_numeric # Return results return results
def test_helper_functions(): """Can we use all the functions in interact_bls?""" from ..interact_bls import (prepare_bls_datasource, prepare_folded_datasource, prepare_lightcurve_datasource) from ..interact_bls import (make_bls_figure_elements, make_folded_figure_elements, make_lightcurve_figure_elements) from ..interact_bls import (prepare_bls_help_source, prepare_f_help_source, prepare_lc_help_source) lc = KeplerLightCurve.read(KEPLER10) lc = lc.normalize().remove_nans().flatten() lc_source = prepare_lightcurve_datasource(lc) f_source = prepare_folded_datasource(lc.fold(1)) model = BoxLeastSquares(lc.time, lc.flux) result = model.power([1,2,3], 0.3) bls_source = prepare_bls_datasource(result, 0) lc_help = prepare_lc_help_source(lc) f_help = prepare_f_help_source(lc.fold(1)) bls_help = prepare_bls_help_source(bls_source, 1) make_lightcurve_figure_elements(lc, lc, lc_source, lc_source, lc_help) make_folded_figure_elements(lc.fold(1), lc.fold(1), f_source, f_source, f_help) make_bls_figure_elements(result, bls_source, bls_help)
def bls(time, nflux): """Applies astropy Box Least-Squares to light curve to fit period Parameters ---------- time: np.array Time array nflux: np.array Flux array Returns ------- per_guess: float Period fit from BLS """ from astropy.timeseries import BoxLeastSquares import astropy.units as u mod = BoxLeastSquares(time * u.day, nflux, dy=0.01) periodogram = mod.autopower(0.2, objective="snr") per_guess = np.asarray(periodogram.period)[int( np.median(np.argmax(periodogram.power)))] return per_guess
def __call__(self, *args, **kwargs): self.logger = getLogger( f"{self.name}:{self.ts.name.lower().replace('_','-')}") self.logger.info("Running BLS periodogram") self._periods = linspace(self.ts.pmin, self.ts.pmax, self.ts.nper) self.bls = BoxLeastSquares(self.ts.time * u.day, self.ts.flux, self.ts.ferr) self.result = self.bls.power(self._periods, self._durations, objective='snr') for p in self.ts.masked_periods: self.result.depth_snr *= maskf(self._periods, p, .1) self.result.log_likelihood *= maskf(self._periods, p, .1) i = argmax(self.result.depth_snr) self.period = self.result.period[i].value self.snr = self.result.depth_snr[i] self.duration = self.result.duration[i].value self.depth = self.result.depth[i] t0 = self.result.transit_time[i].value ep = epoch(self.ts.time.min(), t0, self.period) self.zero_epoch = t0 + ep * self.period self.ts.update_ephemeris(self.zero_epoch, self.period, self.duration, self.depth) self.logger.info( f"BLS SNR {self.snr:.2f} period {self.period:.2f} d, duration {24*self.duration:.2f} h" )
def set_power_period(self, nt=5, min_p=1, max_p=100, n_f=10000, auto=True, method='LS_astropy'): self.pmin = min_p self.pmax = max_p self.method = method if self.method == 'LS_astropy': if auto: ls = LombScargle(self.df.t.values, self.df.m.values, self.df.dflux.values, nterms=nt) self.frequency, self.power = ls.autopower(minimum_frequency=1. / self.pmax, maximum_frequency=1. / self.pmin) else: self.frequency = np.linspace(1. / self.pmax, 1. / self.pmin, n_f) self.power = LombScargle(self.df.t.values, self.df.m.values, self.df.dflux.values).power(self.frequency) elif self.method == 'BLS_astropy': model = BoxLeastSquares(self.df.t.values, self.df.m.values, dy=self.df.dflux.values) if auto: periodogram = model.autopower(0.2) self.frequency = 1. / periodogram.period self.power = periodogram.power else: periods = np.linspace(self.pmin, self.pmax, 10) periodogram = model.power(periods, 0.2) self.frequency = 1. / periodogram.period self.power = periodogram.power else: print('Method should be chosen between these options:') print('LS_astropy, BLS_astropy') sys.exit() # setting_period period = (1. / self.frequency[np.argmax(self.power)]) print("p f p-f", period, np.fix(period), period-np.fix(period)) if period - np.fix(period) < 0.009: self.period = (1. / self.frequency[(np.asarray(self.power).argsort()[-2])]) else: self.period = period
def identifyTces(time, flux, bls_durs_hrs=[1,2,4,8,12], minSnr=3, fracRemain=0.5, \ maxTces=10, minP=None, maxP=None): """ Find highest point in the bls. remove that signal, median detrend again Find the next signal. Stop when less than half the original data set remains. Or, when depth of signal is less than snr*running_std returns period, t0, depth, duration, snr for each signal found. """ keepLooking = True counter = 0 results = [] stats = [] bls_durs_day=np.array(bls_durs_hrs)/24 t=time.copy() f=flux.copy() while keepLooking: bls_results = findBlsSignal(t, f, bls_durs_day, minP=minP, maxP=maxP) #print(bls_results) #simple ssnr because the BLS depth snr is acting strangely bls_results[4] = simpleSnr(t, f, bls_results) results.append(bls_results) bls = BoxLeastSquares(t,f) bls_stats = bls.compute_stats(bls_results[0], bls_results[3],bls_results[1]) stats.append(bls_stats) #signal_snr = bls_stats['depth'][0]/bls_stats['depth' transit_mask = bls.transit_mask(t, bls_results[0],\ bls_results[3]*1.1, bls_results[1]) #plt.figure() #plt.plot(t,f,'ko',ms=3) t=t[~transit_mask] f=f[~transit_mask] #plt.plot(t,f,'r.') #Conditions to keep looking if (len(t)/len(time) > fracRemain) & \ (bls_results[4] >= minSnr) & \ (counter <= maxTces) : counter=counter + 1 keepLooking = True else: keepLooking = False return np.array(results), np.array(stats)
def boxleastsquares(time, relFlux, relFluxErr, acfBP): model = BoxLeastSquares(time.values, relFlux.values, dy=relFluxErr.values) duration = [20 / 1440, 40 / 1440, 80 / 1440, .1] periodogram = model.power(period=[.5 * acfBP, acfBP, 2 * acfBP], duration=duration, objective='snr') period = periodogram.period power = periodogram.power maxPower = np.max(periodogram.power) bestPeriod = periodogram.period[np.argmax(periodogram.power)] return period, power, bestPeriod, maxPower
def calcBls(flux, time, bls_durs, minP=None, maxP=None, min_trans=3): """ Take a bls and return the spectrum. """ bls = BoxLeastSquares(time, flux) period_grid = bls.autoperiod(bls_durs,minimum_period=minP, \ maximum_period=maxP, minimum_n_transit=min_trans, \ frequency_factor=0.8) bls_power = bls.power(period_grid, bls_durs, oversample=20) return bls_power
def simpleSnr(time, flux, results): """ calculate a simple snr on the planet based on the depth and scatter after you remove the planet model from the data. """ model = BoxLeastSquares(time, flux) fmodel = model.model(time, results[0], results[3], results[1]) flatten = median_subtract(flux - fmodel, 12) noise = np.std(flatten) snr = results[2] / noise return snr
def process_lightcurve(lc: LightcurveArbitraryRaster, lc_duration: float, search_settings: dict): """ Perform a transit search on a light curve, using the bls_reference code. :param lc: The lightcurve object containing the input lightcurve. :type lc: LightcurveArbitraryRaster :param lc_duration: The duration of the lightcurve, in units of days. :type lc_duration: float :param search_settings: Dictionary of settings which control how we search for transits. :type search_settings: dict :return: dict containing the results of the transit search. """ t = lc.times * u.day y_filt = lc.fluxes # Work out what period range we are scanning minimum_period = float(search_settings.get('period_min', 0.5)) * u.day maximum_period = float(search_settings.get('period_max', lc_duration / 2)) * u.day # Run this lightcurve through the astropy implementation of BLS durations = np.linspace(0.05, 0.2, 10) * u.day model = BoxLeastSquares(t, y_filt) results = model.autopower(durations, minimum_period=minimum_period, maximum_period=maximum_period, minimum_n_transit=2, frequency_factor=2.0) # Find best period best_period = results.period[np.argmax(results.power)] results = { 'period': float(best_period / u.day), 'power': np.max(results.power) } # Extended results to save to disk results_extended = results # Return results return results, results_extended
def period_finder(self, nt=5, min_p=1, max_p=100, n_f=10000, auto=True, method='LS_astropy'): self.pmin = min_p self.pmax = max_p self.method = method if self.method == 'LS_astropy': if auto: ls = LombScargle(self.df.t.values, self.df.m.values, self.df.e.values, nterms=nt) self.frequency, self.power = ls.autopower( minimum_frequency=1. / self.pmax, maximum_frequency=1. / self.pmin) else: self.frequency = np.linspace(1. / self.pmax, 1. / self.pmin, n_f) self.power = LombScargle(self.df.t.values, self.df.m.values, self.df.e.values).power( self.frequency) elif self.method == 'BLS_astropy': model = BoxLeastSquares(self.df.t.values * u.day, self.df.m.values, dy=self.df.e.values) if auto: periodogram = model.autopower(0.2) self.frequency = 1. / periodogram.period self.power = periodogram.power else: periods = np.linspace(self.pmin, self.pmax, 10) periodogram = model.power(periods, 0.2) self.frequency = 1. / periodogram.period self.power = periodogram.power else: print('Method should be chosen between these options:') print('LS_astropy, BLS_astropy') sys.exit() self.set_period() self.plot_ls()
def refine_ephem_BLS(self, filters=[1, 2], logtrange=-5, dur=None): import astropy.units as u from astropy.timeseries import BoxLeastSquares # pmin,pmax pmin = self.p * (1 - 10**logtrange), pmax = self.p * (1 + 10**logtrange), # preproc t_med = np.median(self.t) _t = (self.t - t_med) * u.day # model setup mask = np.isin(self.fid, filters) model = BoxLeastSquares(_t[mask], self.yn[mask], self.dyn[mask]) # durations to search durmin = np.max([30. / 3600 / 24, self.p * 0.005]) durmax = np.min([10., self.p * 0.15]) dur = np.logspace(np.log10(durmin), np.log10(durmax), num=5, endpoint=True) # run search out = model.autopower(np.array(dur), minimum_period=pmin, maximum_period=pmax) # select best period i = np.argmax(out.power) p = out.period[i].value t0 = out.transit_time[i].value + t_med print('period set to %12.12f, a %f fractional change' % (p, (self.p - p) / self.p)) try: print('t0 set to %g, %g fraction of the period' % (t0, (t0 - self.t0) / p)) except: pass self.p = p self.t0 = t0 self.dur = out.duration[i].value / p self.astropyBLS = out
def __findBestSigma(self, maxperiod, window_length, sigma_step, sigma_max, debug_mode=False): i = 3 #starting point self.sigArr = [] self.lenArr = [] self.maxdataArr = [] while (sigma_max >= i): flat = self.rawlc.flatten( window_length=window_length).remove_outliers(sigma=i) model = BoxLeastSquares(flat.time, flat.flux, dy=0.01) testperiods = np.arange(1, maxperiod, 0.001) periodogram = model.power(testperiods, 0.16) maxID = np.argmax(periodogram.power) stat = model.compute_stats(periodogram.period[maxID], periodogram.duration[maxID], periodogram.transit_time[maxID]) self.sigArr.append(sum(stat['per_transit_log_likelihood'])) self.lenArr.append(len(stat['per_transit_log_likelihood'])) if debug_mode: print([ i, sum(stat['per_transit_log_likelihood']), len(stat['per_transit_log_likelihood']), periodogram.period[maxID] ]) #Debug i += sigma_step maxLLikeIndex = np.argwhere(self.lenArr == np.amax(self.lenArr)) for i in maxLLikeIndex: self.maxdataArr.append(self.sigArr[i.item(0)]) bestFit = np.argmax(self.maxdataArr) - 1 if debug_mode: print("Index of LogLikelihoods") print(maxLLikeIndex) print("Best Sigma") print(bestFit + i) return bestFit + i
def _bootstrap_max(t, y, dy, pmin, pmax, ffac, random_seed, n_bootstrap=1000): """Generate a sequence of bootstrap estimates of the max""" rng = np.random.RandomState(random_seed) power_max = [] for _ in range(n_bootstrap): s = rng.randint(0, len(y), len(y)) # sample with replacement bls_boot = BoxLeastSquares(t, y[s], dy[s]) result = bls_boot.autopower( [0.05, 0.10, 0.15, 0.20, 0.25, 0.33], minimum_period=pmin, maximum_period=pmax, frequency_factor=ffac, ) power_max.append(result.power.max()) power_max = u.Quantity(power_max) power_max.sort() return power_max
def plot_bls_folds(time,flux,results,ticid, zoom=True): num=4 #max number of possible planet candidates plt.figure(figsize=(8,12)) s=np.std(flux) for i,r in enumerate(results): plt.subplot(num,1,i+1) phase = (time - r[1] + 0.5*r[0]) % r[0] - 0.5*r[0] model = BoxLeastSquares(time,flux) fmodel = model.model(time,results[i,0],results[i,3],results[i,1]) order=np.argsort(phase) plt.plot(phase[order]*24,fmodel[order],'g.-', ms=3) plt.plot(phase*24,flux,'k.',label="TIC %u P=%6.2f d" % (ticid, results[i,0])) #plt.title(str(ticid) + " Per: " + str(results[i,0]) ) if zoom: plt.xlim(-5.7*results[i,3]*24,5.7*results[i,3]*24) plt.ylim(-2.9*r[2], 4*s) plt.legend(fontsize=8)
def get_ref_vals(lightcurve, p_ref=None): t = lightcurve.time y = lightcurve.flux dy = lightcurve.flux_err bls = BoxLeastSquares(t, y, dy) durations = [0.05, 0.1, 0.2] if p_ref is None: periodogram = bls.autopower(durations) else: periods = np.linspace(p_ref * 0.9, p_ref * 1.1, 5000) periodogram = bls.power(periods, durations) max_power = np.argmax(periodogram.power) stats = bls.compute_stats(periodogram.period[max_power], periodogram.duration[max_power], periodogram.transit_time[max_power]) num_transits = len(stats['transit_times']) t0 = periodogram.transit_time[max_power] p = periodogram.period[max_power] return (t0, p, num_transits)
def _create_interact_ui(doc, minp=minimum_period, maxp=maximum_period, resolution=resolution): """Create BLS interact user interface.""" if minp is None: minp = 0.3 if maxp is None: maxp = (lc.time[-1] - lc.time[0]) / 2 time_format = '' if lc.time_format == 'bkjd': time_format = ' - 2454833 days' if lc.time_format == 'btjd': time_format = ' - 2457000 days' # Some sliders duration_slider = Slider(start=0.01, end=0.5, value=0.05, step=0.01, title="Duration [Days]", width=400) npoints_slider = Slider(start=500, end=10000, value=resolution, step=100, title="BLS Resolution", width=400) # Set up the period values, BLS model and best period period_values = np.logspace(np.log10(minp), np.log10(maxp), npoints_slider.value) period_values = period_values[(period_values > duration_slider.value) & (period_values < maxp)] model = BoxLeastSquares(lc.time, lc.flux) result = model.power(period_values, duration_slider.value) loc = np.argmax(result.power) best_period = result.period[loc] best_t0 = result.transit_time[loc] # Some Buttons double_button = Button(label="Double Period", button_type="danger", width=100) half_button = Button(label="Half Period", button_type="danger", width=100) text_output = Paragraph(text="Period: {} days, T0: {}{}".format( np.round(best_period, 7), np.round(best_t0, 7), time_format), width=350, height=40) # Set up BLS source bls_source = prepare_bls_datasource(result, loc) bls_help_source = prepare_bls_help_source(bls_source, npoints_slider.value) # Set up the model LC mf = model.model(lc.time, best_period, duration_slider.value, best_t0) mf /= np.median(mf) mask = ~(convolve(np.asarray(mf == np.median(mf)), Box1DKernel(2)) > 0.9) model_lc = LightCurve(lc.time[mask], mf[mask]) model_lc = model_lc.append( LightCurve([(lc.time[0] - best_t0) + best_period / 2], [1])) model_lc = model_lc.append( LightCurve([(lc.time[0] - best_t0) + 3 * best_period / 2], [1])) model_lc_source = ColumnDataSource( data=dict(time=np.sort(model_lc.time), flux=model_lc.flux[np.argsort(model_lc.time)])) # Set up the LC nb = int(np.ceil(len(lc.flux) / 5000)) lc_source = prepare_lightcurve_datasource(lc[::nb]) lc_help_source = prepare_lc_help_source(lc) # Set up folded LC nb = int(np.ceil(len(lc.flux) / 10000)) f = lc.fold(best_period, best_t0) f_source = prepare_folded_datasource(f[::nb]) f_help_source = prepare_f_help_source(f) f_model_lc = model_lc.fold(best_period, best_t0) f_model_lc = LightCurve([-0.5], [1]).append(f_model_lc) f_model_lc = f_model_lc.append(LightCurve([0.5], [1])) f_model_lc_source = ColumnDataSource( data=dict(phase=f_model_lc.time, flux=f_model_lc.flux)) def _update_light_curve_plot(event): """If we zoom in on LC plot, update the binning.""" mint, maxt = fig_lc.x_range.start, fig_lc.x_range.end inwindow = (lc.time > mint) & (lc.time < maxt) nb = int(np.ceil(inwindow.sum() / 5000)) temp_lc = lc[inwindow] lc_source.data = { 'time': temp_lc.time[::nb], 'flux': temp_lc.flux[::nb] } def _update_folded_plot(event): loc = np.argmax(bls_source.data['power']) best_period = bls_source.data['period'][loc] best_t0 = bls_source.data['transit_time'][loc] # Otherwise, we can just update the best_period index minphase, maxphase = fig_folded.x_range.start, fig_folded.x_range.end f = lc.fold(best_period, best_t0) inwindow = (f.time > minphase) & (f.time < maxphase) nb = int(np.ceil(inwindow.sum() / 10000)) f_source.data = { 'phase': f[inwindow].time[::nb], 'flux': f[inwindow].flux[::nb] } # Function to update the widget def _update_params(all=False, best_period=None, best_t0=None): if all: # If we're updating everything, recalculate the BLS model minp, maxp = fig_bls.x_range.start, fig_bls.x_range.end period_values = np.logspace(np.log10(minp), np.log10(maxp), npoints_slider.value) ok = (period_values > duration_slider.value) & (period_values < maxp) if ok.sum() == 0: return period_values = period_values[ok] result = model.power(period_values, duration_slider.value) ok = np.isfinite(result['power']) & np.isfinite(result['duration']) &\ np.isfinite(result['transit_time']) & np.isfinite(result['period']) bls_source.data = dict(period=result['period'][ok], power=result['power'][ok], duration=result['duration'][ok], transit_time=result['transit_time'][ok]) loc = np.nanargmax(bls_source.data['power']) best_period = bls_source.data['period'][loc] best_t0 = bls_source.data['transit_time'][loc] minpow, maxpow = bls_source.data['power'].min( ) * 0.95, bls_source.data['power'].max() * 1.05 fig_bls.y_range.start = minpow fig_bls.y_range.end = maxpow # Otherwise, we can just update the best_period index minphase, maxphase = fig_folded.x_range.start, fig_folded.x_range.end f = lc.fold(best_period, best_t0) inwindow = (f.time > minphase) & (f.time < maxphase) nb = int(np.ceil(inwindow.sum() / 10000)) f_source.data = { 'phase': f[inwindow].time[::nb], 'flux': f[inwindow].flux[::nb] } mf = model.model(lc.time, best_period, duration_slider.value, best_t0) mf /= np.median(mf) mask = ~(convolve(np.asarray(mf == np.median(mf)), Box1DKernel(2)) > 0.9) model_lc = LightCurve(lc.time[mask], mf[mask]) model_lc_source.data = { 'time': np.sort(model_lc.time), 'flux': model_lc.flux[np.argsort(model_lc.time)] } f_model_lc = model_lc.fold(best_period, best_t0) f_model_lc = LightCurve([-0.5], [1]).append(f_model_lc) f_model_lc = f_model_lc.append(LightCurve([0.5], [1])) f_model_lc_source.data = { 'phase': f_model_lc.time, 'flux': f_model_lc.flux } vertical_line.update(location=best_period) fig_folded.title.text = 'Period: {} days \t T0: {}{}'.format( np.round(best_period, 7), np.round(best_t0, 7), time_format) text_output.text = "Period: {} days, \t T0: {}{}".format( np.round(best_period, 7), np.round(best_t0, 7), time_format) # Callbacks def _update_upon_period_selection(attr, old, new): """When we select a period we should just update a few things, but we should not recalculate model """ if len(new) > 0: new = new[0] best_period = bls_source.data['period'][new] best_t0 = bls_source.data['transit_time'][new] _update_params(best_period=best_period, best_t0=best_t0) def _update_model_slider(attr, old, new): """If the duration slider is updated, then update the whole model set.""" _update_params(all=True) def _update_model_slider_EVENT(event): """If we update the duration slider, we should update the whole model set. This is the same as the _update_model_slider but it has a different call signature... """ _update_params(all=True) def _double_period_event(): fig_bls.x_range.start *= 2 fig_bls.x_range.end *= 2 _update_params(all=True) def _half_period_event(): fig_bls.x_range.start /= 2 fig_bls.x_range.end /= 2 _update_params(all=True) # Help Hover Call Backs def _update_folded_plot_help_reset(event): f_help_source.data['phase'] = [ (np.max(f.time) - np.min(f.time)) * 0.98 + np.min(f.time) ] f_help_source.data['flux'] = [ (np.max(f.flux) - np.min(f.flux)) * 0.98 + np.min(f.flux) ] def _update_folded_plot_help(event): f_help_source.data['phase'] = [ (fig_folded.x_range.end - fig_folded.x_range.start) * 0.95 + fig_folded.x_range.start ] f_help_source.data['flux'] = [ (fig_folded.y_range.end - fig_folded.y_range.start) * 0.95 + fig_folded.y_range.start ] def _update_lc_plot_help_reset(event): lc_help_source.data['time'] = [ (np.max(lc.time) - np.min(lc.time)) * 0.98 + np.min(lc.time) ] lc_help_source.data['flux'] = [ (np.max(lc.flux) - np.min(lc.flux)) * 0.9 + np.min(lc.flux) ] def _update_lc_plot_help(event): lc_help_source.data['time'] = [ (fig_lc.x_range.end - fig_lc.x_range.start) * 0.95 + fig_lc.x_range.start ] lc_help_source.data['flux'] = [ (fig_lc.y_range.end - fig_lc.y_range.start) * 0.9 + fig_lc.y_range.start ] def _update_bls_plot_help_event(event): bls_help_source.data['period'] = [ bls_source.data['period'][int(npoints_slider.value * 0.95)] ] bls_help_source.data['power'] = [ (np.max(bls_source.data['power']) - np.min(bls_source.data['power'])) * 0.98 + np.min(bls_source.data['power']) ] def _update_bls_plot_help(attr, old, new): bls_help_source.data['period'] = [ bls_source.data['period'][int(npoints_slider.value * 0.95)] ] bls_help_source.data['power'] = [ (np.max(bls_source.data['power']) - np.min(bls_source.data['power'])) * 0.98 + np.min(bls_source.data['power']) ] # Create all the figures. fig_folded = make_folded_figure_elements(f, f_model_lc, f_source, f_model_lc_source, f_help_source) fig_folded.title.text = 'Period: {} days \t T0: {}{}'.format( np.round(best_period, 7), np.round(best_t0, 5), time_format) fig_bls, vertical_line = make_bls_figure_elements( result, bls_source, bls_help_source) fig_lc = make_lightcurve_figure_elements(lc, model_lc, lc_source, model_lc_source, lc_help_source) # Map changes # If we click a new period, update bls_source.selected.on_change('indices', _update_upon_period_selection) # If we change the duration, update everything, including help button for BLS duration_slider.on_change('value', _update_model_slider) duration_slider.on_change('value', _update_bls_plot_help) # If we increase resolution, update everything npoints_slider.on_change('value', _update_model_slider) # Make sure the vertical line always goes to the best period. vertical_line.update(location=best_period) # If we pan in the BLS panel, update everything fig_bls.on_event(PanEnd, _update_model_slider_EVENT) fig_bls.on_event(Reset, _update_model_slider_EVENT) # If we pan in the LC panel, rebin the points fig_lc.on_event(PanEnd, _update_light_curve_plot) fig_lc.on_event(Reset, _update_light_curve_plot) # If we pan in the Folded panel, rebin the points fig_folded.on_event(PanEnd, _update_folded_plot) fig_folded.on_event(Reset, _update_folded_plot) # Deal with help button fig_bls.on_event(PanEnd, _update_bls_plot_help_event) fig_bls.on_event(Reset, _update_bls_plot_help_event) fig_folded.on_event(PanEnd, _update_folded_plot_help) fig_folded.on_event(Reset, _update_folded_plot_help_reset) fig_lc.on_event(PanEnd, _update_lc_plot_help) fig_lc.on_event(Reset, _update_lc_plot_help_reset) # Buttons double_button.on_click(_double_period_event) half_button.on_click(_half_period_event) # Layout the widget doc.add_root( layout([[fig_bls, fig_folded], fig_lc, [ Spacer(width=70), duration_slider, Spacer(width=50), npoints_slider ], [ Spacer(width=70), double_button, Spacer(width=70), half_button, Spacer(width=300), text_output ]]))
def _package_results( tpf, target, contaminator, aper, contaminant_aper, transit_pixels, transit_pixels_err, period, t0, duration, plot=False, ): """Helper function for packaging up results""" # def get_coords(thumb, err, aper, count=400): # Y, X = np.mgrid[: tpf.shape[1], : tpf.shape[2]] # cxs, cys = [], [] # for count in range(count): # err1 = np.random.normal(0, err[aper]) # cxs.append(np.average(X[aper], weights=thumb[aper] + err1)) # cys.append(np.average(Y[aper], weights=thumb[aper] + err1)) # cxs, cys = np.asarray(cxs), np.asarray(cys) # cras, cdecs = tpf.wcs.wcs_pix2world(np.asarray([cxs, cys]).T, 1).T # return cras, cdecs def get_coords(thumb, err, aper=None): if aper is None: aper = np.ones(tpf.flux.shape[1:], bool) with np.errstate(divide="ignore"): Y, X = np.mgrid[:tpf.shape[1], :tpf.shape[2]] aper = create_threshold_mask(thumb / err, 3) & aper cxs, cys = [], [] for count in range(500): w = np.random.normal(loc=np.abs(thumb[aper]), scale=err[aper]) cxs.append(np.average(X[aper], weights=w)) cys.append(np.average(Y[aper], weights=w)) cxs, cys = np.asarray(cxs), np.asarray(cys) k = (cxs > 0) & (cxs < tpf.shape[2]) & (cys > 0) & (cys < tpf.shape[1]) cxs, cys = cxs[k], cys[k] cras, cdecs = tpf.wcs.all_pix2world(np.asarray([cxs, cys]).T, 1).T return cras, cdecs thumb = np.nanmean(np.nan_to_num(tpf.flux.value), axis=0) err = (np.sum(np.nan_to_num(tpf.flux_err.value)**2, axis=0)**0.5) / len( tpf.time) ra_target, dec_target = get_coords(thumb, err, aper=aper) bls = BoxLeastSquares(target.time, target.flux, target.flux_err) depths = [] for i in range(50): bls.y = target.flux + np.random.normal(0, target.flux_err) depths.append(bls.power(period, duration)["depth"][0]) target_depth = (np.mean(depths), np.std(depths)) res = {"target_depth": target_depth} res["target_ra"] = np.median(ra_target), np.std(ra_target) res["target_dec"] = np.median(dec_target), np.std(dec_target) res["target_lc"] = target res["target_aper"] = aper if contaminant_aper.any(): ra_contaminant, dec_contaminant = get_coords(transit_pixels, transit_pixels_err) bls = BoxLeastSquares(contaminator.time, contaminator.flux, contaminator.flux_err) depths = [] for i in range(50): bls.y = contaminator.flux + np.random.normal( 0, contaminator.flux_err) depths.append(bls.power(period, duration)["depth"][0]) contaminator_depth = (np.mean(depths), np.std(depths)) res["contaminator_ra"] = np.median(ra_contaminant), np.std( ra_contaminant) res["contaminator_dec"] = np.median(dec_contaminant), np.std( dec_contaminant) res["contaminator_depth"] = contaminator_depth res["contaminator_lc"] = contaminator res["contaminator_aper"] = contaminant_aper d, de = (contaminator_depth[0] - target_depth[0]), np.hypot( contaminator_depth[1], target_depth[1]) res["delta_transit_depth[sigma]"] = d / de dra = res["contaminator_ra"][0] - res["target_ra"][0] ddec = res["contaminator_dec"][0] - res["target_dec"][0] edra = (res["contaminator_ra"][1]**2 + res["target_ra"][1]**2)**0.5 eddec = (res["contaminator_dec"][1]**2 + res["target_dec"][1]**2)**0.5 centroid_shift = (((dra**2 + ddec**2)**0.5) * u.deg).to(u.arcsecond) ecentroid_shift = (centroid_shift * ((2 * edra / dra)**2 + (2 * eddec / ddec)**2)**0.5) res["centroid_shift"] = (centroid_shift, ecentroid_shift) res["period"] = period res["t0"] = t0 res["duration"] = duration res["transit_depth"] = transit_pixels res["transit_depth_err"] = transit_pixels_err if plot: res["fig"] = _make_plot(tpf, res) return res
def generate_plots_s3(ticid,sector,cam,ccd, \ bls_bucket="tesssearchresults", \ detrend_bucket="tesssearchresults", \ ffilc_bucket="straw-lightcurves", \ outpath="/Users/smullally/TESS/lambdaSearch/strawTests/blsResults/s0001-kdwarf/plots/"): """ Given a TICID, sector, camera, ccd and the bucket locations. generate a one page plot for each signal found by the bls using the information in those files. bls_bucket contains the search results csv file. detrend_bucket contains the detrend fits file. ffi_bucket contains the raw light curve fits files. returns hdus and a dictionary of the csv. """ rootname = "tic%012u_s%04u-%1u-%1u" % (ticid, sector, cam, ccd) path = "tic%012u/" % ticid bls_name = "%s_plsearch.csv" % rootname det_name = "%s_detrend.fits" % rootname lc_name = "%s_stlc.fits" % rootname if detrend_bucket[0] == "/": det_hdu = fits.open(detrend_bucket + path + det_name) bls = np.loadtxt(bls_bucket + path + bls_name, delimiter=',') else: det_hdu = loadFitsFromUri(detrend_bucket, path, det_name) bls = loadCsvFromUri(bls_bucket, path, bls_name) if ffilc_bucket[0] == "/": raw_hdu = fits.open(ffilc_bucket + path + lc_name) else: raw_hdu = loadFitsFromUri(ffilc_bucket, path, lc_name) #Get the number of signals found by the bls. if len(bls.shape) == 1: N = 1 bls = bls.reshape((1, 7)) #print(bls.shape) else: N = bls.shape[0] time_raw = raw_hdu[1].data['TIME'] raw = raw_hdu[1].data['SAP_FLUX'] time_det = det_hdu[1].data['TIME'] detrend = det_hdu[1].data['DETREND_FLUX'] #plt.plot(time_det,detrend,'.') head = raw_hdu[1].header try: ave_im = raw_hdu[2].data except: ave_im = np.zeros((10, 10)) meta = {} meta['sector'] = sector meta['cam'] = cam meta['ccd'] = ccd try: meta['imloc'] = (head['CUBECOL'], head['CUBEROW']) except: meta['imloc'] = (head['APCEN_Y'], head['APCEN_X']) meta['radius'] = head['AP_RAD'] for i in range(N): #print(i) meta['period'] = bls[i, 0] meta['dur'] = bls[i, 3] meta['epoch'] = bls[i, 1] meta['snr'] = bls[i, 4] meta['depth'] = bls[i, 2] meta['ntrans'] = bls[i, 5] meta['id'] = ticid meta['pn'] = i + 1 #print(meta) bls_object = BoxLeastSquares(time_det, detrend) model = bls_object.model(time_det, meta['period'], \ meta['dur'], meta['epoch']) out_name = "%s-%02i_plot.png" % (rootname, meta['pn']) output = outpath + out_name plt.figure(figsize=(10, 12)) report.summaryPlot1(time_raw, raw, time_det, detrend, model, ave_im, meta) plt.savefig(output) print(output)
def process_cell(matrix, cell, progress_indicator): alphabet_size = cell[0] paa_division_integer = cell[1] progression = 0 if progress_indicator == -1 else progress_indicator # Download or use downloaded lightcurve files time_flux_tuple_arr = pm.get_lightcurve_data() # get ground truth values for all lc's with autocorrelation ground_truth_arr = pm.get_ground_truth_values(time_flux_tuple_arr) # transform durations from exoplanet archieve from hours to days actual_duration_arr = [ 3.88216 / 24, 2.36386 / 24, 3.98235 / 24, 4.56904 / 24, 3.60111 / 24, 5.16165 / 24, 3.19843 / 24 ] ##kepler-2,3,4,5,6,7,8 https://exoplanetarchive.ipac.caltech.edu/cgi-bin/TblView/nph-tblView?app=ExoTbls&config=cumulative #mean array of periods mean_period_arr = [] # Calculate matrix values for all lighcurves # get flux, time, duration and ground thruth for i'th tuple ground_truth_period = ground_truth_arr[progression] actual_duration = actual_duration_arr[progression] time_flux_tuple = time_flux_tuple_arr[progression] time = time_flux_tuple[0] norm_fluxes = time_flux_tuple[1] dat_size = norm_fluxes.size # Find Period for eaxh parameter combination alphabets_size/PAA_division_interger of SAX # PAA transformation procedure # Determine number of PAA points from the datasize devided by the paa_division_integer(number of points per segment) paa_points = int(dat_size / paa_division_integer) # PAA transformation of data PAA_array = paa(norm_fluxes, paa_points) PAA_array = np.asarray(PAA_array, dtype=np.float32) # SAX conversion # Get breakpoints to convert segments into SAX string breakPointsArray = pm.getBreakPointsArray(PAA_array, alphabet_size) sax_output = ts_to_string(PAA_array, breakPointsArray) # Convert to numeric SAX representation numericSaxConversionArray = pm.getNumericSaxArray(breakPointsArray) numeric_SAX_flux = [] for symbol_index in range(len(sax_output)): letter_represented_as_int = pm.getAlfabetToNumericConverter( sax_output[symbol_index], numericSaxConversionArray) numeric_SAX_flux.append(letter_represented_as_int) numeric_SAX_flux = np.asarray(numeric_SAX_flux, dtype=np.float32) numeric_SAX_time = time # Repeat each element in array x times, where x is the number of PAA points repeated_x_array = np.repeat(numeric_SAX_time, paa_points) # How many elements each list should have n = int(len(repeated_x_array) / paa_points) final_x_array = [] lists = list(pm.divide_array_in_chunks(repeated_x_array, n)) # take mean of all chunks for l in lists: final_x_array.append(np.mean(l)) numeric_SAX_time = final_x_array # BoxLeastSquares applied to numeric SAX representation BLS = BoxLeastSquares(numeric_SAX_time, numeric_SAX_flux) periodogram = BLS.autopower(actual_duration) # Find period with highest power in periodogram best_period = np.argmax(periodogram.power) period = periodogram.period[best_period] # Add error in percentage between best peiord and ground truth to array with periods ground_truth_error = (abs(period - ground_truth_period) / ground_truth_period) * 100 # Update matrix if progression == 0: matrix[alphabet_size - MIN_SAX][paa_division_integer - MIN_PAA] = ground_truth_error else: #Update mean of particualr parameter combination current_value = matrix[alphabet_size - MIN_SAX][paa_division_integer - MIN_PAA] matrix[alphabet_size - MIN_SAX][paa_division_integer - MIN_PAA] = (current_value * progression + ground_truth_error) / (progression + 1)
def __generatePeriodogram(self, maxperiod): model = BoxLeastSquares(self.flat.time, self.flat.flux, dy=0.01) testperiods = np.arange(1, maxperiod, 0.001) self.periodogram = model.power(testperiods, 0.16)
def computeperiodbs(JDtime, targetflux): from astropy.timeseries import BoxLeastSquares model = BoxLeastSquares(JDtime, targetflux) results = model.autopower(0.16) period = results.period[np.argmax(results.power)] return period, 0, 0
def _create_interact_ui(doc, minp=minimum_period, maxp=maximum_period, resolution=resolution): """Create BLS interact user interface.""" if minp is None: minp = 0.3 if maxp is None: maxp = (lc.time[-1].value - lc.time[0].value) / 2 # TODO: consider to accept Time as minp / maxp, and convert it to unitless days time_format = "" if lc.time.format == "bkjd": time_format = " - 2454833 days" if lc.time.format == "btjd": time_format = " - 2457000 days" # Some sliders duration_slider = Slider( start=0.01, end=0.5, value=0.05, step=0.01, title="Duration [Days]", width=400, ) npoints_slider = Slider( start=500, end=10000, value=resolution, step=100, title="BLS Resolution", width=400, ) # Set up the period values, BLS model and best period period_values = np.logspace(np.log10(minp), np.log10(maxp), npoints_slider.value) period_values = period_values[(period_values > duration_slider.value) & (period_values < maxp)] model = BoxLeastSquares(lc.time, lc.flux) result = model.power(period_values, duration_slider.value) loc = np.argmax(result.power) best_period = result.period[loc] best_t0 = result.transit_time[loc] # Some Buttons double_button = Button(label="Double Period", button_type="danger", width=100) half_button = Button(label="Half Period", button_type="danger", width=100) text_output = Paragraph( text="Period: {} days, T0: {}{}".format( _round_strip_unit(best_period, 7), _round_strip_unit(best_t0, 7), time_format, ), width=350, height=40, ) # Set up BLS source bls_source = prepare_bls_datasource(result, loc) bls_source_units = dict( transit_time_format=result["transit_time"].format, transit_time_scale=result["transit_time"].scale, period=result["period"].unit, ) bls_help_source = prepare_bls_help_source(bls_source, npoints_slider.value) # Set up the model LC mf = model.model(lc.time, best_period, duration_slider.value, best_t0) mf /= np.median(mf) mask = ~(convolve(np.asarray(mf == np.median(mf)), Box1DKernel(2)) > 0.9) model_lc = _to_lc(lc.time[mask], mf[mask]) model_lc_source = _to_ColumnDataSource( data=dict(time=model_lc.time, flux=model_lc.flux)) # Set up the LC nb = int(np.ceil(len(lc.flux) / 5000)) lc_source = prepare_lightcurve_datasource(lc[::nb]) lc_help_source = prepare_lc_help_source(lc) # Set up folded LC nb = int(np.ceil(len(lc.flux) / 10000)) f = lc.fold(best_period, best_t0) f_source = prepare_folded_datasource(f[::nb]) f_help_source = prepare_f_help_source(f) f_model_lc = model_lc.fold(best_period, best_t0) f_model_lc = _to_lc(_as_1d(f.time.min()), [1]).append(f_model_lc) f_model_lc = f_model_lc.append(_to_lc(_as_1d(f.time.max()), [1])) f_model_lc_source = _to_ColumnDataSource( data=dict(phase=f_model_lc.time, flux=f_model_lc.flux)) def _update_light_curve_plot(event): """If we zoom in on LC plot, update the binning.""" mint, maxt = fig_lc.x_range.start, fig_lc.x_range.end inwindow = (lc.time.value > mint) & (lc.time.value < maxt) nb = int(np.ceil(inwindow.sum() / 5000)) temp_lc = lc[inwindow] _update_source(lc_source, { "time": temp_lc.time[::nb], "flux": temp_lc.flux[::nb] }) def _update_folded_plot(event): loc = np.argmax(bls_source.data["power"]) best_period = bls_source.data["period"][loc] best_t0 = bls_source.data["transit_time"][loc] # Otherwise, we can just update the best_period index minphase, maxphase = fig_folded.x_range.start, fig_folded.x_range.end f = lc.fold(best_period, best_t0) inwindow = (f.time > minphase) & (f.time < maxphase) nb = int(np.ceil(inwindow.sum() / 10000)) _update_source( f_source, { "phase": f[inwindow].time[::nb], "flux": f[inwindow].flux[::nb] }, ) # Function to update the widget def _update_params(all=False, best_period=None, best_t0=None): if all: # If we're updating everything, recalculate the BLS model minp, maxp = fig_bls.x_range.start, fig_bls.x_range.end period_values = np.logspace(np.log10(minp), np.log10(maxp), npoints_slider.value) ok = (period_values > duration_slider.value) & (period_values < maxp) if ok.sum() == 0: return period_values = period_values[ok] result = model.power(period_values, duration_slider.value) ok = (_isfinite(result["power"]) & _isfinite(result["duration"]) & _isfinite(result["transit_time"]) & _isfinite(result["period"])) ok_result = dict( period=result["period"] [ok], # useful for accessing values with units needed later power=result["power"][ok], duration=result["duration"][ok], transit_time=result["transit_time"][ok], ) _update_source(bls_source, ok_result) loc = np.nanargmax(ok_result["power"]) best_period = ok_result["period"][loc] best_t0 = ok_result["transit_time"][loc] minpow, maxpow = ( bls_source.data["power"].min() * 0.95, bls_source.data["power"].max() * 1.05, ) fig_bls.y_range.start = minpow fig_bls.y_range.end = maxpow # Otherwise, we can just update the best_period index minphase, maxphase = fig_folded.x_range.start, fig_folded.x_range.end f = lc.fold(best_period, best_t0) inwindow = (f.time > minphase) & (f.time < maxphase) nb = int(np.ceil(inwindow.sum() / 10000)) _update_source( f_source, { "phase": f[inwindow].time[::nb], "flux": f[inwindow].flux[::nb] }, ) mf = model.model(lc.time, best_period, duration_slider.value, best_t0) mf /= np.median(mf) mask = ~(convolve(np.asarray(mf == np.median(mf)), Box1DKernel(2)) > 0.9) model_lc = _to_lc(lc.time[mask], mf[mask]) _update_source(model_lc_source, { "time": model_lc.time, "flux": model_lc.flux }) f_model_lc = model_lc.fold(best_period, best_t0) f_model_lc = _to_lc(_as_1d(f.time.min()), [1]).append(f_model_lc) f_model_lc = f_model_lc.append(_to_lc(_as_1d(f.time.max()), [1])) _update_source(f_model_lc_source, { "phase": f_model_lc.time, "flux": f_model_lc.flux }) vertical_line.update(location=best_period.value) fig_folded.title.text = "Period: {} days \t T0: {}{}".format( _round_strip_unit(best_period, 7), _round_strip_unit(best_t0, 7), time_format, ) text_output.text = "Period: {} days, \t T0: {}{}".format( _round_strip_unit(best_period, 7), _round_strip_unit(best_t0, 7), time_format, ) # Callbacks def _update_upon_period_selection(attr, old, new): """When we select a period we should just update a few things, but we should not recalculate model""" if len(new) > 0: new = new[0] best_period = (bls_source.data["period"][new] * bls_source_units["period"]) best_t0 = Time( bls_source.data["transit_time"][new], format=bls_source_units["transit_time_format"], scale=bls_source_units["transit_time_scale"], ) _update_params(best_period=best_period, best_t0=best_t0) def _update_model_slider(attr, old, new): """If the duration slider is updated, then update the whole model set.""" _update_params(all=True) def _update_model_slider_EVENT(event): """If we update the duration slider, we should update the whole model set. This is the same as the _update_model_slider but it has a different call signature... """ _update_params(all=True) def _double_period_event(): fig_bls.x_range.start *= 2 fig_bls.x_range.end *= 2 _update_params(all=True) def _half_period_event(): fig_bls.x_range.start /= 2 fig_bls.x_range.end /= 2 _update_params(all=True) # Help Hover Call Backs def _update_folded_plot_help_reset(event): f_help_source.data["phase"] = [_at_ratio(f.time, 0.95)] f_help_source.data["flux"] = [_at_ratio(f.flux, 0.95)] def _update_folded_plot_help(event): f_help_source.data["phase"] = [_at_ratio(fig_folded.x_range, 0.95)] f_help_source.data["flux"] = [_at_ratio(fig_folded.y_range, 0.95)] def _update_lc_plot_help_reset(event): lc_help_source.data["time"] = [_at_ratio(lc.time, 0.98)] lc_help_source.data["flux"] = [_at_ratio(lc.flux, 0.95)] def _update_lc_plot_help(event): lc_help_source.data["time"] = [_at_ratio(fig_lc.x_range, 0.98)] lc_help_source.data["flux"] = [_at_ratio(fig_lc.y_range, 0.95)] def _update_bls_plot_help_event(event): # cannot use _at_ratio helper for period, because period is log scaled. bls_help_source.data["period"] = [ bls_source.data["period"][int(npoints_slider.value * 0.95)] ] bls_help_source.data["power"] = [ _at_ratio(bls_source.data["power"], 0.98) ] def _update_bls_plot_help(attr, old, new): bls_help_source.data["period"] = [ bls_source.data["period"][int(npoints_slider.value * 0.95)] ] bls_help_source.data["power"] = [ _at_ratio(bls_source.data["power"], 0.98) ] # Create all the figures. fig_folded = make_folded_figure_elements(f, f_model_lc, f_source, f_model_lc_source, f_help_source) fig_folded.title.text = "Period: {} days \t T0: {}{}".format( _round_strip_unit(best_period, 7), _round_strip_unit(best_t0, 5), time_format, ) fig_bls, vertical_line = make_bls_figure_elements( result, bls_source, bls_help_source) fig_lc = make_lightcurve_figure_elements(lc, model_lc, lc_source, model_lc_source, lc_help_source) # Map changes # If we click a new period, update bls_source.selected.on_change("indices", _update_upon_period_selection) # If we change the duration, update everything, including help button for BLS duration_slider.on_change("value", _update_model_slider) duration_slider.on_change("value", _update_bls_plot_help) # If we increase resolution, update everything npoints_slider.on_change("value", _update_model_slider) # Make sure the vertical line always goes to the best period. vertical_line.update(location=best_period.value) # If we pan in the BLS panel, update everything fig_bls.on_event(PanEnd, _update_model_slider_EVENT) fig_bls.on_event(Reset, _update_model_slider_EVENT) # If we pan in the LC panel, rebin the points fig_lc.on_event(PanEnd, _update_light_curve_plot) fig_lc.on_event(Reset, _update_light_curve_plot) # If we pan in the Folded panel, rebin the points fig_folded.on_event(PanEnd, _update_folded_plot) fig_folded.on_event(Reset, _update_folded_plot) # Deal with help button fig_bls.on_event(PanEnd, _update_bls_plot_help_event) fig_bls.on_event(Reset, _update_bls_plot_help_event) fig_folded.on_event(PanEnd, _update_folded_plot_help) fig_folded.on_event(Reset, _update_folded_plot_help_reset) fig_lc.on_event(PanEnd, _update_lc_plot_help) fig_lc.on_event(Reset, _update_lc_plot_help_reset) # Buttons double_button.on_click(_double_period_event) half_button.on_click(_half_period_event) # Layout the widget doc.add_root( layout([ [fig_bls, fig_folded], fig_lc, [ Spacer(width=70), duration_slider, Spacer(width=50), npoints_slider, ], [ Spacer(width=70), double_button, Spacer(width=70), half_button, Spacer(width=300), text_output, ], ]))
def ffi_lowess_detrend(save_path='', sector=1, target_ID_list=[], pipeline='2min', multi_sector=False, use_peak_cut=False, binned=False, transit_mask=False, injected_planet='user_defined', injected_rp=0.1, injected_per=8.0, detrending='lowess_partial', single_target_ID=['HIP 1113'], n_bins=30): for target_ID in target_ID_list: try: lc_30min = lightkurve.lightcurve.TessLightCurve(time=[], flux=[]) if multi_sector != False: sap_lc, pdcsap_lc = two_min_lc_download(target_ID, sector=multi_sector[0], from_file=False) lc_30min = pdcsap_lc nancut = np.isnan(lc_30min.flux) | np.isnan(lc_30min.time) lc_30min = lc_30min[~nancut] clean_time, clean_flux, clean_flux_err = clean_tess_lc( lc_30min.time, lc_30min.flux, lc_30min.flux_err, target_ID, multi_sector[0], save_path) lc_30min.time = clean_time lc_30min.flux = clean_flux lc_30min.flux_err = clean_flux_err for sector_num in multi_sector[1:]: sap_lc_new, pdcsap_lc_new = two_min_lc_download( target_ID, sector_num, from_file=False) lc_30min_new = pdcsap_lc_new nancut = np.isnan(lc_30min_new.flux) | np.isnan( lc_30min_new.time) lc_30min_new = lc_30min_new[~nancut] clean_time, clean_flux, clean_flux_err = clean_tess_lc( lc_30min_new.time, lc_30min_new.flux, lc_30min_new.flux_err, target_ID, sector_num, save_path) lc_30min_new.time = clean_time lc_30min_new.flux = clean_flux lc_30min_new.flux_err = clean_flux_err lc_30min = lc_30min.append(lc_30min_new) # lc_30min.flux = lc_30min.flux.append(lc_30min_new.flux) # lc_30min.time = lc_30min.time.append(lc_30min_new.time) # lc_30min.flux_err = lc_30min.flux_err.append(lc_30min_new.flux_err) else: try: if pipeline == 'DIA': lc_30min, filename = diff_image_lc_download( target_ID, sector, plot_lc=True, save_path=save_path, from_file=True) elif pipeline == '2min': sap_lc, pdcsap_lc = two_min_lc_download( target_ID, sector=sector, from_file=False) lc_30min = pdcsap_lc nancut = np.isnan(lc_30min.flux) | np.isnan( lc_30min.time) lc_30min = lc_30min[~nancut] elif pipeline == 'eleanor': raw_lc, corr_lc, pca_lc = eleanor_lc_download( target_ID, sector, from_file=False, save_path=save_path, plot_pca=False) lc_30min = pca_lc elif pipeline == 'from_file': lcf = lightkurve.open( 'tess2019140104343-s0012-0000000212461524-0144-s_lc.fits' ) lc_30min = lcf.PDCSAP_FLUX elif pipeline == 'from_pickle': with open('Original_time.pkl', 'rb') as f: original_time = pickle.load(f) with open('Original_flux.pkl', 'rb') as f: original_flux = pickle.load(f) lc_30min = lightkurve.lightcurve.TessLightCurve( time=original_time, flux=original_flux) elif pipeline == 'raw': lc_30min = raw_FFI_lc_download(target_ID, sector, plot_tpf=False, plot_lc=True, save_path=save_path, from_file=False) pipeline = "raw" else: print('Invalid pipeline') except: print('Lightcurve for {} not available'.format(target_ID)) ################### Clean TESS lc pointing systematics ######################## if multi_sector == False: clean_time, clean_flux, clean_flux_err = clean_tess_lc( lc_30min.time, lc_30min.flux, lc_30min.flux_err, target_ID, sector, save_path) lc_30min.time = clean_time lc_30min.flux = clean_flux lc_30min.flux_err = clean_flux_err ######################### Find rotation period ################################ normalized_flux = np.array(lc_30min.flux) / np.median( lc_30min.flux) # From Lomb-Scargle freq = np.arange(0.04, 4.1, 0.00001) power = LombScargle(lc_30min.time, normalized_flux).power(freq) ls_fig = plt.figure() plt.plot(freq, power, c='k', linewidth=1) plt.xlabel('Frequency') plt.ylabel('Power') plt.title( '{} LombScargle Periodogram for original lc'.format(target_ID)) #ls_plot.show(block=True) # ls_fig.savefig(save_path + '{} - Lomb-Scargle Periodogram for original lc.png'.format(target_ID)) plt.close(ls_fig) i = np.argmax(power) freq_rot = freq[i] p_rot = 1 / freq_rot print('Rotation Period = {:.3f}d'.format(p_rot)) # From BLS durations = np.linspace(0.05, 1, 22) * u.day model = BoxLeastSquares(lc_30min.time * u.day, normalized_flux) results = model.autopower(durations, frequency_factor=1.0) rot_index = np.argmax(results.power) rot_period = results.period[rot_index] print("Rotation Period from BLS of original = {}d".format( rot_period)) ########################### batman stuff ###################################### if injected_planet != False: params = batman.TransitParams( ) #object to store transit parameters params.t0 = -10.0 #time of inferior conjunction params.per = 8.0 params.rp = 0.1 table_data = Table.read("BANYAN_XI-III_members_with_TIC.csv", format='ascii.csv') i = list(table_data['main_id']).index(target_ID) m_star = table_data['Stellar Mass'][i] * m_Sun r_star = table_data['Stellar Radius'][i] * r_Sun * 1000 params.a = (((G * m_star * (params.per * 86400.)**2) / (4. * (np.pi**2)))**(1. / 3)) / r_star if np.isnan(params.a) == True: params.a = 17. #semi-major axis (in units of stellar radii) params.inc = 90. params.ecc = 0. params.w = 90. #longitude of periastron (in degrees) params.limb_dark = "nonlinear" #limb darkening model params.u = [0.5, 0.1, 0.1, -0.1 ] #limb darkening coefficients [u1, u2, u3, u4] if injected_planet == 'user_defined': # Build planet from user specified parameters params.per = injected_per #orbital period (days) params.rp = injected_rp #planet radius (in units of stellar radii) params.a = (((G * m_star * (params.per * 86400.)**2) / (4. * (np.pi**2)))**(1. / 3)) / r_star if np.isnan(params.a) == True: params.a = 17 # Recalculates a if period has changed params.inc = 90. #orbital inclination (in degrees) params.ecc = 0. #eccentricity else: raise NameError('Invalid inputfor injected planet') # Defines times at which to calculate lc and models batman lc t = np.linspace(-13.9165035, 13.9165035, len(lc_30min.time)) index = int(len(lc_30min.time) // 2) mid_point = lc_30min.time[index] t = lc_30min.time - lc_30min.time[index] m = batman.TransitModel(params, t) t += lc_30min.time[index] batman_flux = m.light_curve(params) batman_model_fig = plt.figure() plt.scatter(lc_30min.time, batman_flux, s=2, c='k') plt.xlabel("Time - 2457000 (BTJD days)") plt.ylabel("Relative flux") plt.title("batman model transit for {}R ratio".format( params.rp)) #batman_model_fig.savefig(save_path + "batman model transit for {}d {}R planet.png".format(params.per,params.rp)) #plt.close(batman_model_fig) plt.show() ################################# Combining ################################### if injected_planet != False: combined_flux = np.array(lc_30min.flux) / np.median( lc_30min.flux) + batman_flux - 1 injected_transit_fig = plt.figure() plt.scatter(lc_30min.time, combined_flux, s=2, c='k') plt.xlabel("Time - 2457000 (BTJD days)") plt.ylabel("Relative flux") plt.title( "{} with injected transits for a {}R {}d planet to star ratio." .format(target_ID, params.rp, params.per)) ax = plt.gca() for n in range(int(-1 * 8 / params.per), int(2 * 8 / params.per + 2)): ax.axvline(params.t0 + n * params.per + mid_point, ymin=0.1, ymax=0.2, lw=1, c='r') ax.axvline(params.t0 + lc_30min.time[index], ymin=0.1, ymax=0.2, lw=1, c='r') ax.axvline(params.t0 + params.per + lc_30min.time[index], ymin=0.1, ymax=0.2, lw=1, c='r') ax.axvline(params.t0 + 2 * params.per + lc_30min.time[index], ymin=0.1, ymax=0.2, lw=1, c='r') # injected_transit_fig.savefig(save_path + "{} - Injected transits fig - Period {} - {}R transit.png".format(target_ID, params.per, params.rp)) # plt.close(injected_transit_fig) plt.show() ############################## Removing peaks ################################# if injected_planet == False: combined_flux = np.array(lc_30min.flux) / np.median( lc_30min.flux) # combined_flux = lc_30min.flux if use_peak_cut == True: peaks, peak_info = find_peaks(combined_flux, prominence=0.001, width=15) troughs, trough_info = find_peaks(-combined_flux, prominence=-0.001, width=15) flux_peaks = combined_flux[peaks] flux_troughs = combined_flux[troughs] amplitude_peaks = ((flux_peaks[0] - 1) + (1 - flux_troughs[0])) / 2 print("Absolute amplitude of main variability = {}".format( amplitude_peaks)) peak_location_fig = plt.figure() plt.scatter(lc_30min.time, combined_flux, s=2, c='k') plt.plot(lc_30min.time[peaks], combined_flux[peaks], "x") plt.plot(lc_30min.time[troughs], combined_flux[troughs], "x", c='r') #peak_location_fig.savefig(save_path + "{} - Peak location fig.png".format(target_ID)) peak_location_fig.show() # plt.close(peak_location_fig) near_peak_or_trough = [False] * len(combined_flux) for i in peaks: for j in range(len(lc_30min.time)): if abs(lc_30min.time[j] - lc_30min.time[i]) < 0.1: near_peak_or_trough[j] = True for i in troughs: for j in range(len(lc_30min.time)): if abs(lc_30min.time[j] - lc_30min.time[i]) < 0.1: near_peak_or_trough[j] = True near_peak_or_trough = np.array(near_peak_or_trough) t_cut = lc_30min.time[~near_peak_or_trough] flux_cut = combined_flux[~near_peak_or_trough] flux_err_cut = lc_30min.flux_err[~near_peak_or_trough] # Plot new cut version peak_cut_fig = plt.figure() plt.scatter(t_cut, flux_cut, c='k', s=2) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel("Relative flux") plt.title( '{} lc after removing peaks/troughs'.format(target_ID)) ax = plt.gca() #peak_cut_fig.savefig(save_path + "{} - Peak cut fig.png".format(target_ID)) peak_cut_fig.show() # plt.close(peak_cut_fig) else: t_cut = lc_30min.time flux_cut = combined_flux flux_err_cut = lc_30min.flux_err print('Flux cut skipped') ############################## Apply transit mask ######################### if transit_mask == True: period = 8.138 epoch = 1332.31 duration = 0.15 phase = np.mod(t_cut - epoch - period / 2, period) / period near_transit = [False] * len(flux_cut) for i in range(len(t_cut)): if abs(phase[i] - 0.5) < duration / period: near_transit[i] = True near_transit = np.array(near_transit) t_masked = t_cut[~near_transit] flux_masked = flux_cut[~near_transit] flux_err_masked = flux_err_cut[~near_transit] t_new = t_cut[near_transit] f = interpolate.interp1d(t_masked, flux_masked, kind='quadratic') flux_new = f(t_new) interpolated_fig = plt.figure() # plt.scatter(t_masked, flux_masked, s = 2, c = 'k') plt.scatter(t_cut, flux_cut, s=2, c='k') plt.scatter(t_new, flux_new, s=2, c='r') plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') # interpolated_fig.savefig(save_path + "{} - Interpolated over transit mask fig.png".format(target_ID)) t_transit_mask = np.concatenate((t_masked, t_new), axis=None) flux_transit_mask = np.concatenate((flux_masked, flux_new), axis=None) sorted_order = np.argsort(t_transit_mask) t_transit_mask = t_transit_mask[sorted_order] flux_transit_mask = flux_transit_mask[sorted_order] ############################## LOWESS detrending ############################## # Full lc if detrending == 'lowess_full': full_lowess_flux = np.array([]) if transit_mask == True: lowess = sm.nonparametric.lowess(flux_transit_mask, t_transit_mask, frac=0.03) else: lowess = sm.nonparametric.lowess(flux_cut, t_cut, frac=0.03) overplotted_lowess_full_fig = plt.figure() plt.scatter(t_cut, flux_cut, c='k', s=2) plt.plot(lowess[:, 0], lowess[:, 1]) plt.title( '{} lc with overplotted lowess full lc detrending'.format( target_ID)) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') #overplotted_lowess_full_fig.savefig(save_path + "{} lc with overplotted LOWESS full lc detrending.png".format(target_ID)) plt.show() # plt.close(overplotted_lowess_full_fig) residual_flux_lowess = flux_cut / lowess[:, 1] full_lowess_flux = np.concatenate( (full_lowess_flux, lowess[:, 1])) lowess_full_residuals_fig = plt.figure() plt.scatter(t_cut, residual_flux_lowess, c='k', s=2) plt.title( '{} lc after lowess full lc detrending'.format(target_ID)) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') ax = plt.gca() #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #lowess_full_residuals_fig.savefig(save_path + "{} lc after LOWESS full lc detrending.png".format(target_ID)) plt.show() #plt.close(lowess_full_residuals_fig) # Partial lc if detrending == 'lowess_partial': time_diff = np.diff(t_cut) residual_flux_lowess = np.array([]) time_from_lowess_detrend = np.array([]) full_lowess_flux = np.array([]) overplotted_detrending_fig = plt.figure() plt.scatter(t_cut, flux_cut, c='k', s=2) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel("Normalized flux") plt.title( '{} lc with overplotted detrending'.format(target_ID)) low_bound = 0 if pipeline == '2min': n_bins = 450 else: n_bins = n_bins for i in range(len(t_cut) - 1): if time_diff[i] > 0.1: high_bound = i + 1 t_section = t_cut[low_bound:high_bound] flux_section = flux_cut[low_bound:high_bound] if len(t_section) >= n_bins: if transit_mask == True: lowess = sm.nonparametric.lowess( flux_transit_mask[low_bound:high_bound], t_transit_mask[low_bound:high_bound], frac=n_bins / len(t_section)) else: lowess = sm.nonparametric.lowess( flux_section, t_section, frac=n_bins / len(t_section)) lowess_flux_section = lowess[:, 1] plt.plot(t_section, lowess_flux_section, '-') residuals_section = flux_section / lowess_flux_section residual_flux_lowess = np.concatenate( (residual_flux_lowess, residuals_section)) time_from_lowess_detrend = np.concatenate( (time_from_lowess_detrend, t_section)) full_lowess_flux = np.concatenate( (full_lowess_flux, lowess_flux_section)) low_bound = high_bound else: print('LOWESS skipped one gap at {}'.format( t_section[-1])) # Carries out same process for final line (up to end of data) high_bound = len(t_cut) t_section = t_cut[low_bound:high_bound] flux_section = flux_cut[low_bound:high_bound] if transit_mask == True: lowess = sm.nonparametric.lowess( flux_transit_mask[low_bound:high_bound], t_transit_mask[low_bound:high_bound], frac=n_bins / len(t_section)) else: lowess = sm.nonparametric.lowess(flux_section, t_section, frac=n_bins / len(t_section)) lowess_flux_section = lowess[:, 1] plt.plot(t_section, lowess_flux_section, '-') # if injected_planet != False: # overplotted_detrending_fig.savefig(save_path + "{} - Overplotted lowess detrending - partial lc - {}R {}d injected planet.png".format(target_ID, params.rp, params.per)) # else: # overplotted_detrending_fig.savefig(save_path + "{} - Overplotted lowess detrending - partial lc".format(target_ID)) overplotted_detrending_fig.show() # plt.close(overplotted_detrending_fig) residuals_section = flux_section / lowess_flux_section residual_flux_lowess = np.concatenate( (residual_flux_lowess, residuals_section)) time_from_lowess_detrend = np.concatenate( (time_from_lowess_detrend, t_section)) full_lowess_flux = np.concatenate( (full_lowess_flux, lowess_flux_section)) residuals_after_lowess_fig = plt.figure() plt.scatter(time_from_lowess_detrend, residual_flux_lowess, c='k', s=2) plt.title('{} lc after LOWESS partial lc detrending'.format( target_ID)) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') #ax = plt.gca() #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # if injected_planet != False: # residuals_after_lowess_fig.savefig(save_path + "{} lc after LOWESS partial lc detrending - {}R {}d injected planet.png".format(target_ID, params.rp, params.per)) # else: # residuals_after_lowess_fig.savefig(save_path + "{} lc after LOWESS partial lc detrending".format(target_ID)) residuals_after_lowess_fig.show() # plt.close(residuals_after_lowess_fig) # ###################### Periodogram Construction ################## # Create periodogram durations = np.linspace(0.05, 1, 22) * u.day if detrending == 'lowess_full' or detrending == 'lowess_partial': BLS_flux = residual_flux_lowess else: BLS_flux = combined_flux model = BoxLeastSquares(t_cut * u.day, BLS_flux) results = model.autopower(durations, minimum_n_transit=3, frequency_factor=1.0) # Find the period and epoch of the peak index = np.argmax(results.power) period = results.period[index] #print(results.period) t0 = results.transit_time[index] duration = results.duration[index] transit_info = model.compute_stats(period, duration, t0) print(transit_info) epoch = transit_info['transit_times'][0] periodogram_fig, ax = plt.subplots(1, 1) # Highlight the harmonics of the peak period ax.axvline(period.value, alpha=0.4, lw=3) for n in range(2, 10): ax.axvline(n * period.value, alpha=0.4, lw=1, linestyle="dashed") ax.axvline(period.value / n, alpha=0.4, lw=1, linestyle="dashed") # Plot and save the periodogram ax.plot(results.period, results.power, "k", lw=0.5) ax.set_xlim(results.period.min().value, results.period.max().value) ax.set_xlabel("period [days]") ax.set_ylabel("log likelihood") # ax.set_title('{} - BLS Periodogram after {} detrending - {}R {}d injected planet'.format(target_ID, detrending, params.rp, params.per)) ax.set_title('{} - BLS Periodogram after {} detrending'.format( target_ID, detrending)) # periodogram_fig.savefig(save_path + '{} - BLS Periodogram after lowess partial detrending - {}R {}d injected planet.png'.format(target_ID, params.rp, params.per)) # periodogram_fig.savefig(save_path + '{} - BLS Periodogram after lowess partial detrending.png'.format(target_ID)) # plt.close(periodogram_fig) periodogram_fig.show() ################################## Phase folding ########################## # Find indices of 2nd and 3rd peaks of periodogram all_peaks = scipy.signal.find_peaks(results.power, width=5, distance=10)[0] all_peak_powers = results.power[all_peaks] sorted_power_indices = np.argsort(all_peak_powers) sorted_peak_powers = all_peak_powers[sorted_power_indices] # sorted_peak_periods = results.period[sorted_power_indices] # Find info for 2nd largest peak in periodogram index_peak_2 = np.where(results.power == sorted_peak_powers[-2])[0] period_2 = results.period[index_peak_2[0]] t0_2 = results.transit_time[index_peak_2[0]] # Find info for 3rd largest peak in periodogram index_peak_3 = np.where(results.power == sorted_peak_powers[-3])[0] period_3 = results.period[index_peak_3[0]] t0_3 = results.transit_time[index_peak_3[0]] phase_fold_plot( t_cut, BLS_flux, period.value, t0.value, target_ID, save_path, '{} {} residuals folded by Periodogram Max ({:.3f} days)'. format(target_ID, detrending, period.value)) period_to_test = p_rot t0_to_test = 1332 period_to_test2 = period_2.value t0_to_test2 = t0_2.value period_to_test3 = period_3.value t0_to_test3 = t0_3.value phase_fold_plot( t_cut, BLS_flux, p_rot, t0_to_test, target_ID, save_path, '{} folded by rotation period ({} days)'.format( target_ID, period_to_test)) phase_fold_plot( t_cut, BLS_flux, period_to_test2, t0_to_test2, target_ID, save_path, '{} detrended lc folded by 2nd largest peak ({:0.4} days)'. format(target_ID, period_to_test2)) phase_fold_plot( t_cut, BLS_flux, period_to_test3, t0_to_test3, target_ID, save_path, '{} detrended lc folded by 3rd largest peak ({:0.4} days)'. format(target_ID, period_to_test3)) #variability_table.add_row([target_ID,p_rot,rot_period,amplitude_peaks]) ############################# Eyeballing ############################## """ Generate 2 x 2 eyeballing plot """ eye_balling_fig, axs = plt.subplots(2, 2, figsize=(16, 10), dpi=120) # Original DIA with injected transits setup axs[0, 0].scatter(lc_30min.time, combined_flux, s=1, c='k') axs[0, 0].set_ylabel('Normalized Flux') axs[0, 0].set_xlabel('Time') axs[0, 0].set_title('{} - {} light curve'.format(target_ID, 'DIA')) #for n in range(int(-1*8/params.per),int(2*8/params.per+2)): # axs[0,0].axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # Detrended figure setup axs[0, 1].scatter(t_cut, BLS_flux, c='k', s=1, label='{} residuals after {} detrending'.format( target_ID, detrending)) # axs[0,1].set_title('{} residuals after {} detrending - Sector {}'.format(target_ID, detrending, sector)) axs[0, 1].set_title( '{} residuals after {} detrending - Sectors 14-18'.format( target_ID, detrending)) axs[0, 1].set_ylabel('Normalized Flux') axs[0, 1].set_xlabel('Time - 2457000 [BTJD days]') # binned_time, binned_flux = bin(t_cut, BLS_flux, binsize=15, method='mean') # axs[0,1].scatter(binned_time, binned_flux, c='r', s=4) #for n in range(int(-1*8/params.per),int(2*8/params.per+2)): # axs[0,1].axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # Periodogram setup axs[1, 0].plot(results.period, results.power, "k", lw=0.5) axs[1, 0].set_xlim(results.period.min().value, results.period.max().value) axs[1, 0].set_xlabel("period [days]") axs[1, 0].set_ylabel("log likelihood") axs[1, 0].set_title( '{} - BLS Periodogram of residuals'.format(target_ID)) axs[1, 0].axvline(period.value, alpha=0.4, lw=3) for n in range(2, 10): axs[1, 0].axvline(n * period.value, alpha=0.4, lw=1, linestyle="dashed") axs[1, 0].axvline(period.value / n, alpha=0.4, lw=1, linestyle="dashed") # Folded or zoomed plot setup epoch = t0.value period = period.value phase = np.mod(t_cut - epoch - period / 2, period) / period axs[1, 1].scatter(phase, BLS_flux, c='k', s=1) axs[1, 1].set_title('{} Lightcurve folded by {:0.4} days'.format( target_ID, period)) axs[1, 1].set_xlabel('Phase') axs[1, 1].set_ylabel('Normalized Flux') # binned_phase, binned_lc = bin(phase, BLS_flux, binsize=15, method='mean') # plt.scatter(binned_phase, binned_lc, c='r', s=4) eye_balling_fig.tight_layout() # eye_balling_fig.savefig(save_path + '{} - Full eyeballing fig.pdf'.format(target_ID)) # plt.close(eye_balling_fig) plt.show() ########################### ADDING INFO ROWS ###################### # sensitivity_table.add_row([target_ID,sector,pipeline,params.per,params.a,params.rp,period,np.max(results.power),period_2.value,period_3.value]) except RuntimeError: print('No DiffImage lc exists for {}'.format(target_ID)) except: print('Some other error for {}'.format(target_ID)) return t_cut, BLS_flux, phase, epoch, period
plt.plot(freq, power, c='k', linewidth=1) plt.xlabel('Frequency') plt.ylabel('Power') plt.title( '{} LombScargle Periodogram for original lc'.format(target_ID)) #ls_plot.show(block=True) # ls_fig.savefig(save_path + '{} - Lomb-Sacrgle Periodogram for original lc.png'.format(target_ID)) # plt.close(ls_fig) i = np.argmax(power) freq_rot = freq[i] p_rot = 1 / freq_rot print('Rotation Period = {:.3f}d'.format(p_rot)) # From BLS durations = np.linspace(0.05, 1, 22) * u.day model = BoxLeastSquares(lc_30min.time * u.day, normalized_flux) # model = BLS(lc_30min.time*u.day, BLS_flux) results = model.autopower(durations, frequency_factor=1.0) rot_index = np.argmax(results.power) rot_period = results.period[rot_index] rot_t0 = results.transit_time[rot_index] print("Rotation Period from BLS of original = {}d".format(rot_period)) ########################### batman stuff ###################################### if injected_planet != False: # type_of_planet = 'Hot Jupiter' # stellar_type = 'F or G' params = batman.TransitParams( ) #object to store transit parameters params.t0 = -4.5 #time of inferior conjunction params.per = 8.0
def from_lightcurve(lc, **kwargs): """Creates a Periodogram from a LightCurve using the Box Least Squares (BLS) method.""" # BoxLeastSquares was added to `astropy.stats` in AstroPy v3.1 and then # moved to `astropy.timeseries` in v3.2, which makes the import below # somewhat complicated. try: from astropy.timeseries import BoxLeastSquares except ImportError: try: from astropy.stats import BoxLeastSquares except ImportError: raise ImportError("BLS requires AstroPy v3.1 or later") # Validate user input for `lc` # (BoxLeastSquares will not work if flux or flux_err contain NaNs) lc = lc.remove_nans() if np.isfinite(lc.flux_err).all(): dy = lc.flux_err else: dy = None # Validate user input for `duration` duration = kwargs.pop("duration", 0.25) if duration is not None and ~np.all(np.isfinite(duration)): raise ValueError("`duration` parameter contains illegal nan or inf value(s)") # Validate user input for `period` period = kwargs.pop("period", None) minimum_period = kwargs.pop("minimum_period", None) maximum_period = kwargs.pop("maximum_period", None) if period is not None and ~np.all(np.isfinite(period)): raise ValueError("`period` parameter contains illegal nan or inf value(s)") if minimum_period is None: if period is None: minimum_period = np.max([np.median(np.diff(lc.time)) * 4, np.max(duration) + np.median(np.diff(lc.time))]) else: minimum_period = np.min(period) if maximum_period is None: if period is None: maximum_period = (np.max(lc.time) - np.min(lc.time)) / 3. else: maximum_period = np.max(period) # Validate user input for `time_unit` time_unit = (kwargs.pop("time_unit", "day")) if time_unit not in dir(u): raise ValueError('{} is not a valid value for `time_unit`'.format(time_unit)) # Validate user input for `frequency_factor` frequency_factor = kwargs.pop("frequency_factor", 10) df = frequency_factor * np.min(duration) / (np.max(lc.time) - np.min(lc.time))**2 npoints = int(((1/minimum_period) - (1/maximum_period))/df) if npoints > 1e7: raise ValueError('`period` contains {} points.' 'Periodogram is too large to evaluate. ' 'Consider setting `frequency_factor` to a higher value.' ''.format(np.round(npoints, 4))) elif npoints > 1e5: log.warning('`period` contains {} points.' 'Periodogram is likely to be large, and slow to evaluate. ' 'Consider setting `frequency_factor` to a higher value.' ''.format(np.round(npoints, 4))) # Create BLS object and run the BLS search bls = BoxLeastSquares(lc.time, lc.flux, dy) if period is None: period = bls.autoperiod(duration, minimum_period=minimum_period, maximum_period=maximum_period, frequency_factor=frequency_factor) result = bls.power(period, duration, **kwargs) if not isinstance(result.period, u.quantity.Quantity): result.period = u.Quantity(result.period, time_unit) if not isinstance(result.power, u.quantity.Quantity): result.power = result.power * u.dimensionless_unscaled if not isinstance(result.duration, u.quantity.Quantity): result.duration = u.Quantity(result.duration, time_unit) return BoxLeastSquaresPeriodogram(frequency=1. / result.period, power=result.power, default_view='period', label=lc.label, targetid=lc.targetid, transit_time=result.transit_time, duration=result.duration, depth=result.depth, bls_result=result, snr=result.depth_snr, bls_obj=bls, time=lc.time, flux=lc.flux, time_unit=time_unit)
model = TLS(lc.time.value, lc.flux, lc.flux_err) result = model.power(n_transits_min=1, period_min=args.min_period, period_max=args.max_period, use_threads=args.ncpu, show_progress_bar=True) period = result.period t0 = result.T0 dur = result.duration depth = result.depth periods, power = result.periods, result.power elif args.method == 'BLS': model = BoxLeastSquares(lc.time.value, lc.flux, dy=lc.flux_err) result = model.autopower(0.15) periods, power = result.period, result.power idx = np.argmax(power) period = periods[idx] t0 = result.transit_time[idx] dur = result.duration[idx] depth = result.depth[idx] #if i==1: # period *= 2 2458339.018159 phase = (lc.time.value - t0 + 0.5 * period) % period - 0.5 * period fph = (lc.time.value - t0 + 0.5 * period) % period - 0.5 * period
def find_and_mask_transits(time, flux, flux_err, periods, durations, nplanets=1, plot=False): """ Iteratively find and mask transits in the flattened light curve. Args: time (array): The time array. flux (array): The flux array. You'll get the best results if this is flattened. flux_err (array): The array of flux uncertainties. periods (array): The array of periods to search over for BLS. For example, periods = np.linspace(0.5, 20, 10) durations (array): The array of durations to search over for BLS. For example, durations = np.linspace(0.05, 0.2, 10) nplanets (Optional[int]): The number of planets you'd like to search for. This function will interatively find and remove nplanets. Default is 1. Returns: transit_masks (list): a list of masks that correspond to the in transit points of each light curve. To mask out transits do time[~transit_masks[index]], etc. """ cum_transit = np.ones(len(time), dtype=bool) _time, _flux, _flux_err = time * 1, flux * 1, flux_err * 1 t0s, durs, porbs = [np.zeros(nplanets) for i in range(3)] transit_masks = [] for i in range(nplanets): bls = BoxLeastSquares(t=_time, y=_flux, dy=_flux_err) bls.power(periods, durations) periods = bls.autoperiod(durations, minimum_n_transit=3, frequency_factor=5.0) results = bls.autopower(durations, frequency_factor=5.0) # Find the period of the peak period = results.period[np.argmax(results.power)] # Extract the parameters of the best-fit model index = np.argmax(results.power) porbs[i] = results.period[index] t0s[i] = results.transit_time[index] durs[i] = results.duration[index] if plot: # Plot the periodogram fig, ax = plt.subplots(1, 1, figsize=(10, 5)) ax.plot(results.period, results.power, "k", lw=0.5) ax.set_xlim(results.period.min(), results.period.max()) ax.set_xlabel("period [days]") ax.set_ylabel("log likelihood") # Highlight the harmonics of the peak period ax.axvline(period, alpha=0.4, lw=4) for n in range(2, 10): ax.axvline(n * period, alpha=0.4, lw=1, linestyle="dashed") ax.axvline(period / n, alpha=0.4, lw=1, linestyle="dashed") # plt.show() # plt.plot(_time, _flux, ".") # plt.xlim(1355, 1360) in_transit = bls.transit_mask(_time, porbs[i], durs[i], t0s[i]) transit_masks.append(in_transit) _time, _flux, _flux_err = _time[~in_transit], _flux[~in_transit], \ _flux_err[~in_transit] return transit_masks, t0s, durs, porbs
def bls_estimator( x, y, yerr=None, duration=0.2, min_period=None, max_period=None, objective=None, method=None, oversample=10, **kwargs, ): """Estimate the period of a time series using box least squares All extra keyword arguments are passed directly to :func:`astropy.timeseries.BoxLeastSquares.autopower`. Args: x (ndarray[N]): The times of the observations y (ndarray[N]): The observations at times ``x`` yerr (Optional[ndarray[N]]): The uncertainties on ``y`` min_period (Optional[float]): The minimum period to consider max_period (Optional[float]): The maximum period to consider Returns: A dictionary with the computed autocorrelation function and the estimated period. For compatibility with the :func:`lomb_scargle_estimator`, the period is returned as a list with the key ``peaks``. """ kwargs["minimum_period"] = kwargs.get("minimim_period", min_period) kwargs["maximum_period"] = kwargs.get("maximum_period", max_period) x_ref = 0.5 * (np.min(x) + np.max(x)) bls = BoxLeastSquares(x - x_ref, y, yerr) # Estimate the frequency factor to not be insanely slow if "frequency_factor" not in kwargs: kwargs["frequency_factor"] = 1.0 periods = bls.autoperiod(duration, **kwargs) while len(periods) > len(x): kwargs["frequency_factor"] *= 2 periods = bls.autoperiod(duration, **kwargs) # Compute the periodogram pg = bls.autopower( duration, objective=objective, method=method, oversample=oversample, **kwargs, ) # Correct for the reference time offset pg.transit_time += x_ref # Find the peak peaks = find_peaks(1 / pg.period, pg.power, max_peaks=1) results = dict(bls=pg, peaks=peaks, peak_info=None) if not len(peaks): return results # Extract the relevant information at the peak ind = peaks[0]["index"] results["peak_info"] = dict( (k, v[ind]) for k, v in pg.items() if k != "objective") return results
def ffi_lowess_detrend( save_path='/Users/mbattley/Documents/PhD/New detrending methods/Smoothing/lowess/QLP lcs/', sector=1, target_ID_list=[], pipeline='2min', multi_sector=False, use_TESSflatten=False, use_peak_cut=False, binned=False, transit_mask=False, injected_planet='user_defined', injected_rp=0.1, injected_per=8.0, detrending='lowess_partial', single_target_ID=['HIP 1113'], n_bins=30, filename=''): try: lc_30min = lightkurve.lightcurve.TessLightCurve(time=[], flux=[]) if multi_sector != False: sap_lc, pdcsap_lc = two_min_lc_download(target_ID, sector=multi_sector[0], from_file=False) lc_30min = pdcsap_lc nancut = np.isnan(lc_30min.flux) | np.isnan(lc_30min.time) lc_30min = lc_30min[~nancut] clean_time, clean_flux, clean_flux_err = clean_tess_lc( lc_30min.time, lc_30min.flux, lc_30min.flux_err, target_ID, multi_sector[0], save_path) lc_30min.time = clean_time lc_30min.flux = clean_flux lc_30min.flux_err = clean_flux_err for sector_num in multi_sector[1:]: sap_lc_new, pdcsap_lc_new = two_min_lc_download( target_ID, sector_num, from_file=False) lc_30min_new = pdcsap_lc_new nancut = np.isnan(lc_30min_new.flux) | np.isnan( lc_30min_new.time) lc_30min_new = lc_30min_new[~nancut] clean_time, clean_flux, clean_flux_err = clean_tess_lc( lc_30min_new.time, lc_30min_new.flux, lc_30min_new.flux_err, target_ID, sector_num, save_path) lc_30min_new.time = clean_time lc_30min_new.flux = clean_flux lc_30min_new.flux_err = clean_flux_err lc_30min = lc_30min.append(lc_30min_new) # lc_30min.flux = lc_30min.flux.append(lc_30min_new.flux) # lc_30min.time = lc_30min.time.append(lc_30min_new.time) # lc_30min.flux_err = lc_30min.flux_err.append(lc_30min_new.flux_err) # nancut = np.isnan(lc_30min.flux) | np.isnan(lc_30min.time) # lc_30min = lc_30min[~nancut] else: try: # if pipeline == 'DIA': # lc_30min, filename = diff_image_lc_download(target_ID, sector, plot_lc = True, save_path = save_path, from_file = True) # elif pipeline == '2min': # sap_lc, pdcsap_lc = two_min_lc_download(target_ID, sector = sector, from_file = False) # lc_30min = pdcsap_lc # nancut = np.isnan(lc_30min.flux) | np.isnan(lc_30min.time) # lc_30min = lc_30min[~nancut] # elif pipeline == 'eleanor': # raw_lc, corr_lc, pca_lc = eleanor_lc_download(target_ID, sector, from_file = False, save_path = save_path, plot_pca = False) # lc_30min = pca_lc # elif pipeline == 'from_file': ## sap_lc, pdcsap_lc = two_min_lc_download(target_ID, sector = sector, from_file = False) ## lcf = lightkurve.open('tess2019140104343-s0012-0000000212461524-0144-s_lc.fits') ## lc_30min = lcf.PDCSAP_FLUX # #filename = 'tess2019247000000-0000000224225541-111-cr_llc.fits' # filename = 'tess2019247000000-0000000146520535-111-cr_llc.fits' # lc_30min, kspsap_flux = get_lc_from_fits(filename) # elif pipeline == 'from_pickle': # with open('Original_time.pkl','rb') as f: # original_time = pickle.load(f) # with open('Original_flux.pkl','rb') as f: # original_flux = pickle.load(f) # lc_30min = lightkurve.lightcurve.TessLightCurve(time = original_time,flux=original_flux) # elif pipeline == 'raw': # lc_30min = raw_FFI_lc_download(target_ID, sector, plot_tpf = False, plot_lc = True, save_path = save_path, from_file = False) if pipeline == 'CDIPS': lc_30min, target_ID, sector = get_lc_from_fits( filename, source=pipeline, save_path=save_path) print(target_ID) # elif pipeline == 'QLP': # lc_30min, kspsap_flux = get_lc_from_fits(filename, source = pipeline) else: print('Invalid pipeline') except: print('Lightcurve for {} not available'.format(target_ID)) # try: # raw_lc, corr_lc, pca_lc = eleanor_lc_download(target_ID, sector, from_file = False, save_path = save_path, plot_pca = False) # lc_30min = pca_lc # pipeline = 'eleanor' # except RuntimeError: # print('Lightcurve for {} not available'.format(target_ID)) # sap_lc, pdcsap_lc = two_min_lc_download(target_ID, sector) # lc_30min = pdcsap_lc # pipeline = '2min' ################### Clean TESS lc pointing systematics ######################## if multi_sector == False: clean_time, clean_flux, clean_flux_err = clean_tess_lc( lc_30min.time, lc_30min.flux, lc_30min.flux_err, target_ID, sector, save_path) lc_30min.time = clean_time lc_30min.flux = clean_flux lc_30min.flux_err = clean_flux_err ######################### Find rotation period ################################ # normalized_flux = np.array(lc_30min.flux)/np.median(lc_30min.flux) normalized_flux = lc_30min.flux # # From Lomb-Scargle freq = np.arange(0.04, 4.1, 0.00001) power = LombScargle(lc_30min.time, normalized_flux).power(freq) ls_fig = plt.figure() plt.plot(freq, power, c='k', linewidth=1) plt.xlabel('Frequency') plt.ylabel('Power') plt.title( '{} LombScargle Periodogram for original lc'.format(target_ID)) #ls_plot.show(block=True) # ls_fig.savefig(save_path + '{} - Lomb-Sacrgle Periodogram for original lc.png'.format(target_ID)) plt.close(ls_fig) i = np.argmax(power) freq_rot = freq[i] p_rot = 1 / freq_rot print('Rotation Period = {:.3f}d'.format(p_rot)) # # # From BLS # durations = np.linspace(0.05, 1, 22) * u.day # model = BoxLeastSquares(lc_30min.time*u.day, normalized_flux) ## model = BLS(lc_30min.time*u.day, BLS_flux) # results = model.autopower(durations, frequency_factor=1.0) # rot_index = np.argmax(results.power) # rot_period = results.period[rot_index] # rot_t0 = results.transit_time[rot_index] # print("Rotation Period from BLS of original = {}d".format(rot_period)) ########################### batman stuff ###################################### # if injected_planet != False: # # type_of_planet = 'Hot Jupiter' # # stellar_type = 'F or G' # params = batman.TransitParams() #object to store transit parameters # params.t0 = -10.0 #time of inferior conjunction # params.per = 8.0 # params.rp = 0.1 # table_data = Table.read("BANYAN_XI-III_members_with_TIC.csv" , format='ascii.csv') # i = list(table_data['main_id']).index(target_ID) # m_star = table_data['Stellar Mass'][i]*m_Sun # r_star = table_data['Stellar Radius'][i]*r_Sun*1000 # params.a = (((G*m_star*(params.per*86400.)**2)/(4.*(np.pi**2)))**(1./3))/r_star # if np.isnan(params.a) == True: # #For a: 25 for 10d; 17 for 8d; 10 for 4d; 4-8 (6) for 2 day; 2-5 for 1d; 1-3 (or 8?) for 0.5d # params.a = 17. #semi-major axis (in units of stellar radii) # params.inc = 90. # params.ecc = 0. # params.w = 90. #longitude of periastron (in degrees) # params.limb_dark = "nonlinear" #limb darkening model # params.u = [0.5, 0.1, 0.1, -0.1] #limb darkening coefficients [u1, u2, u3, u4] # # if injected_planet == 'user_defined': # # Build planet from user specified parameters # params.per = injected_per #orbital period (days) - try 0.5, 1, 2, 4, 8 & 10d periods # params.rp = injected_rp #planet radius (in units of stellar radii) - Try between 0.01 and 0.1 (F/G) or 0.025 to 0.18 (K/M) # params.a = (((G*m_star*(params.per*86400.)**2)/(4.*(np.pi**2)))**(1./3))/r_star # if np.isnan(params.a) == True: # params.a = 17 # Recalculates a if period has changed # params.inc = 90. #orbital inclination (in degrees) # params.ecc = 0. #eccentricity # # elif injected_planet == 'exo_archive': # # Randomly inject planet from exoplanet archive # exoplanet_data = Table.read("Exoplanet Archive Planets for injection.csv" , format='ascii.csv') # pl_index = 760#random.randrange(1,1972,1) # params.per = exoplanet_data['pl_orbper'][pl_index] # params.rp = exoplanet_data['pl_radj'][pl_index]*r_Jup/(exoplanet_data['st_rad'][pl_index]*r_Sun) # params.a = exoplanet_data['pl_orbsmax'][pl_index]*au/(exoplanet_data['st_rad'][pl_index]*r_Sun) # if not np.isnan(exoplanet_data['pl_orbincl'][pl_index]): # params.inc = exoplanet_data['pl_orbincl'][pl_index] # if not np.isnan(exoplanet_data['pl_orbeccen'][pl_index]): # params.ecc = exoplanet_data['pl_orbeccen'][pl_index] # # elif injected_planet == 'set_period': # params.per = 8.0 # params.rp = random.uniform(0,0.2) # params.a = 17. # params.inc = 90. # params.ecc = 0. # # elif injected_planet == 'set_depth': # params.per = random.uniform(0.15,13.5) # params.rp = 0.05 # params.a = 17. # params.inc = 90. # params.ecc = 0. # else: # raise NameError('Invalid inputfor injected planet') # # # Defines times at which to calculate lc and models batman lc # t = np.linspace(-13.9165035, 13.9165035, len(lc_30min.time)) # index = int(len(lc_30min.time)//2) # mid_point = lc_30min.time[index] # t = lc_30min.time - lc_30min.time[index] # m = batman.TransitModel(params, t) # t += lc_30min.time[index] # # print("About to compute flux") # batman_flux = m.light_curve(params) # # print("Computed flux") # batman_model_fig = plt.figure() # plt.scatter(lc_30min.time, batman_flux, s = 2, c = 'k') # plt.xlabel("Time - 2457000 (BTJD days)") # plt.ylabel("Relative flux") # plt.title("batman model transit for {}R ratio".format(params.rp)) # #batman_model_fig.savefig(save_path + "batman model transit for {}d {}R planet.png".format(params.per,params.rp)) # #plt.close(batman_model_fig) # plt.show() ################################# Combining ################################### # combined_flux = np.array(lc_30min.flux)/np.median(lc_30min.flux) + batman_flux -1 # injected_transit_fig = plt.figure() # plt.scatter(lc_30min.time, combined_flux, s = 2, c = 'k') # plt.xlabel("Time - 2457000 (BTJD days)") # plt.ylabel("Relative flux") # # plt.title("{} with injected transits for a {} around a {} Star.".format(target_ID, type_of_planet, stellar_type)) # plt.title("{} with injected transits for a {}R {}d planet to star ratio.".format(target_ID, params.rp, params.per)) # ax = plt.gca() # for n in range(int(-1*8/params.per),int(2*8/params.per+2)): # ax.axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') ## injected_transit_fig.savefig(save_path + "{} - Injected transits fig - Period {} - {}R transit.png".format(target_ID, params.per, params.rp)) ## plt.close(injected_transit_fig) # plt.show() ############################## Removing peaks ################################# combined_flux = np.array(lc_30min.flux) / np.median(lc_30min.flux) # combined_flux = lc_30min.flux if use_peak_cut == True: peaks, peak_info = find_peaks(combined_flux, prominence=0.001, width=15) #peaks = np.array([64, 381, 649, 964, 1273]) troughs, trough_info = find_peaks(-combined_flux, prominence=-0.001, width=15) #troughs = np.array([211, 530, 795, 1113]) #troughs = np.append(troughs, [370,1031]) #print(troughs) flux_peaks = combined_flux[peaks] flux_troughs = combined_flux[troughs] amplitude_peaks = ((flux_peaks[0] - 1) + (1 - flux_troughs[0])) / 2 print("Absolute amplitude of main variability = {}".format( amplitude_peaks)) peak_location_fig = plt.figure() plt.scatter(lc_30min.time, combined_flux, s=2, c='k') plt.plot(lc_30min.time[peaks], combined_flux[peaks], "x") plt.plot(lc_30min.time[troughs], combined_flux[troughs], "x", c='r') #peak_location_fig.savefig(save_path + "{} - Peak location fig.png".format(target_ID)) peak_location_fig.show() # plt.close(peak_location_fig) near_peak_or_trough = [False] * len(combined_flux) for i in peaks: for j in range(len(lc_30min.time)): if abs(lc_30min.time[j] - lc_30min.time[i]) < 0.1: near_peak_or_trough[j] = True for i in troughs: for j in range(len(lc_30min.time)): if abs(lc_30min.time[j] - lc_30min.time[i]) < 0.1: near_peak_or_trough[j] = True near_peak_or_trough = np.array(near_peak_or_trough) t_cut = lc_30min.time[~near_peak_or_trough] flux_cut = combined_flux[~near_peak_or_trough] flux_err_cut = lc_30min.flux_err[~near_peak_or_trough] # # phase = np.mod(t-t0_rot,p_rot)/p_rot # plt.figure() # plt.scatter(phase,flux, c = 'k', s = 2) # near_trough = (phase<0.1/p_rot) | (phase>1-0.1/p_rot) # t_cut_bottom = t[~near_trough] # flux_cut_bottom = combined_flux[~near_trough] # flux_err_cut_bottom = lc_30min.flux_err[~near_trough] # # phase = np.mod(t_cut_bottom-t0_rot,p_rot)/p_rot # near_peak = (phase<0.5+0.1/p_rot) & (phase>0.5-0.1/p_rot) # t_cut = t_cut_bottom[~near_peak] # flux_cut = flux_cut_bottom[~near_peak] # flux_err_cut = flux_err_cut_bottom[~near_peak] # # cut_phase = np.mod(t_cut-t0_rot,p_rot)/p_rot # plt.figure() # plt.scatter(cut_phase, flux_cut, c='k', s=2) # # Plot new cut version peak_cut_fig = plt.figure() plt.scatter(t_cut, flux_cut, c='k', s=2) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel("Relative flux") plt.title('{} lc after removing peaks/troughs'.format(target_ID)) ax = plt.gca() #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #peak_cut_fig.savefig(save_path + "{} - Peak cut fig.png".format(target_ID)) peak_cut_fig.show() # plt.close(peak_cut_fig) else: t_cut = lc_30min.time flux_cut = combined_flux flux_err_cut = lc_30min.flux_err print('Flux cut skipped') ############################## Apply transit mask ######################### if transit_mask == True: period = 8.138 epoch = 1332.31 duration = 0.15 phase = np.mod(t_cut - epoch - period / 2, period) / period near_transit = [False] * len(flux_cut) for i in range(len(t_cut)): if abs(phase[i] - 0.5) < duration / period: near_transit[i] = True near_transit = np.array(near_transit) t_masked = t_cut[~near_transit] flux_masked = flux_cut[~near_transit] flux_err_masked = flux_err_cut[~near_transit] t_new = t_cut[near_transit] f = interpolate.interp1d(t_masked, flux_masked, kind='quadratic') # f = interpolate.BarycentricInterpolator(t_masked,flux_masked) flux_new = f(t_new) interpolated_fig = plt.figure() # plt.scatter(t_masked, flux_masked, s = 2, c = 'k') plt.scatter(t_cut, flux_cut, s=2, c='k') plt.scatter(t_new, flux_new, s=2, c='r') plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') # interpolated_fig.savefig(save_path + "{} - Interpolated over transit mask fig.png".format(target_ID)) t_transit_mask = np.concatenate((t_masked, t_new), axis=None) flux_transit_mask = np.concatenate((flux_masked, flux_new), axis=None) sorted_order = np.argsort(t_transit_mask) t_transit_mask = t_transit_mask[sorted_order] flux_transit_mask = flux_transit_mask[sorted_order] ############################## LOWESS detrending ############################## # Full lc if detrending == 'lowess_full': #t_cut = lc_30min.time #flux_cut = combined_flux full_lowess_flux = np.array([]) if transit_mask == True: lowess = sm.nonparametric.lowess(flux_transit_mask, t_transit_mask, frac=0.03) else: lowess = sm.nonparametric.lowess(flux_cut, t_cut, frac=0.03) # number of points = 20 at lowest, or otherwise frac = 20/len(t_section) overplotted_lowess_full_fig = plt.figure() plt.scatter(t_cut, flux_cut, c='k', s=2) plt.plot(lowess[:, 0], lowess[:, 1]) plt.title( '{} lc with overplotted lowess full lc detrending'.format( target_ID)) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') #overplotted_lowess_full_fig.savefig(save_path + "{} lc with overplotted LOWESS full lc detrending.png".format(target_ID)) plt.show() # plt.close(overplotted_lowess_full_fig) residual_flux_lowess = flux_cut / lowess[:, 1] full_lowess_flux = np.concatenate((full_lowess_flux, lowess[:, 1])) lowess_full_residuals_fig = plt.figure() plt.scatter(t_cut, residual_flux_lowess, c='k', s=2) plt.title( '{} lc after lowess full lc detrending'.format(target_ID)) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') ax = plt.gca() #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # lowess_full_residuals_fig.savefig(save_path + "{} lc after LOWESS full lc detrending.png".format(target_ID)) plt.show() # plt.close(lowess_full_residuals_fig) # Partial lc if detrending == 'lowess_partial': time_diff = np.diff(t_cut) residual_flux_lowess = np.array([]) time_from_lowess_detrend = np.array([]) full_lowess_flux = np.array([]) overplotted_detrending_fig = plt.figure() plt.scatter(t_cut, flux_cut, c='k', s=2) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel("Normalized flux") #plt.title('{} lc with overplotted detrending'.format(target_ID)) low_bound = 0 if pipeline == '2min': n_bins = 450 else: n_bins = n_bins for i in range(len(t_cut) - 1): if time_diff[i] > 0.1: high_bound = i + 1 t_section = t_cut[low_bound:high_bound] flux_section = flux_cut[low_bound:high_bound] # print(t_section) if len(t_section) >= n_bins: if transit_mask == True: lowess = sm.nonparametric.lowess( flux_transit_mask[low_bound:high_bound], t_transit_mask[low_bound:high_bound], frac=n_bins / len(t_section)) else: lowess = sm.nonparametric.lowess(flux_section, t_section, frac=n_bins / len(t_section)) # lowess = sm.nonparametric.lowess(flux_section, t_section, frac=20/len(t_section)) lowess_flux_section = lowess[:, 1] plt.plot(t_section, lowess_flux_section, '-') residuals_section = flux_section / lowess_flux_section residual_flux_lowess = np.concatenate( (residual_flux_lowess, residuals_section)) time_from_lowess_detrend = np.concatenate( (time_from_lowess_detrend, t_section)) full_lowess_flux = np.concatenate( (full_lowess_flux, lowess_flux_section)) low_bound = high_bound else: print('Skipped one gap') # Carries out same process for final line (up to end of data) high_bound = len(t_cut) t_section = t_cut[low_bound:high_bound] flux_section = flux_cut[low_bound:high_bound] if transit_mask == True: lowess = sm.nonparametric.lowess( flux_transit_mask[low_bound:high_bound], t_transit_mask[low_bound:high_bound], frac=n_bins / len(t_section)) else: lowess = sm.nonparametric.lowess(flux_section, t_section, frac=n_bins / len(t_section)) # lowess = sm.nonparametric.lowess(flux_section, t_section, frac=20/len(t_section)) lowess_flux_section = lowess[:, 1] plt.plot(t_section, lowess_flux_section, '-') if injected_planet != False: overplotted_detrending_fig.savefig( save_path + "{} - Overplotted lowess detrending - partial lc - {}R {}d injected planet.png" .format(target_ID, params.rp, params.per)) else: overplotted_detrending_fig.savefig( save_path + "{} - Overplotted lowess detrending - partial lc.pdf". format(target_ID)) # overplotted_detrending_fig.show() plt.close(overplotted_detrending_fig) residuals_section = flux_section / lowess_flux_section residual_flux_lowess = np.concatenate( (residual_flux_lowess, residuals_section)) time_from_lowess_detrend = np.concatenate( (time_from_lowess_detrend, t_section)) full_lowess_flux = np.concatenate( (full_lowess_flux, lowess_flux_section)) # t_section = t_cut[83:133] residuals_after_lowess_fig = plt.figure() plt.scatter(time_from_lowess_detrend, residual_flux_lowess, c='k', s=2) plt.title( '{} lc after LOWESS partial lc detrending'.format(target_ID)) plt.xlabel('Time - 2457000 [BTJD days]') plt.ylabel('Relative flux') #ax = plt.gca() #ax.axvline(params.t0+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0+2*params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') #ax.axvline(params.t0-params.per+lc_30min.time[index], ymin = 0.1, ymax = 0.2, lw=1, c = 'r') if injected_planet != False: residuals_after_lowess_fig.savefig( save_path + "{} lc after LOWESS partial lc detrending - {}R {}d injected planet.png" .format(target_ID, params.rp, params.per)) else: residuals_after_lowess_fig.savefig( save_path + "{} lc after LOWESS partial lc detrending.pdf".format( target_ID)) # residuals_after_lowess_fig.show() plt.close(residuals_after_lowess_fig) # ########################## Periodogram Stuff ################################## # Create periodogram durations = np.linspace(0.05, 1, 22) * u.day if detrending == 'lowess_full' or detrending == 'lowess_partial': BLS_flux = residual_flux_lowess else: BLS_flux = combined_flux # with open('Detrended_time.pkl', 'wb') as f: # pickle.dump(t_cut, f, pickle.HIGHEST_PROTOCOL) # with open('Detrended_flux.pkl', 'wb') as f: # pickle.dump(BLS_flux, f, pickle.HIGHEST_PROTOCOL) model = BoxLeastSquares(t_cut * u.day, BLS_flux) #model = BLS(lc_30min.time*u.day,BLS_flux) results = model.autopower(durations, minimum_n_transit=3, frequency_factor=1.0) #results = model.autopower(durations, minimum_n_transit=2,frequency_factor=1.0) # Find the period and epoch of the peak index = np.argmax(results.power) period = results.period[index] #print(results.period) t0 = results.transit_time[index] duration = results.duration[index] transit_info = model.compute_stats(period, duration, t0) print(transit_info) epoch = transit_info['transit_times'][0] # periodogram_fig, ax = plt.subplots(1, 1, figsize=(8, 4)) periodogram_fig, ax = plt.subplots(1, 1) # Highlight the harmonics of the peak period ax.axvline(period.value, alpha=0.4, lw=3) for n in range(2, 10): ax.axvline(n * period.value, alpha=0.4, lw=1, linestyle="dashed") ax.axvline(period.value / n, alpha=0.4, lw=1, linestyle="dashed") # Plot and save the periodogram ax.plot(results.period, results.power, "k", lw=0.5) ax.set_xlim(results.period.min().value, results.period.max().value) ax.set_xlabel("period [days]") ax.set_ylabel("log likelihood") # ax.set_title('{} - BLS Periodogram after {} detrending - {}R {}d injected planet'.format(target_ID, detrending, params.rp, params.per)) ax.set_title('{} - BLS Periodogram after {} detrending'.format( target_ID, detrending)) # periodogram_fig.savefig(save_path + '{} - BLS Periodogram after lowess partial detrending - {}R {}d injected planet.png'.format(target_ID, params.rp, params.per)) periodogram_fig.savefig(save_path + '{} - BLS Periodogram after {} detrending.pdf'. format(target_ID, detrending)) plt.close(periodogram_fig) # periodogram_fig.show() ## ################################## Phase folding ########################## # Find indices of 2nd and 3rd peaks of periodogram all_peaks = scipy.signal.find_peaks(results.power, width=5, distance=10)[0] all_peak_powers = results.power[all_peaks] sorted_power_indices = np.argsort(all_peak_powers) sorted_peak_powers = all_peak_powers[sorted_power_indices] # sorted_peak_periods = results.period[sorted_power_indices] # Find info for 2nd largest peak in periodogram index_peak_2 = np.where(results.power == sorted_peak_powers[-2])[0] period_2 = results.period[index_peak_2[0]] t0_2 = results.transit_time[index_peak_2[0]] # Find info for 3rd largest peak in periodogram index_peak_3 = np.where(results.power == sorted_peak_powers[-3])[0] period_3 = results.period[index_peak_3[0]] t0_3 = results.transit_time[index_peak_3[0]] #phase_fold_plot(t_cut, BLS_flux, 8, mid_point+params.t0, target_ID, save_path, '{} with injected 8 day transit folded by transit period - {}R ratio'.format(target_ID, params.rp)) #phase_fold_plot(lc_30min.time, BLS_flux, rot_period.value, rot_t0.value, target_ID, save_path, '{} folded by rotation period'.format(target_ID)) #print('Max BLS Period = {} days, t0 = {}'.format(period.value, t0.value)) phase_fold_plot( t_cut, BLS_flux, period.value, t0.value, target_ID, save_path, '{} {} residuals folded by Periodogram Max ({:.3f} days)'.format( target_ID, detrending, period.value)) # period_to_test = p_rot # t0_to_test = 1332 period_to_test2 = period_2.value t0_to_test2 = t0_2.value period_to_test3 = period_3.value t0_to_test3 = t0_3.value # period_to_test4 = 10.26 # t0_to_test4 = 1447.06 # phase_fold_plot(t_cut, BLS_flux, p_rot, t0_to_test, target_ID, save_path, '{} folded by rotation period ({} days)'.format(target_ID,period_to_test)) phase_fold_plot( t_cut, BLS_flux, period_to_test2, t0_to_test2, target_ID, save_path, '{} detrended lc folded by 2nd largest peak ({:0.4} days)'.format( target_ID, period_to_test2)) phase_fold_plot( t_cut, BLS_flux, period_to_test3, t0_to_test3, target_ID, save_path, '{} detrended lc folded by 3rd largest peak ({:0.4} days)'.format( target_ID, period_to_test3)) # phase_fold_plot(t_cut, BLS_flux, period_to_test4, t0_to_test4, target_ID, save_path, '{} detrended lc folded by {:0.4} days'.format(target_ID,period_to_test4)) #print("Absolute amplitude of main variability = {}".format(amplitude_peaks)) #print('Main Variability Period from Lomb-Scargle = {:.3f}d'.format(p_rot)) #print("Main Variability Period from BLS of original = {}".format(rot_period)) #variability_table.add_row([target_ID,p_rot,rot_period,amplitude_peaks]) ############################# Eyeballing ############################## """ Generate 2 x 2 eyeballing plot """ eye_balling_fig, axs = plt.subplots(2, 2, figsize=(16, 10), dpi=120) # Original DIA with injected transits setup axs[0, 0].scatter(lc_30min.time, combined_flux, s=1, c='k') axs[0, 0].set_ylabel('Normalized Flux') axs[0, 0].set_xlabel('Time') axs[0, 0].set_title('{} - {} light curve'.format(target_ID, 'DIA')) #for n in range(int(-1*8/params.per),int(2*8/params.per+2)): # axs[0,0].axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # Detrended figure setup axs[0, 1].scatter(t_cut, BLS_flux, c='k', s=1, label='{} residuals after {} detrending'.format( target_ID, detrending)) # axs[0,1].set_title('{} residuals after {} detrending - Sector {}'.format(target_ID, detrending, sector)) axs[0, 1].set_title( '{} residuals after {} detrending - Sectors 14-18'.format( target_ID, detrending)) axs[0, 1].set_ylabel('Normalized Flux') axs[0, 1].set_xlabel('Time - 2457000 [BTJD days]') # binned_time, binned_flux = bin(t_cut, BLS_flux, binsize=15, method='mean') # axs[0,1].scatter(binned_time, binned_flux, c='r', s=4) #for n in range(int(-1*8/params.per),int(2*8/params.per+2)): # axs[0,1].axvline(params.t0+n*params.per+mid_point, ymin = 0.1, ymax = 0.2, lw=1, c = 'r') # Periodogram setup axs[1, 0].plot(results.period, results.power, "k", lw=0.5) axs[1, 0].set_xlim(results.period.min().value, results.period.max().value) axs[1, 0].set_xlabel("period [days]") axs[1, 0].set_ylabel("log likelihood") axs[1, 0].set_title('{} - BLS Periodogram of residuals'.format(target_ID)) axs[1, 0].axvline(period.value, alpha=0.4, lw=3) for n in range(2, 10): axs[1, 0].axvline(n * period.value, alpha=0.4, lw=1, linestyle="dashed") axs[1, 0].axvline(period.value / n, alpha=0.4, lw=1, linestyle="dashed") # Folded or zoomed plot setup epoch = t0.value # epoch = 1686.67 period = period.value #epoch = t0_3.value #period = period_3.value # print('Main epoch is {}'.format(t0.value+lc_30min.time[0])) phase = np.mod(t_cut - epoch - period / 2, period) / period axs[1, 1].scatter(phase, BLS_flux, c='k', s=1) axs[1, 1].set_title('{} Lightcurve folded by {:0.4} days'.format( target_ID, period)) axs[1, 1].set_xlabel('Phase') axs[1, 1].set_ylabel('Normalized Flux') #axs[1,1].set_xlim(0.4,0.6) # binned_phase, binned_lc = bin(phase, BLS_flux, binsize=15, method='mean') # plt.scatter(binned_phase, binned_lc, c='r', s=4) eye_balling_fig.tight_layout() eye_balling_fig.savefig( save_path + '{} - Full eyeballing fig.pdf'.format(target_ID)) plt.close(eye_balling_fig) # plt.show() ########################### ADDING INFO ROWS ###################### # sensitivity_table.add_row([target_ID,sector,pipeline,params.per,params.a,params.rp,period,np.max(results.power),period_2.value,period_3.value]) with open(save_path + 'Period_info_table.csv', 'a') as f: data_row = [ target_ID, sector, np.max(results.power), period, epoch, period_2.value, period_3.value, p_rot ] writer = csv.writer(f, delimiter=',') # writer.writerow(["your", "header", "foo"]) # write header writer.writerow(data_row) ###################### BONUS MULTI-PLOTTING STUFF ################# # orientation = 'vert' # # if orientation == 'vert': # fig, (ax1, ax2, ax3) = plt.subplots(3, 1) # elif orientation == 'horiz': # fig, (ax1, ax2, ax3) = plt.subplots(1, 3) # else: # print('Enter legitimate orientation') # ## fig, (ax1, ax2, ax3) = plt.subplots(3, 1) ## fig.subplots_adjust(hspace=0.3) # # ax1.scatter(t_cut,flux_cut, c = 'k', s = 1) # ax1.set_xlabel('Time - 2457000 [BTJD days]') # ax1.set_ylabel('Normalized Flux') # ax1.plot(t_cut, full_lowess_flux, '-') # ax1.set_xlim(t_cut[0],t_cut[-1]) # # ax2.plot(results.period, results.power, "k", lw=0.5) # ax2.set_xlim(results.period.min().value, results.period.max().value) # ax2.set_xlabel("period [days]") # ax2.set_ylabel("log likelihood") # ax2.axvline(period, alpha=0.4, lw=3) # for n in range(2, 10): # ax2.axvline(n*period, alpha=0.4, lw=1, linestyle="dashed") # ax2.axvline(period / n, alpha=0.4, lw=1, linestyle="dashed") # # ax3.scatter(phase, BLS_flux, c='k', s=1) # ax3.set_xlabel('Phase') # ax3.set_ylabel('Normalized Flux') # ax3.set_xlim(0,1) # plt.text(0.5,0.5,'Folded by {}d'.format(period), fontsize=12) # # plt.show() ################## Saving detrended lc to file ################### detrended_lc = lightkurve.lightcurve.TessLightCurve( time=t_cut, flux=BLS_flux, flux_err=lc_30min.flux_err) detrended_lc.to_csv( save_path + 'Detrended_lcs/{}_detrended_lc.csv'.format(target_ID)) ################################################################### except RuntimeError: print('No DiffImage lc exists for {}'.format(target_ID)) except: print('Some other error for {}'.format(target_ID)) return t_cut, BLS_flux, phase, epoch, period