def to_astropy_timeseries(self): """Save the event list to an Astropy timeseries. Array attributes (time, pi, energy, etc.) are converted into columns, while meta attributes (mjdref, gti, etc.) are saved into the ``meta`` dictionary. """ from astropy.timeseries import TimeSeries from astropy.time import TimeDelta from astropy import units as u data = {} array_attrs = self.array_attrs() for attr in array_attrs: if attr == "time": continue data[attr] = np.asarray(getattr(self, attr)) if data == {}: data = None if self.time is not None and self.time.size > 0: times = TimeDelta(self.time * u.s) ts = TimeSeries(data=data, time=times) else: ts = TimeSeries() ts.meta.update(self.get_meta_dict()) return ts
def to_timeseries(self): """ Converts the internal `coord_list` of the trajectory to a table for easy manipulation and file output. Returns ------- TimeSeries Coordinate list as `TimeSeries` """ r = self.coord_list.cartesian # generate timeseries # add velocity info if possible if self._has_velocity: v = self.coord_list.velocity ts = TimeSeries( time=self.coord_list.obstime.isot, data={ "r_X": r.x, "r_Y": r.y, "r_Z": r.z, "v_X": v.d_x, "v_Y": v.d_y, "v_Z": v.d_z, }, ) else: ts = TimeSeries( time=self.coord_list.obstime.isot, data={"r_X": r.x, "r_Y": r.y, "r_Z": r.z}, ) return ts
def to_timeseries(self): """ Convert this object into an `astropy.timeseries.TimeSeries` instance. """ from astropy.timeseries import TimeSeries ts = TimeSeries(time=self.t, data={ 'rv': self.rv, 'rv_err': self.rv_err }) ts.meta['t0'] = self.t0 return ts
def _find_extrema_events(self, init_time): """ Finds the extrema (max/min) events, contained within the time intervals. Parameters ---------- init_time : Time Initial time Returns ------- max_min_table : TimeSeries Timetable of max and min events """ # *** Find max / min events *** min_max_events = self._deriv_interpolator.roots() # loop through each root and classify them as max / min events events_list = [] value_list = [] for event_time in min_max_events: value = self._interpolator(event_time) value_list.append(value) if value > 0: events_list.append("max") else: events_list.append("min") # init events table max_min_table = TimeSeries( time=init_time + min_max_events * u.day, data={"type": events_list, "value": value_list}, ) # Filter the events to those within the intervals i = 0 remove_indexes = [] for event_row in max_min_table: event_within_interval = self.start_end_intervals.is_in_interval( event_row["time"] ) if not event_within_interval: remove_indexes.append(i) i = i + 1 # Remove the ones outside the intervals max_min_table.remove_rows(remove_indexes) return max_min_table
def _find_extrema_events(self): """ Finds the extrema (max/min) events, contained within the time intervals. Returns ------- max_min_table : TimeSeries Timetable of max and min events """ # *** Find max / min events *** min_max_events = self.discrete_data.deriv_roots() # loop through each root and classify them as max / min events events_list = [] value_list = self.discrete_data.interpolate(min_max_events) sec_deriv_list = self.discrete_data.sec_deriv_interpolate( min_max_events) for sec_deriv in sec_deriv_list: if sec_deriv < 0: events_list.append("max") else: events_list.append("min") # init events table max_min_table = TimeSeries( time=min_max_events, data={ "type": events_list, "value": value_list }, ) # Filter the events to those within the intervals i = 0 remove_indexes = [] for event_row in max_min_table: event_within_interval = self.start_end_intervals.is_in_interval( event_row["time"]) if not event_within_interval: remove_indexes.append(i) i = i + 1 # Remove the ones outside the intervals max_min_table.remove_rows(remove_indexes) return max_min_table
def test_from_timeseries_bad(self): from astropy.time import TimeDelta times = TimeDelta(np.arange(10) * u.s) ts = TimeSeries(time=times) with pytest.raises(ValueError) as excinfo: Lightcurve.from_astropy_timeseries(ts) assert "Input timeseries must contain at least" in str(excinfo.value)
def test_tooltip(self, tmpdir, deterministic_uuid): # Check that if a custom tooltip is specified, which uses columns other # than the ones used for the plotting, these other columns are included # in the output data file. self.ts['other1'] = [1, 2, 3, 4, 5] self.ts['other2'] = [1, 2, 3, 4, 5] self.ts['other3'] = [1, 2, 3, 4, 5] self.ts['other4'] = [1, 2, 3, 4, 5] figure = InteractiveTimeSeriesFigure() figure.add_markers(time_series=self.ts, column='flux', label='Markers', tooltip=['time', 'flux', 'other1']) figure.add_markers(time_series=self.ts, column='flux', label='Markers', tooltip={'other3': 'Other'}) json_file = tmpdir.join('figure.json').strpath figure.save_vega_json(json_file) ts = TimeSeries.read(tmpdir.join( 'data_daea58ba-4c73-4942-8d87-78e7d340bbcd.csv').strpath, format='ascii.basic', delimiter=',') assert ts.colnames == ['time', 'flux', 'other1', 'other3']
def setup_method(self): self.ts = TimeSeries(time_start='2016-03-22T12:30:31', time_delta=3 * u.s, n_samples=5) self.ts['flux'] = [1, 2, 3, 4, 5] self.ts['error'] = [1, 2, 3, 4, 5]
def setup_method(self, method): self.ts = TimeSeries(time_start='2016-03-22T12:30:31', time_delta=3 * u.s, n_samples=5) self.ts['flux'] = [1, 2, 3, 4, 5] self.ts['error'] = [1, 2, 3, 4, 5] self.ts['flux_with_unit'] = [1, 2, 3, 4, 5] * u.Jy self.ts['error_with_unit'] = [1, 2, 3, 4, 5] * u.mJy self.figure = InteractiveTimeSeriesFigure()
def to_astropy_timeseries(self): """Save the ``StingrayTimeseries`` to an ``Astropy`` timeseries. Array attributes (time, pi, energy, etc.) are converted into columns, while meta attributes (mjdref, gti, etc.) are saved into the ``meta`` dictionary. Returns ------- ts : `astropy.timeseries.TimeSeries` A ``TimeSeries`` object with the array attributes as columns, and the meta attributes in the `meta` dictionary """ from astropy.timeseries import TimeSeries from astropy.time import TimeDelta from astropy import units as u data = {} array_attrs = self.array_attrs() for attr in array_attrs: if attr == "time": continue data[attr] = np.asarray(getattr(self, attr)) if data == {}: data = None if self.time is not None and np.size(self.time) > 0: times = TimeDelta(self.time * u.s) ts = TimeSeries(data=data, time=times) else: ts = TimeSeries() ts.meta.update(self.get_meta_dict()) return ts
def _classify_start_end_events( self, init_time, start_end_events, neg_to_pos_is_start ): """ Classify the list of events into a timetable of start and end events. Parameters ---------- init_time : Time Initial time start_end_events : ndarray Array of start and end event times in days, starting from `init_time` neg_to_pos_is_start : bool If `True` value turning from negative to positive marks a *start* event, otherwise marks an *end* event Returns ------- events_table : TimeSeries Timetable of start and end events """ # loop through each root and classify them as start / end events events_list = [] for event_time in start_end_events: deriv = self._deriv_interpolator(event_time) if neg_to_pos_is_start: if deriv > 0: events_list.append("start") else: events_list.append("end") else: if deriv < 0: events_list.append("start") else: events_list.append("end") # init events table events_table = TimeSeries( time=init_time + start_end_events * u.day, data={"type": events_list} ) return events_table
def to_astropy_timeseries(self): from astropy.timeseries import TimeSeries from astropy.time import TimeDelta from astropy import units as u data = {} for attr in ['energy', 'pi']: if hasattr(self, attr) and getattr(self, attr) is not None: data[attr] = np.asarray(getattr(self, attr)) if data == {}: data = None if self.time is not None and self.time.size > 0: times = TimeDelta(self.time * u.s) ts = TimeSeries(data=data, time=times) else: ts = TimeSeries() ts.meta['gti'] = self.gti ts.meta['mjdref'] = self.mjdref ts.meta['instr'] = self.instr ts.meta['mission'] = self.mission ts.meta['header'] = self.header return ts
def test_interactive_screenshot(tmpdir): ts = TimeSeries(time_start='2016-03-22T12:30:31', time_delta=3 * u.s, n_samples=5) ts['flux'] = [1, 2, 3, 4, 5] filename_json = tmpdir.join('figure.json').strpath filename_png = tmpdir.join('figure').strpath figure = InteractiveTimeSeriesFigure() markers = figure.add_markers(time_series=ts, column='flux', label='Markers') figure.add_line(time_series=ts, column='flux', label='Line') figure.add_view(title="only markers", include=[markers]) figure.save_vega_json(filename_json, embed_data=True) interactive_screenshot(filename_json, filename_png) assert os.path.exists(filename_png + '.png') assert os.path.exists(filename_png + '_view1.png')
def __init__(self, emitters, receiver, start_time, timestep, timespan, doppler=relativistic_doppler, only_visible=True, signal=None): """ @param emitters: List of EarthLocation @param receiver: EarthLocation @param start_time: Time @param timestep: Quantity with time unit @param timespan: Quantity with time unit @param doppler: Function to calculate doppler shift; use default @param only_visible: Bool, filter doppler with moon visibility @param signal: None or Number, if none plots doppler factor else plots doppler shift. """ self.emitters = emitters self.receiver = receiver self.only_visible = only_visible self.doppler = doppler self.start_time = start_time self.timestep = timestep self.timespan = timespan self.samples = ceil(timespan.to('hour') / timestep.to('hour')) self.times = TimeSeries(time_start=self.start_time, time_delta=self.timestep, n_samples=self.samples)['time'] self._last_dopplers = None self._last_moon_emitters_altitudes = None self._last_moon_receiver_altitudes = None self.signal = signal
def main(): root = tk.Tk() root.withdraw() filepath = filedialog.askopenfilename() hdulist = fits.open(filepath) telescope = hdulist[0].header['telescop'].lower() if telescope == 'tess': hdu = hdulist['LIGHTCURVE'] print("TESS file recognized") tessID = str(hdulist[1].header['TICID']) print(tessID) ts = TimeSeries.read(filepath, format='tess.fits') elif telescope == 'kepler': hdu = hdulist[1] print("Kepler file recognized") keplerID = str(hdulist[0].header['KEPLERID']) print(keplerID) ts = TimeSeries.read(filepath, format='kepler.fits') else: raise NotImplementedError( "{} is not implemented, only KEPLER or TESS are " "supported through this reader".format( hdulist[0].header['telescop'])) fig, axs = plt.subplots(2, 2) if telescope == 'tess': fig.suptitle('TESS Id:' + tessID, fontsize=15) if telescope == 'kepler': fig.suptitle('Kepler Id:' + keplerID, fontsize=15) fig.set_size_inches(18.5, 10.5) #full data plot axs[0, 0].plot(ts.time.jd, ts['sap_flux'], 'k.', markersize=1, label='whole') axs[0, 0].set_xlabel('Julian Date') axs[0, 0].set_ylabel('SAP Flux (e-/s)') axs[0, 0].set_title('full data') #folded time series periodogram = BoxLeastSquares.from_timeseries(ts, 'sap_flux') results = periodogram.autopower(0.1 * u.day) best = np.argmax(results.power) period = results.period[best] transit_time = results.transit_time[best] ts_folded = ts.fold(period=period, midpoint_epoch=transit_time) axs[0, 1].plot(ts_folded.time.jd, ts_folded['sap_flux'], 'k.', markersize=1) axs[0, 1].set_xlabel('Time (days)') axs[0, 1].set_ylabel('SAP FLUX (e-/s)') axs[0, 1].set_title('folded time series') #binned time series mean, median, stddev = sigma_clipped_stats(ts_folded['sap_flux']) ts_folded['sap_flux_norm'] = ts_folded['sap_flux'] / median ts_binned = aggregate_downsample(ts_folded, time_bin_size=0.003 * u.day) axs[1, 0].plot(ts_folded.time.jd, ts_folded['sap_flux_norm'], 'k.', markersize=1) axs[1, 0].plot(ts_binned.time_bin_start.jd, ts_binned['sap_flux_norm'], 'r-', drawstyle='default') axs[1, 0].set_xlabel('Time (days)') axs[1, 0].set_ylabel('Normalized Flux') axs[1, 0].set_title('binned time series') #final transit model axs[1, 1].plot(ts_folded.time.jd, ts_folded['sap_flux_norm'], 'k.', markersize=1) axs[1, 1].plot(ts_binned.time_bin_start.jd, ts_binned['sap_flux_norm'], 'r-', drawstyle='default') axs[1, 1].set_xlabel('Time (days)') axs[1, 1].set_ylabel('Normalized Flux') axs[1, 1].set_title('transit model') plt.show()
def from_timeseries(cls, f): from astropy.timeseries import TimeSeries ts = TimeSeries.read(f) t0 = ts.meta.get('t0', None) return cls(t=ts['time'], rv=ts['rv'], rv_err=ts['rv_err'], t0=t0)
def __init__(self, name: str, dfile: Path = None, tic: int = None, zero_epoch: float = None, period: float = None, nsamples: int = 2, trdur: float = 0.125, bldur: float = 0.3, use_pdc=True, split_transits=True, separate_noise=False, tm: TransitModel = None, minpt=10): self.zero_epoch = zero_epoch self.period = period if tic is not None: from lightkurve import search_lightcurvefile print("Searching for TESS light curves") lcf = search_lightcurvefile(tic, mission='TESS') print(f"Found {len(lcf)} TESS light curves") print(f"Downloading TESS light curves") lcs = lcf.download_all() if use_pdc: ts = lcs.PDCSAP_FLUX.stitch().normalize().to_timeseries() else: ts = lcs.SAP_FLUX.stitch().normalize().to_timeseries() elif dfile is not None: ts = TimeSeries.read(dfile, format='tess.fits') if use_pdc: ts = ts['time', 'pdcsap_flux', 'pdcsap_flux_err'] else: ts = ts['time', 'sap_flux', 'sap_flux_err'] ts.rename_columns(ts.colnames, 'time flux flux_err'.split()) m = ~isfinite(ts['flux']) self.normalization = (sigma_clipped_stats(ts['flux'], mask=m)[1]).value ts['flux_err'] /= self.normalization ts['flux'] /= self.normalization ts['flux'].mask = m else: raise NotImplementedError( "Need to give either a TIC or a SPOC light curve file") tref = floor(ts.time.jd.min()) self.period = period = period if isinstance( period, u.Quantity) else u.d * period self.zero_epoch = zero_epoch = zero_epoch if isinstance( zero_epoch, Time) else Time(zero_epoch, format='jd', scale='tdb') ts_folded = ts.fold(period, zero_epoch) mwindow = abs(ts_folded.time.jd) < 0.5 * bldur mint = abs(ts_folded.time.jd) < 0.5 * trdur moot = mwindow & ~mint self.transit_duration = trdur self.baseline_duration = bldur bm = ~ts['flux'].mask & mwindow if split_transits: ep = epoch(ts.time.jd, zero_epoch.jd, period) ep -= ep.min() times, fluxes = [], [] for e in unique(ep): m = bm & (ep == e) if m.sum() >= minpt: times.append(ts.time.jd[m].astype('d')) try: fluxes.append(ts['flux'].data.data[m].astype('d')) except AttributeError: fluxes.append(ts['flux'].value[m].astype('d')) pbids = len(times) * [0] else: times, fluxes = [ts.time.jd[bm] ], [ts['flux'].data.data[bm].astype('d')] pbids = [0] wnids = arange(len(times)) if separate_noise else None BaseLPF.__init__(self, name, ['TESS'], times=times, fluxes=fluxes, pbids=pbids, nsamples=nsamples, exptimes=[0.00139], wnids=wnids, tref=tref, tm=tm)
max_mag = np.max(mag) max_time = date[mag == max_mag] #define period from upsilon and convert to units of time row_index = upsilon_class_df[upsilon_class_df['objid'] == i].index[ 0] #get row number of object period = upsilon_class_df.loc[ row_index, 'period'] #get periof of object based on upsilon period = period * u.second #Create time object from hjd data for star t = Time(date, format='mjd', scale='ut1') #Create time series object using time object 't' ts = TimeSeries( time=t ) #If scale = 'utc', gives: WARNING: ErfaWarning: ERFA function "utctai" yielded 561 of "dubious year (Note 3)" [astropy._erfa.core] #Plot light curve for star fig_1 = plt.figure() ts['mag'] = mag title = "Light curve for object " + str(i) plt.title(title) plt.plot(ts.time.jd, mag, 'k.', markersize=1) plt.xlabel("Julian Date") plt.ylabel(("Mag")) plt.gca().invert_yaxis() plt.show() name_file = str(i) + '.png' #Uncomment line below to save file in eps format
ext=1) #likely that TTYPE6 should be SAP_FLUX #possible code edit: prompt the user to change pyke.kepfilter( star. fitsfile, #filters the fits file and saves it as EverestFits-KepFiltered.fits passband='high', outfile="EverestFits-KepFiltered.fits", overwrite=True, datacol='SAP_FLUX', function='boxcar', logfile='kepfilter.log') ts = TimeSeries.read( star.fitsfile, format='kepler.fits') #creates 2 AstroPY timeseries that can be used ts1 = TimeSeries.read("EverestFits-KepFiltered.fits", format='kepler.fits') image = fits.open(star.fitsfile) plt.imshow(image[5].data) #shows the star print("Pre KepFilter") plt.plot(ts.time.jd, ts['sap_flux'], 'k.', markersize=1) plt.xlabel('Julian Date') plt.ylabel('Raw Flux (e-/s)') plt.show() print("Post KepFilter") plt.plot(ts1.time.jd, ts1['sap_flux'], 'k.', markersize=1) plt.xlabel('Julian Date')
df = pd.read_csv("https://github.com/tammojan/perseids2020/raw/master/meteor-overview.csv") df.set_index(pd.DatetimeIndex(df['Time']), inplace=True) # In[7]: from astropy.timeseries import TimeSeries # In[8]: times_allsky = TimeSeries.from_pandas(df[df['DAARO! All Sky Camera'] > 0]).time # In[9]: times_lofar = TimeSeries.from_pandas(df[df['Watec optical Burlage+Dwingeloo Pointing LOFAR'] > 0]).time # In[10]: times_twist = TimeSeries.from_pandas(df[df['Watec optical Burlage+Dwingeloo Pointing Twist'] > 0]).time # In[11]:
def convert_timeseries(timeseries, name): vals = Table(data=[timeseries.values], dtype=[int], names=[name]) str_times = timeseries.index.values times_array = [Time(dt(t)) for t in str_times] return (TimeSeries(data=vals, time=times_array))
def setup_method(self): self.ts = TimeSeries.read(os.path.join(DATA, 'phase.csv'), format='ascii.csv') self.ts['error'] = 0.02 # reduce for now self.ts['relative_s'] = self.ts['relative'] * u.s
def get_body_ephemeris(times, body_id="501", step="1m"): start = times.isot[0] # Because Horizons time range doesn't include the endpoint # we need to add some extra time if step[-1] == "m": padding = 2 * float(step[:-1]) / (60 * 24) elif step[-1] == "h": padding = 2 * float(step[:-1]) / 24 elif step[-1] == "d": padding = 2 * float(step[:-1]) else: raise ValueError( "Unrecognized JPL Horizons step size. Use '1m' or '1h' for example." ) end = Time(times.mjd[-1] + padding, format="mjd").isot # Query JPL Horizons epochs = {"start": start, "stop": end, "step": step} obj = Horizons(id=body_id, epochs=epochs, id_type="id") eph = obj.ephemerides(extra_precision=True) times_jpl = Time(eph["datetime_jd"], format="jd") # Store all data in a TimeSeries object data = TimeSeries(time=times) data["RA"] = np.interp(times.mjd, times_jpl.mjd, eph["RA"]) * eph["RA"].unit data["DEC"] = np.interp(times.mjd, times_jpl.mjd, eph["DEC"]) * eph["DEC"].unit data["ang_width"] = ( np.interp(times.mjd, times_jpl.mjd, eph["ang_width"]) * eph["ang_width"].unit ) data["phase_angle"] = ( np.interp(times.mjd, times_jpl.mjd, eph["alpha_true"]) * eph["alpha_true"].unit ) eph = obj.ephemerides(extra_precision=True) # Boolean flags for occultations/eclipses occ_sunlight = eph["sat_vis"] == "O" umbra = eph["sat_vis"] == "u" occ_umbra = eph["sat_vis"] == "U" partial = eph["sat_vis"] == "p" occ_partial = eph["sat_vis"] == "P" occulted = np.any([occ_umbra, occ_sunlight], axis=0) data["ecl_par"] = np.array( np.interp(times.mjd, times_jpl.mjd, partial), dtype=bool, ) data["ecl_tot"] = np.array(np.interp(times.mjd, times_jpl.mjd, umbra), dtype=bool,) data["occ_umbra"] = np.array( np.interp(times.mjd, times_jpl.mjd, occ_umbra), dtype=bool, ) data["occ_sun"] = np.array( np.interp(times.mjd, times_jpl.mjd, occ_sunlight), dtype=bool, ) # Helper functions for dealing with angles and discontinuities subtract_angles = lambda x, y: np.fmod((x - y) + np.pi * 3, 2 * np.pi) - np.pi def interpolate_angle(x, xp, yp): """ Interpolate an angular quantity on domain [-pi, pi) and avoid discountinuities. """ cosy = np.interp(x, xp, np.cos(yp)) siny = np.interp(x, xp, np.sin(yp)) return np.arctan2(siny, cosy) # Inclination of the starry map = 90 - latitude of the central point of # the observed disc data["inc"] = interpolate_angle( times.mjd, times_jpl.mjd, np.pi / 2 * u.rad - eph["PDObsLat"].to(u.rad), ).to(u.deg) # Rotational phase of the starry map is the observer longitude data["theta"] = ( interpolate_angle( times.mjd, times_jpl.mjd, eph["PDObsLon"].to(u.rad) - np.pi * u.rad, ).to(u.deg) ) + 180 * u.deg # Obliquity of the starry map is the CCW angle from the celestial # NP to the NP of the target body data["obl"] = interpolate_angle( times.mjd, times_jpl.mjd, eph["NPole_ang"].to(u.rad), ).to(u.deg) # Compute the location of the subsolar point relative to the central # point of the disc lon_subsolar = subtract_angles( np.array(eph["PDSunLon"].to(u.rad)), np.array(eph["PDObsLon"].to(u.rad)), ) lon_subsolar = 2 * np.pi - lon_subsolar # positive lon. is to the east lat_subsolar = subtract_angles( np.array(eph["PDSunLat"].to(u.rad)), np.array(eph["PDObsLat"].to(u.rad)), ) # Location of the subsolar point in cartesian Starry coordinates xs = np.array(eph["r"]) * np.cos(lat_subsolar) * np.sin(lon_subsolar) ys = np.array(eph["r"]) * np.sin(lat_subsolar) zs = np.array(eph["r"]) * np.cos(lat_subsolar) * np.cos(lon_subsolar) data["xs"] = np.interp(times.mjd, times_jpl.mjd, xs) * u.AU data["ys"] = np.interp(times.mjd, times_jpl.mjd, ys) * u.AU data["zs"] = np.interp(times.mjd, times_jpl.mjd, zs) * u.AU return data