def lyra_ts(): # Create sample TimeSeries lyra_ts = timeseries.TimeSeries(os.path.join( rootdir, 'lyra_20150101-000000_lev3_std_truncated.fits.gz'), source='LYRA') data = pandas.DataFrame(index=TIME, data={ "CHANNEL1": CHANNELS[0], "CHANNEL2": CHANNELS[1], "CHANNEL3": CHANNELS[0], "CHANNEL4": CHANNELS[1] }) lyra_ts = timeseries.TimeSeries(data, lyra_ts.meta) return lyra_ts
def other(): goes = ts.TimeSeries('go1520140611.fits') filename = '11jun14.lis.gz' rstn = np.genfromtxt(filename, delimiter=2*[4]+5*[2]+8*[7], dtype=('|S10', int, int, int, int, int, int, float, float,float, float,float, float,float, float), names = ['sta','year','mon','day','hour','min','sec','f1','f2','f3','f4','f5','f6', 'f7','f8']) times = list(map(datetime.datetime,rstn['year'],rstn['mon'],rstn['day'], rstn['hour'],rstn['min'],rstn['sec'])) data = np.transpose([rstn['f1'],rstn['f2'],rstn['f3'],rstn['f4'],rstn['f5'],rstn['f6'],rstn['f7'],rstn['f8']]) df = pd.DataFrame(data, columns=['245 MHz','410 MHz','610 MHz','1.4 GHz', '2.7 GHz','4.9 GHz','8.8 GHz','15.4 GHz'], index = times) # save data to a csv df.sort_index(inplace=True) df.to_csv('san_vito_rstn_11062014.csv', header=True, index=True) new_df = df.truncate(flare_ts, flare_te) short_goes = goes.truncate(flare_ts, flare_te) gl = short_goes.data['xrsb'] gs = short_goes.data['xrsa']
def units_attach(data, units, warn_missing_units=True): """ Takes the units defined by the user and attaches them to the TimeSeries. Parameters ---------- data : :class:`pandas.DataFrame` Input data. Takes the DataFrame which needs to have units attached. units : :class:`collections.OrderedDict` The units manually defined by the user. Returns ------- out : :class:`~sunpy.timeseries.TimeSeries` DataFrame converted into TimeSeries with units attached. """ unit_key = list(units.keys()) for column_name in data.columns: if column_name not in unit_key: units[column_name] = u.dimensionless_unscaled if warn_missing_units: message = "{} column has missing units.".format(column_name) warnings.warn(message, Warning) with warnings.catch_warnings(): warnings.simplefilter('ignore', 'Discarding nonzero nanoseconds in conversion') timeseries_data = ts.TimeSeries(data, units) return timeseries_data
def test_calculate_temperature_em(): # Create XRSTimeSeries object, then create new one with # temperature & EM using with calculate_temperature_em(). goeslc = timeseries.TimeSeries(get_test_filepath("go1520110607.fits")) goeslc_new = goes.calculate_temperature_em(goeslc) # Test correct exception is raised if a XRSTimeSeries object is # not inputted. with pytest.raises(TypeError): goes.calculate_temperature_em([]) # Find temperature and EM manually with _goes_chianti_tem() temp, em = goes._goes_chianti_tem( goeslc.quantity("xrsb"), goeslc.quantity("xrsa"), satellite=int(goeslc.meta.metas[0]["TELESCOP"].split()[1]), date="2014-01-01") # Check that temperature and EM arrays from _goes_chianti_tem() # are same as those in new XRSTimeSeries object. assert goeslc_new.data.temperature.all() == temp.value.all() assert goeslc_new.data.em.all() == em.value.all() # Check rest of data frame of new XRSTimeSeries object is same # as that in original object. goeslc_revert = copy.deepcopy(goeslc_new) del goeslc_revert.data["temperature"] del goeslc_revert.data["em"] assert_frame_equal(goeslc_revert.data, goeslc.data)
def test_calculate_radiative_loss_rate(): # Define input variables. goeslc_input = timeseries.TimeSeries(get_test_filepath("go1520110607.fits")) not_goeslc = [] goeslc_no_em = goes.calculate_temperature_em(goeslc_input) del goeslc_no_em.data["em"] # Check correct exceptions are raised to incorrect inputs with pytest.raises(TypeError): goes_test = goes.calculate_radiative_loss_rate(not_goeslc) # Check function gives correct results. # Test case 1: GOESLightCurve object with only flux data goeslc_test = goes.calculate_radiative_loss_rate(goeslc_input) exp_data = np.array([1.78100055e+19, 1.66003113e+19, 1.71993065e+19, 1.60171768e+19, 1.71993065e+19]) np.testing.assert_allclose(goeslc_test.data.rad_loss_rate[:5], exp_data) # Test case 2: GOESLightCurve object with flux and temperature # data, but no EM data. goes_test = goes.calculate_radiative_loss_rate(goeslc_no_em) # we test that the column has been added assert "rad_loss_rate" in goes_test.columns # Compare every 50th value to save on filesize return np.array(goes_test.data[::50])
def units_attach(data, units, warn_missing_units=True): """ Takes the units defined by the user and attaches them to the TimeSeries. Parameters ---------- data : :class:`pandas.DataFrame` Input data. Takes the DataFrame which needs to have units attached. units : :class:`collections.OrderedDict` The units manually defined by the user. Returns ------- out : :class:`~sunpy.timeseries.TimeSeries` DataFrame converted into TimeSeries with units attached. """ missing_msg = ('If you are trying to auomatically download data ' 'with HelioPy this is a bug, please report it at ' 'https://github.com/heliopython/heliopy/issues') unit_key = list(units.keys()) for column_name in data.columns: if column_name not in unit_key: units[column_name] = u.dimensionless_unscaled if warn_missing_units: message = (f"{column_name} column has missing units." f"\n{missing_msg}") warnings.warn(message, Warning) with warnings.catch_warnings(): warnings.simplefilter('ignore', 'Discarding nonzero nanoseconds in conversion') timeseries_data = ts.TimeSeries(data, units) return timeseries_data
def AIATimeSeries(): startDate = request.args.get('a', 0, type=str) endDate = request.args.get('b', 0, type=str) result = Fido.search(a.Time(startDate, endDate), a.Instrument('XRS')) try: downloaded_files = Fido.fetch(result) combined_goes_ts = ts.TimeSeries(downloaded_files, source='XRS', concatenate=True) combined_goes_ts.peek() filename = (os.path.basename(downloaded_files[0])) zipf = zipfile.ZipFile('app/static/TSFits/' + filename + '.zip', 'w', zipfile.ZIP_DEFLATED) for files in downloaded_files: zipf.write(files, os.path.basename(files)) zipf.close() plt.savefig('app/static/images/' + filename + '_timeseries.png') return jsonify( result='<img id="img" src="static/images/' + filename + '_timeseries.png" style="width: inherit; padding-bottom: 6px;">', download='<a href="static/images/' + filename + '_timeseries.png" download="" id="btn-down" ' 'class="glyphicon glyphicon-floppy-save" style="font-size: 20px; color: black; text-decoration: none; ' 'data-toggle="tooltip" data-placement="top" title="Download PNG file""></a>' '<a href="static/TSFits/' + filename + '.zip" download="" id="btn-down" class="glyphicon ' 'glyphicon glyphicon-save-file" style="font-size: 20px; color: black; text-decoration: none;' 'data-toggle="tooltip" data-placement="top" title="Download FITS file""></a>' ) except HTTPError: import pdb pdb.set_trace() result = "error" return jsonify(result=result)
def plot_flares(i, transmitter='NRK'): tt = parse_time(events_to_download[i]).strftime("%Y%m%d") files_vlf = glob.glob("./vlf_bas_files/{:s}{:s}*".format(transmitter, tt)) if len(files_vlf)==0: print("No VLF data") return goes_file = goes_data_dir + "go15" + tt + ".fits" if not Path(goes_file).exists(): print("No goes data") return goes_data = ts.TimeSeries(goes_file) gl = goes_data.data["xrsb"] gs = goes_data.data["xrsa"] flares_ind = np.where(daytime_flares["event_date"].isin([events_to_download[i]])==True)[0] flares = daytime_flares.iloc[flares_ind] vlf_amp, vlf_phase = read_vlf_data(files_vlf[0], tt) fig, ax = plt.subplots(3, sharex=True, figsize=(8, 10)) ax1 = ax[0] ax2 = ax[1] ax3 = ax[2] ax1.plot(gl, color="r", label="1-8 $\mathrm{\AA}$") ax1.plot(gs, color="b", label="0.5-4 $\mathrm{\AA}$") ax1.set_ylim(1e-9, 1e-3) ax1.set_yscale("log") ax1.tick_params(which="both", direction="in", right=True, top=True) ax1.set_ylabel("Flux (Wm$^{-2}$)") ax1.legend(loc="upper right") ax2.plot(vlf_amp, label='NAA', color='grey') ax2.set_ylabel('VLF Amplitude (dB)') ax3.plot(vlf_phase, label='NAA', color='k') ax3.set_ylabel('Phase (degrees)') ax3.set_xlabel("Time {:s} UT".format(events_to_download[i])) ax3.set_xlim(events_to_download[i] + " 00:00", events_to_download[i] + " 23:59") ax3.xaxis.set_major_locator(dates.HourLocator(interval=3)) ax3.xaxis.set_minor_locator(dates.HourLocator(interval=1)) ax3.xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) ax3.tick_params(which="both", direction="in", right=True, top=True) for f in flares["peak_time"]: ax1.axvline(parse_time(f).datetime, color="k", ls="dashed") ax2.axvline(parse_time(f).datetime, color="k", ls="dashed") ax3.axvline(parse_time(f).datetime, color="k", ls="dashed") ax1.grid() ax2.grid() ax3.grid() plt.tight_layout() plt.subplots_adjust(hspace=0.05) plt.savefig(save_dir + transmitter + parse_time(events_to_download[i]).strftime("%Y%m%d.png"), dpi=200) plt.close()
def plot_flare(new_df, i, save=False): """ Function to plot a flare from a pandas DataFrame. Parameters: ---------- new_df : ~`pd.DataFrame DataFrame with each row a flare i : ~`int` row index to plot """ sid_file = glob.glob(pd.to_datetime(new_df["event_starttime"].iloc[i]).strftime(sid_file_dir))[0] new_ts = pd.to_datetime(new_df["event_starttime"].iloc[i])-datetime.timedelta(minutes=5) new_te = pd.to_datetime(new_df["event_endtime"].iloc[i])+datetime.timedelta(minutes=5) sid_data = sid_to_series(sid_file).truncate(new_ts, new_te) sid_resample = pd.Series(savgol_filter(sid_data, 2*60+1, 3), index=sid_data.index) if len(sid_data)>300: sid_resample2 = pd.Series(savgol_filter(sid_data, 5*60+1, 3), index=sid_data.index) tmax_sid = sid_resample.index[np.argmax(sid_resample)] goes_file = glob.glob(pd.to_datetime(new_df["event_starttime"].iloc[i]).strftime(goes_file_dir))[0] goes = ts.TimeSeries(goes_file).truncate(new_ts, new_te) gl = goes.to_dataframe()["xrsb"] gs = goes.to_dataframe()["xrsa"] fig, ax = plt.subplots(2, sharex=True) ax[0].plot(gl, color="r", label="1-8$\mathrm{\AA}$") ax[0].plot(gs, color="b", label="0.5-4$\mathrm{\AA}$") ax[0].set_ylabel("Flux (Wm$^{-2}$)") ax[0].legend(loc="upper left") ax[0].set_yscale("log") ax[1].plot(sid_data, color="grey", lw=0.5, label="raw") ax[1].plot(sid_resample, color="k", label="2 minute smooth") if len(sid_data)>300: ax[1].plot(sid_resample2, color="g", label="5 minute smooth") ax[1].legend(loc="upper left") for a in ax: a.axvline(pd.to_datetime(new_df["event_peaktime"].iloc[i]), color="k", lw=0.8, ls="dashed") a.axvline(pd.to_datetime(new_df["event_starttime"].iloc[i]), color="k", lw=0.8, ls="dashed" ) a.axvline(pd.to_datetime(new_df["event_endtime"].iloc[i]), color="k", lw=0.8, ls="dashed") a.axvline(tmax_sid, color="r") ax[0].xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) tstart_str = pd.to_datetime(new_df["event_starttime"].iloc[i]).strftime("%Y-%m-%dT%H:%M") ax[1].set_xlabel(new_df["event_peaktime"].iloc[i]) plt.tight_layout() plt.subplots_adjust(hspace=0.01) if save: plt.savefig("./final_stats_study_tests/flare2_{:d}_{:s}.png".format(i, tstart_str)) plt.close() # for i in range(len(vlf_flares)): # plot_flare(vlf_flares, i, save=True) # print(i)
def plot(i): tt = parse_time(days_to_plot[i]).strftime("%Y%m%d") files_vlf = glob.glob(vlf_data_dir + tt + '*.csv') if len(files_vlf) == 0: print("No VLF data") return goes_file = goes_data_dir + "go15" + tt + ".fits" if not Path(goes_file).exists(): print("No goes data") return data_vlf = read_files(files_vlf) goes_data = ts.TimeSeries(goes_file).to_dataframe() flares_ind = np.where(daytime_flares["event_date"].isin([days_to_plot[i]])==True)[0] flares = daytime_flares.iloc[flares_ind] fig, ax = plt.subplots(2, figsize=(8,6), sharex=True) ax[0].plot(goes_data['xrsb'], color='b', label='1-8$\mathrm{\AA}$') ax[0].plot(goes_data['xrsa'], color='r', label='0.5-4$\mathrm{\AA}$') ax[0].set_yscale('log') ax[0].set_xlim(days_to_plot[i] + " 00:00", days_to_plot[i] + " 23:59") ax[1].plot(pd.to_datetime(data_vlf['time']), data_vlf['volts'], color='grey') for f in flares["peak_time"]: ax[0].axvline(parse_time(f).datetime, color="k", ls="dashed") ax[1].axvline(parse_time(f).datetime, color="k", ls="dashed") ax[1].xaxis.set_major_locator(dates.HourLocator(interval=3)) ax[1].xaxis.set_minor_locator(dates.HourLocator(interval=1)) ax[1].xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) ax[0].set_ylim(1e-9, 1e-3) #ax[1].set_ylim(-5, 5) for a in ax: a.tick_params(which='both', direction='in') ax[0].set_ylabel('Flux Wm$^{-2}$') ax[1].set_ylabel('Volts') ax[1].set_xlabel('Time ' + days_to_plot[i] + ' UT') plt.tight_layout() plt.subplots_adjust(hspace=0.05) plt.savefig(save_dir + 'birr_vlf_' + days_to_plot[i] + '.png', dpi=100) plt.close()
def second_runthrough(): gg = glob.glob( "/Users/laurahayes/ionospheric_work/ionospheric-analysis/stats_study/plots_that_work/*.png" ) gg.sort() flare_unique_day = [x.split("/")[-1][6:16] for x in gg] new_df = flare_list[flare_list["unique_day"].isin([flare_unique_day[0]])] for i in range(1, len(flare_unique_day)): new_df = new_df.append(flare_list[flare_list["unique_day"].isin( [flare_unique_day[i]])]) new_df.reset_index(inplace=True) for i in range(len(new_df)): print(i) sid_file = glob.glob( pd.to_datetime( new_df["event_starttime"].iloc[i]).strftime(sid_file_dir))[0] new_ts = pd.to_datetime( new_df["event_starttime"].iloc[i]) - datetime.timedelta(minutes=10) new_te = pd.to_datetime( new_df["event_endtime"].iloc[i]) + datetime.timedelta(minutes=10) sid_data = sid_to_series(sid_file).truncate(new_ts, new_te) goes_file = glob.glob( pd.to_datetime( new_df["event_starttime"].iloc[i]).strftime(goes_file_dir))[0] goes = ts.TimeSeries(goes_file).truncate(new_ts, new_te) gl = goes.to_dataframe()["xrsb"] fig, ax = plt.subplots(2, sharex=True) ax[0].plot(gl) ax[1].plot(sid_data) for a in ax: a.axvline(pd.to_datetime(new_df["event_peaktime"].iloc[i])) a.axvline(pd.to_datetime(new_df["event_starttime"].iloc[i])) a.axvline(pd.to_datetime(new_df["event_endtime"].iloc[i])) ax[0].xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) tstart_str = pd.to_datetime( new_df["event_starttime"].iloc[i]).strftime("%Y-%m-%dT%H:%M") ax[1].set_xlabel(new_df["event_peaktime"].iloc[i]) plt.tight_layout() plt.savefig("./all_flares/flare_{:s}.png".format(tstart_str)) plt.close()
def test_calculate_xray_luminosity(): # Check correct exceptions are raised to incorrect inputs not_goeslc = [] with pytest.raises(TypeError): goes_test = goes.calculate_xray_luminosity(not_goeslc) # Check function gives correct results. goeslc_input = timeseries.TimeSeries(get_test_filepath("go1520110607.fits")) goeslc_test = goes.calculate_xray_luminosity(goeslc_input) exp_xrsa = u.Quantity([2.8962085e+14, 2.8962085e+14, 2.8962085e+14, 2.8962085e+14, 2.8962085e+14], "W") exp_xrsb = u.Quantity([5.4654352e+16, 5.3133844e+16, 5.3895547e+16, 5.2375035e+16, 5.3895547e+16], "W") assert_quantity_allclose(exp_xrsa, goeslc_test.quantity("luminosity_xrsa")[:5]) assert_quantity_allclose(exp_xrsb, goeslc_test.quantity("luminosity_xrsb")[:5])
def get_lyradata(dtype): if dtype == 'lc': # Create sample LYRALightCurve lyrats = lightcurve.LYRALightCurve.create("2014-01-01") lyrats.data = pandas.DataFrame(index=TIME, data={"CHANNEL1": CHANNELS[0], "CHANNEL2": CHANNELS[1], "CHANNEL3": CHANNELS[0], "CHANNEL4": CHANNELS[1]}) else: # Create sample TimeSeries lyrats = timeseries.TimeSeries( os.path.join(rootdir, 'lyra_20150101-000000_lev3_std_truncated.fits.gz'), source='LYRA') lyrats.data = pandas.DataFrame(index=TIME, data={"CHANNEL1": CHANNELS[0], "CHANNEL2": CHANNELS[1], "CHANNEL3": CHANNELS[0], "CHANNEL4": CHANNELS[1]}) return lyrats
def units_attach(data, units): """ Takes the units defined by the user and attaches them to the TimeSeries. Parameters ---------- data : :class:`pandas.DataFrame` Input data. Takes the DataFrame which needs to have units attached. units : :class:`collections.OrderedDict` The units manually defined by the user. Returns ------- out : sunpy.timeseries.timeseriesbase.GenericTimeSeries DataFrame converted into TimeSeries with units attached. """ unit_key = list(units.keys()) for column_name in data.columns: if column_name not in unit_key: units[column_name] = u.dimensionless_unscaled message = "{} column has missing units.".format(column_name) warnings.warn(message, Warning) timeseries_data = ts.TimeSeries(data, units) return timeseries_data
def plot_and_get_data(save=True) errors = [] results = [] for i in range(len(vlf_flares)): print(i) try: sid_file = glob.glob(vlf_flares.iloc[i]["event_starttime"].strftime(sid_file_dir))[0] new_ts = pd.to_datetime(vlf_flares["event_starttime"].iloc[i])-datetime.timedelta(minutes=5) new_te = pd.to_datetime(vlf_flares["event_endtime"].iloc[i])+datetime.timedelta(minutes=5) sid_data = sid_to_series(sid_file).truncate(new_ts, new_te) goes_file = glob.glob(pd.to_datetime(vlf_flares["event_starttime"].iloc[i]).strftime(goes_file_dir))[0] goes = ts.TimeSeries(goes_file).truncate(new_ts, new_te) gl = goes.to_dataframe()["xrsb"] gs = goes.to_dataframe()["xrsa"] gl_flare = gl.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) gs_flare = gs.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) window_sec = (sid_data.index[1] - sid_data.index[0]).total_seconds() window = int((2*60)/window_sec) if window%2 ==0: window = window+1 sid_resample = pd.Series(savgol_filter(sid_data, int(window), 3), index=sid_data.index) sid_resample_flare = sid_resample.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) p_vlf = np.max(sid_resample_flare) p_vlf2 = np.abs(np.max(sid_resample_flare) - sid_resample_flare[0])
from matplotlib.patches import ConnectionPatch import numpy as np import datetime from astropy.io import fits import pandas as pd from sunpy.time import parse_time from scipy.io import readsav flare_ts = '2014-06-11 05:30' flare_te = '2014-06-11 05:40' pul_ts = '2014-06-11 05:32:30' pul_te = '2014-06-11 05:36:30' goes = ts.TimeSeries('go1520140611.fits') short_goes = goes.truncate(flare_ts, flare_te) gl = short_goes.data['xrsb'] gs = short_goes.data['xrsa'] norp_df, norp1, norp2, norp3, norp9, norp17, norp35, norp80 = read_norp('norp20140611_0534.xdr') rhessi_time, rhessi_arr, rhessi_emin, rhessi_emax, atten_state = read_rhessi('./rhessi_data/hsi_spectrum_20140611_052832.fits') rhessi_atten = pd.Series(atten_state[0], index=rhessi_time) rhessi_atten[rhessi_atten==0] = 1.1 def df_rhessi_kev(e_low, e_high): return pd.Series(np.sum(rhessi_arr[find_closest(rhessi_emin, e_low):find_closest(rhessi_emax, e_high)], axis=0), index=rhessi_time) rhessi_612 = df_rhessi_kev(6, 12) rhessi_1225 = df_rhessi_kev(12, 25) rhessi_2550 = df_rhessi_kev(25, 50)
flaremask = (labels==3) # Creating arrays containing time-serie data Int_Inc = [] tiempos = [] for i in range(20): diff = Mfiles[i+1].data-Mfiles[i].data Mdiff = Map(np.nan_to_num(np.abs(diff)),Mfiles[i].meta) Mdiffrot = Mdiff.rotate(angle=Mdiff.meta['crota2'] * u.deg) Int_Inc.append((Mdiffrot.data*flaremask).sum()) tiempos.append(datetime.datetime.strptime(Mdiffrot.meta["date-obs"],'%Y-%m-%dT%H:%M:%S.%f')) tbl_meta = {'t_key':'t_value'} table = Table([tiempos, Int_Inc/np.max(Int_Inc)], names=['time', 'Inclination'], meta=Mfiles[i].meta) table.add_index('time') ts_table = ts.TimeSeries(table) # PLOT fig, ax = plt.subplots(figsize=(10,4)) ts_table.plot(marker='o',linewidth=3) ax.axvline(tflare, color="gray", linestyle="--") ax.text(tflare, np.min(Int_Inc/np.max(Int_Inc)), 'flare peak', fontsize=14,color='gray',rotation=90, rotation_mode='anchor') ax.tick_params(axis='both',labelsize=14) ax.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M')) ax.set_xlabel('Time [hour:min]',fontsize=16) ax.set_ylabel('Normalized B-Inclination diff ',fontsize=14) ax.set_title(Mfiles[imask].meta['date-obs'],fontsize=24) plt.legend('',frameon=False) # fix bug with legend fig.savefig('20131108T0422.pdf',dpi=150,bbox_inches='tight') plt.show()
def plot_flare(i): new_ts = pd.to_datetime( vlf_flares["event_starttime"].iloc[i]) - datetime.timedelta(minutes=5) new_te = pd.to_datetime( vlf_flares["event_endtime"].iloc[i]) + datetime.timedelta(minutes=5) # SID data sid_file = glob.glob( vlf_flares.iloc[i]["event_starttime"].strftime(sid_file_dir))[0] sid_data = sid_to_series(sid_file).truncate(new_ts, new_te) sid_data_db = sid_to_series(sid_file, amp=True).truncate(new_ts, new_te) # smoothing window defined in terms of cadence window_sec = (sid_data.index[1] - sid_data.index[0]).total_seconds() window = int((3 * 60) / window_sec) if window % 2 == 0: window = window + 1 sid_resample = pd.Series(savgol_filter(sid_data, int(window), 3), index=sid_data.index) sid_resample_flare = sid_resample.truncate( vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) sid_resample_db = pd.Series(savgol_filter(sid_data_db, int(window), 3), index=sid_data_db.index) sid_resample_flare_db = sid_resample_db.truncate( vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) # GOES data goes_file = glob.glob( pd.to_datetime( vlf_flares["event_starttime"].iloc[i]).strftime(goes_file_dir))[0] goes = ts.TimeSeries(goes_file).truncate(new_ts, new_te) gl = goes.to_dataframe()["xrsb"] gs = goes.to_dataframe()["xrsa"] gl_flare = gl.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) gs_flare = gs.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) fig, ax = plt.subplots(2, figsize=(8, 6), sharex=True) ax[0].plot(gl, color="r", label="1-8$\mathrm{\AA}$") ax[0].plot(gs, color="b", label="0.5-4$\mathrm{\AA}$") ax[0].set_ylabel("Flux (Wm$^{-2}$)") ax[0].legend(loc="upper left") ax[0].set_yscale("log") ax[1].plot(sid_data_db - sid_data_db[0], label="VLF amp", color="grey") ax[1].plot(sid_resample_flare_db - sid_resample_flare_db[0], label="Smoothed VLF amp", color="k") ax[1].legend(loc="upper left") for a in ax: a.axvline(gl_flare.index[np.argmax(gl_flare)], color="r", lw=0.4) a.axvline(gs_flare.index[np.argmax(gs_flare)], color="b", lw=0.4) a.axvline(pd.to_datetime(vlf_flares["event_starttime"].iloc[i]), ls="dashed", color="grey") a.axvline(pd.to_datetime(vlf_flares["event_endtime"].iloc[i]), ls="dashed", color="grey") a.axvline(sid_resample_flare.index[np.argmax(sid_resample_flare)], color="k", lw=0.4) tstart_str = pd.to_datetime( vlf_flares["event_starttime"].iloc[i]).strftime("%Y-%m-%dT%H:%M") ax[1].set_xlabel("Time {:s}".format( pd.to_datetime( vlf_flares["event_starttime"].iloc[i]).strftime("%Y-%m-%d %H:%M"))) ax[1].xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) ax[1].set_xlim(gl.index[0], gl.index[-1]) ax[1].tick_params(which="both", direction="in") ax[0].tick_params(which="both", direction="in") ax[1].set_ylabel("VLF amplitude excess (db)") plt.tight_layout() ax[1].xaxis.set_major_locator( dates.MinuteLocator(byminute=[35, 40, 45, 50])) ax[1].xaxis.set_minor_locator(dates.MinuteLocator(interval=1)) plt.subplots_adjust(hspace=0.01) plt.savefig("./paper_plots/example_flare_ana.png", dpi=300, facecolor="w", bbox_inches="tight") plt.close()
def plot_test(i): tt = parse_time(events_to_download[i]).strftime("%Y%m%d") files_magno = glob.glob("./magno_files/*{:s}*".format(tt)) if len(files_magno) == 0: print("No magnetometer data") #break goes_file = goes_data_dir + "go15" + tt + ".fits" if not Path(goes_file).exists(): print("No goes data") #break goes_data = ts.TimeSeries(goes_file) gl = goes_data.data["xrsb"] gs = goes_data.data["xrsa"] flares_ind = np.where( daytime_flares["event_date"].isin([events_to_download[i]]) == True)[0] flares = daytime_flares.iloc[flares_ind] filey = files_magno[0] magno = pd.read_csv(filey, delim_whitespace=True, skiprows=1, names=[ "Date", "Time", "Index", "Bx", "By", "Bz", "E1", "E2", "E3", "E4", "T(FG)", "T(E)", "volts" ]) magno_time = [ datetime.datetime.strptime( magno.iloc[i]["Date"] + " " + magno.iloc[i]["Time"], "%d/%m/%Y %H:%M:%S") for i in range(len(magno)) ] bx = pd.Series(np.array(magno["Bx"]), index=magno_time) by = pd.Series(np.array(magno["By"]), index=magno_time) bz = pd.Series(np.array(magno["Bz"]), index=magno_time) h = np.sqrt(np.array(bx)**2 + np.array(by)**2) H = pd.Series(h, index=magno_time) fig, ax = plt.subplots(2, sharex=True, figsize=(10, 8)) ax1 = ax[0] ax2 = ax[1] ax1.plot(gl, color="r", label="1-8 $\mathrm{\AA}$") ax1.plot(gs, color="b", label="0.5-4 $\mathrm{\AA}$") ax1.set_ylim(1e-9, 1e-3) ax1.set_yscale("log") ax1.tick_params(which="both", direction="in", right=True, top=True) ax1.set_ylabel("Flux (Wm$^{-2}$)") ax1.legend(loc="upper right") ax2.plot(magno_time, magno["Bx"], label="Bx", color="k") ax2.plot(np.nan, color="grey", label="By") ax2.plot(np.nan, color="green", label="Bx") ax2.legend(loc="lower right") ax3 = ax2.twinx() ax3.plot(magno_time, magno["By"], label="By", color="grey") ax4 = ax2.twinx() ax4.plot(magno_time, magno["Bz"], label="Bz", color="green") ax2.set_xlabel("Time {:s} UT".format(events_to_download[i])) ax2.set_xlim(events_to_download[i] + " 00:00", events_to_download[i] + " 23:59") ax2.xaxis.set_major_locator(dates.HourLocator(interval=3)) ax2.xaxis.set_minor_locator(dates.HourLocator(interval=1)) ax2.xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) ax2.tick_params(which="both", direction="in", right=True, top=True) for f in flares["peak_time"]: ax1.axvline(parse_time(f).datetime, color="k", ls="dashed") ax2.axvline(parse_time(f).datetime, color="k", ls="dashed") ax1.grid() ax2.grid() plt.tight_layout() plt.subplots_adjust(hspace=0.05) plt.savefig(save_dir + parse_time(events_to_download[i]).strftime("%Y%m%d.png"), dpi=200) plt.close()
from matplotlib import dates import numpy as np from sunpy import timeseries as ts from sunpy.time import parse_time from read_files import euve_to_series, mag_to_series, sid_to_series # tstart = '2017-09-10 15:00' # tend = '2017-09-10 22:00' tstart = '2015-11-04 08:00' tend = '2015-11-04 20:00' euve_data = euve_to_series("./magno_codes/euve_data/g15_euve_{:s}.txt".format( parse_time(tstart).strftime('%Y%m%d'))) goes_data = ts.TimeSeries( "/Users/laurahayes/QPP/stats_study/TEBBS/goes_rawdata/go15{:s}.fits". format(parse_time(tstart).strftime('%Y%m%d'))) magno_data = mag_to_series( "./magno_codes/magno_files/birr_mag_{:s}_000001.txt".format( parse_time(tstart).strftime('%Y%m%d'))) sid_data = sid_to_series( "./vlf_codes/vlf_files/BIR_sid_{:s}_000000.txt".format( parse_time(tstart).strftime('%Y%m%d'))) #euve_flare = euve_data.truncate(tstart, tend) bx, by, bz = magno_data[0].truncate(tstart, tend), magno_data[1].truncate( tstart, tend), magno_data[2].truncate(tstart, tend) gl, gs = goes_data.to_dataframe().truncate( tstart, tend)['xrsb'], goes_data.to_dataframe().truncate(tstart, tend)['xrsa'] euve_data = euve_data.truncate(tstart, tend)
=============== This example shows the current and possible next solar cycle. """ import datetime import matplotlib.pyplot as plt import sunpy.timeseries as ts from sunpy.data.sample import NOAAINDICES_TIMESERIES, NOAAPREDICT_TIMESERIES ############################################################################### # For this example we will use the SunPy sample data. This code snippet grabs # the most current NOAA solar cycle data as a ``TimeSeries`` # (see :ref:`timeseries_code_ref`). noaa = ts.TimeSeries(NOAAINDICES_TIMESERIES, source='noaaindices') noaa_predict = ts.TimeSeries(NOAAPREDICT_TIMESERIES, source='noaapredictindices') ############################################################################### # Next, we grab a new copy of the data and shift it forward 12 years to # simulate the next solar cycle. We will also truncate the data to ensure # that we only plot what is necessary. noaa2 = ts.TimeSeries(NOAAINDICES_TIMESERIES, source='noaaindices') noaa2.data = noaa2.data.shift(2, freq=datetime.timedelta(days=365 * 12)) noaa2 = noaa2.truncate('2021/04/01', '2030/01/01') ############################################################################### # Finally, we plot both ``noaa`` and ``noaa2`` together, with an arbitrary # range for the strength of the next solar cycle.
def plot_event(tstart, tend, path="./bst_files/", plot_goes=False, goes_path="./goes_files/", background_sub=False, save_plot=None, **kwargs): """ Function to plot the dynamic spectrum for a given date. Parameters ---------- tstart : ~`datetime.datetime`, ~`str` start time tend : ~`datetime.datetime`, ~`str` end time plot_goes : ~`boolean`, optional if True overplot the GOES XRS lightcurves """ if isinstance(tstart, str): tstart = parse_time(tstart).datetime if isinstance(tend, str): tend = parse_time(tend).datetime file = glob.glob(path + tstart.strftime("%Y%m%d*.dat")) if len(file) == 0: return spec_data, times, freq = read_bst_data(file[0]) dynamic_spec = dynamic_spectra(spec_data, times, freq).crop_time(tstart, tend) if background_sub: dynamic_spec = dynamic_spec.background_sub1() if plot_goes: goes_file = Path(goes_path + tstart.strftime("go15%Y%m%d.fits")) if goes_file.exists(): goes_ts = ts.TimeSeries(os.fspath(goes_file)).truncate( tstart, tend) else: try: goes_file = get_goes(tstart, tend) goes_ts = ts.TimeSeries(goes_file).truncate(tstart, tend) except: print("cant get GOES XRS data") return fig, ax = plt.subplots(figsize=(10, 6)) im = dynamic_spec.plot(**kwargs) if plot_goes: ax2 = ax.twinx() ax2.plot(goes_ts.to_dataframe()["xrsb"], color="k", label="1-8 $\mathrm{\AA}$") ax2.plot(goes_ts.to_dataframe()["xrsa"], color="k", ls="dashed", label="0.5-4 $\mathrm{\AA}$") ax2.set_ylabel("Flux Wm$^{-2}$") ax2.set_yscale("log") ax2.legend(loc='lower right') ax2.set_ylim(1e-9, 1e-3) ax.xaxis.set_major_formatter(dates.DateFormatter('%H:%M')) #fig.colorbar(im) fig.autofmt_xdate(rotation=45) plt.tight_layout() if save_plot is not None: plt.savefig(save_plot, dpi=200) plt.close() plt.show()
############################################################# # Now we can see that this returns just one file for the GOES 15 data. # Lets now download this data using `~sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch`. file_goes15 = Fido.fetch(result_goes15) ############################################################# # Also just to note, if this will download the file to the # ``~/sunpy/data/`` directory on your local machine. You can also # define where you want this to download to using the ``path`` keyword # argument in `~sunpy.net.sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch` # (e.g. ``Fido.fetch(result, path=".\")``). ############################################################# # Lets now load this data into a `~sunpy.timeseries.TimeSeries`, # and inspect the data using `~sunpy.timeseries.GenericTimeSeries.peek()` goes_15 = ts.TimeSeries(file_goes15) goes_15.peek() ############################################################### # We can also pull out the individual GOES chanels and plot. The 0.5-4 angstrom # channel is known as the "xrsa" channel and the 1-8 angstrom channel is known # as the "xrsb" channel. fig, ax = plt.subplots() ax.plot(goes_15.index, goes_15.quantity("xrsb")) ax.set_ylabel("Flux (Wm$^{-2}$)") ax.set_xlabel("Time") fig.autofmt_xdate() plt.show() ############################################################### # We can also truncate the data for the time of the large flare,
# sunspot number and radio flux will evolve. Predicted values are based on the # consensus of the Solar Cycle 24 Prediction Panel. # # We will first search for and then download the data. time_range = TimeRange("2008-06-01 00:00", Time.now()) result = Fido.search(a.Time(time_range), a.Instrument('noaa-indices')) f_noaa_indices = Fido.fetch(result) result = Fido.search(a.Time(time_range.end, time_range.end + TimeDelta(4 * u.year)), a.Instrument('noaa-predict')) f_noaa_predict = Fido.fetch(result) ############################################################################### # We then load them into individual `~sunpy.timeseries.TimeSeries` objects. noaa = ts.TimeSeries(f_noaa_indices, source='noaaindices').truncate(time_range) noaa_predict = ts.TimeSeries(f_noaa_predict, source='noaapredictindices') ############################################################################### # Finally, we plot both ``noaa`` and ``noaa_predict`` for the sunspot number. # In this case we use the S.I.D.C. Brussels International Sunspot Number (RI). # The predictions provide both a high and low values, which we plot below as # ranges. time_support() plt.figure() plt.plot(noaa.time, noaa.quantity('sunspot RI'), label='Sunspot Number') plt.plot(noaa_predict.time, noaa_predict.quantity('sunspot'), color='grey', label='Near-term Prediction') plt.fill_between(noaa_predict.time, noaa_predict.quantity('sunspot low'), noaa_predict.quantity('sunspot high'), alpha=0.3, color='grey')
save_dir = "/Users/laurahayes/ionospheric_work/ionospheric-analysis/magno_codes/plots_x_flares/" i = 0 for i in range(len(x_flares)): print("analyzing {:d}".format(i)) try: file_euve = glob.glob(euve_data_dir + parse_time( x_flares.iloc[i]['event_date']).strftime("*%Y%m%d.txt"))[0] file_magno = glob.glob(magno_data_dir + parse_time( x_flares.iloc[i]['event_date']).strftime("*%Y%m%d*.txt"))[0] goes_file = glob.glob(goes_data_dir + parse_time( x_flares.iloc[i]['event_date']).strftime("go15%Y%m%d.fits"))[0] data_euve = euve_to_series(file_euve) data_goes = ts.TimeSeries(goes_file) data_mag_bx, data_mag_by, data_mag_bz = mag_to_series(file_magno) gl = data_goes.to_dataframe()['xrsb'].truncate( x_flares.iloc[i]['start_time'], x_flares.iloc[i]['end_time']) gs = data_goes.to_dataframe()['xrsa'].truncate( x_flares.iloc[i]['start_time'], x_flares.iloc[i]['end_time']) data_mag_bx = data_mag_bx.truncate(x_flares.iloc[i]['start_time'], x_flares.iloc[i]['end_time']) data_mag_by = data_mag_by.truncate(x_flares.iloc[i]['start_time'], x_flares.iloc[i]['end_time']) data_mag_bz = data_mag_bz.truncate(x_flares.iloc[i]['start_time'], x_flares.iloc[i]['end_time']) data_euve = data_euve.truncate(x_flares.iloc[i]['start_time'], x_flares.iloc[i]['end_time'])
import matplotlib.pyplot as plt import matplotlib import pylab import sunpy.map import sunpy.data.sample import sunpy.timeseries as ts from sunpy.time import parse_time from astropy import units as u from astropy.coordinates import SkyCoord from sunpy.net import hek client = hek.HEKClient() # GOES data for timeseries goes = ts.TimeSeries(sunpy.data.sample.GOES_XRS_TIMESERIES, source='XRS') flares_hek = client.search(hek.attrs.Time('2011-06-07 00:00', '2011-06-07 23:59'), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') # AIA data for map my_map = sunpy.map.Map(sunpy.data.sample.AIA_171_IMAGE) top_right = SkyCoord(1200 * u.arcsec, 0 * u.arcsec, frame=my_map.coordinate_frame) bottom_left = SkyCoord(500 * u.arcsec, -700 * u.arcsec, frame=my_map.coordinate_frame) my_submap = my_map.submap(bottom_left, top_right) # plot figure fig = plt.figure(figsize=(13,6)) ax0 = pylab.axes([0.05, 0.09, 0.42, 0.8]) ax0.plot(goes.data['xrsb'], color='r',label='1-8 $\mathrm{\AA}$') ax0.plot(goes.data['xrsa'], color='b',label='0.5-4 $\mathrm{\AA}$')
sid = pd.read_csv(file, comment="#", names=["times", "data"]) tt = parse_time(sid["times"]).datetime if amp: ser = pd.Series(calc_amp(sid["data"].values), index=tt) else: ser = pd.Series(sid["data"].values, index=tt) ser.sort_index(inplace=True) return ser flare_start = "2013-05-22 12:00:00" flare_end = "2013-05-22 15:00:00" sid_data = sid_to_series(sid_file).truncate(flare_start, flare_end) goes_data = ts.TimeSeries(goes_file).truncate(flare_start, flare_end) gl = goes_data.to_dataframe()["xrsb"] gs = goes_data.to_dataframe()["xrsa"] def make_rhessi_lc(file): #file = 'hsi_spectrum_20130515_012024.fits' a = fits.open(file) start_time = a[0].header['DATE_OBS'] t_start = datetime.datetime.strptime(start_time[0:10] + ' '+start_time[11:], '%Y-%m-%d %H:%M:%S.%f') start_time_day = datetime.datetime.strptime(str(t_start)[0:10]+' 00:00:00', '%Y-%m-%d %H:%M:%S') #print a[1].data.columns time = a[1].data['TIME'] time_array = [] for i in range(len(time)):
import sunpy.timeseries as ts from collections import OrderedDict import astropy.units as u # The index of the SunPy Timeseries is always datetime base = datetime.datetime.today() times = [base - datetime.timedelta(minutes=x) for x in range(24*60, 0, -1)] intensity = np.sin(np.arange(0, 12 * np.pi, ((12 * np.pi) / (24*60)))) # This example shows how a TimeSeries object is made from a Pandas DataFrame data = pd.DataFrame(intensity, index=times, columns=['intensity']) # TimeSeries can have a metadata attached to it. meta = OrderedDict({'key':'value'}) # AstroPy Units are attached to the TimeSeries by passing it alongside the data. # The units are stored in an OrderedDict object. # Each key is the unit, and the value is the astropy representation of the same. units = OrderedDict([('intensity', u.W/u.m**2)]) ts_custom = ts.TimeSeries(data, meta, units) # Using sunpy.timeseries.TimeSeries.data will return a Pandas DataFrame of the TimeSeries object. print(ts_custom.data) # To view the units, sunpy.timeserise.TimeSeries.units can be used. print(ts_custom.units) # The values can be extracted along with their units as well. #sunpy.timeseries.TimeSeries.quantity(column_name)[index] print(ts_custom.quantity('intensity')[1])
def plot_one(i): new_ts = pd.to_datetime(vlf_flares["event_starttime"].iloc[i])-datetime.timedelta(minutes=5) new_te = pd.to_datetime(vlf_flares["event_endtime"].iloc[i])+datetime.timedelta(minutes=5) # SID data sid_file = glob.glob(vlf_flares.iloc[i]["event_starttime"].strftime(sid_file_dir))[0] sid_data = sid_to_series(sid_file).truncate(new_ts, new_te) sid_data_db = sid_to_series(sid_file, amp=True).truncate(new_ts, new_te) # smoothing window defined in terms of cadence window_sec = (sid_data.index[1] - sid_data.index[0]).total_seconds() window = int((3*60)/window_sec) if window%2 == 0: window = window+1 sid_resample = pd.Series(savgol_filter(sid_data, int(window), 3), index=sid_data.index) sid_resample_flare = sid_resample.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) sid_resample_db = pd.Series(savgol_filter(sid_data_db, int(window), 3), index=sid_data_db.index) sid_resample_flare_db = sid_resample_db.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) # GOES data goes_file = glob.glob(pd.to_datetime(vlf_flares["event_starttime"].iloc[i]).strftime(goes_file_dir))[0] goes = ts.TimeSeries(goes_file).truncate(new_ts, new_te) gl = goes.to_dataframe()["xrsb"] gs = goes.to_dataframe()["xrsa"] gl_flare = gl.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) gs_flare = gs.truncate(vlf_flares["event_starttime"].iloc[i], vlf_flares["event_endtime"].iloc[i]) euvs_flare = euvs_df.truncate(new_ts, new_te) ## plots fig, ax = plt.subplots(3, sharex=True) ## GOES XRS ax[0].plot(gl, color="r", label="1-8$\mathrm{\AA}$") ax[0].plot(gs, color="b", label="0.5-4$\mathrm{\AA}$") ax[0].set_ylabel("Flux (Wm$^{-2}$)") ax[0].legend(loc="upper left") ax[0].set_yscale("log") ## GOES EUVS ax[1].plot(euvs_flare.irrad_ly) ## VLF data ax[2].plot(sid_data_db - sid_data_db[0], label="raw data", color="grey") ax[2].plot(sid_resample_flare_db - sid_resample_flare_db[0], label="2min resample", color="k") ax[2].legend(loc="upper left") tstart_str = pd.to_datetime(vlf_flares["event_starttime"].iloc[i]).strftime("%Y-%m-%dT%H:%M") ax[2].set_xlabel("Time {:s}".format(pd.to_datetime(vlf_flares["event_starttime"].iloc[i]).strftime("%Y-%m-%d %H:%M"))) ax[2].xaxis.set_major_formatter(dates.DateFormatter("%H:%M")) for a in ax: a.axvline(gl_flare.index[np.argmax(gl_flare)], color="r") a.axvline(gs_flare.index[np.argmax(gs_flare)], color="b") a.axvline(pd.to_datetime(vlf_flares["event_starttime"].iloc[i]), ls="dashed", color="grey") a.axvline(pd.to_datetime(vlf_flares["event_endtime"].iloc[i]), ls="dashed", color="grey") a.axvline(sid_resample_flare.index[np.argmax(sid_resample_flare)], color="k") plt.tight_layout() plt.subplots_adjust(hspace=0.01) plt.savefig("./test_plots/lyman_alpha_{:d}_{:s}.png".format(i, tstart_str)) plt.close()
result_goes15 = Fido.search(a.Time(tstart, tend), a.Instrument("XRS"), a.goes.SatelliteNumber(15)) print(result_goes15) ############################################################# # Now we can see that this returns just one file for the GOES 15 data. # Lets now download this data using `~sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch`. file_goes15 = Fido.fetch(result_goes15) ############################################################# # Lets now load this data into a `~sunpy.timeseries.TimeSeries`, # and inspect the data using `~sunpy.timeseries.GenericTimeSeries.peek()`. goes_15 = ts.TimeSeries(file_goes15) goes_15.peek() ############################################################# # The resulting `~sunpy.timeseries.TimeSeries` can be filtered by GOES quality flags. For more information # refer to the `GOES Data Guide <https://satdat.ngdc.noaa.gov/sem/goes/data/science/xrs/GOES_13-15_XRS_Science-Quality_Data_Readme.pdf>`__. df = goes_15.to_dataframe() df = df[(df["xrsa_quality"] == 0) & (df["xrsb_quality"] == 0)] goes_15 = ts.TimeSeries(df, goes_15.meta, goes_15.units) ############################################################### # We can also pull out the individual GOES chanels and plot. The 0.5-4 angstrom # channel is known as the "xrsa" channel and the 1-8 angstrom channel is known # as the "xrsb" channel.