def get_goes(tr,t0,t1): results = Fido.search(a.Time(tr), a.Instrument('XRS')) files = Fido.fetch(results) goes = TimeSeries(files, source='XRS') year = [] month = [] day = [] hour = [] minute = [] for items in goes.index: year.append(int(str(items)[0:4])) month.append(int(str(items)[5:7])) day.append(int(str(items)[8:10])) hour.append(int(str(items)[11:13])) minute.append(int(str(items)[14:16])) times = [] for i in range(0,len(year)): times.append(datetime(year[i],month[i],day[i],hour[i],minute[i])) times = np.array(times) # t0 = datetime(2019, 4, 9, 10, 0) # t1 = datetime(2019, 4, 9, 15, 0) indices = np.where((times>=t0) & (times<=t1))[0] xrsa_data = goes.data['xrsa'][indices] xrsb_data = goes.data['xrsb'][indices] return xrsa_data, xrsb_data
def goes_data(): begin = request.args["begin"] end = request.args["end"] try: tr = TimeRange(begin, end) except ValueError: return jsonify({"message": "Date error"}), 400 results = Fido.search(attrs.Time(tr), attrs.Instrument("XRS")) files = Fido.fetch(results) goes = TimeSeries(files) # Transforms XRSTimeSeries object to a Dataframe to ease manipulation. goes = goes.to_dataframe() begin, end = get_correct_goes_index(goes.index, begin, end) goes = goes[begin:end] # Transforms all the index to strings. goes.index = goes.index.map(lambda x: str(x)) return Response(goes.to_json(), mimetype='application/json')
def graph_sp(self): now = datetime.datetime.now() length = datetime.timedelta(hours=24 * 2) tr = TimeRange([now - length, now]) results = Fido.search(a.Time(tr), a.Instrument('XRS')) print("Nalezene soubory", results) files = Fido.fetch(results) goes = TimeSeries(files, source='XRS', concatenate=True) #goes = database.add_from_vso_query_result(results) #print(goes) client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') #out = goes.data.to_json(orient='index', date_format='iso') print(goes.data) out = goes.data.rolling(20, min_periods=1).mean().to_csv() print(flares_hek) return out
from sunpy.time import parse_time from sunpy.timeseries import TimeSeries ############################################################################### # Let's grab GOES XRS data for a particular time of interest and the HEK flare # data for this time from the NOAA Space Weather Prediction Center (SWPC). tr = a.Time('2011-06-07 04:00', '2011-06-07 12:00') results = Fido.search( tr, a.Instrument.xrs & a.goes.SatelliteNumber(15) | a.hek.FL & (a.hek.FRM.Name == 'SWPC')) ############################################################################### # Then download the XRS data and load it into a TimeSeries files = Fido.fetch(results) goes = TimeSeries(files) ############################################################################### # Next let's retrieve `~sunpy.net.hek.HEKTable` from the Fido result # and then load the first row from HEK results into ``flares_hek``. hek_results = results['hek'] flares_hek = hek_results[0] ############################################################################### # Lets plot everything together. fig, ax = plt.subplots() goes.plot() ax.axvline(parse_time(flares_hek['event_peaktime']).datetime) ax.axvspan(parse_time(flares_hek['event_starttime']).datetime, parse_time(flares_hek['event_endtime']).datetime, alpha=0.2,
How to find minimum or maximum peaks in a TimeSeries. Note: Peak finding is a complex problem that has many potential solutions and this example is just one method of many. """ import numpy as np import matplotlib.pyplot as plt from sunpy.timeseries import TimeSeries from sunpy.data.sample import NOAAINDICES_TIMESERIES as noaa_ind ############################################################################## # We will now create a TimeSeries object from an observational data source, # Also, we will truncate it to do analysis on a smaller time duration of 10 # years. ts_noaa_ind = TimeSeries(noaa_ind, source='NOAAIndices') my_timeseries = ts_noaa_ind.truncate('1991/01/01', '2001/01/01') fig, ax = plt.subplots() my_timeseries.plot() ############################################################################## # To find extrema in any TimeSeries, we first define a function findpeaks that # takes in input an iterable data series and a DELTA value. The DELTA value # controls how much difference between values in the TimeSeries defines an # extremum point. Inside the function, we iterate over the data values of # TimeSeries and consider a point to be a local maxima if it has the maximal # value, and was preceded (to the left) by a value lower by DELTA. Similar # logic applies to find a local minima. def findpeaks(series, DELTA):
import matplotlib.pyplot as plt import matplotlib import datetime from sunpy.timeseries import TimeSeries from sunpy.net import hek from sunpy.time import parse_time client = hek.HEKClient() goes_file = 'go1520110607.fits' goes = TimeSeries(goes_file) flares_hek = client.search( hek.attrs.Time('2011-06-07 00:00', '2011-06-07 23:59'), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') matplotlib.rcParams['savefig.pad_inches'] = 0.2 fig, ax = plt.subplots(figsize=(6, 4)) plt.plot(goes.data['xrsb'], color='r', label='1-8 $\mathrm{\AA}$') plt.plot(goes.data['xrsa'], color='b', label='0.5-4 $\mathrm{\AA}$') ax.set_yscale('log') ax.set_ylim(1e-9, 1e-3) ax.set_ylabel('Watts m$^{-2}$') ax.set_xlabel('Time (UT) 2011-06-07') ax.set_title('GOES X-ray flux') ax.axvline(parse_time(flares_hek[0].get('event_peaktime')).to_datetime(), ls='dashed', color='grey', label='Flare peak')
How to smooth a TimeSeries using a convolution filter kernel from `~astropy.convolution` and `~astropy.convolution.convolve` function. """ import matplotlib.pyplot as plt from astropy.convolution import Box1DKernel, convolve from sunpy.data.sample import GOES_XRS_TIMESERIES from sunpy.timeseries import TimeSeries ############################################################################### # Let's first create a TimeSeries from sample data. goes_lc = TimeSeries(GOES_XRS_TIMESERIES).truncate('2011/06/07 06:10', '2011/06/07 07:00') ############################################################################### # Now we will extract data values from the TimeSeries and apply a BoxCar filter # to get smooth data. Boxcar smoothing is equivalent to taking our signal and # using it to make a new signal where each element is the average of w adjacent # elements. Here we will use astropy's convolve function with a "boxcar" kernel # of width w = 10. goes_lc = goes_lc.add_column( 'xrsa_smoothed', convolve(goes_lc.quantity('xrsa'), kernel=Box1DKernel(50))) ############################################################################### # Plotting original and smoothed timeseries.
import matplotlib as mpl import datetime as datetime from datetime import datetime, timedelta, date import time import matplotlib.pyplot as plt import matplotlib.dates as dates mpl.rc('font', size=12) begin_time = datetime(2017, 9, 2, 15, 00, 14) fin_time = datetime(2017, 9, 2, 17, 30, 14) firsts = [] for i in range(begin_time.minute, fin_time.minute): firsts.append(datetime(2017, 9, 2, 15, i, 14)) i + 10 plt.figure(figsize=(12, 9)) tr = TimeRange(['2017-09-02 10:25:00', '2017-09-02 19:05:00']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) files = Fido.fetch(results) goes = TimeSeries(files) client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') goes.peek() #plt.xticks(firsts) #plt.gca().xaxis.set_major_locator(dates.HourLocator()) plt.gca().xaxis.set_major_formatter(dates.DateFormatter('%H:%M')) plt.xlim(begin_time, fin_time) plt.savefig("goes_02092017.png")
def remove_lytaf_events_from_timeseries(ts, artifacts=None, return_artifacts=False, force_use_local_lytaf=False): """ Removes periods of LYRA artifacts defined in LYTAF from a TimeSeries. Parameters ---------- ts : `sunpy.timeseries.TimeSeries` artifacts : `list` Sets the artifact types to be removed. For a list of artifact types see reference [1]. For example, if a user wants to remove only large angle rotations, listed at reference [1] as LAR, set artifacts=["LAR"]. The default is that no artifacts will be removed. return_artifacts : `bool` Set to True to return a `numpy.recarray` containing the start time, end time and type of all artifacts removed. Default=False force_use_local_lytaf : `bool` Ensures current local version of lytaf files are not replaced by up-to-date online versions even if current local lytaf files do not cover entire input time range etc. Default=False Returns ------- ts_new : `sunpy.timeseries.TimeSeries` copy of input TimeSeries with periods corresponding to artifacts removed. artifact_status : `dict` List of 4 variables containing information on what artifacts were found, removed, etc. from the time series. | **artifact_status["lytaf"]** : `numpy.recarray` | The full LYRA annotation file for the time series time range | output by get_lytaf_events(). | **artifact_status["removed"]** : `numpy.recarray` | Artifacts which were found and removed from from time series. | **artifact_status["not_removed"]** : `numpy.recarray` | Artifacts which were found but not removed as they were not | included when user defined artifacts kwarg. | **artifact_status["not_found"]** : `list` of strings | Artifacts listed to be removed by user when defining | artifacts kwarg which were not found in time series time range. Notes ----- This function is intended to take TimeSeries objects as input, but the deprecated LightCurve is still supported here. References ---------- [1] http://proba2.oma.be/data/TARDIS Examples -------- Remove LARs (Large Angle Rotations) from TimeSeries for 4-Dec-2014: >>> import sunpy.timeseries as ts >>> import sunpy.data.sample # doctest: +REMOTE_DATA >>> from sunkit_instruments.lyra import remove_lytaf_events_from_timeseries >>> lyrats = ts.TimeSeries(sunpy.data.sample.LYRA_LEVEL3_TIMESERIES, source='LYRA') # doctest: +REMOTE_DATA >>> ts_nolars = remove_lytaf_events_from_timeseries(lyrats, artifacts=["LAR"]) # doctest: +REMOTE_DATA To also retrieve information on the artifacts during that day: >>> ts_nolars, artifact_status = remove_lytaf_events_from_timeseries( ... lyrats, artifacts=["LAR"], return_artifacts=True) # doctest: +REMOTE_DATA """ # Remove artifacts from time series ts_ds = ts.to_dataframe() data_columns = ts_ds.columns time, channels, artifact_status = _remove_lytaf_events( ts_ds.index, channels=[np.asanyarray(ts_ds[col]) for col in data_columns], artifacts=artifacts, return_artifacts=True, force_use_local_lytaf=force_use_local_lytaf) # Create new copy copy of timeseries and replace data with # artifact-free time series. data = pandas.DataFrame( index=time, data={col: channels[i] for i, col in enumerate(data_columns)}) ts_new = TimeSeries(data, ts.meta) if return_artifacts: return ts_new, artifact_status else: return ts_new
from sunpy.timeseries import TimeSeries from sunpy.time import TimeRange, parse_time from sunpy.net import hek, Fido, attrs as a ############################################################################### # Let's first grab GOES XRS data for a particular time of interest tr = TimeRange(['2011-06-07 04:00', '2011-06-07 12:00']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) results ############################################################################### # Then download the data and load it into a TimeSeries files = Fido.fetch(results) goes = TimeSeries(files) ############################################################################### # Next lets grab the HEK data for this time from the NOAA Space Weather # Prediction Center (SWPC) client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') ############################################################################### # Finally lets plot everything together fig, ax = plt.subplots() goes.plot() ax.axvline(parse_time(flares_hek[0].get('event_peaktime')).plot_date)
############################################################################## # Start by importing the necessary modules. import numpy as np import matplotlib.pyplot as plt from sunpy.timeseries import TimeSeries from sunpy.data.sample import NOAAINDICES_TIMESERIES as noaa_ind ############################################################################## # We will now create a TimeSeries object from an observational data source, # Also, we will truncate it to do analysis on a smaller time duration of 10 # years. ts_noaa_ind = TimeSeries(noaa_ind, source='NOAAIndices') my_timeseries = ts_noaa_ind.truncate('1991/01/01', '2001/01/01') my_timeseries.peek() ############################################################################## # To find extrema in any TimeSeries, we first define a function findpeaks that # takes in input an iterable data series and a DELTA value. The DELTA value # controls how much difference between values in the TimeSeries defines an # extremum point. Inside the function, we iterate over the data values of # TimeSeries and consider a point to be a local maxima if it has the maximal # value, and was preceded (to the left) by a value lower by DELTA. Similar # logic applies to find a local minima. def findpeaks(series, DELTA): """
import matplotlib.pyplot as plt import datetime as datetime from sunpy.timeseries import TimeSeries from sunpy.time import TimeRange, parse_time from sunpy.net import hek, Fido, attrs as a from matplotlib import dates, colors """ tr = TimeRange(['2017-09-10 12:00', '2017-09-10 16:00']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) results files = Fido.fetch(results) goes = TimeSeries(files) """ results = '/Users/cmaguir4/sunpy/data/go1520170910.fits' goes = TimeSeries(results, source='XRS') #plt.plot(goes) #plt.legend(loc=2) #t.show() xr = goes.data.xrsa xr1 = goes.data.xrsb time_goes = goes.data.index font = {'size': 10, 'color': 'k'} plt.figure(figsize=(8, 5)) ax2 = plt.subplot(1, 1, 1) plt.plot(time_goes, xr1, '-', label=('GOES ' + str(1.0) + '-' + str(8.0) + ' ' + r'$\AA$'),
def fromFile(cls, file): series = TimeSeries(file) model = TimeSeriesModel(series) return cls(model)
Smoothing of timeSeries data using convolution filters ====================================================== How to smooth a TimeSeries using a convolution filter kernel from `~astropy.convolution` and `~astropy.convolution.convolve` function. """ import matplotlib.pyplot as plt from astropy.convolution import convolve, Box1DKernel from sunpy.timeseries import TimeSeries from sunpy.data.sample import NOAAINDICES_TIMESERIES as noaa_ind ############################################################################### # Let's first create a TimeSeries from sample data ts_noaa_ind = TimeSeries(noaa_ind, source='NOAAIndices') ############################################################################### # Now we will extract data values from the TimeSeries and apply a BoxCar filter # to get smooth data. Boxcar smoothing is equivalent to taking our signal and # using it to make a new signal where each element is the average of w adjacent # elements. Here we will use AstroPy’s convolve function with a “boxcar” kernel # of width w = 10. ts_noaa_ind = ts_noaa_ind.add_column( 'sunspot SWO Smoothed', convolve(ts_noaa_ind.quantity('sunspot SWO'), kernel=Box1DKernel(10))) ############################################################################### # Plotting original and smoothed timeseries plt.ylabel('Sunspot Number') plt.xlabel('Time')
## Sunspots and Flares # Flares are best seen near the edge of the solar disk where we see them extending out into space. # SunPy can be used to visualise these flares. The following code snippet from the SunPy Docs here shows a M2.5 flare that occured on the 7th of June 2011. aia_cutout03_map = sunpy.map.Map(sample_data.AIA_193_CUTOUT03_IMAGE) fig = plt.figure(5) ax = fig.add_subplot(111, projection=aia_cutout03_map) aia_cutout03_map.plot() plt.show() # I enourage you to delve into the documentation for this code snippet. You will find examples there of plotting the x-ray flux for the event and creating a series of images showing how the flares shape changed over time. # We saw sunspots on our earlier images. Let's use SunPy to plot NOAA data for the number of sunspots as a function of time. ts_noaa_ind = TimeSeries(noaa_ind, source='NOAAIndices') fig = plt.figure(6) plt.ylabel('Sunspot Number') plt.xlabel('Time') plt.title('Sunspots Time Series') plt.plot(ts_noaa_ind.data['sunspot SWO']) plt.show() # The sun has a 11 year cycle which you can see from the graph. # You can also see that sunspot activity at the peak of the cycle has been decreasing over the last two decades. # Visit the docs here to see this code snippet and the smoothed (time averaged) version of this series. ## Challenge # 1) Make LASCO plots for the other detectors
from sunpy.timeseries import TimeSeries from sunpy.time import TimeRange, parse_time from sunpy.net import hek, Fido, attrs as a ############################################################################### # Let's first grab GOES XRS data for a particular time of interest tr = TimeRange(['2011-06-07 04:00', '2011-06-07 12:00']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) results ############################################################################### # Then download the data and load it into a TimeSeries files = Fido.fetch(results) goes = TimeSeries(files) ############################################################################### # Next lets grab the HEK data for this time from the NOAA Space Weather # Prediction Center (SWPC) client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') ############################################################################### # Finally lets plot everything together goes.peek() plt.axvline(parse_time(flares_hek[0].get('event_peaktime'))) plt.axvspan(parse_time(flares_hek[0].get('event_starttime')),
kernel from `~astropy.convolution` and `~astropy.convolution.convolve` function. """ ############################################################################## # Start by importing the necessary modules. import matplotlib.pyplot as plt from astropy.convolution import convolve, Box1DKernel from sunpy.timeseries import TimeSeries from sunpy.data.sample import NOAAINDICES_TIMESERIES as noaa_ind ############################################################################### # Let's first create a TimeSeries from sample data ts_noaa_ind = TimeSeries(noaa_ind, source='NOAAIndices') ############################################################################### # Now we will extract data values from the TimeSeries and apply a BoxCar filter # to get smooth data. Boxcar smoothing is equivalent to taking our signal and # using it to make a new signal where each element is the average of w adjacent # elements. Here we will use AstroPy’s convolve function with a “boxcar” kernel # of width w = 10. # Apply convolution filter ts_noaa_ind.data['sunspot SWO Smoothed'] = convolve( ts_noaa_ind.data['sunspot SWO'].values, kernel=Box1DKernel(10)) # Plotting original and smoothed timeseries plt.ylabel('Sunspot Number') plt.xlabel('Time') plt.title('Smoothing of Time Series')
def read_solar_data(): noaa = TimeSeries('./../data/RecentIndices.txt', source='NOAAIndices') df = noaa.to_dataframe() return df
This example illustrates how to find minimum or maximum peaks in a TimeSeries. Note: Peak finding is a complex problem that has many potential solutions and this example is just one method of many. """ import matplotlib.pyplot as plt import numpy as np from sunpy.data.sample import GOES_XRS_TIMESERIES from sunpy.timeseries import TimeSeries ############################################################################## # We will now create a TimeSeries object from an observational data source, # Also, we will truncate it to do analysis on a smaller time duration of 10 # years. goes_lc = TimeSeries(GOES_XRS_TIMESERIES) my_timeseries = goes_lc.truncate('2011/06/07 06:10', '2011/06/07 09:00') fig, ax = plt.subplots() my_timeseries.plot() ############################################################################## # To find extrema in any TimeSeries, we first define a function findpeaks that # takes in input an iterable data series and a DELTA value. The DELTA value # controls how much difference between values in the TimeSeries defines an # extremum point. Inside the function, we iterate over the data values of # TimeSeries and consider a point to be a local maxima if it has the maximal # value, and was preceded (to the left) by a value lower by DELTA. Similar # logic applies to find a local minima. def findpeaks(series, DELTA):
def download_omni_text(input_datetime): t_start = input_datetime - datetime.timedelta(1) t_end = input_datetime + datetime.timedelta(1) + datetime.timedelta( minutes=10) t_start_day = input_datetime t_end_day = input_datetime + datetime.timedelta(minutes=1439) #--------------------------------------------------------# # OMNI Data - includes solar wind, and geomag params # #--------------------------------------------------------# #get OMNI data omniInt = omnireader.omni_interval(t_start, t_end, '5min', cdf_or_txt='txt') #print(omniInt.cdfs[0].vars) #prints all the variables available on omni epochs = omniInt['Epoch'] #time array for omni 5min data By, Bz, AE, SymH = omniInt['BY_GSM'], omniInt['BZ_GSM'], omniInt[ 'AE_INDEX'], omniInt['SYM_H'] vsw, psw = omniInt['flow_speed'], omniInt['Pressure'] borovsky_reader = omnireader.borovsky(omniInt) borovsky = borovsky_reader() #newell_reader = omnireader.newell(omniInt) #newell = newell_reader() def NewellCF_calc(v, bz, by): # v expected in km/s # b's expected in nT NCF = np.zeros_like(v) NCF.fill(np.nan) bt = np.sqrt(by**2 + bz**2) bztemp = bz bztemp[bz == 0] = .001 #Caculate clock angle (theta_c = t_c) tc = np.arctan2(by, bztemp) neg_tc = bt * np.cos(tc) * bz < 0 tc[neg_tc] = tc[neg_tc] + np.pi sintc = np.abs(np.sin(tc / 2.)) NCF = (v**1.33333) * (sintc**2.66667) * (bt**0.66667) return NCF newell = NewellCF_calc(vsw, Bz, By) proton_flux_10MeV, proton_flux_30MeV, proton_flux_60MeV = omniInt[ 'PR-FLX_10'], omniInt['PR-FLX_30'], omniInt['PR-FLX_60'] #calculate clock angle clock_angle = np.degrees(np.arctan2(By, Bz)) clock_angle[clock_angle < 0] = clock_angle[clock_angle < 0] + 360. print('Got 5 minutes data') omniInt_1hr = omnireader.omni_interval(t_start, t_end, 'hourly', cdf_or_txt='txt') epochs_1hr = omniInt_1hr['Epoch'] #datetime timestamps F107, KP = omniInt_1hr['F10_INDEX'], omniInt_1hr['KP'] print('Got hour data') #--------------------------------------------------------# # GOES X-ray data - Channel 1-8A, defines flare class # #--------------------------------------------------------# results = Fido.search(a.Time(t_start, t_end), a.Instrument('XRS')) files = Fido.fetch(results) goes = TimeSeries(files, concatenate=True) goes_l = goes.data['xrsb'] print('Got GOES data') #--------------------------------------------------------# # Resample data to 1min to match GNSS CHAIN network # #--------------------------------------------------------# #resample OMNI Solar Wind Data By_data = pd.Series(By, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) Bz_data = pd.Series(Bz, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) AE_data = pd.Series(AE, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) SymH_data = pd.Series(SymH, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) vsw_data = pd.Series(vsw, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) psw_data = pd.Series(psw, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) borovsky_data = pd.Series(borovsky, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) newell_data = pd.Series(newell, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) proton_10_data = pd.Series(proton_flux_10MeV, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) proton_30_data = pd.Series(proton_flux_30MeV, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) proton_60_data = pd.Series(proton_flux_60MeV, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) clock_angle_data = pd.Series(clock_angle, index=epochs).resample('1T').pad().truncate( t_start_day, t_end_day) F107data = pd.Series(F107, index=epochs_1hr).resample('1T').pad().truncate( t_start_day, t_end_day) KPdata = pd.Series(KP, index=epochs_1hr).resample('1T').pad().truncate( t_start_day, t_end_day) #function to find data at previous time intervals def roll_back(data, minutes=1): ts = t_start_day - datetime.timedelta(minutes=minutes) te = t_end_day - datetime.timedelta(minutes=minutes) data = pd.Series(data, index=epochs).resample('1T').pad() new_data = data.truncate(ts, te) rolled_data = pd.Series(np.array(new_data), index=By_data.index) return rolled_data #calculate rolled back timeseries - 15 and 30 minutes previous By_15 = roll_back(By, minutes=15) By_30 = roll_back(By, minutes=30) Bz_15 = roll_back(Bz, minutes=15) Bz_30 = roll_back(Bz, minutes=30) AE_15 = roll_back(AE, minutes=15) AE_30 = roll_back(AE, minutes=30) SymH_15 = roll_back(SymH, minutes=15) SymH_30 = roll_back(SymH, minutes=30) vsw_15 = roll_back(vsw, minutes=15) vsw_30 = roll_back(vsw, minutes=30) psw_15 = roll_back(psw, minutes=15) psw_30 = roll_back(psw, minutes=30) borovsky_15 = roll_back(borovsky, minutes=15) borovsky_30 = roll_back(borovsky, minutes=30) newell_15 = roll_back(newell, minutes=15) newell_30 = roll_back(newell, minutes=30) clock_angle_15 = roll_back(clock_angle, minutes=15) clock_angle_30 = roll_back(clock_angle, minutes=30) #resample GOES X-ray flux goes_data = goes_l.resample('1T').mean().truncate(t_start_day, t_end_day) #put all in a dataframe and save dataframe = pd.DataFrame() dataframe['Bz - 0min [nT]'] = Bz_data dataframe['Bz - 15min [nT]'] = Bz_15 dataframe['Bz - 30min [nT]'] = Bz_30 dataframe['By - 0min [nT]'] = By_data dataframe['By - 15min [nT]'] = By_15 dataframe['By - 30min [nT]'] = By_30 dataframe['Vsw - 0min [km/s]'] = vsw_data dataframe['Vsw - 15min [km/s]'] = vsw_15 dataframe['Vsw - 30min [km/s]'] = vsw_30 dataframe['Psw - 0min [nPa]'] = psw_data dataframe['Psw - 15min [nPa]'] = psw_15 dataframe['Psw - 30min [nPa]'] = psw_30 dataframe['AE - 0min [nT]'] = AE_data dataframe['AE - 15min [nT]'] = AE_15 dataframe['AE - 30min [nT]'] = AE_30 dataframe['SymH - 0min [nT]'] = SymH_data dataframe['SymH - 15min [nT]'] = SymH_15 dataframe['SymH - 30min [nT]'] = SymH_30 dataframe['Clock Angle - 0min [deg]'] = clock_angle_data dataframe['Clock Angle - 15min [deg]'] = clock_angle_15 dataframe['Clock Angle - 30min [deg]'] = clock_angle_30 dataframe['Newell CF - 0min [m/s^(4/3) T^(2/3)]'] = newell_data dataframe['Newell CF - 15min [m/s^(4/3) T^(2/3)]'] = newell_15 dataframe['Newell CF - 30min [m/s^(4/3) T^(2/3)]'] = newell_30 dataframe['Borovsky CF - 0min [nT km/s]'] = borovsky_data dataframe['Borovsky CF - 15min [nT km/s]'] = borovsky_15 dataframe['Borovsky CF - 30min [nT km/s]'] = borovsky_30 dataframe['Kp [dimensionless]'] = KPdata dataframe['F107 [sfu=10^-22 W/m^2/hz]'] = F107data dataframe['Proton 10MeV'] = proton_10_data dataframe['Proton 30MeV'] = proton_30_data dataframe['Proton 60MeV'] = proton_60_data dataframe['GOES X-ray Wm^-2'] = goes_data dataframe_nan = dataframe.replace(9999.99, np.nan) #replace 9999.99 with nans filepath = '/Users/ryanmcgranaghan/Documents/Conferences/ISSI_2018/ISSI_geospaceParticles/solar_data/' filename = filepath + 'solardata' + input_datetime.strftime( '%Y') + '_' + input_datetime.strftime('%j') + '.csv' print('output solardata file location = {}'.format(filename)) dataframe_nan.to_csv(filename, index_label='Datetime')
# I_0 = np.nanmean(datatest[yc-refw:yc+refw,xc-refw:xc+refw]) # # I_0 = np.nanmean(datatest[600-refw:600+refw,700-refw:700+refw]) # #I_0 = 160 # grid_Isc = np.empty(np.shape(datatest)) * np.nan #same sizes as data but empty, it will fill with I_sc values # for idx,item in enumerate(enc_coor): # grid_Isc[item[1],item[0]]=datatest[item[1],item[0]] # grid_Ne = ((grid_Isc/200)/I_0)*(1/grid_Th) # grid_ne = grid_Ne/(0.725*100000000) del datatest """Coupling with previous code""" """---------------------------""" #Datos de GOES tr = TimeRange(['2017-09-10 12:30', '2017-09-10 21:30']) results = Fido.search(a.Time(tr), a.Instrument('XRS')) files = Fido.fetch(results) goes = TimeSeries(files) tci = datetime.datetime.strptime('2017-09-10 12:28:41.40', '%Y-%m-%d %H:%M:%S.%f') tcf = datetime.datetime.strptime('2017-09-10 21:22:41.30', '%Y-%m-%d %H:%M:%S.%f') xrsa = goes.data['xrsb'] client = hek.HEKClient() flares_hek = client.search(hek.attrs.Time(tr.start, tr.end), hek.attrs.FL, hek.attrs.FRM.Name == 'SWPC') xrgoes = np.array( [xrsa[i] for i in range(len(xrsa)) if tci <= xrsa.index[i] <= tcf]) xrtiempo = np.array( [xrsa.index[i] for i in range(len(xrsa)) if tci <= xrsa.index[i] <= tcf]) ### Reading and sorting HMI meanvalue files ### ### Choose between averages or rebining files ### mypathI = 'Stokes_IQUV_averages/'
from sunpy.net import Fido from sunpy.net import attrs as a from sunpy.timeseries import TimeSeries ############################################################################### # `sunpy.net.Fido` is the primary interface to search for and download data and # will automatically search CDAWeb when the ``cdaweb.Dataset`` attribute is provided to # the search. To lookup the different dataset IDs available, you can use the # form at https://cdaweb.gsfc.nasa.gov/index.html/ trange = a.Time('2021/07/01', '2021/07/08') dataset = a.cdaweb.Dataset('SOLO_L2_MAG-RTN-NORMAL-1-MINUTE') result = Fido.search(trange, dataset) ############################################################################### # Let's inspect the results. We can see that there's seven files, one for each # day within the query. print(result) ############################################################################### # Let's download the first two files downloaded_files = Fido.fetch(result[0, 0:2]) print(downloaded_files) ############################################################################### # Finally we can load and take a look at the data using # `~sunpy.timeseries.TimeSeries` This requires an installation of the cdflib # Python library to read the CDF file. solo_mag = TimeSeries(downloaded_files, concatenate=True) print(solo_mag.columns) solo_mag.peek(['B_RTN_0', 'B_RTN_1', 'B_RTN_2'])