def pull_ACE(t1, t2): swe_data = cdas.get_data('sp_phys', 'AC_H0_SWE', mdate.num2date(t1), mdate.num2date(t2), ['Np', 'Vp', 'Tpr', 'V_GSE', 'SC_pos_GSE']) mfi_data = cdas.get_data('sp_phys', 'AC_H0_MFI', mdate.num2date(t1), mdate.num2date(t2), ['BGSEc']) return swe_data, mfi_data
def pull_OMNI_BSN(t1, t2): data = cdas.get_data('sp_phys', 'OMNI_HRO_1MIN', mdate.num2date(t1), mdate.num2date(t2), ['BSN_x', 'BSN_y', 'BSN_z']) dtype = np.dtype([('t', '<f8'), ('bsn_x_gse', '<f8'), ('bsn_y_gse', '<f8'), ('bsn_z_gse', '<f8')]) BSN = np.ndarray(len(data['EPOCH_TIME']), dtype=dtype) BSN['bsn_x_gse'] = data['X_(BSN),_GSE'] BSN['bsn_y_gse'] = data['Y_(BSN),_GSE'] BSN['bsn_z_gse'] = data['Z_(BSN),_GSE'] for i in range(len(BSN)): BSN['t'][i] = mdate.date2num(data['EPOCH_TIME'][i]) return BSN
year = int(year_str) datetimeStart = datetime(year, 1, 1, 0, 0, 0) datetimeEnd = datetime(year, 12, 31, 23, 59, 59) # If turn cache on, do not download from one dataset more than one time. There is a bug to casue error. # Make sure download every variables you need from one dataset at once. # cdas.set_cache(True, '/Users/jz0006/GoogleDrive/MyResearchFolder/FluxRope/PythonPlotCode/data_cache') cdas.set_cache(True, homedir + '/GoogleDrive/GS/data_cache') # Download magnetic field data from ACE. # The dimension of [WI_H0_MFI['BGSE'] is N X 3(N row, 3 column) print('Downloading data from AC_H0_MFI...') # AC_H0_MFI [Available Time Range: 1997/09/02 00:00:12 - 2016/12/24 23:59:56] AC_H0_MFI = cdas.get_data('istp_public', 'AC_H0_MFI', datetimeStart, datetimeEnd, ['BGSEc'], cdf=True) print('Done.') # Download solar wind data Np, V_GSE, and Thermal speed. print('Downloading data from AC_H0_SWE...') # Np: Solar Wind Proton Number Density, scalar. # V_GSE: Solar Wind Velocity in GSE coord., 3 components. # Tpr: radial component of the proton temperature. The radial component of the proton temperature is the (1,1) component of the temperature tensor, along the radial direction. It is obtained by integration of the ion (proton) distribution function. # Alpha to proton density ratio. # AC_H0_SWE [Available Time Range: 1998/02/04 00:00:31 - 2016/11/27 23:59:51] AC_H0_SWE = cdas.get_data('istp_public', 'AC_H0_SWE', datetimeStart, datetimeEnd, ['Np', 'V_GSE', 'Tpr', 'alpha_ratio'],
year = 2015 datetimeStart = datetime(year, 1, 1, 0, 0, 0) datetimeEnd = datetime(year, 12, 31, 23, 59, 59) # If turn cache on, do not download from one dataset more than one time. There is a bug to casue error. # Make sure download every variables you need from one dataset at once. # cdas.set_cache(True, '/Users/jz0006/GoogleDrive/MyResearchFolder/FluxRope/PythonPlotCode/data_cache') cdas.set_cache(True, homedir + '/GoogleDrive/GS/data_cache') # Download magnetic field data. # The dimension of [WI_H0_MFI['BGSE'] is N X 3(N row, 3 column) print('Downloading data from WI_H0_MFI...') WI_H0_MFI = cdas.get_data('istp_public', 'WI_H0_MFI', datetimeStart, datetimeEnd, ['BGSE'], cdf=True) print('Done.') # Download solar wind data. print('Downloading data from WI_K0_SWE...') WI_K0_SWE = cdas.get_data('istp_public', 'WI_K0_SWE', datetimeStart, datetimeEnd, ['Np', 'V_GSE', 'THERMAL_SPD'], cdf=True) print('Done.') # Download electron temprature data. Unit is Kelvin. # WI_H0_SWE time span = [1994/12/29 00:00:02, 2001/05/31 23:59:57]
timeRangeInMinutes = (365 + calendar.isleap(year)) * 24 * 60 timeStampSeries = np.asarray([ datetime(year, 1, 1, 0, 0, 0) + timedelta(minutes=x) for x in range(0, timeRangeInMinutes) ]) # WI_H4_SWE Available Time Range: 1994/11/30 00:00:20 - 2001/07/10 00:00:35 # If time range is within the range covered by WI_H4_SWE. if (datetimeStart >= datetime(1994, 11, 30, 0, 0, 20)) and ( datetimeEnd <= datetime(2001, 7, 10, 0, 0, 35)): print('Downloading data from WI_H4_SWE...') print(time.ctime()) #WI_H4_SWE = cdas.get_data('istp_public', 'WI_H4_SWE', datetimeStart, datetimeEnd, ['f_pitch_E09', 'f_pitch_E10'], cdf=True) WI_H4_SWE = cdas.get_data('istp_public', 'WI_H4_SWE', datetimeStart, datetimeEnd, ['f_pitch_E09'], cdf=True) print(time.ctime()) print('Done.') #print(WI_H4_SWE.keys()) # Extract data from cdf file. print('Extracting data from cdf file...') print('Extracting ElectronPitch_Epoch...') ElectronPitch_Epoch = WI_H4_SWE['Epoch'][...] print('Extracting ElectronPitch_Flux...') ElectronPitch_Flux_094eV = WI_H4_SWE['f_pitch_E09'][...] #ElectronPitch_Flux_138eV = WI_H4_SWE['f_pitch_E10'][...] print(ElectronPitch_Flux_094eV.shape) print('Extracting ElectronPitch_PitchAngle...') ElectronPitch_PitchAngle = WI_H4_SWE['Pitch_Angle'][...]
def pull_ACE_B_year(year, filepath=''): ''' Pull a year of ACE MFI data from CDAWeb, clean it, and store it in a location specified in config.par Arguments: year(int) -- The year for which data will be pulled Returns: int: Function finished indicator ''' filepath = uf.get_parameter('filepath') #check if there's a folder there, if not, make it if not os.path.exists(filepath + 'Data/'): os.makedirs(filepath + 'Data/') filename = filepath + 'Data/ACE_B_' + str(year) + '.npy' #Check if file already exists if os.path.exists(filename): print('File ' + 'ACE_B_' + str(year) + '.npy' + ' already exists! Skipping...') return 1 print('Pulling ACE mfi data from ' + str(year)) uf.status(0) ACE_B_dtype = np.dtype([('t', 'f8'), ('B', '3f8')]) ACE_B = np.ndarray(0, dtype=ACE_B_dtype) for i in range(1, 13): t1 = datetime.datetime(year, i, 1) if i + 1 < 13: t2 = datetime.datetime(year, i + 1, 1) else: t2 = datetime.datetime(year + 1, 1, 1) #print('Pulling '+str(t1)[0:10] + ' - ' + str(t2)[0:10]) mfi_data = cdas.get_data('sp_phys', 'AC_H0_MFI', t1, t2, ['BGSEc']) ACE_B_month = np.ndarray(len(mfi_data['EPOCH']) // 4, dtype=ACE_B_dtype) np.transpose([ collapse_down(mfi_data['BX_GSE'], 4), collapse_down(mfi_data['BY_GSE'], 4), collapse_down(mfi_data['BZ_GSE'], 4) ]) ACE_B_month['B'] = np.transpose([ collapse_down(mfi_data['BX_GSE'], 4), collapse_down(mfi_data['BY_GSE'], 4), collapse_down(mfi_data['BZ_GSE'], 4) ]) ACE_B_month['t'] = collapse_down(mdate.date2num(mfi_data['EPOCH']), 4) #Clean bad data ACE_B_month['B'][ACE_B_month['B'] < -10**30] = np.nan #append to the full array ACE_B = np.append(ACE_B, ACE_B_month) uf.status(int((i / 12) * 100)) np.save(filename, ACE_B) print(str(year) + ' finished!') print('File saved to ' + filename)
def pull_ACE_year(year): ''' Pull a year of ACE SWE data from CDAWeb, clean it, and store it in a location specified in config.par Arguments: year(int) -- The year for which data will be pulled Returns: int: Function finished indicator ''' print('Pulling data for ' + str(year)) filepath = uf.get_parameter('filepath') #check if there's a folder there, if not, make it if not os.path.exists(filepath + 'Data/'): os.makedirs(filepath + 'Data/') filename = filepath + 'Data/ACE_' + str(year) + '.npy' #Check if file already exists if os.path.exists(filename): print('File ' + 'ACE_' + str(year) + '.npy' + ' already exists! Skipping...') return 1 #First create empty structures to hold the data ACE_dtype = np.dtype([('t', 'f8'), ('pos', '3f8'), ('v', '3f8'), ('n', 'f8'), ('p', 'f8'), ('spd', 'f8')]) ACE = np.ndarray(0, dtype=ACE_dtype) print('Pulling ACE swe data from ' + str(year)) uf.status(0) #Pull the data from CDAWeb in month chunks for i in range(1, 13): t1 = datetime.datetime(year, i, 1) if i + 1 < 13: t2 = datetime.datetime(year, i + 1, 1) else: t2 = datetime.datetime(year + 1, 1, 1) #print('Pulling '+str(t1)[0:10] + ' - ' + str(t2)[0:10]) swe_data = cdas.get_data('sp_phys', 'AC_H0_SWE', t1, t2, ['Np', 'Vp', 'V_GSE', 'SC_pos_GSE']) #make temp structure ACE_month = np.ndarray(len(swe_data['EPOCH']), dtype=ACE_dtype) #throw data into structure and clean it up ACE_month['t'] = mdate.date2num(swe_data['EPOCH']) ACE_month['pos'] = np.transpose([ swe_data['ACE_X-GSE'], swe_data['ACE_Y-GSE'], swe_data['ACE_Z-GSE'] ]) ACE_month['n'] = swe_data['H_DENSITY'] ACE_month['v'] = np.transpose( [swe_data['VX_(GSE)'], swe_data['VY_(GSE)'], swe_data['VZ_(GSE)']]) #clean up ACE data ACE_month['n'][ACE_month['n'] < -10**30] = np.nan ACE_month['v'][ACE_month['v'] < -10**30] = np.nan ACE_month['spd'] = np.sqrt(np.sum(ACE_month['v']**2, axis=1)) ACE_month['p'] = 1.6726 * 10**( -6) * ACE_month['n'] * ACE_month['spd']**2 # Units are nPa ACE = np.append(ACE, ACE_month) uf.status(int((i / 12) * 100)) np.save(filename, ACE) print(str(year) + ' finished!') print('File saved to ' + filename) return 1
def pull_GOES_year(year, filepath=''): ''' Pull a year of GOES data from CDAWeb, clean it, and store it in a location specified in config.par. Which GOES satellite data comes from depends on the year. 2000-2003 pulls GOES 10, 2003-2009 pulls GOES12. Arguments: year(int) -- The year for which data will be pulled Returns: int: Function finished indicator ''' filepath = uf.get_parameter('filepath') #check if there's a folder there, if not, make it if not os.path.exists(filepath + 'Data/'): os.makedirs(filepath + 'Data/') filename = filepath + 'Data/GOES_' + str(year) + '.npy' #Check if file already exists if os.path.exists(filename): print('File ' + 'GOES_' + str(year) + '.npy' + ' already exists! Skipping...') return 1 print('Pulling GOES data from ' + str(year)) uf.status(0) GOES_dtype = np.dtype([('t', 'f8'), ('pos', '3f8'), ('B', '3f8')]) GOES = np.ndarray(0, dtype=GOES_dtype) #This maps a given year to a GOES satellite GOES_dict = { 2000: 10, 2001: 10, 2002: 10, 2003: 10, 2004: 12, 2005: 12, 2006: 12, 2007: 12, 2008: 12, 2009: 12 } #This dict serves to map a goes satellite to it's name in CDAS and it's associated variable names. GOES_names = { 10: ['G0_K0_MAG', 'SC_pos_se', 'B_GSE_c'], 12: ['GOES12_K0_MAG', 'SC_pos_se', 'B_GSE_c'] } try: x = GOES_dict[year] except: print("Year is not defined yet, try another one.") return -1 #Again, go month by month. for i in range(1, 13): t1 = datetime.datetime(year, i, 1) if i + 1 < 13: t2 = datetime.datetime(year, i + 1, 1) else: t2 = datetime.datetime(year + 1, 1, 1) #print('Pulling '+str(t1)[0:10] + ' - ' + str(t2)[0:10]) try: goes_data = cdas.get_data('sp_phys', GOES_names[GOES_dict[year]][0], t1, t2, GOES_names[GOES_dict[year]][1:]) except: import calendar print('No data found for ' + calendar.month_name[i] + ' ' + str(year)) continue GOES_month = np.ndarray(len(goes_data['EPOCH']), dtype=GOES_dtype) GOES_month['pos'] = np.transpose([ goes_data['GSE_X'], goes_data['GSE_Y'], goes_data['GSE_Z'], ]) GOES_month['B'] = np.transpose([ goes_data['BX_GSE'], goes_data['BY_GSE'], goes_data['BZ_GSE'], ]) GOES_month['t'] = mdate.date2num(goes_data['EPOCH']) #Clean bad data GOES_month['B'][GOES_month['B'] < -10**30] = np.nan #append to the full array GOES = np.append(GOES, GOES_month) uf.status(int((i / 12) * 100)) np.save(filename, GOES) print(str(year) + ' finished!') print('File saved to ' + filename)