def load_spin(self, time): # Compute the requested time both as a standard-format string # and a "datetime" object. time_str = calc_time_str(time) time_epc = calc_time_epc(time) # Construct a list of the dates requested. req_date = [] tm_strt = time_epc - timedelta(seconds=self.buf) tm_stop = time_epc + timedelta(seconds=self.buf) dt_strt = datetime(tm_strt.year, tm_strt.month, tm_strt.day) dt_stop = datetime(tm_stop.year, tm_stop.month, tm_stop.day) dt_i = dt_strt while (dt_i <= dt_stop): date_i = (calc_time_str(dt_i))[0:10] req_date.append(date_i) dt_i += timedelta(1) # For each date in "date_i", load the data (if necessary). for date in req_date: self.load_date(date) # Compute the absolute time difference between the requested # time and the timestamp of each loaded datum. adt = [abs((t - time_epc).total_seconds()) for t in self.arr_spin_t] # Determine the ordering of the absolute time differences. arg = sorted(range(len(adt)), key=adt.__getitem__) # If the smallest time difference is greater than the tolerance, # return 'None'. if (adt[arg[0]] > self.buf): return None # Compute and the median spin rate for the data with the # smallest time difference. w = median([self.arr_spin_w[arg[i]] for i in range(self.win)]) # Request a cleanup of the data loaded into this archive. self.cleanup_date() # Return the spin period. return (2. * pi / w)
def load_rang(self, time_strt, dur_sec): # Compute the requested start and stop times as values, as # strings, and as "datetime" epochs. print time_strt, dur_sec time_strt_val = calc_time_val(time_strt) time_stop_val = calc_time_val(time_strt_val + dur_sec) time_strt_str = calc_time_str(time_strt_val) time_stop_str = calc_time_str(time_stop_val) time_strt_epc = calc_time_epc(time_strt_val) time_stop_epc = calc_time_epc(time_stop_val) # Construct an array of the dates requested. date_req = array([]) date_i = (calc_time_str(time_strt_val - self.buf))[0:10] time_i = calc_time_val(date_i + '/00:00:00.000') while (time_i < (time_stop_val + self.buf)): # Add the current date to the array of dates to be # loaded. date_req = append(date_req, [date_i]) # Move on to the next date. # Note. This may look a bit odd, but may be necessary # to avoid issues with leap seconds. An # additional leap-second concern is the posiblity # of "date_req" containing duplicates, but that # shouldn't be too much of an issue even if it # does occur. time_i = time_i + 86400. date_i = (calc_time_str(time_i))[0:10] time_i = calc_time_val(date_i + '/00:00:00.000') # For each date in "date_i", load the data (if necessary). [self.load_date(dt) for dt in date_req] # Identify and extract the requested range of Wind/MFI data. tk = where((self.mfi_t >= (time_strt_epc - timedelta(0, self.tol))) & (self.mfi_t <= (time_stop_epc + timedelta(0, self.tol)))) tk = tk[0] n_tk = len(tk) if (n_tk <= 0): self.mesg_txt('none') ret_t = array([]) ret_b_x = array([]) ret_b_y = array([]) ret_b_z = array([]) else: ret_t = self.mfi_t[tk] ret_b_x = self.mfi_b_x[tk] ret_b_y = self.mfi_b_y[tk] ret_b_z = self.mfi_b_z[tk] srt = argsort(ret_t) ret_t = ret_t[srt] ret_b_x = ret_b_x[srt] ret_b_y = ret_b_y[srt] ret_b_z = ret_b_z[srt] # Request a cleanup of the data loaded into this archive. self.cleanup_date() # Return the requested range of Wind/MFI data. return (list(ret_t), list(ret_b_x), list(ret_b_y), list(ret_b_z))
def load_spec(self, time, get_prev=False, get_next=False): # If both "get_????" keywords are "True", abort. if ((get_prev) and (get_next)): self.mesg_txt('none') return None # Convert/standardize the requested time. time_req_str = calc_time_str(time) time_req_val = calc_time_val(time) time_req_epc = calc_time_epc(time) # Extract requested date (as a string) and the requested time # (as a float indicating seconds since midnight). Likewise, # determine the date of the previous day and the date of the # next day. date_req_str = time_req_str[0:10] scnd_req_val = time_req_val - calc_time_val(date_req_str) date_pre_str = (calc_time_str(time_req_val - 86400.))[0:10] date_nex_str = (calc_time_str(time_req_val + 86400.))[0:10] # Load all the spectra from the requested date. If the # requested time is within "self.buf" seconds of either the # previous or next day, load all the spectra from that date as # well. # Note. There is no need to check here whether a date has # already been loaded as that's the first thing that # "self.load_date( )" does. self.load_date(date_req_str) if (scnd_req_val <= self.buf): self.load_date(date_pre_str) if ((86400. - scnd_req_val) <= self.buf): self.load_date(date_nex_str) # If no spectra have been loaded, abort. if (len(self.arr_tag) == 0): self.mesg_txt('none') return None # Locate the spectrum whose timestamp is closest to the # one requested. adt = [abs(tag.epoch - time_req_epc) for tag in self.arr_tag] adt_min = min(adt) tk = [a for a in range(len(adt)) if adt[a] == adt_min][0] if (get_prev): tk -= 1 if (get_next): tk += 1 if ((tk < 0) or (tk >= len(self.arr_tag))): self.mesg_txt('none') return None # If the selected spectrum is not within the the request # tolerence, abort. if ((adt[tk]).total_seconds() > self.tol): # In case of long # Data gap self.mesg_txt('none') return None # If the selected spectrum is not within the the request # tolerence, abort. # Extract the spectrum to be returned. cdf = self.arr_cdf[self.arr_tag[tk].c] s = self.arr_tag[tk].s # Find actual no. of voltage bins n_bin_max = 31 n_dir = 20 for n_bin_1 in range(n_bin_max): if (n_bin_1 == n_bin_max + 1): break if (cdf['cup1_EperQ'][s][n_bin_1] >= cdf['cup1_EperQ'][s][n_bin_1 + 1]): break n_bin_1 += 1 for n_bin_2 in range(n_bin_max): if (n_bin_2 == n_bin_max + 1): break if (cdf['cup2_EperQ'][s][n_bin_2] >= cdf['cup2_EperQ'][s][n_bin_2 + 1]): break n_bin_2 += 1 n_bin = min([n_bin_1, n_bin_2]) # Assigning all retrieved data to parameter values time = cdf['Epoch'][s] elev = [ float(cdf['inclination_angle'][0]), float(cdf['inclination_angle'][1]) ] azim = [[float(cdf['cup1_azimuth'][s][d]) for d in range(n_dir)], [float(cdf['cup2_azimuth'][s][d]) for d in range(n_dir)]] volt_cen = [[float(cdf['cup1_EperQ'][s][b]) for b in range(n_bin)], [float(cdf['cup2_EperQ'][s][b]) for b in range(n_bin)]] volt_del = [[float(cdf['cup1_EperQ_DEL'][s][b]) for b in range(n_bin)], [float(cdf['cup2_EperQ_DEL'][s][b]) for b in range(n_bin)]] curr = [[[float(cdf['cup1_qflux'][s][d][b]) for b in range(n_bin)] for d in range(n_dir)], [[float(cdf['cup2_qflux'][s][d][b]) for b in range(n_bin)] for d in range(n_dir)]] spec = fc_spec( n_bin, elev=elev, azim=azim, volt_cen=volt_cen,\ volt_del=volt_del, curr=curr, time=time ) # Request a cleanup of the data loaded into this archive. self.cleanup_date() return spec
def load_spec(self, time, get_prev=False, get_next=False, tmin=None, tmax=None): # If both "get_????" keywords are "True", abort. if ((get_prev) and (get_next)): self.mesg_txt('none') return None # Convert/standardize the requested time. time_req_str = calc_time_str(time) time_req_val = calc_time_val(time) time_req_epc = calc_time_epc(time) # Extract requested date (as a string) and the requested time # (as a float indicating seconds since midnight). Likewise, # determine the date of the previous day and the date of the # next day. date_req_str = time_req_str[0:10] scnd_req_val = time_req_val - calc_time_val(date_req_str) date_pre_str = (calc_time_str(time_req_val - 86400.))[0:10] date_nex_str = (calc_time_str(time_req_val + 86400.))[0:10] # Load all the spectra from the requested date. If the # requested time is within "self.buf" seconds of either the # previous or next day, load all the spectra from that date as # well. # Note. There is no need to check here whether a date has # already been loaded as that's the first thing that # "self.load_date( )" does. self.load_date(date_req_str) if (scnd_req_val <= self.buf): self.load_date(date_pre_str) if ((86400. - scnd_req_val) <= self.buf): self.load_date(date_nex_str) # If no spectra have been loaded, abort. if (self.n_fc <= 0): self.mesg_txt('none') return None # Identify the subset of spectra with timestamps between "tmin" # and "tmax". if (tmin is not None): con_tmin = (self.fc_time_epc >= calc_time_epc(tmin)) else: con_tmin = tile(True, self.n_fc) if (tmax is not None): con_tmax = (self.fc_time_epc <= calc_time_epc(tmax)) else: con_tmax = tile(True, self.n_fc) tk_con = where(con_tmin & con_tmax)[0] # If no spectra had timestamps in the specified range, abort. if (len(tk_con) <= 0): self.mesg_txt('none') return None # Compute the time difference between the timestamps within the # "tm??" range and the requested time. Identify the index of # the smallest absolute in this array and the index of the # corresponding spectrum. dt = array([(epc - time_req_epc).total_seconds() for epc in self.fc_time_epc[tk_con]]) dt_abs = abs(dt) dt_abs_min = amin(dt_abs) tk_dt = where(dt_abs == dt_abs_min)[0][0] tk_req = tk_con[tk_dt] # Set the spectrum with index "tk_req" to be returned. If the # (chronologically) next or previous spectrum has been # requested, find it and set it to be returned instead. tk = tk_req if ((get_prev) and (not get_next)): tk_sub = where(dt < dt[tk_dt])[0] if (len(tk_sub) <= 0): self.mesg_txt('none') return None tk_dt_prev = where(dt == amax(dt[tk_sub]))[0][0] tk = tk_con[tk_dt_prev] if ((get_next) and (not get_prev)): tk_sub = where(dt > dt[tk_dt])[0] if (len(tk_sub) <= 0): self.mesg_txt('none') return None tk_dt_next = where(dt == amin(dt[tk_sub]))[0][0] tk = tk_con[tk_dt_next] # If the selected spectrum is not within the the request # tolerence, abort. if (abs( (self.fc_time_epc[tk] - time_req_epc).total_seconds()) > self.tol): self.mesg_txt('none') return None # Extract the spectrum to be returned. ret_time_epc = self.fc_time_epc[tk] ret_cup1_azm = self.fc_cup1_azm[tk] ret_cup2_azm = self.fc_cup2_azm[tk] ret_cup1_c_vol = self.fc_cup1_c_vol[tk] ret_cup2_c_vol = self.fc_cup2_c_vol[tk] ret_cup1_d_vol = self.fc_cup1_d_vol[tk] ret_cup2_d_vol = self.fc_cup2_d_vol[tk] ret_cup1_cur = self.fc_cup1_cur[tk] ret_cup2_cur = self.fc_cup2_cur[tk] # Request a cleanup of the data loaded into this archive. self.cleanup_date() # Return the selected spetrum to the user. return (ret_time_epc, ret_cup1_azm, ret_cup2_azm, ret_cup1_c_vol, ret_cup2_c_vol, ret_cup1_d_vol, ret_cup2_d_vol, ret_cup1_cur, ret_cup2_cur)
def load_date(self, date_str): # Determine whether or not the requested date has already been # loaded. If it has, abort. if (self.n_date > 0): tk = where(self.date_str == date_str)[0] if (len(tk) > 0): return # Extract the year, month, day, and day of year of the requested # date. year = int(date_str[0:4]) mon = int(date_str[5:7]) day = int(date_str[8:10]) # Attempt to load and extract data from the appropriate file. # Note. The default data file format is CDF, and the code will # attempt to download the appropriate CDF file from # CDAWeb if it doesn't find one in the specified # directory. However, the user may also request that # IDL "SAVE" files be used instead. if (self.use_idl): # Determine the name of the file that contains data from # the requested date. doy = 1 + int( round((datetime(year, mon, day) - datetime(year, 1, 1)).total_seconds() / 86400.)) doy_min = int(round(20. * floor(doy / 20.))) doy_max = int(round(20. * ceil((doy + 1) / 20.))) str_year = '{:4}'.format(year) str_doy_min = '{:03}'.format(doy_min) str_doy_max = '{:03}'.format(doy_max) fl = 'wind_mag.' + str_year + '.' + \ str_doy_min + '.' + str_doy_max + '.idl' fl_path = os.path.join(self.path, fl) # If the file exists, attempt to load it; otherwise, # abort. self.mesg_txt('load', date_str) if (os.path.isfile(fl_path)): try: dat = readsav(fl_path) except: self.mesg_txt('fail', date_str) return else: self.mesg_txt('fail', date_str) return # Determine the number of data loaded. If none were # loaded, abort. n_sub = len(dat.doymag) if (n_sub <= 0): self.mesg_txt('fail', date_str) return # Extract the data from the loaded file. sub_doy = dat.doymag sub_b_x = dat.bxmag sub_b_y = dat.bymag sub_b_z = dat.bzmag sub_ind = tile(-1, len(dat.doymag)) # Convert the loaded time from floating-point # day-of-year to "datetime" epoch. sub_t = array([ datetime(year, 1, 1) + timedelta(doy - 1.) for doy in sub_doy ]) # Construct an array of dates associated with the file # that was loaded. For each of the 20 day-of-year # values that could (hypothetically, at least) be stored # in the file, determine whether that value is valid, # and, if it is, add it to the array of dates. While # doing this, also populate the "sub_ind" array. new_date_str = array([]) new_date_ind = array([]) n_new_date = 0 for d in range(20): # Determine the "d"-th day-of-year value # associated with this file. doy_d = doy_min + d # If this day-of-year value is too small, move # on to the next one. if (doy <= 0): continue # Construct a "datetime" object to represent # this day-of-year value. # Note. Noon is chosen for the time of day to # avoid any potential issues with leap # seconds. time_epc_d = datetime( year, 1, 1, 12 ) + \ timedelta( doy_d - 1 ) time_epc_d_1 = datetime( year, 1, 1 ) + \ timedelta( doy_d - 1 ) time_epc_d_2 = datetime( year, 1, 1 ) + \ timedelta( doy_d ) # If the "datetime" object indicates a year # other than the one associated with the file, # continue onto the next day-of-year value. if (time_epc_d.year != year): continue # Since this day-of-year value is valid, enter # it into the array of dates. date_str_d = calc_time_str(time_epc_d)[0:10] date_ind_d = self.t_date + n_new_date new_date_str = append(new_date_str, [date_str_d]) new_date_ind = append(new_date_ind, [date_ind_d]) n_new_date += 1 # Select all data associated with this date # and assign each the date index. tk_d = where((sub_t >= time_epc_d_1) & (sub_t < time_epc_d_2))[0] n_tk_d = len(tk_d) if (n_tk_d > 0): sub_ind[tk_d] = date_ind_d # Select those data which seem to have valid (versus # fill) values. tk = where((abs(sub_b_x) < 1000.) & (abs(sub_b_y) < 1000.) & (abs(sub_b_z) < 1000.) & (sub_ind >= 0))[0] n_tk = len(tk) else: # Determine the name of the file that contains data # from the requested date. str_year = date_str[0:4] str_mon = date_str[5:7] str_day = date_str[8:10] if (self.use_k0): fl0 = 'wi_k0_mfi_' + \ str_year + str_mon + str_day + '_v??.cdf' else: fl0 = 'wi_h0_mfi_' + \ str_year + str_mon + str_day + '_v??.cdf' fl0_path = os.path.join(self.path, fl0) gb = glob(fl0_path) # If the file does not exist, attempt to download it. if (len(gb) > 0): fl_path = gb[-1] else: try: self.mesg_txt('ftp', date_str) ftp = FTP('cdaweb.gsfc.nasa.gov') ftp.login() ftp.cwd('pub/data/wind/mfi/') if (self.use_k0): ftp.cwd('mfi_k0/') else: ftp.cwd('mfi_h0/') ftp.cwd(str_year) ls = ftp.nlst(fl0) fl = ls[-1] fl_path = os.path.join(self.path, fl) ftp.retrbinary("RETR " + fl, open(fl_path, 'wb').write) except: self.mesg_txt('fail', date_str) return # If the file now exists, try to load it; otherwise, # abort. self.mesg_txt('load', date_str) if (os.path.isfile(fl_path)): try: cdf = pycdf.CDF(fl_path) except: self.mesg_txt('fail', date_str) return else: self.mesg_txt('fail', date_str) return # Extract the data from the loaded file. if (self.use_k0): sub_t = cdf['Epoch'][:] sub_b_x = cdf['BGSEc'][:, 0] sub_b_y = cdf['BGSEc'][:, 1] sub_b_z = cdf['BGSEc'][:, 2] sub_pnt = cdf['N'][:] else: sub_t = cdf['Epoch3'][:, 0] sub_b_x = cdf['B3GSE'][:, 0] sub_b_y = cdf['B3GSE'][:, 1] sub_b_z = cdf['B3GSE'][:, 2] sub_pnt = cdf['NUM3_PTS'][:, 0] sub_ind = tile(self.t_date, len(sub_t)) # Select those data which seem to have valid (versus # fill) values. tk = where(sub_pnt > 0)[0] n_tk = len(tk) # Copy the date associated with this file into and # array. new_date_str = [date_str] new_date_ind = [self.t_date] n_new_date = 1 # Append any valid, newly-loaded data to the saved arrays. if (n_tk > 0): self.mfi_t = append(self.mfi_t, sub_t[tk]) self.mfi_b_x = append(self.mfi_b_x, sub_b_x[tk]) self.mfi_b_y = append(self.mfi_b_y, sub_b_y[tk]) self.mfi_b_z = append(self.mfi_b_z, sub_b_z[tk]) self.mfi_ind = append(self.mfi_ind, sub_ind[tk]) # Append the array of loaded dates with the date(s) loaded in # this call of this function. self.date_str = append(self.date_str, new_date_str) self.date_ind = append(self.date_ind, new_date_ind) self.n_date += n_new_date self.t_date += n_new_date # Request a clean-up of the files in the data directory. self.cleanup_file()
def load_spec( self, time, dur, fc_bins ) : # Convert/standardize the requested time. time_req_str = calc_time_str( time ) time_req_val = calc_time_val( time ) time_req_epc = calc_time_epc( time ) # Extract requested date (as a string) and the requested time # (as a float indicating seconds since midnight). Likewise, # determine the date of the previous day and the date of the # next day. date_req_str = time_req_str[0:10] scnd_req_val = time_req_val - calc_time_val( date_req_str ) date_pre_str = ( calc_time_str( time_req_val - 86400. ) )[0:10] date_nex_str = ( calc_time_str( time_req_val + 86400. ) )[0:10] # Load all the spectra from the requested date. If the # requested time is within "self.buf" seconds of either the # previous or next day, load all the spectra from that date as # well. # Note. There is no need to check here whether a date has # already been loaded as that's the first thing that # "self.load_date( )" does. self.load_date( date_req_str ) if ( scnd_req_val <= self.buf ) : self.load_date( date_pre_str ) if ( ( 86400. - scnd_req_val ) <= self.buf ) : self.load_date( date_nex_str ) # If no spectra have been loaded, abort. if ( len( self.arr_tag ) == 0 ) : self.mesg_txt( 'none' ) return [] # Locate the spectrum whose timestamp is closest to the # one requested. dt = [ datetime(1970, 1, 1) + timedelta( seconds=tag.epoch ) - self.core.fc_spec['time'] for tag in self.arr_tag ] adt = [ abs( del_t ) for del_t in dt ] adt_min = min( adt ) dt_min = dt[ where( [ del_t == adt_min for del_t in adt ] )[0][0] ] tk = [ a for a in range( len( adt ) ) if adt[a] == adt_min ][0] # if ( get_prev ) : # tk -= 1 # if ( get_next ) : # tk +=1 if( ( tk < 0 ) or ( tk >= len( self.arr_tag ) ) ) : self.mesg_txt( 'none' ) return [] # Determine how many more PESA-L spectra exist within the # duration of the FC spectrum num_spec = len( where( [( del_t >= timedelta(seconds=-1.*dur/fc_bins) and del_t <= timedelta(seconds=dur) ) for del_t in dt ] )[0] ) # If the selected spectrum is not within the the request # tolerence, abort. if ( ( adt[tk] ).total_seconds() > self.tol ) :# In case of long # Data gap self.mesg_txt( 'none' ) return [] # Get the PL spectra that lie within this time spec = [] if num_spec == 1 : plur = 'spectrum' else : plur = 'spectra' self.mesg_txt( 'load', (str(num_spec) + ' ' + plur + ' found') ) for n in range( num_spec ) : # Extract the spectrum to be returned. cdf = self.arr_cdf[self.arr_tag[tk+n].c] s = self.arr_tag[ tk+n ].s # Assigning all retrieved data to parameter values t_strt = cdf['sec_beg'][s] t_stop = cdf['sec_end'][s] elev_cen = cdf['the'][s] the_del = cdf['d_the'][s] azim_cen = cdf['phi'][s] phi_del = cdf['d_phi'][s] volt_cen = cdf['nrg'][s] volt_del = cdf['d_nrg'][s] psd = cdf['psd'][s] spec = spec + [ pl_spec( t_strt=t_strt, t_stop=t_stop, elev_cen=elev_cen, the_del=the_del, azim_cen=azim_cen, phi_del=phi_del, volt_cen=volt_cen, volt_del=volt_del, psd=psd ) ] # Request a cleanup of the data loaded into this archive. self.cleanup_date( ) return spec