def __init__(self, time_strt, time_stop): # Inherit all attributes of an instance of "QDialog". super(dialog_auto_prog, self).__init__() # Make this a non-modal dialog (i.e., allow the user to still # interact with the main application window). self.setModal(False) # Set the title of this dialog window. self.setWindowTitle('Progress') # Give this widget a grid layout, "self.grd". self.grd = QGridLayout() self.grd.setContentsMargins(6, 6, 6, 6) self.setLayout(self.grd) # Initialize the progress bar and set its minimum, maximum, and # initial values. self.bar = QProgressBar() self.bar.setMinimum(calc_time_val(time_strt)) self.bar.setMaximum(calc_time_val(time_stop)) self.bar.setValue(self.bar.minimum()) # Initialize the event button. self.btn_exit = event_PushButton(self, 'exit', 'Close') # Initialize the label. self.lab = QLabel('Note: closing this window will *NOT* ' + 'interrupt the automated analysis.') self.lab.setWordWrap(True) # Row by row, add the bar and buttons to the grid layout. self.grd.addWidget(self.bar, 0, 0, 1, 1) self.grd.addWidget(self.btn_exit, 0, 1, 1, 1) self.grd.addWidget(self.lab, 1, 0, 1, 2) # Display this dialog. self.show()
def updt_bar(self, time): # Convert this functions argument (i.e., the timestamp of the # current spectrum) to Unix time. time_curr = calc_time_val(time) # If necessary, adjust the minimum or maximum of the progress # bar based on the new timestamp. if (time_curr < self.bar.minimum()): self.bar.setMinimum(time_curr) if (time_curr > self.bar.maximum()): self.bar.setMaximum(time_curr) # Update the value of the progress bar. self.bar.setValue(time_curr)
def load_rang(self, time_strt, dur_sec): # Compute the requested start and stop times as values, as # strings, and as "datetime" epochs. print time_strt, dur_sec time_strt_val = calc_time_val(time_strt) time_stop_val = calc_time_val(time_strt_val + dur_sec) time_strt_str = calc_time_str(time_strt_val) time_stop_str = calc_time_str(time_stop_val) time_strt_epc = calc_time_epc(time_strt_val) time_stop_epc = calc_time_epc(time_stop_val) # Construct an array of the dates requested. date_req = array([]) date_i = (calc_time_str(time_strt_val - self.buf))[0:10] time_i = calc_time_val(date_i + '/00:00:00.000') while (time_i < (time_stop_val + self.buf)): # Add the current date to the array of dates to be # loaded. date_req = append(date_req, [date_i]) # Move on to the next date. # Note. This may look a bit odd, but may be necessary # to avoid issues with leap seconds. An # additional leap-second concern is the posiblity # of "date_req" containing duplicates, but that # shouldn't be too much of an issue even if it # does occur. time_i = time_i + 86400. date_i = (calc_time_str(time_i))[0:10] time_i = calc_time_val(date_i + '/00:00:00.000') # For each date in "date_i", load the data (if necessary). [self.load_date(dt) for dt in date_req] # Identify and extract the requested range of Wind/MFI data. tk = where((self.mfi_t >= (time_strt_epc - timedelta(0, self.tol))) & (self.mfi_t <= (time_stop_epc + timedelta(0, self.tol)))) tk = tk[0] n_tk = len(tk) if (n_tk <= 0): self.mesg_txt('none') ret_t = array([]) ret_b_x = array([]) ret_b_y = array([]) ret_b_z = array([]) else: ret_t = self.mfi_t[tk] ret_b_x = self.mfi_b_x[tk] ret_b_y = self.mfi_b_y[tk] ret_b_z = self.mfi_b_z[tk] srt = argsort(ret_t) ret_t = ret_t[srt] ret_b_x = ret_b_x[srt] ret_b_y = ret_b_y[srt] ret_b_z = ret_b_z[srt] # Request a cleanup of the data loaded into this archive. self.cleanup_date() # Return the requested range of Wind/MFI data. return (list(ret_t), list(ret_b_x), list(ret_b_y), list(ret_b_z))
def load_spec(self, time, get_prev=False, get_next=False): # If both "get_????" keywords are "True", abort. if ((get_prev) and (get_next)): self.mesg_txt('none') return None # Convert/standardize the requested time. time_req_str = calc_time_str(time) time_req_val = calc_time_val(time) time_req_epc = calc_time_epc(time) # Extract requested date (as a string) and the requested time # (as a float indicating seconds since midnight). Likewise, # determine the date of the previous day and the date of the # next day. date_req_str = time_req_str[0:10] scnd_req_val = time_req_val - calc_time_val(date_req_str) date_pre_str = (calc_time_str(time_req_val - 86400.))[0:10] date_nex_str = (calc_time_str(time_req_val + 86400.))[0:10] # Load all the spectra from the requested date. If the # requested time is within "self.buf" seconds of either the # previous or next day, load all the spectra from that date as # well. # Note. There is no need to check here whether a date has # already been loaded as that's the first thing that # "self.load_date( )" does. self.load_date(date_req_str) if (scnd_req_val <= self.buf): self.load_date(date_pre_str) if ((86400. - scnd_req_val) <= self.buf): self.load_date(date_nex_str) # If no spectra have been loaded, abort. if (len(self.arr_tag) == 0): self.mesg_txt('none') return None # Locate the spectrum whose timestamp is closest to the # one requested. adt = [abs(tag.epoch - time_req_epc) for tag in self.arr_tag] adt_min = min(adt) tk = [a for a in range(len(adt)) if adt[a] == adt_min][0] if (get_prev): tk -= 1 if (get_next): tk += 1 if ((tk < 0) or (tk >= len(self.arr_tag))): self.mesg_txt('none') return None # If the selected spectrum is not within the the request # tolerence, abort. if ((adt[tk]).total_seconds() > self.tol): # In case of long # Data gap self.mesg_txt('none') return None # If the selected spectrum is not within the the request # tolerence, abort. # Extract the spectrum to be returned. cdf = self.arr_cdf[self.arr_tag[tk].c] s = self.arr_tag[tk].s # Find actual no. of voltage bins n_bin_max = 31 n_dir = 20 for n_bin_1 in range(n_bin_max): if (n_bin_1 == n_bin_max + 1): break if (cdf['cup1_EperQ'][s][n_bin_1] >= cdf['cup1_EperQ'][s][n_bin_1 + 1]): break n_bin_1 += 1 for n_bin_2 in range(n_bin_max): if (n_bin_2 == n_bin_max + 1): break if (cdf['cup2_EperQ'][s][n_bin_2] >= cdf['cup2_EperQ'][s][n_bin_2 + 1]): break n_bin_2 += 1 n_bin = min([n_bin_1, n_bin_2]) # Assigning all retrieved data to parameter values time = cdf['Epoch'][s] elev = [ float(cdf['inclination_angle'][0]), float(cdf['inclination_angle'][1]) ] azim = [[float(cdf['cup1_azimuth'][s][d]) for d in range(n_dir)], [float(cdf['cup2_azimuth'][s][d]) for d in range(n_dir)]] volt_cen = [[float(cdf['cup1_EperQ'][s][b]) for b in range(n_bin)], [float(cdf['cup2_EperQ'][s][b]) for b in range(n_bin)]] volt_del = [[float(cdf['cup1_EperQ_DEL'][s][b]) for b in range(n_bin)], [float(cdf['cup2_EperQ_DEL'][s][b]) for b in range(n_bin)]] curr = [[[float(cdf['cup1_qflux'][s][d][b]) for b in range(n_bin)] for d in range(n_dir)], [[float(cdf['cup2_qflux'][s][d][b]) for b in range(n_bin)] for d in range(n_dir)]] spec = fc_spec( n_bin, elev=elev, azim=azim, volt_cen=volt_cen,\ volt_del=volt_del, curr=curr, time=time ) # Request a cleanup of the data loaded into this archive. self.cleanup_date() return spec
def load_date(self, date_str): # Determine whether or not the requested date has already been # loaded. If it has, abort. if (self.n_date > 0): tk = where(self.date_str == date_str)[0] if (len(tk) > 0): return # Extract the year, month, and day portions of the "date_str" # string. str_year = date_str[0:4] str_mon = date_str[5:7] str_day = date_str[8:10] # Attempt to load and extract data from the appropriate file. # Note. The default data file format is CDF, and the code will # attempt to download the appropriate CDF file from # CDAWeb if it doesn't find one in the specified # directory. However, the user may also request that # IDL "SAVE" files be used instead. if (self.use_idl): # Determine the path and name of the file corresponding # to the requested date. fl = 'wind_janus_fc_' + str_year + '-' + \ str_mon + '-' + str_day + '.idl' fl_path = os.path.join(self.path, fl) # If the file exists, attempt to load it; otherwise, # abort. self.mesg_txt('load', date_str) if (os.path.isfile(fl_path)): try: dat = readsav(fl_path) except: self.mesg_txt('fail', date_str) return else: self.mesg_txt('fail', date_str) return # Determine the number of spectra loaded. If no spectra # were loaded, return. n_sub = len(dat.sec) if (n_sub <= 0): self.mesg_txt('fail', date_str) return # Separate the loaded data into parameter arrays. sub_time_val = dat.sec + calc_time_val(date_str) sub_time_epc = array( [calc_time_epc(t_val) for t_val in sub_time_val]) sub_cup1_azm = dat.cup1_angles sub_cup2_azm = dat.cup2_angles sub_cup1_c_vol = dat.cup1_eperq sub_cup2_c_vol = dat.cup2_eperq sub_cup1_d_vol = dat.cup1_eqdel sub_cup2_d_vol = dat.cup2_eqdel sub_cup1_cur = 1E12 * array([ transpose(dat.currents[s, 0, :, :] + dat.currents[s, 2, :, :]) for s in range(n_sub) ]) sub_cup2_cur = 1E12 * array([ transpose(dat.currents[s, 1, :, :] + dat.currents[s, 3, :, :]) for s in range(n_sub) ]) sub_ind = tile(self.t_date, n_sub) else: # Determine the name of the file that contains data from # the requested date. fl0 = 'wi_sw-ion-dist_swe-faraday_' + \ str_year + str_mon + str_day + '_v??.cdf' fl0_path = os.path.join(self.path, fl0) gb = glob(fl0_path) # If the file does not exist, attempt to download it. if (len(gb) > 0): fl_path = gb[-1] else: try: self.mesg_txt('ftp', date_str) ftp = FTP('cdaweb.gsfc.nasa.gov') ftp.login() ftp.cwd('pub/data/wind/swe/swe_faraday/') ftp.cwd(str_year) ls = ftp.nlst(fl0) fl = ls[-1] fl_path = os.path.join(self.path, fl) ftp.retrbinary("RETR " + fl, open(fl_path, 'wb').write) except: self.mesg_txt('fail', date_str) return # If the file now exists, try to load it; otherwise, # abort. self.mesg_txt('load', date_str) if (os.path.isfile(fl_path)): try: cdf = pycdf.CDF(fl_path) except: self.mesg_txt('fail', date_str) return else: self.mesg_txt('fail', date_str) return # Separate the loaded data into parameter arrays, and # determine the number of spectra loaded. sub_time_epc = array(cdf['Epoch']) sub_cup1_azm = array(cdf['cup1_azimuth']) sub_cup2_azm = array(cdf['cup2_azimuth']) sub_cup1_c_vol = array(cdf['cup1_EperQ']) sub_cup2_c_vol = array(cdf['cup2_EperQ']) sub_cup1_d_vol = array(cdf['cup1_EperQ_DEL']) sub_cup2_d_vol = array(cdf['cup1_EperQ_DEL']) sub_cup1_cur = array(cdf['cup1_qflux']) sub_cup2_cur = array(cdf['cup2_qflux']) n_sub = len(sub_time_epc) sub_ind = tile(self.t_date, n_sub) # Add the loaded and formatted Wind/FC spectra to the archive. self.fc_time_epc = append(self.fc_time_epc, sub_time_epc, axis=0) self.fc_cup1_azm = append(self.fc_cup1_azm, sub_cup1_azm, axis=0) self.fc_cup2_azm = append(self.fc_cup2_azm, sub_cup2_azm, axis=0) self.fc_cup1_c_vol = append(self.fc_cup1_c_vol, sub_cup1_c_vol, axis=0) self.fc_cup2_c_vol = append(self.fc_cup2_c_vol, sub_cup2_c_vol, axis=0) self.fc_cup1_d_vol = append(self.fc_cup1_d_vol, sub_cup1_d_vol, axis=0) self.fc_cup2_d_vol = append(self.fc_cup2_d_vol, sub_cup2_d_vol, axis=0) self.fc_cup1_cur = append(self.fc_cup1_cur, sub_cup1_cur, axis=0) self.fc_cup2_cur = append(self.fc_cup2_cur, sub_cup2_cur, axis=0) self.fc_ind = append(self.fc_ind, sub_ind, axis=0) self.n_fc = self.n_fc + n_sub # Append the array of loaded dates with this one. self.date_str = append(self.date_str, [date_str]) self.date_ind = append(self.date_ind, [self.n_date]) self.n_date += 1 self.t_date += 1 # Request a clean-up of the files in the data directory. self.cleanup_file()
def load_spec(self, time, get_prev=False, get_next=False, tmin=None, tmax=None): # If both "get_????" keywords are "True", abort. if ((get_prev) and (get_next)): self.mesg_txt('none') return None # Convert/standardize the requested time. time_req_str = calc_time_str(time) time_req_val = calc_time_val(time) time_req_epc = calc_time_epc(time) # Extract requested date (as a string) and the requested time # (as a float indicating seconds since midnight). Likewise, # determine the date of the previous day and the date of the # next day. date_req_str = time_req_str[0:10] scnd_req_val = time_req_val - calc_time_val(date_req_str) date_pre_str = (calc_time_str(time_req_val - 86400.))[0:10] date_nex_str = (calc_time_str(time_req_val + 86400.))[0:10] # Load all the spectra from the requested date. If the # requested time is within "self.buf" seconds of either the # previous or next day, load all the spectra from that date as # well. # Note. There is no need to check here whether a date has # already been loaded as that's the first thing that # "self.load_date( )" does. self.load_date(date_req_str) if (scnd_req_val <= self.buf): self.load_date(date_pre_str) if ((86400. - scnd_req_val) <= self.buf): self.load_date(date_nex_str) # If no spectra have been loaded, abort. if (self.n_fc <= 0): self.mesg_txt('none') return None # Identify the subset of spectra with timestamps between "tmin" # and "tmax". if (tmin is not None): con_tmin = (self.fc_time_epc >= calc_time_epc(tmin)) else: con_tmin = tile(True, self.n_fc) if (tmax is not None): con_tmax = (self.fc_time_epc <= calc_time_epc(tmax)) else: con_tmax = tile(True, self.n_fc) tk_con = where(con_tmin & con_tmax)[0] # If no spectra had timestamps in the specified range, abort. if (len(tk_con) <= 0): self.mesg_txt('none') return None # Compute the time difference between the timestamps within the # "tm??" range and the requested time. Identify the index of # the smallest absolute in this array and the index of the # corresponding spectrum. dt = array([(epc - time_req_epc).total_seconds() for epc in self.fc_time_epc[tk_con]]) dt_abs = abs(dt) dt_abs_min = amin(dt_abs) tk_dt = where(dt_abs == dt_abs_min)[0][0] tk_req = tk_con[tk_dt] # Set the spectrum with index "tk_req" to be returned. If the # (chronologically) next or previous spectrum has been # requested, find it and set it to be returned instead. tk = tk_req if ((get_prev) and (not get_next)): tk_sub = where(dt < dt[tk_dt])[0] if (len(tk_sub) <= 0): self.mesg_txt('none') return None tk_dt_prev = where(dt == amax(dt[tk_sub]))[0][0] tk = tk_con[tk_dt_prev] if ((get_next) and (not get_prev)): tk_sub = where(dt > dt[tk_dt])[0] if (len(tk_sub) <= 0): self.mesg_txt('none') return None tk_dt_next = where(dt == amin(dt[tk_sub]))[0][0] tk = tk_con[tk_dt_next] # If the selected spectrum is not within the the request # tolerence, abort. if (abs( (self.fc_time_epc[tk] - time_req_epc).total_seconds()) > self.tol): self.mesg_txt('none') return None # Extract the spectrum to be returned. ret_time_epc = self.fc_time_epc[tk] ret_cup1_azm = self.fc_cup1_azm[tk] ret_cup2_azm = self.fc_cup2_azm[tk] ret_cup1_c_vol = self.fc_cup1_c_vol[tk] ret_cup2_c_vol = self.fc_cup2_c_vol[tk] ret_cup1_d_vol = self.fc_cup1_d_vol[tk] ret_cup2_d_vol = self.fc_cup2_d_vol[tk] ret_cup1_cur = self.fc_cup1_cur[tk] ret_cup2_cur = self.fc_cup2_cur[tk] # Request a cleanup of the data loaded into this archive. self.cleanup_date() # Return the selected spetrum to the user. return (ret_time_epc, ret_cup1_azm, ret_cup2_azm, ret_cup1_c_vol, ret_cup2_c_vol, ret_cup1_d_vol, ret_cup2_d_vol, ret_cup1_cur, ret_cup2_cur)
def load_spec( self, time, dur, fc_bins ) : # Convert/standardize the requested time. time_req_str = calc_time_str( time ) time_req_val = calc_time_val( time ) time_req_epc = calc_time_epc( time ) # Extract requested date (as a string) and the requested time # (as a float indicating seconds since midnight). Likewise, # determine the date of the previous day and the date of the # next day. date_req_str = time_req_str[0:10] scnd_req_val = time_req_val - calc_time_val( date_req_str ) date_pre_str = ( calc_time_str( time_req_val - 86400. ) )[0:10] date_nex_str = ( calc_time_str( time_req_val + 86400. ) )[0:10] # Load all the spectra from the requested date. If the # requested time is within "self.buf" seconds of either the # previous or next day, load all the spectra from that date as # well. # Note. There is no need to check here whether a date has # already been loaded as that's the first thing that # "self.load_date( )" does. self.load_date( date_req_str ) if ( scnd_req_val <= self.buf ) : self.load_date( date_pre_str ) if ( ( 86400. - scnd_req_val ) <= self.buf ) : self.load_date( date_nex_str ) # If no spectra have been loaded, abort. if ( len( self.arr_tag ) == 0 ) : self.mesg_txt( 'none' ) return [] # Locate the spectrum whose timestamp is closest to the # one requested. dt = [ datetime(1970, 1, 1) + timedelta( seconds=tag.epoch ) - self.core.fc_spec['time'] for tag in self.arr_tag ] adt = [ abs( del_t ) for del_t in dt ] adt_min = min( adt ) dt_min = dt[ where( [ del_t == adt_min for del_t in adt ] )[0][0] ] tk = [ a for a in range( len( adt ) ) if adt[a] == adt_min ][0] # if ( get_prev ) : # tk -= 1 # if ( get_next ) : # tk +=1 if( ( tk < 0 ) or ( tk >= len( self.arr_tag ) ) ) : self.mesg_txt( 'none' ) return [] # Determine how many more PESA-L spectra exist within the # duration of the FC spectrum num_spec = len( where( [( del_t >= timedelta(seconds=-1.*dur/fc_bins) and del_t <= timedelta(seconds=dur) ) for del_t in dt ] )[0] ) # If the selected spectrum is not within the the request # tolerence, abort. if ( ( adt[tk] ).total_seconds() > self.tol ) :# In case of long # Data gap self.mesg_txt( 'none' ) return [] # Get the PL spectra that lie within this time spec = [] if num_spec == 1 : plur = 'spectrum' else : plur = 'spectra' self.mesg_txt( 'load', (str(num_spec) + ' ' + plur + ' found') ) for n in range( num_spec ) : # Extract the spectrum to be returned. cdf = self.arr_cdf[self.arr_tag[tk+n].c] s = self.arr_tag[ tk+n ].s # Assigning all retrieved data to parameter values t_strt = cdf['sec_beg'][s] t_stop = cdf['sec_end'][s] elev_cen = cdf['the'][s] the_del = cdf['d_the'][s] azim_cen = cdf['phi'][s] phi_del = cdf['d_phi'][s] volt_cen = cdf['nrg'][s] volt_del = cdf['d_nrg'][s] psd = cdf['psd'][s] spec = spec + [ pl_spec( t_strt=t_strt, t_stop=t_stop, elev_cen=elev_cen, the_del=the_del, azim_cen=azim_cen, phi_del=phi_del, volt_cen=volt_cen, volt_del=volt_del, psd=psd ) ] # Request a cleanup of the data loaded into this archive. self.cleanup_date( ) return spec