コード例 #1
0
ファイル: janus_fc_arcv.py プロジェクト: blalterman/FC
    def num_spec(self, tmin=None, tmax=None):

        # CAUTION!  This function only counts spectra that have already
        #           been loaded into this archive from the data files.

        # First, handle the easy cases: i.e., the case of no spectra
        # having been loaded into the archive and the case of both
        # "tm??" values being "None".

        if (self.n_fc == 0):
            return 0

        if ((tmin is None) and (tmax is None)):
            return self.n_fc

        # Identify the subset of spectra with timestamps between "tmin"
        # and "tmax" and return the size of that subset.

        if (tmin is not None):
            con_tmin = (self.fc_time_epc >= calc_time_epc(tmin))
        else:
            con_tmin = tile(True, self.n_fc)

        if (tmax is not None):
            con_tmax = (self.fc_time_epc <= calc_time_epc(tmax))
        else:
            con_tmax = tile(True, self.n_fc)

        tk_con = where(con_tmin & con_tmax)[0]

        return len(tk_con)
コード例 #2
0
    def rtrv_time(self):

        # Attempt to retrieve each timestamp.

        # Note.  If the function "calc_time_epc" is given input that it
        #        cannot process, it should return "None".

        self.time_strt = calc_time_epc(str(self.txt_strt.text()))
        self.time_stop = calc_time_epc(str(self.txt_stop.text()))
        self.get_next = self.box_next.isChecked()
        self.err_halt = self.box_halt.isChecked()

        # Validate the timestamps and update the text color of the text
        # boxes appropriately.

        self.vldt_time()
コード例 #3
0
    def load_spin(self, time):

        # Compute the requested time both as a standard-format string
        # and a "datetime" object.

        time_str = calc_time_str(time)
        time_epc = calc_time_epc(time)

        # Construct a list of the dates requested.

        req_date = []

        tm_strt = time_epc - timedelta(seconds=self.buf)
        tm_stop = time_epc + timedelta(seconds=self.buf)

        dt_strt = datetime(tm_strt.year, tm_strt.month, tm_strt.day)
        dt_stop = datetime(tm_stop.year, tm_stop.month, tm_stop.day)

        dt_i = dt_strt

        while (dt_i <= dt_stop):
            date_i = (calc_time_str(dt_i))[0:10]
            req_date.append(date_i)
            dt_i += timedelta(1)

        # For each date in "date_i", load the data (if necessary).

        for date in req_date:
            self.load_date(date)

        # Compute the absolute time difference between the requested
        # time and the timestamp of each loaded datum.

        adt = [abs((t - time_epc).total_seconds()) for t in self.arr_spin_t]

        # Determine the ordering of the absolute time differences.

        arg = sorted(range(len(adt)), key=adt.__getitem__)

        # If the smallest time difference is greater than the tolerance,
        # return 'None'.

        if (adt[arg[0]] > self.buf):
            return None

        # Compute and the median spin rate for the data with the
        # smallest time difference.

        w = median([self.arr_spin_w[arg[i]] for i in range(self.win)])

        # Request a cleanup of the data loaded into this archive.

        self.cleanup_date()

        # Return the spin period.

        return (2. * pi / w)
コード例 #4
0
    def load_rang(self, time_strt, dur_sec):

        # Compute the requested start and stop times as values, as
        # strings, and as "datetime" epochs.

        print time_strt, dur_sec
        time_strt_val = calc_time_val(time_strt)
        time_stop_val = calc_time_val(time_strt_val + dur_sec)

        time_strt_str = calc_time_str(time_strt_val)
        time_stop_str = calc_time_str(time_stop_val)

        time_strt_epc = calc_time_epc(time_strt_val)
        time_stop_epc = calc_time_epc(time_stop_val)

        # Construct an array of the dates requested.

        date_req = array([])

        date_i = (calc_time_str(time_strt_val - self.buf))[0:10]
        time_i = calc_time_val(date_i + '/00:00:00.000')

        while (time_i < (time_stop_val + self.buf)):

            # Add the current date to the array of dates to be
            # loaded.

            date_req = append(date_req, [date_i])

            # Move on to the next date.

            # Note.  This may look a bit odd, but may be necessary
            #        to avoid issues with leap seconds.  An
            #        additional leap-second concern is the posiblity
            #        of "date_req" containing duplicates, but that
            #        shouldn't be too much of an issue even if it
            #        does occur.

            time_i = time_i + 86400.
            date_i = (calc_time_str(time_i))[0:10]
            time_i = calc_time_val(date_i + '/00:00:00.000')

        # For each date in "date_i", load the data (if necessary).

        [self.load_date(dt) for dt in date_req]

        # Identify and extract the requested range of Wind/MFI data.

        tk = where((self.mfi_t >= (time_strt_epc - timedelta(0, self.tol)))
                   & (self.mfi_t <= (time_stop_epc + timedelta(0, self.tol))))
        tk = tk[0]

        n_tk = len(tk)

        if (n_tk <= 0):

            self.mesg_txt('none')

            ret_t = array([])
            ret_b_x = array([])
            ret_b_y = array([])
            ret_b_z = array([])

        else:

            ret_t = self.mfi_t[tk]
            ret_b_x = self.mfi_b_x[tk]
            ret_b_y = self.mfi_b_y[tk]
            ret_b_z = self.mfi_b_z[tk]

            srt = argsort(ret_t)

            ret_t = ret_t[srt]
            ret_b_x = ret_b_x[srt]
            ret_b_y = ret_b_y[srt]
            ret_b_z = ret_b_z[srt]

        # Request a cleanup of the data loaded into this archive.

        self.cleanup_date()

        # Return the requested range of Wind/MFI data.

        return (list(ret_t), list(ret_b_x), list(ret_b_y), list(ret_b_z))
コード例 #5
0
ファイル: janus_fc_arcv.py プロジェクト: qudsiramiz/FC
    def load_spec(self, time, get_prev=False, get_next=False):

        # If both "get_????" keywords are "True", abort.

        if ((get_prev) and (get_next)):
            self.mesg_txt('none')
            return None

        # Convert/standardize the requested time.

        time_req_str = calc_time_str(time)
        time_req_val = calc_time_val(time)
        time_req_epc = calc_time_epc(time)

        # Extract requested date (as a string) and the requested time
        # (as a float indicating seconds since midnight).  Likewise,
        # determine the date of the previous day and the date of the
        # next day.

        date_req_str = time_req_str[0:10]
        scnd_req_val = time_req_val - calc_time_val(date_req_str)

        date_pre_str = (calc_time_str(time_req_val - 86400.))[0:10]
        date_nex_str = (calc_time_str(time_req_val + 86400.))[0:10]

        # Load all the spectra from the requested date.  If the
        # requested time is within "self.buf" seconds of either the
        # previous or next day, load all the spectra from that date as
        # well.

        # Note.  There is no need to check here whether a date has
        #        already been loaded as that's the first thing that
        #        "self.load_date( )" does.

        self.load_date(date_req_str)

        if (scnd_req_val <= self.buf):
            self.load_date(date_pre_str)

        if ((86400. - scnd_req_val) <= self.buf):
            self.load_date(date_nex_str)

        # If no spectra have been loaded, abort.

        if (len(self.arr_tag) == 0):
            self.mesg_txt('none')
            return None

        # Locate the spectrum whose timestamp is closest to the
        # one requested.

        adt = [abs(tag.epoch - time_req_epc) for tag in self.arr_tag]

        adt_min = min(adt)

        tk = [a for a in range(len(adt)) if adt[a] == adt_min][0]

        if (get_prev):
            tk -= 1
        if (get_next):
            tk += 1

        if ((tk < 0) or (tk >= len(self.arr_tag))):
            self.mesg_txt('none')
            return None

        # If the selected spectrum is not within the the request
        # tolerence, abort.

        if ((adt[tk]).total_seconds() > self.tol):  # In case of long
            # Data gap
            self.mesg_txt('none')
            return None

        # If the selected spectrum is not within the the request
        # tolerence, abort.

        # Extract the spectrum to be returned.

        cdf = self.arr_cdf[self.arr_tag[tk].c]
        s = self.arr_tag[tk].s

        # Find actual no. of voltage bins

        n_bin_max = 31
        n_dir = 20

        for n_bin_1 in range(n_bin_max):
            if (n_bin_1 == n_bin_max + 1):
                break
            if (cdf['cup1_EperQ'][s][n_bin_1] >=
                    cdf['cup1_EperQ'][s][n_bin_1 + 1]):
                break
        n_bin_1 += 1

        for n_bin_2 in range(n_bin_max):
            if (n_bin_2 == n_bin_max + 1):
                break
            if (cdf['cup2_EperQ'][s][n_bin_2] >=
                    cdf['cup2_EperQ'][s][n_bin_2 + 1]):
                break
        n_bin_2 += 1

        n_bin = min([n_bin_1, n_bin_2])

        # Assigning all retrieved data to parameter values

        time = cdf['Epoch'][s]

        elev = [
            float(cdf['inclination_angle'][0]),
            float(cdf['inclination_angle'][1])
        ]

        azim = [[float(cdf['cup1_azimuth'][s][d]) for d in range(n_dir)],
                [float(cdf['cup2_azimuth'][s][d]) for d in range(n_dir)]]

        volt_cen = [[float(cdf['cup1_EperQ'][s][b]) for b in range(n_bin)],
                    [float(cdf['cup2_EperQ'][s][b]) for b in range(n_bin)]]

        volt_del = [[float(cdf['cup1_EperQ_DEL'][s][b]) for b in range(n_bin)],
                    [float(cdf['cup2_EperQ_DEL'][s][b]) for b in range(n_bin)]]

        curr = [[[float(cdf['cup1_qflux'][s][d][b]) for b in range(n_bin)]
                 for d in range(n_dir)],
                [[float(cdf['cup2_qflux'][s][d][b]) for b in range(n_bin)]
                 for d in range(n_dir)]]


        spec = fc_spec( n_bin, elev=elev, azim=azim, volt_cen=volt_cen,\
                               volt_del=volt_del, curr=curr, time=time )

        # Request a cleanup of the data loaded into this archive.

        self.cleanup_date()

        return spec
コード例 #6
0
    def user_event(self, event, fnc):

        # If the stop button has been pressed, inform the core to abort
        # the automatic analysis.

        if (fnc == 'stop'):

            # Set the core's indicator of a premature-stop request
            # to "True".

            self.core.stop_auto_run = True

            # Return.

            return

        # If a "thread_*" computation thread is running, abort.

        if (n_thread() != 0):
            return

        # If the "Moments" or "Non-Linear" button has been pressed,
        # execute the requested analysis and return.

        if (fnc == 'mom'):
            if (n_thread() == 0):
                Thread(target=thread_anls_mom, args=(self.core, )).start()
            return

        if (fnc == 'nln'):
            if (n_thread() == 0):
                Thread(target=thread_anls_nln, args=(self.core, )).start()
            return

        # If the "Options" button has been pressed, launch a dialog box
        # that will allow the user to alter various settings.

        if (fnc == 'opt'):

            # Launch a dialog box to request options from the user.

            dialog_opt(self.core)

            # Return.

            return

        # If the "Auto" button has been pressed, launch the auto-run
        # dialog box.  If the user-input is valid, start a thread to run
        # the reqested analyses on the specified spectra, enable the
        # "stop" button, and return.

        if (fnc == 'auto'):

            # WARNING!  THIS FEATURE IS INCOMPLETE.  DURING
            #           DEVELOPMENT, IT IS ONLY AVAILABLE IN
            #           DEBUGGING MODE.

            # If debugging mode is not active, alert the user and
            # abort.

            if (not self.core.debug):
                dialog_missing().alert()
                return

            # Attempt to find suggested settings for the automated
            # analysis based on the current spectrum loaded (if any)
            # and the previous requests of the user.

            self.req_auto_strt = self.core.time_txt

            if (self.req_auto_strt == ''):
                self.req_auto_stop = ''
                self.req_auto_next = False
            else:
                self.req_auto_next = True
                if (self.req_auto_stop != ''):
                    epc_strt = calc_time_epc(self.req_auto_strt)
                    epc_stop = calc_time_epc(self.req_auto_stop)
                    if ((epc_strt is None) or (epc_stop is None)):
                        self.req_auto_stop = ''
                    elif (epc_strt >= epc_stop):
                        self.req_auto_stop = ''

            # Launch a dialog box to request a range of times from
            # the user.

            time_rang = dialog_auto_ctrl(
                time_strt=self.req_auto_strt,
                time_stop=self.req_auto_stop,
                get_next=self.req_auto_next).get_time_rang()

            # If the range of times is invalid (which can happen if
            # the user cancels the dialog), return.

            if (time_rang is None):
                return

            # Store the new requested times from the user.

            self.req_auto_strt = time_rang[0]
            self.req_auto_stop = time_rang[1]
            self.req_auto_next = time_rang[2]
            self.req_auto_halt = time_rang[3]

            # Assuming that there still aren't any janus threads
            # running, start a new thread for the automatic analysis
            # and make the "stop" button available for the user to
            # abort that analysis (if so desired).

            if (n_thread() == 0):

                # Start a new thread that automatically loads
                # and processes each spectrum in the time range
                # specified by the user.

                Thread(target=thread_auto_run,
                       args=(self.core, self.req_auto_strt, self.req_auto_stop,
                             self.req_auto_next, self.req_auto_halt,
                             1)).start()

                # Hide the "auto" button and make the "stop"
                # button visible (so that the user can abort the
                # automatic analyis).

                self.btn_auto.setVisible(False)
                self.btn_stop.setVisible(True)

                self.dia_prog = dialog_auto_prog(self.req_auto_strt,
                                                 self.req_auto_stop)

            # Return.

            return
コード例 #7
0
ファイル: janus_fc_arcv.py プロジェクト: blalterman/FC
    def load_date(self, date_str):

        # Determine whether or not the requested date has already been
        # loaded.  If it has, abort.

        if (self.n_date > 0):

            tk = where(self.date_str == date_str)[0]

            if (len(tk) > 0):
                return

        # Extract the year, month, and day portions of the "date_str"
        # string.

        str_year = date_str[0:4]
        str_mon = date_str[5:7]
        str_day = date_str[8:10]

        # Attempt to load and extract data from the appropriate file.

        # Note.  The default data file format is CDF, and the code will
        #        attempt to download the appropriate CDF file from
        #        CDAWeb if it doesn't find one in the specified
        #        directory.  However, the user may also request that
        #        IDL "SAVE" files be used instead.

        if (self.use_idl):

            # Determine the path and name of the file corresponding
            # to the requested date.

            fl = 'wind_janus_fc_' + str_year + '-' + \
                                    str_mon + '-' + str_day + '.idl'

            fl_path = os.path.join(self.path, fl)

            # If the file exists, attempt to load it; otherwise,
            # abort.

            self.mesg_txt('load', date_str)

            if (os.path.isfile(fl_path)):
                try:
                    dat = readsav(fl_path)
                except:
                    self.mesg_txt('fail', date_str)
                    return
            else:
                self.mesg_txt('fail', date_str)
                return

            # Determine the number of spectra loaded.  If no spectra
            # were loaded, return.

            n_sub = len(dat.sec)

            if (n_sub <= 0):
                self.mesg_txt('fail', date_str)
                return

            # Separate the loaded data into parameter arrays.

            sub_time_val = dat.sec + calc_time_val(date_str)

            sub_time_epc = array(
                [calc_time_epc(t_val) for t_val in sub_time_val])

            sub_cup1_azm = dat.cup1_angles
            sub_cup2_azm = dat.cup2_angles
            sub_cup1_c_vol = dat.cup1_eperq
            sub_cup2_c_vol = dat.cup2_eperq
            sub_cup1_d_vol = dat.cup1_eqdel
            sub_cup2_d_vol = dat.cup2_eqdel

            sub_cup1_cur = 1E12 * array([
                transpose(dat.currents[s, 0, :, :] + dat.currents[s, 2, :, :])
                for s in range(n_sub)
            ])
            sub_cup2_cur = 1E12 * array([
                transpose(dat.currents[s, 1, :, :] + dat.currents[s, 3, :, :])
                for s in range(n_sub)
            ])

            sub_ind = tile(self.t_date, n_sub)

        else:

            # Determine the name of the file that contains data from
            # the requested date.

            fl0 = 'wi_sw-ion-dist_swe-faraday_' + \
                  str_year + str_mon + str_day + '_v??.cdf'

            fl0_path = os.path.join(self.path, fl0)

            gb = glob(fl0_path)

            # If the file does not exist, attempt to download it.

            if (len(gb) > 0):
                fl_path = gb[-1]
            else:
                try:
                    self.mesg_txt('ftp', date_str)
                    ftp = FTP('cdaweb.gsfc.nasa.gov')
                    ftp.login()
                    ftp.cwd('pub/data/wind/swe/swe_faraday/')
                    ftp.cwd(str_year)
                    ls = ftp.nlst(fl0)
                    fl = ls[-1]
                    fl_path = os.path.join(self.path, fl)
                    ftp.retrbinary("RETR " + fl, open(fl_path, 'wb').write)
                except:
                    self.mesg_txt('fail', date_str)
                    return

            # If the file now exists, try to load it; otherwise,
            # abort.

            self.mesg_txt('load', date_str)

            if (os.path.isfile(fl_path)):
                try:
                    cdf = pycdf.CDF(fl_path)
                except:
                    self.mesg_txt('fail', date_str)
                    return
            else:
                self.mesg_txt('fail', date_str)
                return

            # Separate the loaded data into parameter arrays, and
            # determine the number of spectra loaded.

            sub_time_epc = array(cdf['Epoch'])
            sub_cup1_azm = array(cdf['cup1_azimuth'])
            sub_cup2_azm = array(cdf['cup2_azimuth'])
            sub_cup1_c_vol = array(cdf['cup1_EperQ'])
            sub_cup2_c_vol = array(cdf['cup2_EperQ'])
            sub_cup1_d_vol = array(cdf['cup1_EperQ_DEL'])
            sub_cup2_d_vol = array(cdf['cup1_EperQ_DEL'])
            sub_cup1_cur = array(cdf['cup1_qflux'])
            sub_cup2_cur = array(cdf['cup2_qflux'])

            n_sub = len(sub_time_epc)

            sub_ind = tile(self.t_date, n_sub)

        # Add the loaded and formatted Wind/FC spectra to the archive.

        self.fc_time_epc = append(self.fc_time_epc, sub_time_epc, axis=0)
        self.fc_cup1_azm = append(self.fc_cup1_azm, sub_cup1_azm, axis=0)
        self.fc_cup2_azm = append(self.fc_cup2_azm, sub_cup2_azm, axis=0)
        self.fc_cup1_c_vol = append(self.fc_cup1_c_vol, sub_cup1_c_vol, axis=0)
        self.fc_cup2_c_vol = append(self.fc_cup2_c_vol, sub_cup2_c_vol, axis=0)
        self.fc_cup1_d_vol = append(self.fc_cup1_d_vol, sub_cup1_d_vol, axis=0)
        self.fc_cup2_d_vol = append(self.fc_cup2_d_vol, sub_cup2_d_vol, axis=0)
        self.fc_cup1_cur = append(self.fc_cup1_cur, sub_cup1_cur, axis=0)
        self.fc_cup2_cur = append(self.fc_cup2_cur, sub_cup2_cur, axis=0)
        self.fc_ind = append(self.fc_ind, sub_ind, axis=0)

        self.n_fc = self.n_fc + n_sub

        # Append the array of loaded dates with this one.

        self.date_str = append(self.date_str, [date_str])
        self.date_ind = append(self.date_ind, [self.n_date])

        self.n_date += 1
        self.t_date += 1

        # Request a clean-up of the files in the data directory.

        self.cleanup_file()
コード例 #8
0
ファイル: janus_fc_arcv.py プロジェクト: blalterman/FC
    def load_spec(self,
                  time,
                  get_prev=False,
                  get_next=False,
                  tmin=None,
                  tmax=None):

        # If both "get_????" keywords are "True", abort.

        if ((get_prev) and (get_next)):
            self.mesg_txt('none')
            return None

        # Convert/standardize the requested time.

        time_req_str = calc_time_str(time)
        time_req_val = calc_time_val(time)
        time_req_epc = calc_time_epc(time)

        # Extract requested date (as a string) and the requested time
        # (as a float indicating seconds since midnight).  Likewise,
        # determine the date of the previous day and the date of the
        # next day.

        date_req_str = time_req_str[0:10]
        scnd_req_val = time_req_val - calc_time_val(date_req_str)

        date_pre_str = (calc_time_str(time_req_val - 86400.))[0:10]
        date_nex_str = (calc_time_str(time_req_val + 86400.))[0:10]

        # Load all the spectra from the requested date.  If the
        # requested time is within "self.buf" seconds of either the
        # previous or next day, load all the spectra from that date as
        # well.

        # Note.  There is no need to check here whether a date has
        #        already been loaded as that's the first thing that
        #        "self.load_date( )" does.

        self.load_date(date_req_str)

        if (scnd_req_val <= self.buf):
            self.load_date(date_pre_str)

        if ((86400. - scnd_req_val) <= self.buf):
            self.load_date(date_nex_str)

        # If no spectra have been loaded, abort.

        if (self.n_fc <= 0):
            self.mesg_txt('none')
            return None

        # Identify the subset of spectra with timestamps between "tmin"
        # and "tmax".

        if (tmin is not None):
            con_tmin = (self.fc_time_epc >= calc_time_epc(tmin))
        else:
            con_tmin = tile(True, self.n_fc)

        if (tmax is not None):
            con_tmax = (self.fc_time_epc <= calc_time_epc(tmax))
        else:
            con_tmax = tile(True, self.n_fc)

        tk_con = where(con_tmin & con_tmax)[0]

        # If no spectra had timestamps in the specified range, abort.

        if (len(tk_con) <= 0):
            self.mesg_txt('none')
            return None

        # Compute the time difference between the timestamps within the
        # "tm??" range and the requested time.  Identify the index of
        # the smallest absolute in this array and the index of the
        # corresponding spectrum.

        dt = array([(epc - time_req_epc).total_seconds()
                    for epc in self.fc_time_epc[tk_con]])

        dt_abs = abs(dt)

        dt_abs_min = amin(dt_abs)

        tk_dt = where(dt_abs == dt_abs_min)[0][0]

        tk_req = tk_con[tk_dt]

        # Set the spectrum with index "tk_req" to be returned.  If the
        # (chronologically) next or previous spectrum has been
        # requested, find it and set it to be returned instead.

        tk = tk_req

        if ((get_prev) and (not get_next)):

            tk_sub = where(dt < dt[tk_dt])[0]

            if (len(tk_sub) <= 0):
                self.mesg_txt('none')
                return None

            tk_dt_prev = where(dt == amax(dt[tk_sub]))[0][0]

            tk = tk_con[tk_dt_prev]

        if ((get_next) and (not get_prev)):

            tk_sub = where(dt > dt[tk_dt])[0]

            if (len(tk_sub) <= 0):
                self.mesg_txt('none')
                return None

            tk_dt_next = where(dt == amin(dt[tk_sub]))[0][0]

            tk = tk_con[tk_dt_next]

        # If the selected spectrum is not within the the request
        # tolerence, abort.

        if (abs(
            (self.fc_time_epc[tk] - time_req_epc).total_seconds()) > self.tol):
            self.mesg_txt('none')
            return None

        # Extract the spectrum to be returned.

        ret_time_epc = self.fc_time_epc[tk]
        ret_cup1_azm = self.fc_cup1_azm[tk]
        ret_cup2_azm = self.fc_cup2_azm[tk]
        ret_cup1_c_vol = self.fc_cup1_c_vol[tk]
        ret_cup2_c_vol = self.fc_cup2_c_vol[tk]
        ret_cup1_d_vol = self.fc_cup1_d_vol[tk]
        ret_cup2_d_vol = self.fc_cup2_d_vol[tk]
        ret_cup1_cur = self.fc_cup1_cur[tk]
        ret_cup2_cur = self.fc_cup2_cur[tk]

        # Request a cleanup of the data loaded into this archive.

        self.cleanup_date()

        # Return the selected spetrum to the user.

        return (ret_time_epc, ret_cup1_azm, ret_cup2_azm, ret_cup1_c_vol,
                ret_cup2_c_vol, ret_cup1_d_vol, ret_cup2_d_vol, ret_cup1_cur,
                ret_cup2_cur)
コード例 #9
0
	def load_spec( self, time, dur, fc_bins ) :	

		# Convert/standardize the requested time.

		time_req_str = calc_time_str( time )
		time_req_val = calc_time_val( time )
		time_req_epc = calc_time_epc( time )

		# Extract requested date (as a string) and the requested time
		# (as a float indicating seconds since midnight).  Likewise,
		# determine the date of the previous day and the date of the
		# next day.

		date_req_str = time_req_str[0:10]
		scnd_req_val = time_req_val - calc_time_val( date_req_str )

		date_pre_str = ( calc_time_str( time_req_val - 86400. ) )[0:10]
		date_nex_str = ( calc_time_str( time_req_val + 86400. ) )[0:10]

		# Load all the spectra from the requested date.  If the
		# requested time is within "self.buf" seconds of either the
		# previous or next day, load all the spectra from that date as
		# well.

		# Note.  There is no need to check here whether a date has
		#        already been loaded as that's the first thing that
		#        "self.load_date( )" does.

		self.load_date( date_req_str )		

		if ( scnd_req_val <= self.buf ) :
			self.load_date( date_pre_str )

		if ( ( 86400. - scnd_req_val ) <= self.buf ) :
			self.load_date( date_nex_str )

		# If no spectra have been loaded, abort.

		if ( len( self.arr_tag ) == 0 ) :
			self.mesg_txt( 'none' )
			return []

		# Locate the spectrum whose timestamp is closest to the
		# one requested.

		dt  = [ datetime(1970, 1, 1) + timedelta( seconds=tag.epoch ) -
		        self.core.fc_spec['time'] for tag in self.arr_tag      ]

		adt = [ abs( del_t ) for del_t in dt ]

		adt_min = min( adt )

		dt_min = dt[ where( [ del_t == adt_min
		                      for del_t in adt ] )[0][0] ]

		tk = [ a for a in range( len( adt ) ) if adt[a] == adt_min ][0]

#		if ( get_prev ) :
#			tk -= 1
#		if ( get_next ) :
#			tk +=1

		if( ( tk <  0                   ) or
		    ( tk >= len( self.arr_tag ) )    ) :
			self.mesg_txt( 'none' )
			return []

		# Determine how many more PESA-L spectra exist within the 
		# duration of the FC spectrum

		num_spec = len( where( [( del_t >= timedelta(seconds=-1.*dur/fc_bins) and
		                          del_t <= timedelta(seconds=dur) )
		                         for del_t in dt ] )[0] )

		# If the selected spectrum is not within the the request
		# tolerence, abort.

		if ( ( adt[tk] ).total_seconds() > self.tol ) :# In case of long
		                                               # Data gap  
			self.mesg_txt( 'none' )
			return []

		# Get the PL spectra that lie within this time

		spec = []

		if num_spec == 1 :
			plur = 'spectrum'
		else :
			plur = 'spectra'

		self.mesg_txt( 'load', (str(num_spec) + ' ' + plur + ' found') )

		for n in range( num_spec ) :

			# Extract the spectrum to be returned.

			cdf = self.arr_cdf[self.arr_tag[tk+n].c]
			s   = self.arr_tag[ tk+n ].s

			# Assigning all retrieved data to parameter values

			t_strt   = cdf['sec_beg'][s]

			t_stop   = cdf['sec_end'][s]

	                elev_cen = cdf['the'][s]

			the_del  = cdf['d_the'][s]

	                azim_cen = cdf['phi'][s]

			phi_del  = cdf['d_phi'][s]

	                volt_cen = cdf['nrg'][s]

			volt_del = cdf['d_nrg'][s]

			psd      = cdf['psd'][s]

			spec = spec + [ pl_spec( t_strt=t_strt, t_stop=t_stop,
			                   elev_cen=elev_cen, the_del=the_del,
			                   azim_cen=azim_cen, phi_del=phi_del,
			                   volt_cen=volt_cen, volt_del=volt_del,
			                   psd=psd                           ) ]

		# Request a cleanup of the data loaded into this archive.

		self.cleanup_date( )	

		return spec