Exemple #1
0
    def main(self):

        stream_id = self.id

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        db_id_in = self.cfg["PROCESSING_DATA_BLOCK"]
        db_id_out = self.cfg["SEND_DATA_BLOCK"]
        num_stream = self.cfg["NUM_STREAM"]
        cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id]

        db_key_in = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        db_id_in)
        db_key_out = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                         db_id_out)

        self.log(0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out)

        # create dspsr input file for the data block
        db_key_filename = "/tmp/spip_" + db_key_in + ".info"
        db_key_file = open(db_key_filename, "w")
        db_key_file.write("DADA INFO:\n")
        db_key_file.write("key " + db_key_in + "\n")
        db_key_file.close()

        gpu_id = self.cfg["GPU_ID_" + str(self.id)]
        prev_utc_start = ""

        (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":")

        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":")

        # wait up to 10s for the SMRB to be created
        smrb_wait = 10
        cmd = "dada_dbmetric -k " + db_key_in
        self.binary_list.append(cmd)

        rval = 1
        while rval and smrb_wait > 0 and not self.quit_event.isSet():

            rval, lines = self.system(cmd)
            if rval:
                time.sleep(1)
            smrb_wait -= 1

        if rval:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                db_key_in)
            self.quit_event.set()

        else:

            while (not self.quit_event.isSet()):

                cmd = "dada_header -k " + db_key_in
                self.log(0, cmd)
                self.binary_list.append(cmd)
                rval, lines = self.system(cmd)
                self.binary_list.remove(cmd)

                # if the command returned ok and we have a header
                if rval != 0:
                    if self.quit_event.isSet():
                        self.log(2, cmd + " failed, but quit_event true")
                    else:
                        self.log(-2, cmd + " failed")
                        self.quit_event.set()

                elif len(lines) == 0:

                    self.log(-2, "header was empty")
                    self.quit_event.set()

                else:

                    header = Config.parseHeader(lines)

                    utc_start = header["UTC_START"]
                    self.log(1, "UTC_START=" + header["UTC_START"])
                    self.log(1, "RESOLUTION=" + header["RESOLUTION"])

                    # default processing commands
                    fold_cmd = "dada_dbnull -s -k " + db_key_in
                    trans_cmd = "dada_dbnull -s -k " + db_key_out
                    search_cmd = "dada_dbnull -s -k " + db_key_in

                    if prev_utc_start == utc_start:
                        self.log(
                            -2, "UTC_START [" + utc_start +
                            "] repeated, ignoring observation")

                    else:
                        beam = self.cfg["BEAM_" + str(self.beam_id)]

                        if not float(bw) == float(header["BW"]):
                            self.log(
                                -1, "configured bandwidth [" + bw +
                                "] != header[" + header["BW"] + "]")
                        if not float(cfreq) == float(header["FREQ"]):
                            self.log(
                                -1, "configured cfreq [" + cfreq +
                                "] != header[" + header["FREQ"] + "]")
                        if not int(nchan) == int(header["NCHAN"]):
                            self.log(
                                -2, "configured nchan [" + nchan +
                                "] != header[" + header["NCHAN"] + "]")

                        source = header["SOURCE"]

                        # output directories
                        suffix = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq
                        fold_dir = self.cfg["CLIENT_FOLD_DIR"] + suffix
                        trans_dir = self.cfg["CLIENT_TRANS_DIR"] + suffix
                        search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix

                        fold = False
                        search = False
                        trans = False

                        try:
                            fold = (header["PERFORM_FOLD"] == "1")
                            search = (header["PERFORM_SEARCH"] == "1")
                            trans = (header["PERFORM_TRANS"] == "1")
                        except KeyError as e:
                            fold = True
                            search = False
                            trans = False

                        if fold:
                            os.makedirs(fold_dir, 0755)
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn"
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr"
                            #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn"
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn"
                            #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir

                            header_file = fold_dir + "/obs.header"
                            Config.writeDictToCFGFile(header, header_file)

                        if search or trans:
                            os.makedirs(search_dir, 0755)
                            search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil"
                            if trans:
                                search_cmd += " -k " + db_key_out

                        if trans and int(self.cfg["NUM_SUBBAND"]) == "1":
                            os.makedirs(trans_dir, 0755)
                            trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1"

                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])

                    # setup output pipes
                    fold_log_pipe = LogSocket("fold_src", "fold_src",
                                              str(self.id), "stream", log_host,
                                              log_port, int(DL))

                    #trans_log_pipe  = LogSocket ("trans_src", "trans_src", str(self.id), "stream",
                    #                             log_host, log_port, int(DL))
                    #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream",
                    #                             log_host, log_port, int(DL))

                    fold_log_pipe.connect()

                    self.binary_list.append(fold_cmd)
                    #self.binary_list.append (trans_cmd)
                    #self.binary_list.append (search_cmd)

                    # create processing threads
                    self.log(2, "creating processing threads")
                    cmd = "numactl -C " + cpu_core + " -- " + fold_cmd
                    fold_thread = procThread(cmd, fold_dir, fold_log_pipe.sock,
                                             1)

                    #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2)
                    #search_thread = procThread (search_cmd, self.log_sock.sock, 2)

                    # start processing threads
                    self.log(2, "starting processing threads")
                    self.log(1, "START " + fold_cmd)
                    fold_thread.start()
                    #trans_thread.start()
                    #search_thread.start()

                    # join processing threads
                    self.log(2, "waiting for fold thread to terminate")
                    rval = fold_thread.join()
                    self.log(2, "fold thread joined")
                    self.log(1, "END   " + fold_cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(fold_cmd)

                    if rval:
                        self.log(-2, "fold thread failed")
                        quit_event.set()

                    #self.log (2, "joining trans thread")
                    #rval = trans_thread.join()
                    #self.log (2, "trans thread joined")
                    #if rval:
                    #  self.log (-2, "trans thread failed")
                    #  quit_event.set()

                    #self.log (2, "joining search thread")
                    #rval = search_thread.join()
                    #self.log (2, "search thread joined")
                    #if rval:
                    #  self.log (-2, "search thread failed")
                    #  quit_event.set()

                    fold_log_pipe.close()
                    #trans_log_pipe.close()
                    #search_log_pipe.close()

                self.log(1, "processing completed")
Exemple #2
0
    def main(self):

        self.log(2, "UWBProcDaemon::main configure_child()")
        self.configure_child()

        self.log(2, "UWBProcDaemon::main wait_for_smrb()")
        SMRBDaemon.waitForSMRB(self.db_key, self)

        if self.quit_event.isSet():
            self.log(
                -1,
                "UWBProcDaemon::main quit event was set after waiting for SMRB creation"
            )
            return

        # continuously run the main command waiting on the SMRB
        while (not self.quit_event.isSet()):

            # wait for the header to determine if folding is required
            cmd = "dada_header -k " + self.db_key + " -t " + self.tag
            self.log(2, "UWBProcDaemon::main " + cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                time.sleep(0.1)
                if self.quit_event.isSet():
                    self.log(
                        2, "UWBProcDaemon::main " + cmd +
                        " failed, but quit_event true")
                else:
                    self.log(-2, "UWBProcDaemon::main " + cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:

                self.log(-2, "UWBProcDaemon::main header was empty")
                self.quit_event.set()

            else:

                self.log(2, "UWBProcDaemon::main parsing header")
                self.header = Config.parseHeader(lines)

                # account for lower to upper sideband conversion
                if not abs(float(self.bw)) == float(self.header["BW"]):
                    self.log(
                        -1, "configured bandwidth [" + self.bw +
                        "] != self.header[" + self.header["BW"] + "]")
                if not float(self.cfreq) == float(self.header["FREQ"]):
                    self.log(
                        -1, "configured cfreq [" + self.cfreq +
                        "] != self.header[" + self.header["FREQ"] + "]")
                if not int(self.nchan) == int(self.header["NCHAN"]):
                    self.log(
                        -2, "configured nchan [" + self.nchan +
                        "] != self.header[" + self.header["NCHAN"] + "]")

                self.source = self.header["SOURCE"]
                self.utc_start = self.header["UTC_START"]

                # call the child class prepare method
                self.log(2, "UWBProcDaemon::main prepare()")
                valid = self.prepare()

                if valid:

                    # ensure the output directory exists
                    self.log(
                        2, "UWBProcDaemon::main creating out_dir: " +
                        self.out_dir)
                    if not os.path.exists(self.out_dir):
                        os.makedirs(self.out_dir, 0755)

                    # write the sub-bands header to the out_dir
                    header_file = self.out_dir + "/obs.header"
                    self.log(
                        2, "UWBProcDaemon::main writing obs.header to out_dir")
                    Config.writeDictToCFGFile(self.header, header_file)

                    # configure the output pipe
                    self.log(
                        2, "UWBProcDaemon::main configuring output log pipe")
                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])
                    log_pipe = LogSocket(self.log_prefix, self.log_prefix,
                                         str(self.id), "stream", log_host,
                                         log_port, int(DL))
                    log_pipe.connect()

                    # get any modifications to the environment
                    env = self.getEnvironment()

                    # add the binary command to the kill list
                    self.binary_list.append(self.cmd)

                    # create processing threads
                    self.log(
                        2, "UWBProcDaemon::main creating processing threads")
                    cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
                    proc_thread = UWBProcThread(self, cmd, log_pipe.sock, env,
                                                1)

                    # start processing threads
                    self.log(2,
                             "UWBProcDaemon::main starting processing thread")
                    proc_thread.start()

                    self.log(1, "START " + self.cmd)

                    # join processing threads
                    self.log(
                        2,
                        "UWBProcDaemon::main waiting for proc thread to terminate"
                    )
                    rval = proc_thread.join()
                    self.log(2, "UWBProcDaemon::main proc thread joined")

                    self.log(1, "END   " + self.cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(self.cmd)

                    if rval:
                        self.log(-2, "UWBProcDaemon::main proc thread failed")
                        quit_event.set()

                    log_pipe.close()

                    # good practise in case the proc thread always fails
                    time.sleep(1)

                else:

                    self.log(2, "MEERKATProcDaemon::main skip this processing")
                    time.sleep(10)

            self.log(2, "UWBProcDaemon::main processing loop completed")
Exemple #3
0
    def main(self):

        self.log(2, "MeerKATXposeDaemon::main configure_child()")
        self.configure_child()

        self.log(2, "MeerKATXposeDaemon::main wait_for_smrb()")
        self.wait_for_smrb()

        if self.quit_event.isSet():
            self.log(
                -1,
                "MeerKATXposeDaemon::main quit event was set after waiting for SMRB creation"
            )
            return

        self.prepare()

        # continuously run the main command waiting on the SMRB
        while (not self.quit_event.isSet()):

            tag = "meerkat_xpose_" + self.stream_id

            # wait for the header to determine if folding is required
            cmd = "dada_header -t " + tag + " -k " + self.db_key_in1
            self.log(1, cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                if self.quit_event.isSet():
                    self.log(
                        2, "MeerKATXposeDaemon::main " + cmd +
                        " failed, but quit_event true")
                else:
                    self.log(-2, "MeerKATXposeDaemon::main " + cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:

                self.log(-2, "MeerKATXposeDaemon::main header was empty")
                self.quit_event.set()

            else:

                self.log(2, "MeerKATXposeDaemon::main parsing header")
                self.header = Config.parseHeader(lines)

                #if not float(self.bw) == float(self.header["BW"]):
                #  self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]")
                #if not float(self.cfreq) == float(self.header["FREQ"]):
                #  self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]")
                #if not int(self.nchan) == int(self.header["NCHAN"]):
                #  self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]")

                # configure the output pipe
                self.log(
                    2,
                    "MeerKATXposeDaemon::main configuring output log pipe prefix="
                    + self.log_prefix)
                log_host = self.cfg["SERVER_HOST"]
                log_port = int(self.cfg["SERVER_LOG_PORT"])
                log_pipe = LogSocket(self.log_prefix, self.log_prefix,
                                     str(self.id), "stream", log_host,
                                     log_port, int(DL))
                log_pipe.connect()

                # add the binary command to the kill list
                self.binary_list.append(self.cmd)

                # create processing threads
                self.log(
                    1, "MeerKATXposeDaemon::main creating processing thread")
                proc_thread = MeerKATXposeThread(self, self.cmd, log_pipe.sock,
                                                 1)

                # start processing threads
                self.log(
                    1, "MeerKATXposeDaemon::main starting processing thread")
                proc_thread.start()

                # join processing threads
                self.log(
                    2,
                    "MeerKATXposeDaemon::main waiting for proc thread to terminate"
                )
                rval = proc_thread.join()
                self.log(2, "MeerKATXposeDaemon::main proc thread joined")

                # remove the binary command from the list
                self.binary_list.remove(self.cmd)

                if rval:
                    self.log(-2, "MeerKATXposeDaemon::main proc thread failed")
                    quit_event.set()

                log_pipe.close()

            self.log(1, "MeerKATXposeDaemon::main processing completed")
Exemple #4
0
  def main (self):

    self.log (2, "UWBProcDaemon::main configure_child()")
    self.configure_child()

    self.log (2, "UWBProcDaemon::main wait_for_smrb()")
    SMRBDaemon.waitForSMRB(self.db_key, self)

    if self.quit_event.isSet():
      self.log (-1, "UWBProcDaemon::main quit event was set after waiting for SMRB creation")
      return

    # continuously run the main command waiting on the SMRB
    while (not self.quit_event.isSet()):

      # wait for the header to determine if folding is required
      cmd = "dada_header -k " + self.db_key + " -t " + self.tag
      self.log(2, "UWBProcDaemon::main " + cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd, 2, True)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        time.sleep(0.1)
        if self.quit_event.isSet():
          self.log (2, "UWBProcDaemon::main " + cmd + " failed, but quit_event true")
        else:
          self.log (-2, "UWBProcDaemon::main " + cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        
        self.log (-2, "UWBProcDaemon::main header was empty")
        self.quit_event.set()
        
      else:

        self.log (2, "UWBProcDaemon::main parsing header")
        self.header = Config.parseHeader (lines)

        # account for lower to upper sideband conversion
        if not abs(float(self.bw)) == float(self.header["BW"]):
          self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]")
        if not float(self.cfreq) == float(self.header["FREQ"]):
          self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]")
        if not int(self.nchan) == int(self.header["NCHAN"]):
          self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]")

        self.source = self.header["SOURCE"]
        self.utc_start = self.header["UTC_START"]

        # call the child class prepare method
        self.log (2, "UWBProcDaemon::main prepare()")
        valid = self.prepare()

        if valid:

          # ensure the output directory exists
          self.log (2, "UWBProcDaemon::main creating out_dir: " + self.out_dir)
          if not os.path.exists (self.out_dir):
            os.makedirs (self.out_dir, 0755)

          # write the sub-bands header to the out_dir
          header_file = self.out_dir + "/obs.header"
          self.log (2, "UWBProcDaemon::main writing obs.header to out_dir")
          Config.writeDictToCFGFile (self.header, header_file)
    
          # configure the output pipe
          self.log (2, "UWBProcDaemon::main configuring output log pipe")
          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])
          log_pipe = LogSocket (self.log_prefix, self.log_prefix,
                                str(self.id), "stream",
                                log_host, log_port, int(DL))
          log_pipe.connect()

          # get any modifications to the environment
          env = self.getEnvironment()

          # add the binary command to the kill list
          self.binary_list.append (self.cmd)

          # create processing threads
          self.log (2, "UWBProcDaemon::main creating processing threads")      
          cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
          proc_thread = UWBProcThread (self, cmd, log_pipe.sock, env, 1)

          # start processing threads
          self.log (2, "UWBProcDaemon::main starting processing thread")
          proc_thread.start()

          self.log (1, "START " + self.cmd)

          # join processing threads
          self.log (2, "UWBProcDaemon::main waiting for proc thread to terminate")
          rval = proc_thread.join() 
          self.log (2, "UWBProcDaemon::main proc thread joined")

          self.log (1, "END   " + self.cmd)

          # remove the binary command from the list
          self.binary_list.remove (self.cmd)

          if rval:
            self.log (-2, "UWBProcDaemon::main proc thread failed")
            quit_event.set()

          log_pipe.close()

          # good practise in case the proc thread always fails
          time.sleep(1)

        else:

          self.log (2, "MEERKATProcDaemon::main skip this processing")
          time.sleep(10)

      self.log (2, "UWBProcDaemon::main processing loop completed")
Exemple #5
0
  def main (self):

    if self.gen_histogram:
      self.hg_plot = HistogramPlot()
      self.valid_plots.append("histogram")

    if self.gen_bandpass:
      self.bp_plot = BandpassPlot()
      self.valid_plots.append("bandpass")

    if self.gen_timeseries:
      self.ts_plot = TimeseriesPlot()
      self.valid_plots.append("timeseries")

    if self.gen_freqtime:
      self.ft_plot = FreqTimePlot()
      self.valid_plots.append("freqtime")

    # stats files are stored in flat directory structure
    # stats_dir / beam / cfreq

    if not os.path.exists(self.processing_dir):
      os.makedirs(self.processing_dir, 0755) 

    # get the data block keys
    db_prefix  = self.cfg["DATA_BLOCK_PREFIX"]
    db_id      = self.cfg["RECEIVING_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    stream_id  = str(self.id)
    self.debug("stream_id=" + str(self.id))
    self.db_key = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id)
    self.debug("db_key=" + self.db_key)

    # start dbstats in a separate thread
    self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
    self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

    if not os.path.exists(self.stat_dir):
      os.makedirs(self.stat_dir, 0755)
    if not os.path.exists(self.archived_dir):
      os.makedirs(self.archived_dir, 0755)

    # configure the histogram plot with all channels included
    self.hg_plot.configure (-1, self.histogram_abs_xmax)

    log = False
    zap = False
    transpose = False
    # configure the freq v time plot
    if self.gen_freqtime:
      self.ft_plot.configure (log, zap, transpose)

    # configure the bandpass plot
    log = True
    if self.gen_bandpass:
      self.bp_plot.configure (log, zap, transpose)

    log_host = self.cfg["SERVER_HOST"]
    log_port = int(self.cfg["SERVER_LOG_PORT"])

    # stat will use the stream config file created for the recv command
    self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
    while (not os.path.exists(self.stream_config_file)):
      self.debug("waiting for stream_config file [" + self.stream_config_file +"] to be created by recv")
      time.sleep(1)    

    self.debug("wait_for_smrb()")
    smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    stat_cmd = self.build_cmd()

    while (not self.quit_event.isSet()):

      process_stats = True

      # wait for the header to determine when dbstats should run
      cmd = "dada_header -k " + self.db_key + " -t stat"
      self.info(cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        if self.quit_event.isSet():
          self.debug(cmd + " failed, but quit_event true")
        else:
          self.error(cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        self.error("header was empty")
        self.quit_event.set()

      else:
        self.debug("parsing header")
        self.header = Config.parseHeader (lines)

        try:
          if self.header["ZERO_INPUT"] == "1":
            process_stats = False
        except:
          self.debug("ZERO_INPUT did not exist in header")

      if self.quit_event.isSet():
        self.debug("quit event set, exiting loop")
        continue

      if not process_stats:
        self.debug("not analyzing stats due to ZERO_INPUT")
        time.sleep(5)
        continue

      # create a log pipe for the stats command
      stat_log_pipe   = LogSocket ("stat_src", "stat_src", str(self.id), "stream",
                                   log_host, log_port, int(DL))

      # connect up the log file output
      stat_log_pipe.connect()

      # add this binary to the list of active commands
      kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
      self.info("kill_cmd=" + kill_cmd)
      self.binary_list.append (kill_cmd)

      self.log (1, "START " + stat_cmd)

       # initialize the threads
      stat_thread = dbstatsThread (stat_cmd, self.stat_dir, stat_log_pipe.sock, 2)

      self.debug("cmd=" + stat_cmd)

      self.debug("starting stat thread")
      stat_thread.start()
      self.debug("stat thread started")

      pref_freq = 0

      while stat_thread.is_alive() and not self.quit_event.isSet():

        # get a list of all the files in stat_dir
        files = os.listdir (self.stat_dir)

        self.debug("found " + str(len(files)) + " in " + self.stat_dir)

        # if stat files exist in the directory
        if len(files) > 0:
          if self.gen_histogram:
            self.process_hg (pref_freq)
          if self.gen_bandpass:
            self.process_bp (pref_freq)
          if self.gen_freqtime:
            self.process_ft (pref_freq)
          if self.gen_timeseries:
            self.process_ts ()
          self.process_ms ()

          self.results["lock"].acquire()

          pref_freq = self.pref_freq
          self.results["timestamp"] = times.getCurrentTime()
          self.results["valid"] = self.ms_valid
          if self.gen_histogram:
            self.results["valid"] |= self.hg_valid
          if self.gen_timeseries:
            self.results["valid"] |= self.ts_valid
          if self.gen_freqtime:
            self.results["valid"] |= self.ft_valid
          if self.gen_bandpass:
            self.results["valid"] |= self.bp_valid

          self.results["lock"].release()

        time.sleep(5)

      self.debug("joining stat thread")
      rval = stat_thread.join()
      self.debug("stat thread joined")

      self.log (1, "END   " + stat_cmd)

      if rval:
        self.log (-2, "stat thread failed")
        self.quit_event.set()
Exemple #6
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb["+str(self.id)+"] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src",
                                 str(self.id), "stream", log_host,
                                 log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
Exemple #7
0
    def main(self):

        if self.gen_histogram:
            self.hg_plot = HistogramPlot()
            self.valid_plots.append("histogram")

        if self.gen_bandpass:
            self.bp_plot = BandpassPlot()
            self.valid_plots.append("bandpass")

        if self.gen_timeseries:
            self.ts_plot = TimeseriesPlot()
            self.valid_plots.append("timeseries")

        if self.gen_freqtime:
            self.ft_plot = FreqTimePlot()
            self.valid_plots.append("freqtime")

        # stats files are stored in flat directory structure
        # stats_dir / beam / cfreq

        if not os.path.exists(self.processing_dir):
            os.makedirs(self.processing_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        db_id = self.cfg["RECEIVING_DATA_BLOCK"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))
        self.db_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                          db_id)
        self.debug("db_key=" + self.db_key)

        # start dbstats in a separate thread
        self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
        self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

        if not os.path.exists(self.stat_dir):
            os.makedirs(self.stat_dir, 0755)
        if not os.path.exists(self.archived_dir):
            os.makedirs(self.archived_dir, 0755)

        # configure the histogram plot with all channels included
        self.hg_plot.configure(-1, self.histogram_abs_xmax)

        log = False
        zap = False
        transpose = False
        # configure the freq v time plot
        if self.gen_freqtime:
            self.ft_plot.configure(log, zap, transpose)

        # configure the bandpass plot
        log = True
        if self.gen_bandpass:
            self.bp_plot.configure(log, zap, transpose)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        # stat will use the stream config file created for the recv command
        self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
        while (not os.path.exists(self.stream_config_file)):
            self.debug("waiting for stream_config file [" +
                       self.stream_config_file + "] to be created by recv")
            time.sleep(1)

        self.debug("wait_for_smrb()")
        smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

        if not smrb_exists:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                self.db_key)
            self.quit_event.set()
            return

        stat_cmd = self.build_cmd()

        while (not self.quit_event.isSet()):

            process_stats = True

            # wait for the header to determine when dbstats should run
            cmd = "dada_header -k " + self.db_key + " -t stat"
            self.info(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                if self.quit_event.isSet():
                    self.debug(cmd + " failed, but quit_event true")
                else:
                    self.error(cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:
                self.error("header was empty")
                self.quit_event.set()

            else:
                self.debug("parsing header")
                self.header = Config.parseHeader(lines)

                try:
                    if self.header["ZERO_COPY"] == "1":
                        process_stats = False
                except:
                    self.debug("ZERO_COPY did not exist in header")

            if self.quit_event.isSet():
                self.debug("quit event set, exiting loop")
                continue

            if not process_stats:
                self.debug("not analyzing stats due to ZERO_COPY")
                time.sleep(5)
                continue

            # create a log pipe for the stats command
            stat_log_pipe = LogSocket("stat_src", "stat_src", str(self.id),
                                      "stream", log_host, log_port, int(DL))

            # connect up the log file output
            stat_log_pipe.connect()

            # add this binary to the list of active commands
            kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
            self.info("kill_cmd=" + kill_cmd)
            self.binary_list.append(kill_cmd)

            self.log(1, "START " + stat_cmd)

            # initialize the threads
            stat_thread = dbstatsThread(stat_cmd, self.stat_dir,
                                        stat_log_pipe.sock, 2)

            self.debug("cmd=" + stat_cmd)

            self.debug("starting stat thread")
            stat_thread.start()
            self.debug("stat thread started")

            pref_freq = 0

            while stat_thread.is_alive() and not self.quit_event.isSet():

                # get a list of all the files in stat_dir
                files = os.listdir(self.stat_dir)

                self.debug("found " + str(len(files)) + " in " + self.stat_dir)

                # if stat files exist in the directory
                if len(files) > 0:
                    if self.gen_histogram:
                        self.process_hg(pref_freq)
                    if self.gen_bandpass:
                        self.process_bp(pref_freq)
                    if self.gen_freqtime:
                        self.process_ft(pref_freq)
                    if self.gen_timeseries:
                        self.process_ts()
                    self.process_ms()

                    self.results["lock"].acquire()

                    pref_freq = self.pref_freq
                    self.results["timestamp"] = times.getCurrentTime()
                    self.results["valid"] = self.ms_valid
                    if self.gen_histogram:
                        self.results["valid"] |= self.hg_valid
                    if self.gen_timeseries:
                        self.results["valid"] |= self.ts_valid
                    if self.gen_freqtime:
                        self.results["valid"] |= self.ft_valid
                    if self.gen_bandpass:
                        self.results["valid"] |= self.bp_valid

                    self.results["lock"].release()

                time.sleep(5)

            self.debug("joining stat thread")
            rval = stat_thread.join()
            self.debug("stat thread joined")

            self.log(1, "END   " + stat_cmd)

            if rval:
                self.log(-2, "stat thread failed")
                self.quit_event.set()
Exemple #8
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb[" + str(self.id) + "] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src", str(self.id),
                                 "stream", log_host, log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
Exemple #9
0
  def main (self):

    stream_id = self.id

    # get the data block keys
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    db_id_in  = self.cfg["PROCESSING_DATA_BLOCK"]
    db_id_out = self.cfg["SEND_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id]

    db_key_in = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_in)
    db_key_out = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_out)

    self.log (0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out)

    # create dspsr input file for the data block
    db_key_filename = "/tmp/spip_" + db_key_in + ".info"
    db_key_file = open (db_key_filename, "w")
    db_key_file.write("DADA INFO:\n")
    db_key_file.write("key " +  db_key_in + "\n")
    db_key_file.close()

    gpu_id = self.cfg["GPU_ID_" + str(self.id)]
    prev_utc_start = ""

    (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":")

    (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":")

    # wait up to 10s for the SMRB to be created
    smrb_wait = 10
    cmd = "dada_dbmetric -k " + db_key_in
    self.binary_list.append (cmd)

    rval = 1
    while rval and smrb_wait > 0 and not self.quit_event.isSet():

      rval, lines = self.system (cmd)
      if rval:
        time.sleep(1)
      smrb_wait -= 1

    if rval:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + db_key_in)
      self.quit_event.set()

    else:

      while (not self.quit_event.isSet()):

        cmd = "dada_header -k " + db_key_in
        self.log(0, cmd)
        self.binary_list.append (cmd)
        rval, lines = self.system (cmd)
        self.binary_list.remove (cmd)

        # if the command returned ok and we have a header
        if rval != 0:
          if self.quit_event.isSet():
            self.log (2, cmd + " failed, but quit_event true")
          else:
            self.log (-2, cmd + " failed")
            self.quit_event.set()

        elif len(lines) == 0:
        
          self.log (-2, "header was empty")
          self.quit_event.set()
        
        else:

          header = Config.parseHeader (lines)

          utc_start = header["UTC_START"]
          self.log (1, "UTC_START=" + header["UTC_START"])
          self.log (1, "RESOLUTION=" + header["RESOLUTION"])

          # default processing commands
          fold_cmd = "dada_dbnull -s -k " + db_key_in
          trans_cmd = "dada_dbnull -s -k " + db_key_out
          search_cmd = "dada_dbnull -s -k " + db_key_in

          if prev_utc_start == utc_start:
            self.log (-2, "UTC_START [" + utc_start + "] repeated, ignoring observation")
          
          else: 
            beam = self.cfg["BEAM_" + str(self.beam_id)]

            if not float(bw) == float(header["BW"]):
              self.log (-1, "configured bandwidth ["+bw+"] != header["+header["BW"]+"]")
            if not float(cfreq) == float(header["FREQ"]):
              self.log (-1, "configured cfreq ["+cfreq+"] != header["+header["FREQ"]+"]")
            if not int(nchan) == int(header["NCHAN"]):
              self.log (-2, "configured nchan ["+nchan+"] != header["+header["NCHAN"]+"]")

            source = header["SOURCE"]

            # output directories 
            suffix     = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq
            fold_dir   = self.cfg["CLIENT_FOLD_DIR"]   + suffix
            trans_dir  = self.cfg["CLIENT_TRANS_DIR"]  + suffix
            search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix
            
            fold = False
            search = False
            trans = False 
          
            try:
              fold = (header["PERFORM_FOLD"] == "1")
              search = (header["PERFORM_SEARCH"] == "1")
              trans = (header["PERFORM_TRANS"] == "1")
            except KeyError as e:
              fold = True
              search = False
              trans = False 

            if fold:
              os.makedirs (fold_dir, 0755)
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn"
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr"
              #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn"
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn"
              #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir

              header_file = fold_dir + "/obs.header"
              Config.writeDictToCFGFile (header, header_file)

            if search or trans:
              os.makedirs (search_dir, 0755)
              search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil"
              if trans:
                search_cmd += " -k " + db_key_out

            if trans and int(self.cfg["NUM_SUBBAND"] ) == "1":
              os.makedirs (trans_dir, 0755)
              trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1"

          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])

          # setup output pipes
          fold_log_pipe = LogSocket ("fold_src", "fold_src", str(self.id), "stream",
                                       log_host, log_port, int(DL))

          #trans_log_pipe  = LogSocket ("trans_src", "trans_src", str(self.id), "stream",
          #                             log_host, log_port, int(DL))
          #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream",
          #                             log_host, log_port, int(DL))

          fold_log_pipe.connect()

          self.binary_list.append (fold_cmd)
          #self.binary_list.append (trans_cmd)
          #self.binary_list.append (search_cmd)

          # create processing threads
          self.log (2, "creating processing threads")      
          cmd = "numactl -C " + cpu_core + " -- " + fold_cmd
          fold_thread = procThread (cmd, fold_dir, fold_log_pipe.sock, 1)

          #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2)
          #search_thread = procThread (search_cmd, self.log_sock.sock, 2)

          # start processing threads
          self.log (2, "starting processing threads")      
          self.log (1, "START " + fold_cmd)      
          fold_thread.start()
          #trans_thread.start()
          #search_thread.start()

          # join processing threads
          self.log (2, "waiting for fold thread to terminate")
          rval = fold_thread.join() 
          self.log (2, "fold thread joined")
          self.log (1, "END   " + fold_cmd)      

          # remove the binary command from the list
          self.binary_list.remove (fold_cmd)

          if rval:
            self.log (-2, "fold thread failed")
            quit_event.set()

          #self.log (2, "joining trans thread")
          #rval = trans_thread.join() 
          #self.log (2, "trans thread joined")
          #if rval:
          #  self.log (-2, "trans thread failed")
          #  quit_event.set()

          #self.log (2, "joining search thread")
          #rval = search_thread.join() 
          #self.log (2, "search thread joined")
          #if rval:
          #  self.log (-2, "search thread failed")
          #  quit_event.set()

          fold_log_pipe.close()
          #trans_log_pipe.close()
          #search_log_pipe.close()

        self.log (1, "processing completed")