Beispiel #1
0
  def main (self):

    self.log (2, "UWBProcDaemon::main configure_child()")
    self.configure_child()

    self.log (2, "UWBProcDaemon::main wait_for_smrb()")
    SMRBDaemon.waitForSMRB(self.db_key, self)

    if self.quit_event.isSet():
      self.log (-1, "UWBProcDaemon::main quit event was set after waiting for SMRB creation")
      return

    # continuously run the main command waiting on the SMRB
    while (not self.quit_event.isSet()):

      # wait for the header to determine if folding is required
      cmd = "dada_header -k " + self.db_key + " -t " + self.tag
      self.log(2, "UWBProcDaemon::main " + cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd, 2, True)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        time.sleep(0.1)
        if self.quit_event.isSet():
          self.log (2, "UWBProcDaemon::main " + cmd + " failed, but quit_event true")
        else:
          self.log (-2, "UWBProcDaemon::main " + cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        
        self.log (-2, "UWBProcDaemon::main header was empty")
        self.quit_event.set()
        
      else:

        self.log (2, "UWBProcDaemon::main parsing header")
        self.header = Config.parseHeader (lines)

        # account for lower to upper sideband conversion
        if not abs(float(self.bw)) == float(self.header["BW"]):
          self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]")
        if not float(self.cfreq) == float(self.header["FREQ"]):
          self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]")
        if not int(self.nchan) == int(self.header["NCHAN"]):
          self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]")

        self.source = self.header["SOURCE"]
        self.utc_start = self.header["UTC_START"]

        # call the child class prepare method
        self.log (2, "UWBProcDaemon::main prepare()")
        valid = self.prepare()

        if valid:

          # ensure the output directory exists
          self.log (2, "UWBProcDaemon::main creating out_dir: " + self.out_dir)
          if not os.path.exists (self.out_dir):
            os.makedirs (self.out_dir, 0755)

          # write the sub-bands header to the out_dir
          header_file = self.out_dir + "/obs.header"
          self.log (2, "UWBProcDaemon::main writing obs.header to out_dir")
          Config.writeDictToCFGFile (self.header, header_file)
    
          # configure the output pipe
          self.log (2, "UWBProcDaemon::main configuring output log pipe")
          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])
          log_pipe = LogSocket (self.log_prefix, self.log_prefix,
                                str(self.id), "stream",
                                log_host, log_port, int(DL))
          log_pipe.connect()

          # get any modifications to the environment
          env = self.getEnvironment()

          # add the binary command to the kill list
          self.binary_list.append (self.cmd)

          # create processing threads
          self.log (2, "UWBProcDaemon::main creating processing threads")      
          cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
          proc_thread = UWBProcThread (self, cmd, log_pipe.sock, env, 1)

          # start processing threads
          self.log (2, "UWBProcDaemon::main starting processing thread")
          proc_thread.start()

          self.log (1, "START " + self.cmd)

          # join processing threads
          self.log (2, "UWBProcDaemon::main waiting for proc thread to terminate")
          rval = proc_thread.join() 
          self.log (2, "UWBProcDaemon::main proc thread joined")

          self.log (1, "END   " + self.cmd)

          # remove the binary command from the list
          self.binary_list.remove (self.cmd)

          if rval:
            self.log (-2, "UWBProcDaemon::main proc thread failed")
            quit_event.set()

          log_pipe.close()

          # good practise in case the proc thread always fails
          time.sleep(1)

        else:

          self.log (2, "MEERKATProcDaemon::main skip this processing")
          time.sleep(10)

      self.log (2, "UWBProcDaemon::main processing loop completed")
Beispiel #2
0
    def main(self):

        self.log(2, "UWBProcDaemon::main configure_child()")
        self.configure_child()

        self.log(2, "UWBProcDaemon::main wait_for_smrb()")
        SMRBDaemon.waitForSMRB(self.db_key, self)

        if self.quit_event.isSet():
            self.log(
                -1,
                "UWBProcDaemon::main quit event was set after waiting for SMRB creation"
            )
            return

        # continuously run the main command waiting on the SMRB
        while (not self.quit_event.isSet()):

            # wait for the header to determine if folding is required
            cmd = "dada_header -k " + self.db_key + " -t " + self.tag
            self.log(2, "UWBProcDaemon::main " + cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                time.sleep(0.1)
                if self.quit_event.isSet():
                    self.log(
                        2, "UWBProcDaemon::main " + cmd +
                        " failed, but quit_event true")
                else:
                    self.log(-2, "UWBProcDaemon::main " + cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:

                self.log(-2, "UWBProcDaemon::main header was empty")
                self.quit_event.set()

            else:

                self.log(2, "UWBProcDaemon::main parsing header")
                self.header = Config.parseHeader(lines)

                # account for lower to upper sideband conversion
                if not abs(float(self.bw)) == float(self.header["BW"]):
                    self.log(
                        -1, "configured bandwidth [" + self.bw +
                        "] != self.header[" + self.header["BW"] + "]")
                if not float(self.cfreq) == float(self.header["FREQ"]):
                    self.log(
                        -1, "configured cfreq [" + self.cfreq +
                        "] != self.header[" + self.header["FREQ"] + "]")
                if not int(self.nchan) == int(self.header["NCHAN"]):
                    self.log(
                        -2, "configured nchan [" + self.nchan +
                        "] != self.header[" + self.header["NCHAN"] + "]")

                self.source = self.header["SOURCE"]
                self.utc_start = self.header["UTC_START"]

                # call the child class prepare method
                self.log(2, "UWBProcDaemon::main prepare()")
                valid = self.prepare()

                if valid:

                    # ensure the output directory exists
                    self.log(
                        2, "UWBProcDaemon::main creating out_dir: " +
                        self.out_dir)
                    if not os.path.exists(self.out_dir):
                        os.makedirs(self.out_dir, 0755)

                    # write the sub-bands header to the out_dir
                    header_file = self.out_dir + "/obs.header"
                    self.log(
                        2, "UWBProcDaemon::main writing obs.header to out_dir")
                    Config.writeDictToCFGFile(self.header, header_file)

                    # configure the output pipe
                    self.log(
                        2, "UWBProcDaemon::main configuring output log pipe")
                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])
                    log_pipe = LogSocket(self.log_prefix, self.log_prefix,
                                         str(self.id), "stream", log_host,
                                         log_port, int(DL))
                    log_pipe.connect()

                    # get any modifications to the environment
                    env = self.getEnvironment()

                    # add the binary command to the kill list
                    self.binary_list.append(self.cmd)

                    # create processing threads
                    self.log(
                        2, "UWBProcDaemon::main creating processing threads")
                    cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
                    proc_thread = UWBProcThread(self, cmd, log_pipe.sock, env,
                                                1)

                    # start processing threads
                    self.log(2,
                             "UWBProcDaemon::main starting processing thread")
                    proc_thread.start()

                    self.log(1, "START " + self.cmd)

                    # join processing threads
                    self.log(
                        2,
                        "UWBProcDaemon::main waiting for proc thread to terminate"
                    )
                    rval = proc_thread.join()
                    self.log(2, "UWBProcDaemon::main proc thread joined")

                    self.log(1, "END   " + self.cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(self.cmd)

                    if rval:
                        self.log(-2, "UWBProcDaemon::main proc thread failed")
                        quit_event.set()

                    log_pipe.close()

                    # good practise in case the proc thread always fails
                    time.sleep(1)

                else:

                    self.log(2, "MEERKATProcDaemon::main skip this processing")
                    time.sleep(10)

            self.log(2, "UWBProcDaemon::main processing loop completed")
Beispiel #3
0
  def main (self):

    if self.gen_histogram:
      self.hg_plot = HistogramPlot()
      self.valid_plots.append("histogram")

    if self.gen_bandpass:
      self.bp_plot = BandpassPlot()
      self.valid_plots.append("bandpass")

    if self.gen_timeseries:
      self.ts_plot = TimeseriesPlot()
      self.valid_plots.append("timeseries")

    if self.gen_freqtime:
      self.ft_plot = FreqTimePlot()
      self.valid_plots.append("freqtime")

    # stats files are stored in flat directory structure
    # stats_dir / beam / cfreq

    if not os.path.exists(self.processing_dir):
      os.makedirs(self.processing_dir, 0755) 

    # get the data block keys
    db_prefix  = self.cfg["DATA_BLOCK_PREFIX"]
    db_id      = self.cfg["RECEIVING_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    stream_id  = str(self.id)
    self.debug("stream_id=" + str(self.id))
    self.db_key = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id)
    self.debug("db_key=" + self.db_key)

    # start dbstats in a separate thread
    self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
    self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

    if not os.path.exists(self.stat_dir):
      os.makedirs(self.stat_dir, 0755)
    if not os.path.exists(self.archived_dir):
      os.makedirs(self.archived_dir, 0755)

    # configure the histogram plot with all channels included
    self.hg_plot.configure (-1, self.histogram_abs_xmax)

    log = False
    zap = False
    transpose = False
    # configure the freq v time plot
    if self.gen_freqtime:
      self.ft_plot.configure (log, zap, transpose)

    # configure the bandpass plot
    log = True
    if self.gen_bandpass:
      self.bp_plot.configure (log, zap, transpose)

    log_host = self.cfg["SERVER_HOST"]
    log_port = int(self.cfg["SERVER_LOG_PORT"])

    # stat will use the stream config file created for the recv command
    self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
    while (not os.path.exists(self.stream_config_file)):
      self.debug("waiting for stream_config file [" + self.stream_config_file +"] to be created by recv")
      time.sleep(1)    

    self.debug("wait_for_smrb()")
    smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    stat_cmd = self.build_cmd()

    while (not self.quit_event.isSet()):

      process_stats = True

      # wait for the header to determine when dbstats should run
      cmd = "dada_header -k " + self.db_key + " -t stat"
      self.info(cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        if self.quit_event.isSet():
          self.debug(cmd + " failed, but quit_event true")
        else:
          self.error(cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        self.error("header was empty")
        self.quit_event.set()

      else:
        self.debug("parsing header")
        self.header = Config.parseHeader (lines)

        try:
          if self.header["ZERO_INPUT"] == "1":
            process_stats = False
        except:
          self.debug("ZERO_INPUT did not exist in header")

      if self.quit_event.isSet():
        self.debug("quit event set, exiting loop")
        continue

      if not process_stats:
        self.debug("not analyzing stats due to ZERO_INPUT")
        time.sleep(5)
        continue

      # create a log pipe for the stats command
      stat_log_pipe   = LogSocket ("stat_src", "stat_src", str(self.id), "stream",
                                   log_host, log_port, int(DL))

      # connect up the log file output
      stat_log_pipe.connect()

      # add this binary to the list of active commands
      kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
      self.info("kill_cmd=" + kill_cmd)
      self.binary_list.append (kill_cmd)

      self.log (1, "START " + stat_cmd)

       # initialize the threads
      stat_thread = dbstatsThread (stat_cmd, self.stat_dir, stat_log_pipe.sock, 2)

      self.debug("cmd=" + stat_cmd)

      self.debug("starting stat thread")
      stat_thread.start()
      self.debug("stat thread started")

      pref_freq = 0

      while stat_thread.is_alive() and not self.quit_event.isSet():

        # get a list of all the files in stat_dir
        files = os.listdir (self.stat_dir)

        self.debug("found " + str(len(files)) + " in " + self.stat_dir)

        # if stat files exist in the directory
        if len(files) > 0:
          if self.gen_histogram:
            self.process_hg (pref_freq)
          if self.gen_bandpass:
            self.process_bp (pref_freq)
          if self.gen_freqtime:
            self.process_ft (pref_freq)
          if self.gen_timeseries:
            self.process_ts ()
          self.process_ms ()

          self.results["lock"].acquire()

          pref_freq = self.pref_freq
          self.results["timestamp"] = times.getCurrentTime()
          self.results["valid"] = self.ms_valid
          if self.gen_histogram:
            self.results["valid"] |= self.hg_valid
          if self.gen_timeseries:
            self.results["valid"] |= self.ts_valid
          if self.gen_freqtime:
            self.results["valid"] |= self.ft_valid
          if self.gen_bandpass:
            self.results["valid"] |= self.bp_valid

          self.results["lock"].release()

        time.sleep(5)

      self.debug("joining stat thread")
      rval = stat_thread.join()
      self.debug("stat thread joined")

      self.log (1, "END   " + stat_cmd)

      if rval:
        self.log (-2, "stat thread failed")
        self.quit_event.set()
Beispiel #4
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb["+str(self.id)+"] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src",
                                 str(self.id), "stream", log_host,
                                 log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
Beispiel #5
0
    def main(self):

        if self.gen_histogram:
            self.hg_plot = HistogramPlot()
            self.valid_plots.append("histogram")

        if self.gen_bandpass:
            self.bp_plot = BandpassPlot()
            self.valid_plots.append("bandpass")

        if self.gen_timeseries:
            self.ts_plot = TimeseriesPlot()
            self.valid_plots.append("timeseries")

        if self.gen_freqtime:
            self.ft_plot = FreqTimePlot()
            self.valid_plots.append("freqtime")

        # stats files are stored in flat directory structure
        # stats_dir / beam / cfreq

        if not os.path.exists(self.processing_dir):
            os.makedirs(self.processing_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        db_id = self.cfg["RECEIVING_DATA_BLOCK"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))
        self.db_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                          db_id)
        self.debug("db_key=" + self.db_key)

        # start dbstats in a separate thread
        self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
        self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

        if not os.path.exists(self.stat_dir):
            os.makedirs(self.stat_dir, 0755)
        if not os.path.exists(self.archived_dir):
            os.makedirs(self.archived_dir, 0755)

        # configure the histogram plot with all channels included
        self.hg_plot.configure(-1, self.histogram_abs_xmax)

        log = False
        zap = False
        transpose = False
        # configure the freq v time plot
        if self.gen_freqtime:
            self.ft_plot.configure(log, zap, transpose)

        # configure the bandpass plot
        log = True
        if self.gen_bandpass:
            self.bp_plot.configure(log, zap, transpose)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        # stat will use the stream config file created for the recv command
        self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
        while (not os.path.exists(self.stream_config_file)):
            self.debug("waiting for stream_config file [" +
                       self.stream_config_file + "] to be created by recv")
            time.sleep(1)

        self.debug("wait_for_smrb()")
        smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

        if not smrb_exists:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                self.db_key)
            self.quit_event.set()
            return

        stat_cmd = self.build_cmd()

        while (not self.quit_event.isSet()):

            process_stats = True

            # wait for the header to determine when dbstats should run
            cmd = "dada_header -k " + self.db_key + " -t stat"
            self.info(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                if self.quit_event.isSet():
                    self.debug(cmd + " failed, but quit_event true")
                else:
                    self.error(cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:
                self.error("header was empty")
                self.quit_event.set()

            else:
                self.debug("parsing header")
                self.header = Config.parseHeader(lines)

                try:
                    if self.header["ZERO_COPY"] == "1":
                        process_stats = False
                except:
                    self.debug("ZERO_COPY did not exist in header")

            if self.quit_event.isSet():
                self.debug("quit event set, exiting loop")
                continue

            if not process_stats:
                self.debug("not analyzing stats due to ZERO_COPY")
                time.sleep(5)
                continue

            # create a log pipe for the stats command
            stat_log_pipe = LogSocket("stat_src", "stat_src", str(self.id),
                                      "stream", log_host, log_port, int(DL))

            # connect up the log file output
            stat_log_pipe.connect()

            # add this binary to the list of active commands
            kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
            self.info("kill_cmd=" + kill_cmd)
            self.binary_list.append(kill_cmd)

            self.log(1, "START " + stat_cmd)

            # initialize the threads
            stat_thread = dbstatsThread(stat_cmd, self.stat_dir,
                                        stat_log_pipe.sock, 2)

            self.debug("cmd=" + stat_cmd)

            self.debug("starting stat thread")
            stat_thread.start()
            self.debug("stat thread started")

            pref_freq = 0

            while stat_thread.is_alive() and not self.quit_event.isSet():

                # get a list of all the files in stat_dir
                files = os.listdir(self.stat_dir)

                self.debug("found " + str(len(files)) + " in " + self.stat_dir)

                # if stat files exist in the directory
                if len(files) > 0:
                    if self.gen_histogram:
                        self.process_hg(pref_freq)
                    if self.gen_bandpass:
                        self.process_bp(pref_freq)
                    if self.gen_freqtime:
                        self.process_ft(pref_freq)
                    if self.gen_timeseries:
                        self.process_ts()
                    self.process_ms()

                    self.results["lock"].acquire()

                    pref_freq = self.pref_freq
                    self.results["timestamp"] = times.getCurrentTime()
                    self.results["valid"] = self.ms_valid
                    if self.gen_histogram:
                        self.results["valid"] |= self.hg_valid
                    if self.gen_timeseries:
                        self.results["valid"] |= self.ts_valid
                    if self.gen_freqtime:
                        self.results["valid"] |= self.ft_valid
                    if self.gen_bandpass:
                        self.results["valid"] |= self.bp_valid

                    self.results["lock"].release()

                time.sleep(5)

            self.debug("joining stat thread")
            rval = stat_thread.join()
            self.debug("stat thread joined")

            self.log(1, "END   " + stat_cmd)

            if rval:
                self.log(-2, "stat thread failed")
                self.quit_event.set()
Beispiel #6
0
  def main (self):

    db_id = self.cfg["RECEIVING_DATA_BLOCK"]
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    self.db_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, db_id)

    # wait for the SMRB to exist before continuing
    self.log(2, "main: SMRBDaemon.waitForSMRB()")
    smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

    # don't proceed without an SMRB
    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    # configuration file for recvsim stream
    self.local_config = self.getConfiguration()
    self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"

    self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)]
    self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id))

    self.configured = True
    self.running = False
    env = self.getEnvironment()

    # external control loop to allow for reconfiguration of RECV
    while not self.quit_event.isSet():

      while not self.quit_event.isSet() and not self.configured:
        self.log(3, "main: waiting for configuration")
        sleep(1)

      if self.quit_event.isSet():
        return

      Config.writeDictToCFGFile (self.local_config, self.local_config_file)
      self.log(3, "main: configured")

      cmd = self.getCommand(self.local_config_file)
      self.binary_list.append (cmd)

      self.log(3, "main: sleep(1)")
      sleep(1)

      self.log(3, "main: log_pipe = LogSocket(recvsim_src))")
      log_pipe = LogSocket ("recvsim_src", "recvsim_src", str(self.id), "stream",
                            self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"],
                            int(DL))

      self.log(3, "main: log_pipe.connect()")
      log_pipe.connect()

      self.log(3, "main: sleep(1)")
      sleep(1)

      self.running = True

      self.log(1, "START " + cmd)

      # this should be a persistent / blocking command 
      rval = self.system_piped (cmd, log_pipe.sock)

      self.running = False

      self.binary_list.remove (cmd)

      self.log(1, "END   " + cmd)

      if rval:
        if self.quit_event.isSet():
          self.log (-2, cmd + " failed with return value " + str(rval))

      log_pipe.close ()
Beispiel #7
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb[" + str(self.id) + "] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src", str(self.id),
                                 "stream", log_host, log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)