示例#1
0
  def waitForSMRB (self):

    db_id = self.cfg["RECEIVING_DATA_BLOCK"]
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    self.db_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, db_id)

    # port of the SMRB daemon for this stream
    smrb_port = SMRBDaemon.getDBMonPort(self.id)

    # wait up to 30s for the SMRB to be created
    smrb_wait = 60

    smrb_exists = False
    while not smrb_exists and smrb_wait > 0 and not self.quit_event.isSet():

      self.log(2, "trying to open connection to SMRB")
      smrb_sock = sockets.openSocket (DL, "localhost", smrb_port, 1)
      if smrb_sock:
        smrb_sock.send ("smrb_status\r\n")
        junk = smrb_sock.recv (65536)
        smrb_sock.close()
        smrb_exists = True
      else:
        sleep (1)
        smrb_wait -= 1

    return smrb_exists
示例#2
0
    def getCommand(self, config_file):
        self.log(2, "MeerKATRecvDaemon::getCommand()")

        db_id = self.cfg["RECEIVING_DATA_BLOCK"]
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        key1 = SMRBDaemon.getDBKey(db_prefix, 0, num_stream, db_id)
        key2 = SMRBDaemon.getDBKey(db_prefix, 1, num_stream, db_id)

        cmd = self.cfg["STREAM_BINARY"] + " " + config_file + " " + key1 + " " + key2 \
                + " -b " + self.cpu_core \
                + " -c " + self.ctrl_port

        if int(self.local_config["NCHAN"]) <= 1024:
            if self.cfg["CONFIG_NAME"] == "meerkat_cx5_1k2k":
                cmd = cmd + " -f spead2k"
            else:
                cmd = cmd + " -f spead1k"
        else:
            cmd = cmd + " -f spead"

        # hack for sub-band mode
        if self.id != "0":
            #cmd = "meerkat_dummyserver -b " + self.cpu_core + " -c " + self.ctrl_port
            cmd = "sleep 86400"

        return cmd
示例#3
0
    def waitForSMRB(self):

        db_id = self.cfg["PROCESSING_DATA_BLOCK"]
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        self.db_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                          db_id)

        # port of the SMRB daemon for this stream
        smrb_port = SMRBDaemon.getDBMonPort(self.id)

        # wait up to 30s for the SMRB to be created
        smrb_wait = 60

        smrb_exists = False
        while not smrb_exists and smrb_wait > 0 and not self.quit_event.isSet(
        ):

            self.log(2, "trying to open connection to SMRB")
            smrb_sock = sockets.openSocket(DL, "localhost", smrb_port, 1)
            if smrb_sock:
                smrb_sock.send("smrb_status\r\n")
                junk = smrb_sock.recv(65536)
                smrb_sock.close()
                smrb_exists = True
            else:
                sleep(1)
                smrb_wait -= 1

        return smrb_exists
示例#4
0
    def configure_child(self):

        self.stream_id = self.id

        # get the data block keys
        self.db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        self.db_id = self.cfg["PROCESSING_DATA_BLOCK"]
        self.num_stream = self.cfg["NUM_STREAM"]
        self.cpu_core = self.cfg["STREAM_PROC_CORE_" + self.stream_id]

        self.db_key = SMRBDaemon.getDBKey(self.db_prefix, self.stream_id,
                                          self.num_stream, self.db_id)

        self.log(2, "UWBProcDaemon::configure db_key=" + self.db_key)

        # GPU to use for signal processing
        self.gpu_id = self.cfg["GPU_ID_" + str(self.id)]

        (host, self.beam_id,
         self.subband_id) = self.cfg["STREAM_" + self.stream_id].split(":")
        (self.cfreq, self.bw,
         self.nchan) = self.cfg["SUBBAND_CONFIG_" + self.subband_id].split(":")
        self.beam = self.cfg["BEAM_" + str(self.beam_id)]

        self.log(2, "UWBProcDaemon::configure done")
示例#5
0
  def main (self):

    # open a listening socket to receive the data files to read
    hostname = getHostNameShort()

    # get the site configurationa
    config = Config()

    # prepare header using configuration file parameters
    fixed_config = config.getStreamConfigFixed(self.id)

    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    db_id = self.cfg["RECEIVING_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    db_key = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id)

    cmd = "dada_diskdb -k " + db_key + " -z -s " + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000000000000000.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000000000000000.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000034359738368.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000068719476736.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000103079215104.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000137438953472.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000171798691840.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000206158430208.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000240518168576.000000.dada"

    self.log (0, "cmd=" + cmd)
    (rval, lines) = self.system (cmd)
    self.log (0, "rval=" + str(rval))
    for line in lines:
      self.log (0, line)
示例#6
0
    def configure_child(self):

        self.stream_id = self.id

        # get the data block keys
        self.db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        self.db_id_in = self.cfg["RECEIVING_DATA_BLOCK"]
        self.db_id_out = self.cfg["PROCESSING_DATA_BLOCK"]
        self.num_stream = self.cfg["NUM_STREAM"]
        self.cpu_core = self.cfg["STREAM_PROC_CORE_" + self.stream_id]

        # share memory keys
        self.db_key_in1 = SMRBDaemon.getDBKey(self.db_prefix, 0,
                                              self.num_stream, self.db_id_in)
        self.db_key_in2 = SMRBDaemon.getDBKey(self.db_prefix, 1,
                                              self.num_stream, self.db_id_in)
        self.db_key_out = SMRBDaemon.getDBKey(self.db_prefix, self.stream_id,
                                              self.num_stream, self.db_id_out)

        self.log(
            0, "MeerKATXposeDaemon::configure db_key_in1=" + self.db_key_in1 +
            " db_key_in2=" + self.db_key_in2)

        # GPU to use for signal processing
        self.gpu_id = self.cfg["GPU_ID_" + str(self.id)]
        if "," in self.gpu_id:
            self.gpu_id = self.gpu_id.split(",")[0]
        self.log(0, "MeerKATXposeDaemon::configure gpu_id=" + str(self.gpu_id))

        (host, self.beam_id,
         self.subband_id) = self.cfg["STREAM_" + self.stream_id].split(":")
        (cfreq1, bw1, nchan1) = self.cfg["SUBBAND_CONFIG_0"].split(":")
        (cfreq2, bw2, nchan2) = self.cfg["SUBBAND_CONFIG_1"].split(":")

        self.cfreq = (float(cfreq1) + float(cfreq2)) / 2
        self.bw = float(bw1) + float(bw2)
        self.nchan = int(nchan1) + int(nchan2)

        self.beam = self.cfg["BEAM_" + str(self.beam_id)]

        self.log(0, "MeerKATXposeDaemon::configure done")
示例#7
0
def getSMRBCapacity(stream_ids, quit_event, dl):

    smrbs = {}
    rval = 0

    for stream_id in stream_ids:

        if quit_event.isSet():
            continue
        port = SMRBDaemon.getDBMonPort(stream_id)
        sock = sockets.openSocket(dl, "localhost", port, 1)
        if sock:
            sock.send("smrb_status\n")
            data = sock.recv(65536)
            smrbs[stream_id] = json.loads(data)
            sock.close()

    return rval, smrbs
示例#8
0
def getSMRBCapacity (stream_ids, quit_event, dl):

  smrbs = {}
  rval = 0

  for stream_id in stream_ids:

    if quit_event.isSet(): 
      continue
    port = SMRBDaemon.getDBMonPort (stream_id)
    sock = sockets.openSocket (dl, "localhost", port, 1)
    if sock:
      try:
        sock.settimeout(1)
        sock.send ("smrb_status\n")
        data = sock.recv(65536)
      except socket.error, e:
        print "socket connection to SMRB failed" 
      else:
        smrbs[stream_id] = json.loads(data) 
      sock.close()
示例#9
0
  def getCommand (self, config_file):
    self.log (2, "MeerKATRecvDaemon::getCommand()")

    db_id = self.cfg["RECEIVING_DATA_BLOCK"]
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, db_id)

    cmd = self.cfg["STREAM_BINARY"] + " " + config_file + " " + key \
            + " -b " + self.cpu_core \
            + " -c " + self.ctrl_port

    if int(self.local_config["NCHAN"]) <= 1024:
      if (self.cfg["CONFIG_NAME"] == "meerkat_cx5_1k2k" or
         self.cfg["CONFIG_NAME"] == "meerkat_cx4_1k2k"):
        cmd = cmd + " -f spead2k"
      else:
        cmd = cmd + " -f spead1k"
    else:
      cmd = cmd + " -f spead"

    return cmd
示例#10
0
文件: uwb_proc.py 项目: ajameson/spip
  def configure_child (self):

    self.stream_id = self.id

    # get the data block keys
    self.db_prefix  = self.cfg["DATA_BLOCK_PREFIX"]
    self.db_id      = self.cfg["PROCESSING_DATA_BLOCK"]
    self.num_stream = self.cfg["NUM_STREAM"]
    self.cpu_core   = self.cfg["STREAM_PROC_CORE_" + self.stream_id]

    self.db_key = SMRBDaemon.getDBKey (self.db_prefix, self.stream_id, self.num_stream, self.db_id)

    self.log (2, "UWBProcDaemon::configure db_key=" + self.db_key)

    # GPU to use for signal processing
    self.gpu_id = self.cfg["GPU_ID_" + str(self.id)]

    (host, self.beam_id, self.subband_id) = self.cfg["STREAM_" + self.stream_id].split(":")
    (self.cfreq, self.bw, self.nchan) = self.cfg["SUBBAND_CONFIG_" + self.subband_id].split(":")
    self.beam = self.cfg["BEAM_" + str(self.beam_id)]

    self.log (2, "UWBProcDaemon::configure done")
示例#11
0
    def main(self):

        self.log(2, "UWBProcDaemon::main configure_child()")
        self.configure_child()

        self.log(2, "UWBProcDaemon::main wait_for_smrb()")
        SMRBDaemon.waitForSMRB(self.db_key, self)

        if self.quit_event.isSet():
            self.log(
                -1,
                "UWBProcDaemon::main quit event was set after waiting for SMRB creation"
            )
            return

        # continuously run the main command waiting on the SMRB
        while (not self.quit_event.isSet()):

            # wait for the header to determine if folding is required
            cmd = "dada_header -k " + self.db_key + " -t " + self.tag
            self.log(2, "UWBProcDaemon::main " + cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                time.sleep(0.1)
                if self.quit_event.isSet():
                    self.log(
                        2, "UWBProcDaemon::main " + cmd +
                        " failed, but quit_event true")
                else:
                    self.log(-2, "UWBProcDaemon::main " + cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:

                self.log(-2, "UWBProcDaemon::main header was empty")
                self.quit_event.set()

            else:

                self.log(2, "UWBProcDaemon::main parsing header")
                self.header = Config.parseHeader(lines)

                # account for lower to upper sideband conversion
                if not abs(float(self.bw)) == float(self.header["BW"]):
                    self.log(
                        -1, "configured bandwidth [" + self.bw +
                        "] != self.header[" + self.header["BW"] + "]")
                if not float(self.cfreq) == float(self.header["FREQ"]):
                    self.log(
                        -1, "configured cfreq [" + self.cfreq +
                        "] != self.header[" + self.header["FREQ"] + "]")
                if not int(self.nchan) == int(self.header["NCHAN"]):
                    self.log(
                        -2, "configured nchan [" + self.nchan +
                        "] != self.header[" + self.header["NCHAN"] + "]")

                self.source = self.header["SOURCE"]
                self.utc_start = self.header["UTC_START"]

                # call the child class prepare method
                self.log(2, "UWBProcDaemon::main prepare()")
                valid = self.prepare()

                if valid:

                    # ensure the output directory exists
                    self.log(
                        2, "UWBProcDaemon::main creating out_dir: " +
                        self.out_dir)
                    if not os.path.exists(self.out_dir):
                        os.makedirs(self.out_dir, 0755)

                    # write the sub-bands header to the out_dir
                    header_file = self.out_dir + "/obs.header"
                    self.log(
                        2, "UWBProcDaemon::main writing obs.header to out_dir")
                    Config.writeDictToCFGFile(self.header, header_file)

                    # configure the output pipe
                    self.log(
                        2, "UWBProcDaemon::main configuring output log pipe")
                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])
                    log_pipe = LogSocket(self.log_prefix, self.log_prefix,
                                         str(self.id), "stream", log_host,
                                         log_port, int(DL))
                    log_pipe.connect()

                    # get any modifications to the environment
                    env = self.getEnvironment()

                    # add the binary command to the kill list
                    self.binary_list.append(self.cmd)

                    # create processing threads
                    self.log(
                        2, "UWBProcDaemon::main creating processing threads")
                    cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
                    proc_thread = UWBProcThread(self, cmd, log_pipe.sock, env,
                                                1)

                    # start processing threads
                    self.log(2,
                             "UWBProcDaemon::main starting processing thread")
                    proc_thread.start()

                    self.log(1, "START " + self.cmd)

                    # join processing threads
                    self.log(
                        2,
                        "UWBProcDaemon::main waiting for proc thread to terminate"
                    )
                    rval = proc_thread.join()
                    self.log(2, "UWBProcDaemon::main proc thread joined")

                    self.log(1, "END   " + self.cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(self.cmd)

                    if rval:
                        self.log(-2, "UWBProcDaemon::main proc thread failed")
                        quit_event.set()

                    log_pipe.close()

                    # good practise in case the proc thread always fails
                    time.sleep(1)

                else:

                    self.log(2, "MEERKATProcDaemon::main skip this processing")
                    time.sleep(10)

            self.log(2, "UWBProcDaemon::main processing loop completed")
示例#12
0
    def main(self):

        stream_id = self.id

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        db_id_in = self.cfg["PROCESSING_DATA_BLOCK"]
        db_id_out = self.cfg["SEND_DATA_BLOCK"]
        num_stream = self.cfg["NUM_STREAM"]
        cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id]

        db_key_in = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        db_id_in)
        db_key_out = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                         db_id_out)

        self.log(0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out)

        # create dspsr input file for the data block
        db_key_filename = "/tmp/spip_" + db_key_in + ".info"
        db_key_file = open(db_key_filename, "w")
        db_key_file.write("DADA INFO:\n")
        db_key_file.write("key " + db_key_in + "\n")
        db_key_file.close()

        gpu_id = self.cfg["GPU_ID_" + str(self.id)]
        prev_utc_start = ""

        (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":")

        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":")

        # wait up to 10s for the SMRB to be created
        smrb_wait = 10
        cmd = "dada_dbmetric -k " + db_key_in
        self.binary_list.append(cmd)

        rval = 1
        while rval and smrb_wait > 0 and not self.quit_event.isSet():

            rval, lines = self.system(cmd)
            if rval:
                time.sleep(1)
            smrb_wait -= 1

        if rval:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                db_key_in)
            self.quit_event.set()

        else:

            while (not self.quit_event.isSet()):

                cmd = "dada_header -k " + db_key_in
                self.log(0, cmd)
                self.binary_list.append(cmd)
                rval, lines = self.system(cmd)
                self.binary_list.remove(cmd)

                # if the command returned ok and we have a header
                if rval != 0:
                    if self.quit_event.isSet():
                        self.log(2, cmd + " failed, but quit_event true")
                    else:
                        self.log(-2, cmd + " failed")
                        self.quit_event.set()

                elif len(lines) == 0:

                    self.log(-2, "header was empty")
                    self.quit_event.set()

                else:

                    header = Config.parseHeader(lines)

                    utc_start = header["UTC_START"]
                    self.log(1, "UTC_START=" + header["UTC_START"])
                    self.log(1, "RESOLUTION=" + header["RESOLUTION"])

                    # default processing commands
                    fold_cmd = "dada_dbnull -s -k " + db_key_in
                    trans_cmd = "dada_dbnull -s -k " + db_key_out
                    search_cmd = "dada_dbnull -s -k " + db_key_in

                    if prev_utc_start == utc_start:
                        self.log(
                            -2, "UTC_START [" + utc_start +
                            "] repeated, ignoring observation")

                    else:
                        beam = self.cfg["BEAM_" + str(self.beam_id)]

                        if not float(bw) == float(header["BW"]):
                            self.log(
                                -1, "configured bandwidth [" + bw +
                                "] != header[" + header["BW"] + "]")
                        if not float(cfreq) == float(header["FREQ"]):
                            self.log(
                                -1, "configured cfreq [" + cfreq +
                                "] != header[" + header["FREQ"] + "]")
                        if not int(nchan) == int(header["NCHAN"]):
                            self.log(
                                -2, "configured nchan [" + nchan +
                                "] != header[" + header["NCHAN"] + "]")

                        source = header["SOURCE"]

                        # output directories
                        suffix = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq
                        fold_dir = self.cfg["CLIENT_FOLD_DIR"] + suffix
                        trans_dir = self.cfg["CLIENT_TRANS_DIR"] + suffix
                        search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix

                        fold = False
                        search = False
                        trans = False

                        try:
                            fold = (header["PERFORM_FOLD"] == "1")
                            search = (header["PERFORM_SEARCH"] == "1")
                            trans = (header["PERFORM_TRANS"] == "1")
                        except KeyError as e:
                            fold = True
                            search = False
                            trans = False

                        if fold:
                            os.makedirs(fold_dir, 0755)
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn"
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr"
                            #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn"
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn"
                            #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir

                            header_file = fold_dir + "/obs.header"
                            Config.writeDictToCFGFile(header, header_file)

                        if search or trans:
                            os.makedirs(search_dir, 0755)
                            search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil"
                            if trans:
                                search_cmd += " -k " + db_key_out

                        if trans and int(self.cfg["NUM_SUBBAND"]) == "1":
                            os.makedirs(trans_dir, 0755)
                            trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1"

                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])

                    # setup output pipes
                    fold_log_pipe = LogSocket("fold_src", "fold_src",
                                              str(self.id), "stream", log_host,
                                              log_port, int(DL))

                    #trans_log_pipe  = LogSocket ("trans_src", "trans_src", str(self.id), "stream",
                    #                             log_host, log_port, int(DL))
                    #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream",
                    #                             log_host, log_port, int(DL))

                    fold_log_pipe.connect()

                    self.binary_list.append(fold_cmd)
                    #self.binary_list.append (trans_cmd)
                    #self.binary_list.append (search_cmd)

                    # create processing threads
                    self.log(2, "creating processing threads")
                    cmd = "numactl -C " + cpu_core + " -- " + fold_cmd
                    fold_thread = procThread(cmd, fold_dir, fold_log_pipe.sock,
                                             1)

                    #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2)
                    #search_thread = procThread (search_cmd, self.log_sock.sock, 2)

                    # start processing threads
                    self.log(2, "starting processing threads")
                    self.log(1, "START " + fold_cmd)
                    fold_thread.start()
                    #trans_thread.start()
                    #search_thread.start()

                    # join processing threads
                    self.log(2, "waiting for fold thread to terminate")
                    rval = fold_thread.join()
                    self.log(2, "fold thread joined")
                    self.log(1, "END   " + fold_cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(fold_cmd)

                    if rval:
                        self.log(-2, "fold thread failed")
                        quit_event.set()

                    #self.log (2, "joining trans thread")
                    #rval = trans_thread.join()
                    #self.log (2, "trans thread joined")
                    #if rval:
                    #  self.log (-2, "trans thread failed")
                    #  quit_event.set()

                    #self.log (2, "joining search thread")
                    #rval = search_thread.join()
                    #self.log (2, "search thread joined")
                    #if rval:
                    #  self.log (-2, "search thread failed")
                    #  quit_event.set()

                    fold_log_pipe.close()
                    #trans_log_pipe.close()
                    #search_log_pipe.close()

                self.log(1, "processing completed")
示例#13
0
文件: uwb_proc.py 项目: ajameson/spip
  def main (self):

    self.log (2, "UWBProcDaemon::main configure_child()")
    self.configure_child()

    self.log (2, "UWBProcDaemon::main wait_for_smrb()")
    SMRBDaemon.waitForSMRB(self.db_key, self)

    if self.quit_event.isSet():
      self.log (-1, "UWBProcDaemon::main quit event was set after waiting for SMRB creation")
      return

    # continuously run the main command waiting on the SMRB
    while (not self.quit_event.isSet()):

      # wait for the header to determine if folding is required
      cmd = "dada_header -k " + self.db_key + " -t " + self.tag
      self.log(2, "UWBProcDaemon::main " + cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd, 2, True)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        time.sleep(0.1)
        if self.quit_event.isSet():
          self.log (2, "UWBProcDaemon::main " + cmd + " failed, but quit_event true")
        else:
          self.log (-2, "UWBProcDaemon::main " + cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        
        self.log (-2, "UWBProcDaemon::main header was empty")
        self.quit_event.set()
        
      else:

        self.log (2, "UWBProcDaemon::main parsing header")
        self.header = Config.parseHeader (lines)

        # account for lower to upper sideband conversion
        if not abs(float(self.bw)) == float(self.header["BW"]):
          self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]")
        if not float(self.cfreq) == float(self.header["FREQ"]):
          self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]")
        if not int(self.nchan) == int(self.header["NCHAN"]):
          self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]")

        self.source = self.header["SOURCE"]
        self.utc_start = self.header["UTC_START"]

        # call the child class prepare method
        self.log (2, "UWBProcDaemon::main prepare()")
        valid = self.prepare()

        if valid:

          # ensure the output directory exists
          self.log (2, "UWBProcDaemon::main creating out_dir: " + self.out_dir)
          if not os.path.exists (self.out_dir):
            os.makedirs (self.out_dir, 0755)

          # write the sub-bands header to the out_dir
          header_file = self.out_dir + "/obs.header"
          self.log (2, "UWBProcDaemon::main writing obs.header to out_dir")
          Config.writeDictToCFGFile (self.header, header_file)
    
          # configure the output pipe
          self.log (2, "UWBProcDaemon::main configuring output log pipe")
          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])
          log_pipe = LogSocket (self.log_prefix, self.log_prefix,
                                str(self.id), "stream",
                                log_host, log_port, int(DL))
          log_pipe.connect()

          # get any modifications to the environment
          env = self.getEnvironment()

          # add the binary command to the kill list
          self.binary_list.append (self.cmd)

          # create processing threads
          self.log (2, "UWBProcDaemon::main creating processing threads")      
          cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
          proc_thread = UWBProcThread (self, cmd, log_pipe.sock, env, 1)

          # start processing threads
          self.log (2, "UWBProcDaemon::main starting processing thread")
          proc_thread.start()

          self.log (1, "START " + self.cmd)

          # join processing threads
          self.log (2, "UWBProcDaemon::main waiting for proc thread to terminate")
          rval = proc_thread.join() 
          self.log (2, "UWBProcDaemon::main proc thread joined")

          self.log (1, "END   " + self.cmd)

          # remove the binary command from the list
          self.binary_list.remove (self.cmd)

          if rval:
            self.log (-2, "UWBProcDaemon::main proc thread failed")
            quit_event.set()

          log_pipe.close()

          # good practise in case the proc thread always fails
          time.sleep(1)

        else:

          self.log (2, "MEERKATProcDaemon::main skip this processing")
          time.sleep(10)

      self.log (2, "UWBProcDaemon::main processing loop completed")
示例#14
0
    def main(self):

        if self.gen_histogram:
            self.hg_plot = HistogramPlot()
            self.valid_plots.append("histogram")

        if self.gen_bandpass:
            self.bp_plot = BandpassPlot()
            self.valid_plots.append("bandpass")

        if self.gen_timeseries:
            self.ts_plot = TimeseriesPlot()
            self.valid_plots.append("timeseries")

        if self.gen_freqtime:
            self.ft_plot = FreqTimePlot()
            self.valid_plots.append("freqtime")

        # stats files are stored in flat directory structure
        # stats_dir / beam / cfreq

        if not os.path.exists(self.processing_dir):
            os.makedirs(self.processing_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        db_id = self.cfg["RECEIVING_DATA_BLOCK"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))
        self.db_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                          db_id)
        self.debug("db_key=" + self.db_key)

        # start dbstats in a separate thread
        self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
        self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

        if not os.path.exists(self.stat_dir):
            os.makedirs(self.stat_dir, 0755)
        if not os.path.exists(self.archived_dir):
            os.makedirs(self.archived_dir, 0755)

        # configure the histogram plot with all channels included
        self.hg_plot.configure(-1, self.histogram_abs_xmax)

        log = False
        zap = False
        transpose = False
        # configure the freq v time plot
        if self.gen_freqtime:
            self.ft_plot.configure(log, zap, transpose)

        # configure the bandpass plot
        log = True
        if self.gen_bandpass:
            self.bp_plot.configure(log, zap, transpose)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        # stat will use the stream config file created for the recv command
        self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
        while (not os.path.exists(self.stream_config_file)):
            self.debug("waiting for stream_config file [" +
                       self.stream_config_file + "] to be created by recv")
            time.sleep(1)

        self.debug("wait_for_smrb()")
        smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

        if not smrb_exists:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                self.db_key)
            self.quit_event.set()
            return

        stat_cmd = self.build_cmd()

        while (not self.quit_event.isSet()):

            process_stats = True

            # wait for the header to determine when dbstats should run
            cmd = "dada_header -k " + self.db_key + " -t stat"
            self.info(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                if self.quit_event.isSet():
                    self.debug(cmd + " failed, but quit_event true")
                else:
                    self.error(cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:
                self.error("header was empty")
                self.quit_event.set()

            else:
                self.debug("parsing header")
                self.header = Config.parseHeader(lines)

                try:
                    if self.header["ZERO_COPY"] == "1":
                        process_stats = False
                except:
                    self.debug("ZERO_COPY did not exist in header")

            if self.quit_event.isSet():
                self.debug("quit event set, exiting loop")
                continue

            if not process_stats:
                self.debug("not analyzing stats due to ZERO_COPY")
                time.sleep(5)
                continue

            # create a log pipe for the stats command
            stat_log_pipe = LogSocket("stat_src", "stat_src", str(self.id),
                                      "stream", log_host, log_port, int(DL))

            # connect up the log file output
            stat_log_pipe.connect()

            # add this binary to the list of active commands
            kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
            self.info("kill_cmd=" + kill_cmd)
            self.binary_list.append(kill_cmd)

            self.log(1, "START " + stat_cmd)

            # initialize the threads
            stat_thread = dbstatsThread(stat_cmd, self.stat_dir,
                                        stat_log_pipe.sock, 2)

            self.debug("cmd=" + stat_cmd)

            self.debug("starting stat thread")
            stat_thread.start()
            self.debug("stat thread started")

            pref_freq = 0

            while stat_thread.is_alive() and not self.quit_event.isSet():

                # get a list of all the files in stat_dir
                files = os.listdir(self.stat_dir)

                self.debug("found " + str(len(files)) + " in " + self.stat_dir)

                # if stat files exist in the directory
                if len(files) > 0:
                    if self.gen_histogram:
                        self.process_hg(pref_freq)
                    if self.gen_bandpass:
                        self.process_bp(pref_freq)
                    if self.gen_freqtime:
                        self.process_ft(pref_freq)
                    if self.gen_timeseries:
                        self.process_ts()
                    self.process_ms()

                    self.results["lock"].acquire()

                    pref_freq = self.pref_freq
                    self.results["timestamp"] = times.getCurrentTime()
                    self.results["valid"] = self.ms_valid
                    if self.gen_histogram:
                        self.results["valid"] |= self.hg_valid
                    if self.gen_timeseries:
                        self.results["valid"] |= self.ts_valid
                    if self.gen_freqtime:
                        self.results["valid"] |= self.ft_valid
                    if self.gen_bandpass:
                        self.results["valid"] |= self.bp_valid

                    self.results["lock"].release()

                time.sleep(5)

            self.debug("joining stat thread")
            rval = stat_thread.join()
            self.debug("stat thread joined")

            self.log(1, "END   " + stat_cmd)

            if rval:
                self.log(-2, "stat thread failed")
                self.quit_event.set()
示例#15
0
  def main (self):

    db_id = self.cfg["RECEIVING_DATA_BLOCK"]
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    self.db_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, db_id)

    # wait for the SMRB to exist before continuing
    self.log(2, "main: SMRBDaemon.waitForSMRB()")
    smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

    # don't proceed without an SMRB
    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    # configuration file for recvsim stream
    self.local_config = self.getConfiguration()
    self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"

    self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)]
    self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id))

    self.configured = True
    self.running = False
    env = self.getEnvironment()

    # external control loop to allow for reconfiguration of RECV
    while not self.quit_event.isSet():

      while not self.quit_event.isSet() and not self.configured:
        self.log(3, "main: waiting for configuration")
        sleep(1)

      if self.quit_event.isSet():
        return

      Config.writeDictToCFGFile (self.local_config, self.local_config_file)
      self.log(3, "main: configured")

      cmd = self.getCommand(self.local_config_file)
      self.binary_list.append (cmd)

      self.log(3, "main: sleep(1)")
      sleep(1)

      self.log(3, "main: log_pipe = LogSocket(recvsim_src))")
      log_pipe = LogSocket ("recvsim_src", "recvsim_src", str(self.id), "stream",
                            self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"],
                            int(DL))

      self.log(3, "main: log_pipe.connect()")
      log_pipe.connect()

      self.log(3, "main: sleep(1)")
      sleep(1)

      self.running = True

      self.log(1, "START " + cmd)

      # this should be a persistent / blocking command 
      rval = self.system_piped (cmd, log_pipe.sock)

      self.running = False

      self.binary_list.remove (cmd)

      self.log(1, "END   " + cmd)

      if rval:
        if self.quit_event.isSet():
          self.log (-2, cmd + " failed with return value " + str(rval))

      log_pipe.close ()
示例#16
0
  def main (self):

    stream_id = self.id

    # get the data block keys
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    db_id_in  = self.cfg["PROCESSING_DATA_BLOCK"]
    db_id_out = self.cfg["SEND_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id]

    db_key_in = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_in)
    db_key_out = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_out)

    self.log (0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out)

    # create dspsr input file for the data block
    db_key_filename = "/tmp/spip_" + db_key_in + ".info"
    db_key_file = open (db_key_filename, "w")
    db_key_file.write("DADA INFO:\n")
    db_key_file.write("key " +  db_key_in + "\n")
    db_key_file.close()

    gpu_id = self.cfg["GPU_ID_" + str(self.id)]
    prev_utc_start = ""

    (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":")

    (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":")

    # wait up to 10s for the SMRB to be created
    smrb_wait = 10
    cmd = "dada_dbmetric -k " + db_key_in
    self.binary_list.append (cmd)

    rval = 1
    while rval and smrb_wait > 0 and not self.quit_event.isSet():

      rval, lines = self.system (cmd)
      if rval:
        time.sleep(1)
      smrb_wait -= 1

    if rval:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + db_key_in)
      self.quit_event.set()

    else:

      while (not self.quit_event.isSet()):

        cmd = "dada_header -k " + db_key_in
        self.log(0, cmd)
        self.binary_list.append (cmd)
        rval, lines = self.system (cmd)
        self.binary_list.remove (cmd)

        # if the command returned ok and we have a header
        if rval != 0:
          if self.quit_event.isSet():
            self.log (2, cmd + " failed, but quit_event true")
          else:
            self.log (-2, cmd + " failed")
            self.quit_event.set()

        elif len(lines) == 0:
        
          self.log (-2, "header was empty")
          self.quit_event.set()
        
        else:

          header = Config.parseHeader (lines)

          utc_start = header["UTC_START"]
          self.log (1, "UTC_START=" + header["UTC_START"])
          self.log (1, "RESOLUTION=" + header["RESOLUTION"])

          # default processing commands
          fold_cmd = "dada_dbnull -s -k " + db_key_in
          trans_cmd = "dada_dbnull -s -k " + db_key_out
          search_cmd = "dada_dbnull -s -k " + db_key_in

          if prev_utc_start == utc_start:
            self.log (-2, "UTC_START [" + utc_start + "] repeated, ignoring observation")
          
          else: 
            beam = self.cfg["BEAM_" + str(self.beam_id)]

            if not float(bw) == float(header["BW"]):
              self.log (-1, "configured bandwidth ["+bw+"] != header["+header["BW"]+"]")
            if not float(cfreq) == float(header["FREQ"]):
              self.log (-1, "configured cfreq ["+cfreq+"] != header["+header["FREQ"]+"]")
            if not int(nchan) == int(header["NCHAN"]):
              self.log (-2, "configured nchan ["+nchan+"] != header["+header["NCHAN"]+"]")

            source = header["SOURCE"]

            # output directories 
            suffix     = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq
            fold_dir   = self.cfg["CLIENT_FOLD_DIR"]   + suffix
            trans_dir  = self.cfg["CLIENT_TRANS_DIR"]  + suffix
            search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix
            
            fold = False
            search = False
            trans = False 
          
            try:
              fold = (header["PERFORM_FOLD"] == "1")
              search = (header["PERFORM_SEARCH"] == "1")
              trans = (header["PERFORM_TRANS"] == "1")
            except KeyError as e:
              fold = True
              search = False
              trans = False 

            if fold:
              os.makedirs (fold_dir, 0755)
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn"
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr"
              #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn"
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn"
              #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir

              header_file = fold_dir + "/obs.header"
              Config.writeDictToCFGFile (header, header_file)

            if search or trans:
              os.makedirs (search_dir, 0755)
              search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil"
              if trans:
                search_cmd += " -k " + db_key_out

            if trans and int(self.cfg["NUM_SUBBAND"] ) == "1":
              os.makedirs (trans_dir, 0755)
              trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1"

          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])

          # setup output pipes
          fold_log_pipe = LogSocket ("fold_src", "fold_src", str(self.id), "stream",
                                       log_host, log_port, int(DL))

          #trans_log_pipe  = LogSocket ("trans_src", "trans_src", str(self.id), "stream",
          #                             log_host, log_port, int(DL))
          #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream",
          #                             log_host, log_port, int(DL))

          fold_log_pipe.connect()

          self.binary_list.append (fold_cmd)
          #self.binary_list.append (trans_cmd)
          #self.binary_list.append (search_cmd)

          # create processing threads
          self.log (2, "creating processing threads")      
          cmd = "numactl -C " + cpu_core + " -- " + fold_cmd
          fold_thread = procThread (cmd, fold_dir, fold_log_pipe.sock, 1)

          #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2)
          #search_thread = procThread (search_cmd, self.log_sock.sock, 2)

          # start processing threads
          self.log (2, "starting processing threads")      
          self.log (1, "START " + fold_cmd)      
          fold_thread.start()
          #trans_thread.start()
          #search_thread.start()

          # join processing threads
          self.log (2, "waiting for fold thread to terminate")
          rval = fold_thread.join() 
          self.log (2, "fold thread joined")
          self.log (1, "END   " + fold_cmd)      

          # remove the binary command from the list
          self.binary_list.remove (fold_cmd)

          if rval:
            self.log (-2, "fold thread failed")
            quit_event.set()

          #self.log (2, "joining trans thread")
          #rval = trans_thread.join() 
          #self.log (2, "trans thread joined")
          #if rval:
          #  self.log (-2, "trans thread failed")
          #  quit_event.set()

          #self.log (2, "joining search thread")
          #rval = search_thread.join() 
          #self.log (2, "search thread joined")
          #if rval:
          #  self.log (-2, "search thread failed")
          #  quit_event.set()

          fold_log_pipe.close()
          #trans_log_pipe.close()
          #search_log_pipe.close()

        self.log (1, "processing completed")
示例#17
0
  def main (self):

    in_ids = self.cfg["RECEIVING_DATA_BLOCKS"].split(" ")
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    pola_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, in_ids[0])
    polb_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, in_ids[1])
    out_id = self.cfg["PROCESSING_DATA_BLOCK"]
    out_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, out_id)
    self.log(2, "pola_key="+pola_key+" polb_key="+polb_key+" out_key="+out_key)

    # wait up to 10s for the SMRB to be created
    smrb_wait = 10
    cmd = "dada_dbmetric -k " + out_key
    self.binary_list.append (cmd)

    rval = 1
    while rval and smrb_wait > 0 and not self.quit_event.isSet():
      self.log(2, "MergeDaemon::main smrb_wait="+str(smrb_wait))
      rval, lines = self.system (cmd)
      self.log(2, "MergeDaemon::main rval="+str(rval) + " lines="+str(lines))
      if rval:
        self.log(2, "waiting for SMRB to be created")
        sleep(1)
      smrb_wait  = smrb_wait - 1

    if rval:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " + "key=" + out_key)
      self.quit_event.set()

    else:

      # get the site configuration, things the backend configuration
      # does not affect
      config = Config()

      # generate the front-end configuration file for this stream
      # the does not change from observation to observation
      local_config = config.getStreamConfigFixed(self.id)

      # write this config to file
      config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
      Config.writeDictToCFGFile (local_config, config_file)

      stream_core = self.cfg["STREAM_CORE_" + str(self.id)]  

      # TODO CPU/RAM affinity
      cmd = "dada_dbmergedb -w -s " + pola_key + " " + polb_key + " "  + out_key + " -v"
      self.binary_list.append (cmd)

      self.log(2, "MergeDaemon::main log_pipe = LogSocket(merge_src))")
      log_pipe = LogSocket ("merge_src", "merge_src", str(self.id), "stream",
                            self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"],
                            int(DL))

      self.log(2, "MergeDaemon::main log_pipe.connect()")
      log_pipe.connect()

      while not self.quit_event.isSet():

        self.log(2, "MergeDaemon::main sleep(1)")
        sleep(1)
     
        # this should be a persistent / blocking command 
        self.log(2, "MergeDaemon::main " + cmd)
        rval = self.system_piped (cmd, log_pipe.sock)
        if rval:
          self.log (-2, cmd + " failed with return value " + str(rval))
          self.quit_event.set()

      self.log(2, "MergeDaemon::main closing log_pipe")
      log_pipe.close ()
示例#18
0
  def main (self):

    if self.gen_histogram:
      self.hg_plot = HistogramPlot()
      self.valid_plots.append("histogram")

    if self.gen_bandpass:
      self.bp_plot = BandpassPlot()
      self.valid_plots.append("bandpass")

    if self.gen_timeseries:
      self.ts_plot = TimeseriesPlot()
      self.valid_plots.append("timeseries")

    if self.gen_freqtime:
      self.ft_plot = FreqTimePlot()
      self.valid_plots.append("freqtime")

    # stats files are stored in flat directory structure
    # stats_dir / beam / cfreq

    if not os.path.exists(self.processing_dir):
      os.makedirs(self.processing_dir, 0755) 

    # get the data block keys
    db_prefix  = self.cfg["DATA_BLOCK_PREFIX"]
    db_id      = self.cfg["RECEIVING_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    stream_id  = str(self.id)
    self.debug("stream_id=" + str(self.id))
    self.db_key = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id)
    self.debug("db_key=" + self.db_key)

    # start dbstats in a separate thread
    self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
    self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

    if not os.path.exists(self.stat_dir):
      os.makedirs(self.stat_dir, 0755)
    if not os.path.exists(self.archived_dir):
      os.makedirs(self.archived_dir, 0755)

    # configure the histogram plot with all channels included
    self.hg_plot.configure (-1, self.histogram_abs_xmax)

    log = False
    zap = False
    transpose = False
    # configure the freq v time plot
    if self.gen_freqtime:
      self.ft_plot.configure (log, zap, transpose)

    # configure the bandpass plot
    log = True
    if self.gen_bandpass:
      self.bp_plot.configure (log, zap, transpose)

    log_host = self.cfg["SERVER_HOST"]
    log_port = int(self.cfg["SERVER_LOG_PORT"])

    # stat will use the stream config file created for the recv command
    self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
    while (not os.path.exists(self.stream_config_file)):
      self.debug("waiting for stream_config file [" + self.stream_config_file +"] to be created by recv")
      time.sleep(1)    

    self.debug("wait_for_smrb()")
    smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    stat_cmd = self.build_cmd()

    while (not self.quit_event.isSet()):

      process_stats = True

      # wait for the header to determine when dbstats should run
      cmd = "dada_header -k " + self.db_key + " -t stat"
      self.info(cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        if self.quit_event.isSet():
          self.debug(cmd + " failed, but quit_event true")
        else:
          self.error(cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        self.error("header was empty")
        self.quit_event.set()

      else:
        self.debug("parsing header")
        self.header = Config.parseHeader (lines)

        try:
          if self.header["ZERO_INPUT"] == "1":
            process_stats = False
        except:
          self.debug("ZERO_INPUT did not exist in header")

      if self.quit_event.isSet():
        self.debug("quit event set, exiting loop")
        continue

      if not process_stats:
        self.debug("not analyzing stats due to ZERO_INPUT")
        time.sleep(5)
        continue

      # create a log pipe for the stats command
      stat_log_pipe   = LogSocket ("stat_src", "stat_src", str(self.id), "stream",
                                   log_host, log_port, int(DL))

      # connect up the log file output
      stat_log_pipe.connect()

      # add this binary to the list of active commands
      kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
      self.info("kill_cmd=" + kill_cmd)
      self.binary_list.append (kill_cmd)

      self.log (1, "START " + stat_cmd)

       # initialize the threads
      stat_thread = dbstatsThread (stat_cmd, self.stat_dir, stat_log_pipe.sock, 2)

      self.debug("cmd=" + stat_cmd)

      self.debug("starting stat thread")
      stat_thread.start()
      self.debug("stat thread started")

      pref_freq = 0

      while stat_thread.is_alive() and not self.quit_event.isSet():

        # get a list of all the files in stat_dir
        files = os.listdir (self.stat_dir)

        self.debug("found " + str(len(files)) + " in " + self.stat_dir)

        # if stat files exist in the directory
        if len(files) > 0:
          if self.gen_histogram:
            self.process_hg (pref_freq)
          if self.gen_bandpass:
            self.process_bp (pref_freq)
          if self.gen_freqtime:
            self.process_ft (pref_freq)
          if self.gen_timeseries:
            self.process_ts ()
          self.process_ms ()

          self.results["lock"].acquire()

          pref_freq = self.pref_freq
          self.results["timestamp"] = times.getCurrentTime()
          self.results["valid"] = self.ms_valid
          if self.gen_histogram:
            self.results["valid"] |= self.hg_valid
          if self.gen_timeseries:
            self.results["valid"] |= self.ts_valid
          if self.gen_freqtime:
            self.results["valid"] |= self.ft_valid
          if self.gen_bandpass:
            self.results["valid"] |= self.bp_valid

          self.results["lock"].release()

        time.sleep(5)

      self.debug("joining stat thread")
      rval = stat_thread.join()
      self.debug("stat thread joined")

      self.log (1, "END   " + stat_cmd)

      if rval:
        self.log (-2, "stat thread failed")
        self.quit_event.set()
示例#19
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb["+str(self.id)+"] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src",
                                 str(self.id), "stream", log_host,
                                 log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
示例#20
0
    def main(self):

        db_id = self.cfg["PROCESSING_DATA_BLOCK"]
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        self.db_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                          db_id)
        self.log(0, "db_key=" + self.db_key)

        # wait up to 10s for the SMRB to be created
        smrb_wait = 10
        cmd = "dada_dbmetric -k " + self.db_key
        self.binary_list.append(cmd)

        rval = 1
        while rval and smrb_wait > 0 and not self.quit_event.isSet():

            rval, lines = self.system(cmd)
            if rval:
                sleep(1)
            smrb_wait -= 1

        if rval:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                self.db_key)
            self.quit_event.set()

        else:

            local_config = self.getConfiguration()

            self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)]
            self.ctrl_port = str(
                int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id))

            # write this config to file
            local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
            self.log(1, "main: creating " + local_config_file)
            Config.writeDictToCFGFile(local_config, local_config_file)

            env = self.getEnvironment()

            cmd = self.getCommand(local_config_file)
            self.binary_list.append(cmd)

            self.log(3, "main: sleep(1)")
            sleep(1)

            self.log(3, "main: log_pipe = LogSocket(recvsim_src))")
            log_pipe = LogSocket("recvsim_src", "recvsim_src", str(self.id),
                                 "stream", self.cfg["SERVER_HOST"],
                                 self.cfg["SERVER_LOG_PORT"], int(DL))

            self.log(3, "main: log_pipe.connect()")
            log_pipe.connect()

            self.log(3, "main: sleep(1)")
            sleep(1)

            # this should be a persistent / blocking command
            rval = self.system_piped(cmd, log_pipe.sock)

            if rval:
                self.log(-2, cmd + " failed with return value " + str(rval))
            self.quit_event.set()

            log_pipe.close()
示例#21
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb[" + str(self.id) + "] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src", str(self.id),
                                 "stream", log_host, log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
示例#22
0
    def read_obs(self, xml):
        self.log(1, "KAT7ReadDaemon::read_obs()")
        self.log(1, "KAT7ReadDaemon::read_obs xml=" + str(xml))

        # launch 2 threads, one for each pol and wait for them to finish
        pol1_dir = xml['pol1']['@dir']
        pol2_dir = xml['pol2']['@dir']

        pol1_nfiles = xml['pol1']['@nfiles']
        pol2_nfiles = xml['pol2']['@nfiles']

        pol1_files = []
        pol2_files = []

        for file in xml['pol1']['file']:
            pol1_files.append(pol1_dir + "/" + file['#text'])
        for file in xml['pol2']['file']:
            pol2_files.append(pol2_dir + "/" + file['#text'])

        # launch a thread for each dbdisk to read the .dada files into
        # a dada buffer
        in_ids = self.cfg["RECEIVING_DATA_BLOCKS"].split(" ")
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        pol1_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                       in_ids[0])
        pol2_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                       in_ids[1])

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        # create a diskdb thread
        pol1_log_pipe = LogSocket("pol1_src", "pol1_src", str(self.id),
                                  "stream", log_host, log_port, int(DL))
        pol2_log_pipe = LogSocket("pol2_src", "pol2_src", str(self.id),
                                  "stream", log_host, log_port, int(DL))

        pol1_log_pipe.connect()
        pol2_log_pipe.connect()

        self.binary_list.append("dada_diskdb -k " + pol1_key)
        self.binary_list.append("dada_diskdb -k " + pol2_key)

        # create processing threads
        self.log(1, "creating processing threads")
        pol1_thread = diskdbThread(self, pol1_key, pol1_files,
                                   pol1_log_pipe.sock)
        pol2_thread = diskdbThread(self, pol2_key, pol2_files,
                                   pol2_log_pipe.sock)

        # start processing threads
        self.log(1, "starting processing threads")
        pol1_thread.start()
        pol2_thread.start()

        # join processing threads
        self.log(2, "waiting for diskdb threads to terminate")
        rval = pol1_thread.join()
        self.log(2, "pol1 thread joined")
        if rval:
            self.log(-2, "pol1 thread failed")
            quit_event.set()

        rval = pol2_thread.join()
        self.log(2, "pol2 thread joined")
        if rval:
            self.log(-2, "pol2 thread failed")
            quit_event.set()
示例#23
0
    def main(self):

        in_ids = self.cfg["RECEIVING_DATA_BLOCKS"].split(" ")
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        pola_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                       in_ids[0])
        polb_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                       in_ids[1])
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]
        out_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream, out_id)
        self.log(
            2, "pola_key=" + pola_key + " polb_key=" + polb_key + " out_key=" +
            out_key)

        # wait up to 10s for the SMRB to be created
        smrb_wait = 10
        cmd = "dada_dbmetric -k " + out_key
        self.binary_list.append(cmd)

        rval = 1
        while rval and smrb_wait > 0 and not self.quit_event.isSet():
            self.log(2, "MergeDaemon::main smrb_wait=" + str(smrb_wait))
            rval, lines = self.system(cmd)
            self.log(
                2,
                "MergeDaemon::main rval=" + str(rval) + " lines=" + str(lines))
            if rval:
                self.log(2, "waiting for SMRB to be created")
                sleep(1)
            smrb_wait = smrb_wait - 1

        if rval:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                out_key)
            self.quit_event.set()

        else:

            # get the site configuration, things the backend configuration
            # does not affect
            config = Config()

            # generate the front-end configuration file for this stream
            # the does not change from observation to observation
            local_config = config.getStreamConfigFixed(self.id)

            # write this config to file
            config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
            Config.writeDictToCFGFile(local_config, config_file)

            stream_core = self.cfg["STREAM_CORE_" + str(self.id)]

            # TODO CPU/RAM affinity
            cmd = "dada_dbmergedb -w -s " + pola_key + " " + polb_key + " " + out_key + " -v"
            self.binary_list.append(cmd)

            self.log(2, "MergeDaemon::main log_pipe = LogSocket(merge_src))")
            log_pipe = LogSocket("merge_src", "merge_src", str(self.id),
                                 "stream", self.cfg["SERVER_HOST"],
                                 self.cfg["SERVER_LOG_PORT"], int(DL))

            self.log(2, "MergeDaemon::main log_pipe.connect()")
            log_pipe.connect()

            while not self.quit_event.isSet():

                self.log(2, "MergeDaemon::main sleep(1)")
                sleep(1)

                # this should be a persistent / blocking command
                self.log(2, "MergeDaemon::main " + cmd)
                rval = self.system_piped(cmd, log_pipe.sock)
                if rval:
                    self.log(-2,
                             cmd + " failed with return value " + str(rval))
                    self.quit_event.set()

            self.log(2, "MergeDaemon::main closing log_pipe")
            log_pipe.close()
示例#24
0
  def read_obs (self, xml):
    self.log(1, "KAT7ReadDaemon::read_obs()")
    self.log(1, "KAT7ReadDaemon::read_obs xml="+str(xml))

    # launch 2 threads, one for each pol and wait for them to finish
    pol1_dir = xml['pol1']['@dir']
    pol2_dir = xml['pol2']['@dir']

    pol1_nfiles =  xml['pol1']['@nfiles']
    pol2_nfiles =  xml['pol2']['@nfiles']

    pol1_files = []
    pol2_files = []

    for file in xml['pol1']['file']:
      pol1_files.append(pol1_dir + "/" + file['#text'])
    for file in xml['pol2']['file']:
      pol2_files.append(pol2_dir + "/" + file['#text'])

    # launch a thread for each dbdisk to read the .dada files into 
    # a dada buffer
    in_ids = self.cfg["RECEIVING_DATA_BLOCKS"].split(" ")
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    pol1_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, in_ids[0])
    pol2_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, in_ids[1])

    log_host = self.cfg["SERVER_HOST"]
    log_port = int(self.cfg["SERVER_LOG_PORT"])

    # create a diskdb thread
    pol1_log_pipe   = LogSocket ("pol1_src", "pol1_src", str(self.id), "stream",
                                 log_host, log_port, int(DL))
    pol2_log_pipe   = LogSocket ("pol2_src", "pol2_src", str(self.id), "stream",
                                 log_host, log_port, int(DL))

    pol1_log_pipe.connect()
    pol2_log_pipe.connect()

    self.binary_list.append ("dada_diskdb -k " + pol1_key)
    self.binary_list.append ("dada_diskdb -k " + pol2_key)

    # create processing threads
    self.log (1, "creating processing threads")
    pol1_thread = diskdbThread (self, pol1_key, pol1_files, pol1_log_pipe.sock)
    pol2_thread = diskdbThread (self, pol2_key, pol2_files, pol2_log_pipe.sock)

    # start processing threads
    self.log (1, "starting processing threads")
    pol1_thread.start()
    pol2_thread.start()

    # join processing threads
    self.log (2, "waiting for diskdb threads to terminate")
    rval = pol1_thread.join()
    self.log (2, "pol1 thread joined")
    if rval:
      self.log (-2, "pol1 thread failed")
      quit_event.set()

    rval = pol2_thread.join()
    self.log (2, "pol2 thread joined")
    if rval:
      self.log (-2, "pol2 thread failed")
      quit_event.set()
示例#25
0
文件: kat7_gen.py 项目: ajameson/spip
  def gen_obs (self, fixed_config, message):

    self.log(1, "gen_obs: " + str(message))
    header = Config.readDictFromString(message)

    in_ids = self.cfg["RECEIVING_DATA_BLOCKS"].split(" ")
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    num_stream = self.cfg["NUM_STREAM"]
    pol1_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, in_ids[0])
    pol2_key = SMRBDaemon.getDBKey (db_prefix, self.id, num_stream, in_ids[1])

    # generate the header file to be use by GEN_BINARY
    header1_file = "/tmp/kat7_gen1_" + header["UTC_START"] + "." + self.id + ".header"
    header2_file = "/tmp/ket7_gen2_" + header["UTC_START"] + "." + self.id + ".header"

    header["HDR_VERSION"] = "1.0"
    fixed_config["NPOL"] = "1"
    fixed_config["BYTES_PER_SECOND"] = str(float(fixed_config["BYTES_PER_SECOND"])/2)
    fixed_config["RESOLUTION"] = str(float(fixed_config["RESOLUTION"])/2)
    #fixed_config["FILE_SIZE"] = str(float(fixed_config["FILE_SIZE"])/2)
    header["PICOSECONDS"] = "0";
    header["ADC_COUNTS"] = "0";

    # include the fixed configuration
    header.update(fixed_config)

    # rate in Gb/s
    transmit_rate = float(header["BYTES_PER_SECOND"]) / 1000000.0

    self.log(3, "gen_obs: writing header to " + header1_file + " and " + header2_file)
    Config.writeDictToCFGFile (header, header1_file)
    Config.writeDictToCFGFile (header, header2_file)

    stream_core = self.cfg["STREAM_GEN_CORE_" + str(self.id)]

    tobs = "60"
    if header["TOBS"] != "":
      tobs = header["TOBS"]

    cmd1 = "dada_junkdb -k " + pol1_key \
          + " -R " + str(transmit_rate) \
          + " -t " + tobs \
          + " -g " + header1_file
    self.binary_list.append (cmd1)

    cmd2 = "dada_junkdb -k " + pol2_key \
          + " -R " + str(transmit_rate) \
          + " -t " + tobs \
          + " -g " + header2_file
    self.binary_list.append (cmd2)

    sleep(1)

    log_pipe1 = LogSocket ("gen1_src", "gen1_src", str(self.id), "stream",
                        self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"],
                        int(DL))
    log_pipe2 = LogSocket ("gen2_src", "gen2_src", str(self.id), "stream",
                        self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"],
                        int(DL))

    log_pipe1.connect()
    log_pipe2.connect()

    sleep(1)

    pol1_thread = genThread (cmd1, log_pipe1.sock)
    pol2_thread = genThread (cmd2, log_pipe2.sock)

    # start processing threads
    self.log (1, "starting processing threads")
    pol1_thread.start()
    pol2_thread.start()

    # join processing threads
    self.log (2, "waiting for gen threads to terminate")
    rval = pol1_thread.join()
    self.log (2, "pol1 thread joined")
    if rval:
      self.log (-2, "pol1 thread failed")
      quit_event.set()

    rval = pol2_thread.join()
    self.log (2, "pol2 thread joined")
    if rval:
      self.log (-2, "pol2 thread failed")
      quit_event.set()

    log_pipe1.close ()
    log_pipe2.close ()
示例#26
0
    def gen_obs(self, fixed_config, message):

        self.log(1, "gen_obs: " + str(message))
        header = Config.readDictFromString(message)

        in_ids = self.cfg["RECEIVING_DATA_BLOCKS"].split(" ")
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        pol1_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                       in_ids[0])
        pol2_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                       in_ids[1])

        # generate the header file to be use by GEN_BINARY
        header1_file = "/tmp/kat7_gen1_" + header[
            "UTC_START"] + "." + self.id + ".header"
        header2_file = "/tmp/ket7_gen2_" + header[
            "UTC_START"] + "." + self.id + ".header"

        header["HDR_VERSION"] = "1.0"
        fixed_config["NPOL"] = "1"
        fixed_config["BYTES_PER_SECOND"] = str(
            float(fixed_config["BYTES_PER_SECOND"]) / 2)
        fixed_config["RESOLUTION"] = str(float(fixed_config["RESOLUTION"]) / 2)
        #fixed_config["FILE_SIZE"] = str(float(fixed_config["FILE_SIZE"])/2)
        header["PICOSECONDS"] = "0"
        header["ADC_COUNTS"] = "0"

        # include the fixed configuration
        header.update(fixed_config)

        # rate in Gb/s
        transmit_rate = float(header["BYTES_PER_SECOND"]) / 1000000.0

        self.log(
            3, "gen_obs: writing header to " + header1_file + " and " +
            header2_file)
        Config.writeDictToCFGFile(header, header1_file)
        Config.writeDictToCFGFile(header, header2_file)

        stream_core = self.cfg["STREAM_GEN_CORE_" + str(self.id)]

        tobs = "60"
        if header["TOBS"] != "":
            tobs = header["TOBS"]

        cmd1 = "dada_junkdb -k " + pol1_key \
              + " -R " + str(transmit_rate) \
              + " -t " + tobs \
              + " -g " + header1_file
        self.binary_list.append(cmd1)

        cmd2 = "dada_junkdb -k " + pol2_key \
              + " -R " + str(transmit_rate) \
              + " -t " + tobs \
              + " -g " + header2_file
        self.binary_list.append(cmd2)

        sleep(1)

        log_pipe1 = LogSocket("gen1_src", "gen1_src", str(self.id), "stream",
                              self.cfg["SERVER_HOST"],
                              self.cfg["SERVER_LOG_PORT"], int(DL))
        log_pipe2 = LogSocket("gen2_src", "gen2_src", str(self.id), "stream",
                              self.cfg["SERVER_HOST"],
                              self.cfg["SERVER_LOG_PORT"], int(DL))

        log_pipe1.connect()
        log_pipe2.connect()

        sleep(1)

        pol1_thread = genThread(cmd1, log_pipe1.sock)
        pol2_thread = genThread(cmd2, log_pipe2.sock)

        # start processing threads
        self.log(1, "starting processing threads")
        pol1_thread.start()
        pol2_thread.start()

        # join processing threads
        self.log(2, "waiting for gen threads to terminate")
        rval = pol1_thread.join()
        self.log(2, "pol1 thread joined")
        if rval:
            self.log(-2, "pol1 thread failed")
            quit_event.set()

        rval = pol2_thread.join()
        self.log(2, "pol2 thread joined")
        if rval:
            self.log(-2, "pol2 thread failed")
            quit_event.set()

        log_pipe1.close()
        log_pipe2.close()