Esempio n. 1
0
  def main (self):

    # open a listening socket to receive the data files to read
    hostname = getHostNameShort()

    # get the site configurationa
    config = Config()

    # prepare header using configuration file parameters
    fixed_config = config.getStreamConfigFixed(self.id)

    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    db_id = self.cfg["RECEIVING_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    db_key = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id)

    cmd = "dada_diskdb -k " + db_key + " -z -s " + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000000000000000.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000000000000000.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000034359738368.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000068719476736.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000103079215104.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000137438953472.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000171798691840.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000206158430208.000000.dada" + \
          "-f /data/spip/first_light/single_dish/2016-04-28-13:27:30_0000240518168576.000000.dada"

    self.log (0, "cmd=" + cmd)
    (rval, lines) = self.system (cmd)
    self.log (0, "rval=" + str(rval))
    for line in lines:
      self.log (0, line)
Esempio n. 2
0
  def acquire_obs_header (self, in_dir):
    """Generate the obs.header file for the whole band from sub-bands."""

    # test if already exists
    if os.path.exists (in_dir + "/obs.header"):
      self.log(2, "RepackDaemon::acquire_obs_header obs.header file already existed")
      return (0, "")
  
    subband_freqs = self.get_subbands (in_dir)

    # start with header file from first sub-band
    if not os.path.exists (in_dir + "/" + subband_freqs[0] + "/obs.header"):
      self.log(2, "RepackDaemon::acquire_obs_header first sub-band obs.header did not exist")
      return (1, "first sub-band header file did not exist")

    self.log (2, "RepackDaemon::acquire_obs_header header_file[0]=" + in_dir + "/" + subband_freqs[0] + "/obs.header")
    header = Config.readCFGFileIntoDict (in_dir + "/" + subband_freqs[0] + "/obs.header")

    # merge the headers from the other sub-bands
    for i in range(1,len(subband_freqs)):
      subband_header_file = in_dir + "/" + subband_freqs[i] + "/obs.header"
      self.log (2, "RepackDaemon::acquire_obs_header header_file[" + str(i)+ "]=" + subband_header_file)
      if os.path.exists (subband_header_file):
        header_sub = Config.readCFGFileIntoDict (subband_header_file)
        header = Config.mergeHeaderFreq (header, header_sub)
      else:
        return (1, "not all sub-band header files present")

    # write the combined header
    self.log (2, "RepackDaemon::acquire_obs_header writing header to " + in_dir + "/" + "obs.header")
    Config.writeDictToCFGFile (header, in_dir + "/" + "obs.header")

    return (0, "")
Esempio n. 3
0
    def getStreamConfigFixed(self, id):

        cfg = Config.getStreamConfigFixed(self, id)

        cfg["NPOL"] = Config.getStreamParam(self.config, "NPOL", str(id))

        (cfg["DATA_HOST"],
         cfg["DATA_PORT"]) = self.config["STREAM_UDP_" + str(id)].split(":")
        cfg["UDP_NSAMP"] = "2048"

        return cfg
Esempio n. 4
0
  def generateObsInfoDat (self, finished_subdir, completed_subdir):

    obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
    obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
    obs_info_dat_file = self.completed_dir + "/" + completed_subdir + "/obs_info.dat"

    proposal_id = ""
   
    if not os.path.exists (obs_info_dat_file):

      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat creating obs_info.dat")

      if os.path.exists(obs_results_file):
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: reading " + obs_results_file)
        obs_results = Config.readCFGFileIntoDict(obs_results_file)
      else:
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: generating results")
        obs_results = self.computeResults (self.finished_dir + "/" + finished_subdir)

      if not os.path.exists(obs_header_file):
        self.log (-1, "MeerKATArchiverDaemon::generateObsInfoDat: " + obs_header_file + " did not exist")
        return ("fail", "obs.header file did not exist")
      obs_header = Config.readCFGFileIntoDict(obs_header_file)

      obs_info_dat = {}
  
      obs_info_dat["observer"] = self.extractKey(obs_header ,"OBSERVER")
      obs_info_dat["program_block_id"] = self.extractKey(obs_header, "PROGRAM_BLOCK_ID")
      obs_info_dat["targets"] = "['" + self.extractKey(obs_header,"SOURCE") + "']"
      obs_info_dat["mode"] = self.extractKey(obs_header,"MODE")
      obs_info_dat["sb_id_code"] = self.extractKey(obs_header,"SCHEDULE_BLOCK_ID")
      obs_info_dat["target_duration"] = self.extractKey(obs_results, "length")
      obs_info_dat["target_snr"] = self.extractKey(obs_results, "snr")
      obs_info_dat["proposal_id"] = self.extractKey(obs_header, "PROPOSAL_ID")
      obs_info_dat["description"] = self.extractKey(obs_header, "DESCRIPTION")
      obs_info_dat["backend_args"] = "TBD"
      obs_info_dat["experiment_id"] = self.extractKey(obs_header, "EXPERIMENT_ID")
      obs_info_dat["adc_sync_time"] = self.extractKey(obs_header, "ADC_SYNC_TIME")
      obs_info_dat["precisetime_fraction"] = self.extractKey(obs_header, "PRECISETIME_FRACTION_AVG")
      obs_info_dat["utc_start_offset_picoseconds"] = self.extractKey(obs_header, "PICOSECONDS")

      fold_mode = self.extractKey(obs_header, "PERFORM_FOLD")
      search_mode = self.extractKey(obs_header, "PERFORM_SEARCH")

      Config.writeDictToColonSVFile(obs_info_dat, obs_info_dat_file)

      proposal_id = obs_info_dat["proposal_id"]

    else:
      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat obs_info.dat existed")

    return ("ok", proposal_id)
Esempio n. 5
0
  def generateObsJSON(self, finished_subdir, completed_subdir):

    obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
    obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
    obs_info_json_file = self.completed_dir + "/" + completed_subdir + "/obs_info.json"

    proposal_id = ""

    if not os.path.exists (obs_info_json_file):

      self.debug("creating obs_info.json")

      if os.path.exists(obs_results_file):
        self.debug("reading " + obs_results_file)
        obs_results = Config.readCFGFileIntoDict(obs_results_file)
      else:
        self.debug("generating results")
        obs_results = self.computeResults (self.finished_dir + "/" + finished_subdir)

      if not os.path.exists(obs_header_file):
        self.warn(obs_header_file + " did not exist")
        return ("fail", "obs.header file did not exist")
      obs_header = Config.readCFGFileIntoDict(obs_header_file)

      data = {}
      data["ProductType"] = "USEabcProduct"
      data["Description"] = self.extractKey(obs_header, "DESCRIPTION")
      data["SchedulelBockIdCode"] = self.extractKey(obs_header,"SCHEDULE_BLOCK_ID")
      data["ProposalId"] = self.extractKey(obs_header, "PROPOSAL_ID")

      utc_start = self.extractKey(obs_header, "UTC_START")
      data["StartTime"] = times.reformatUTCTime(utc_start, "%Y-%m-%dT%H:%M:%S%Z")
      data["Observer"] = self.extractKey(obs_header ,"OBSERVER")
      data["ProgramBlockId"] = self.extractKey(obs_header, "PROGRAM_BLOCK_ID")
      data["Duration"] = float(self.extractKey(obs_results, "length"))
      data["Bandwidth"] = float(self.extractKey(obs_header, "BW"))
      data["CenterFrequency"] = float(self.extractKey(obs_header, "FREQ"))
      data["NumFreqChannels"] = int(self.extractKey(obs_header, "NCHAN"))
      data["ChannelWidth"] = float(data["CenterFrequency"]) / float(data["NumFreqChannels"])

      json_data = json.dumps(data)
      fptr = open(data_file, 'w')
      fptr.write(json_data)
      fptr.close()

    else:
      self.debug("obs_info.json existed")

    return ("ok", "")
Esempio n. 6
0
  def generateObsInfoDat (self, finished_subdir, completed_subdir):

    obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
    obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
    obs_info_dat_file = self.completed_dir + "/" + completed_subdir + "/obs_info.dat"
   
    if not os.path.exists (obs_info_dat_file):

      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat creating obs_info.dat")

      if os.path.exists(obs_results_file):
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: reading " + obs_results_file)
        obs_results = Config.readCFGFileIntoDict(obs_results_file)
      else:
        self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat: generating results")
        obs_results = self.computeResults (self.finished_dir + "/" + finished_subdir)

      if not os.path.exists(obs_header_file):
        self.log (-1, "MeerKATArchiverDaemon::generateObsInfoDat: " + obs_header_file + " did not exist")
        return ("fail", "obs.header file did not exist")
      obs_header = Config.readCFGFileIntoDict(obs_header_file)

      obs_info_dat = {}
  
      obs_info_dat["observer"] = self.extractKey(obs_header ,"OBSERVER")
      obs_info_dat["program_block_id"] = self.extractKey(obs_header, "PROGRAM_BLOCK_ID")
      obs_info_dat["targets"] = "['" + self.extractKey(obs_header,"SOURCE") + "']"
      obs_info_dat["mode"] = self.extractKey(obs_header,"MODE")
      obs_info_dat["sb_id_code"] = self.extractKey(obs_header,"SCHEDULE_BLOCK_ID")
      obs_info_dat["target_duration"] = self.extractKey(obs_results, "length")
      obs_info_dat["target_snr"] = self.extractKey(obs_results, "snr")
      obs_info_dat["proposal_id"] = self.extractKey(obs_header, "PROPOSAL_ID")
      obs_info_dat["description"] = self.extractKey(obs_header, "DESCRIPTION")
      obs_info_dat["backend_args"] = "TBD"
      obs_info_dat["experiment_id"] = self.extractKey(obs_header, "EXPERIMENT_ID")
      obs_info_dat["adc_sync_time"] = self.extractKey(obs_header, "ADC_SYNC_TIME")
      obs_info_dat["precisetime_fraction"] = self.extractKey(obs_header, "PRECISETIME_FRACTION_AVG")
      obs_info_dat["utc_start_offset_picoseconds"] = self.extractKey(obs_header, "PICOSECONDS")

      fold_mode = self.extractKey(obs_header, "PERFORM_FOLD")
      search_mode = self.extractKey(obs_header, "PERFORM_SEARCH")

      Config.writeDictToColonSVFile(obs_info_dat, obs_info_dat_file)

    else:
      self.log (2, "MeerKATArchiverDaemon::generateObsInfoDat obs_info.dat existed")

    return ("ok", "")
Esempio n. 7
0
  def prepare_observation (self, beam, utc_start, source, mode, streams):

    if mode == "fold":
      base_dir = self.cfg["SERVER_FOLD_DIR"]
    elif mode == "search":
      base_dir = self.cfg["SERVER_SEARCH_DIR"]
    elif mode == "continuum":
      base_dir = self.cfg["SERVER_CONTINUUM_DIR"]
    elif mode == "spectral_line":
      base_dir = self.cfg["SERVER_SPECTRAL_LINE_DIR"]
    elif mode == "vlbi":
      base_dir = self.cfg["SERVER_VLBI_DIR"]
    elif mode == "tran":
      base_dir = self.cfg["SERVER_TRANSIENTS_DIR"]
    else:
      return (0, "Unrecognized processing mode: " + mode)

    processing_dir = base_dir + "/processing"

    # create the directory structure
    src_dir = processing_dir + "/" + beam + "/" + utc_start + "/" + source + "/"
    if not os.path.exists(src_dir):
      try:
        os.makedirs (src_dir, 0755)
      except Exception as e:
        self.info("could not create dir " + src_dir + ": " + str(e))

    # write stream information to file
    fptr = open (src_dir + "/obs.info", "w")
    fptr.write(Config.writePaddedString("NUM_STREAM", len(streams)) + "\n")

    for i in range(len(streams)):

      # get the stream configuration
      (freq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + str(streams[i])].split(":")

      # create the ouput sub-band dir
      if not os.path.exists(src_dir + "/" + freq):
        try:
          os.makedirs (src_dir + "/" + freq, 0755)
        except Exception as e:
          self.info("could not create dir " + src_dir + "/" + freq + ":" + str(e))

      # write the configuration to the file
      fptr.write(Config.writePaddedString("SUBBAND_" + str(i), \
                 self.cfg["SUBBAND_CONFIG_" + str(streams[i])]) + "\n")

    fptr.close()
Esempio n. 8
0
    def getCommand(self, config_file):

        # get the beam name for the stream
        (host, self.beam_id,
         self.subband_id) = self.cfg["STREAM_" + self.id].split(":")
        beam = self.cfg["BEAM_" + str(self.beam_id)]

        npol = Config.getStreamParam(self.cfg, "NPOL", self.id)

        cmd = self.cfg["STREAM_BINARY"] + " -k " + self.db_key \
                + " -b " + self.cpu_core \
                + " -c " + self.ctrl_port \
                + " -D " + self.cfg["CLIENT_STATS_DIR"] + "/" + beam \
                + " -p " + npol \
                + " -s " + str(self.id) \
                + " -f dualvdif" \
                + " " + config_file

        #cmd = self.cfg["STREAM_BINARY"] + " -k " + self.db_key \
        #        + " -b " + self.cpu_core \
        #        + " -c " + self.ctrl_port \
        #        + " -f dualvdif" \
        #        + " " + config_file

        return cmd
Esempio n. 9
0
    def getStreamConfigFixed(self, id):

        cfg = Config.getStreamConfigFixed(self, id)

        (cfg["DATA_HOST_0"],
         cfg["DATA_HOST_1"]) = self.config["DATA_HOST_0"].split(",")
        (cfg["DATA_MCAST_0"],
         cfg["DATA_MCAST_1"]) = self.config["DATA_MCAST_0"].split(",")
        (cfg["DATA_PORT_0"],
         cfg["DATA_PORT_1"]) = self.config["DATA_PORT_0"].split(",")

        (cfg["META_HOST_0"],
         cfg["META_HOST_1"]) = self.config["META_HOST_0"].split(",")
        (cfg["META_MCAST_0"],
         cfg["META_MCAST_1"]) = self.config["META_MCAST_0"].split(",")
        (cfg["META_PORT_0"],
         cfg["META_PORT_1"]) = self.config["META_PORT_0"].split(",")

        cfg["ADC_SAMPLE_RATE"] = self.config["ADC_SAMPLE_RATE"]

        (freq, bw, nchan) = self.config["SUBBAND_CONFIG_" +
                                        cfg["STREAM_SUBBAND_ID"]].split(":")
        chan_bw = float(bw) / float(nchan)
        cfg["FREQ"] = str(float(freq) - chan_bw / 2)

        return cfg
Esempio n. 10
0
  def getStreamConfigFixed (self, id):

    cfg = Config.getStreamConfigFixed (self, id)

    (cfg["DATA_HOST_0"], cfg["DATA_HOST_1"]) = self.config["DATA_HOST"].split(",")
    (cfg["DATA_MCAST_0"], cfg["DATA_MCAST_1"]) = self.config["DATA_MCAST"].split(",")
    (cfg["DATA_PORT_0"], cfg["DATA_PORT_1"]) = self.config["DATA_PORT"].split(",")

    (cfg["META_HOST_0"], cfg["META_HOST_1"]) = self.config["META_HOST"].split(",")
    (cfg["META_MCAST_0"], cfg["META_MCAST_1"]) = self.config["META_MCAST"].split(",")
    (cfg["META_PORT_0"], cfg["META_PORT_1"]) = self.config["META_PORT"].split(",")

    cfg["ADC_SAMPLE_RATE"] = self.config["ADC_SAMPLE_RATE"]

    (freq, bw, nchan) = self.config["SUBBAND_CONFIG_" + str(id)].split(":")

    # MeerKAT's convention is that the CFREQ is offset by chan_bw/2
    chan_bw = float(bw) / float(nchan)
    cfg["FREQ"] = str(float(freq) - chan_bw / 2)

    cfg["BW"] = bw
    cfg["NCHAN"] = nchan
    cfg["NPOL"] = "2"

    (start_chan, end_chan) = self.config["SUBBAND_CHANS_" + str(id)].split(":")
    cfg["START_CHANNEL"] = start_chan
    cfg["END_CHANNEL"]   = end_chan

    return cfg
Esempio n. 11
0
    def acquire_obs_header(self, in_dir):
        """Generate the obs.header file for the whole band from sub-bands."""

        # test if already exists
        if os.path.exists(in_dir + "/obs.header"):
            self.log(
                2,
                "RepackDaemon::acquire_obs_header obs.header file already existed"
            )
            return (0, "")

        subband_freqs = self.get_subbands(in_dir)

        # start with header file from first sub-band
        if not os.path.exists(in_dir + "/" + subband_freqs[0] + "/obs.header"):
            self.log(
                2,
                "RepackDaemon::acquire_obs_header first sub-band obs.header did not exist"
            )
            return (1, "first sub-band header file did not exist")

        self.log(
            2, "RepackDaemon::acquire_obs_header header_file[0]=" + in_dir +
            "/" + subband_freqs[0] + "/obs.header")
        header = Config.readCFGFileIntoDict(in_dir + "/" + subband_freqs[0] +
                                            "/obs.header")

        # merge the headers from the other sub-bands
        for i in range(1, len(subband_freqs)):
            subband_header_file = in_dir + "/" + subband_freqs[
                i] + "/obs.header"
            self.log(
                2, "RepackDaemon::acquire_obs_header header_file[" + str(i) +
                "]=" + subband_header_file)
            if os.path.exists(subband_header_file):
                header_sub = Config.readCFGFileIntoDict(subband_header_file)
                header = Config.mergeHeaderFreq(header, header_sub)
            else:
                return (1, "not all sub-band header files present")

        # write the combined header
        self.log(
            2, "RepackDaemon::acquire_obs_header writing header to " + in_dir +
            "/" + "obs.header")
        Config.writeDictToCFGFile(header, in_dir + "/" + "obs.header")

        return (0, "")
Esempio n. 12
0
    def generateObsInfoDat(self, finished_subdir, completed_subdir):

        obs_results_file = self.finished_dir + "/" + finished_subdir + "/obs.results"
        obs_header_file = self.completed_dir + "/" + completed_subdir + "/obs.header"
        obs_info_dat_file = self.completed_dir + "/" + completed_subdir + "/obs_info.dat"

        if not os.path.exists(obs_info_dat_file):

            self.log(
                2,
                "MeerKATArchiverDaemon::generateObsInfoDat creating obs_info.dat"
            )

            obs_results = Config.readCFGFileIntoDict(obs_results_file)
            obs_header = Config.readCFGFileIntoDict(obs_header_file)
            obs_info_dat = {}

            obs_info_dat["observer"] = self.extractKey(obs_header, "OBSERVER")
            obs_info_dat["program_block_id"] = self.extractKey(
                obs_header, "PROGRAM_BLOCK_ID")
            obs_info_dat["targets"] = "['" + self.extractKey(
                obs_header, "SOURCE") + "']"
            obs_info_dat["mode"] = self.extractKey(obs_header, "MODE")
            obs_info_dat["sb_id_code"] = self.extractKey(
                obs_header, "SCHEDULE_BLOCK_ID")
            obs_info_dat["target_duration"] = self.extractKey(
                obs_results, "length")
            obs_info_dat["target_snr"] = self.extractKey(obs_results, "snr")
            obs_info_dat["proposal_id"] = self.extractKey(
                obs_header, "PROPOSAL_ID")
            obs_info_dat["description"] = self.extractKey(
                obs_header, "DESCRIPTION")
            obs_info_dat["backend_args"] = "TBD"
            obs_info_dat["experiment_id"] = self.extractKey(
                obs_header, "EXPERIMENT_ID")

            Config.writeDictToColonSVFile(obs_info_dat, obs_info_dat_file)

        else:
            self.log(
                2,
                "MeerKATArchiverDaemon::generateObsInfoDat obs_info.dat existed"
            )

        return ("ok", "")
Esempio n. 13
0
    def prepare_observation(self, beam, utc_start, source, mode, streams):

        if mode == "fold":
            base_dir = self.cfg["SERVER_FOLD_DIR"]
        elif mode == "search":
            base_dir = self.cfg["SERVER_SEARCH_DIR"]
        elif mode == "continuum":
            base_dir = self.cfg["SERVER_CONTINUUM_DIR"]
        elif mode == "spectral_line":
            base_dir = self.cfg["SERVER_SPECTRAL_LINE_DIR"]
        elif mode == "vlbi":
            base_dir = self.cfg["SERVER_VLBI_DIR"]
        elif mode == "tran":
            base_dir = self.cfg["SERVER_TRANSIENTS_DIR"]
        else:
            return (0, "Unrecognized processing mode: " + mode)

        processing_dir = base_dir + "/processing"

        # create the directory structure
        src_dir = processing_dir + "/" + beam + "/" + utc_start + "/" + source + "/"
        if not os.path.exists(src_dir):
            os.makedirs(src_dir, 0755)

        # write stream information to file
        fptr = open(src_dir + "/obs.info", "w")
        fptr.write(Config.writePaddedString("NUM_STREAM", len(streams)) + "\n")

        for i in range(len(streams)):

            # get the stream configuration
            (freq, bw,
             nchan) = self.cfg["SUBBAND_CONFIG_" + str(streams[i])].split(":")

            # create the ouput sub-band dir
            if not os.path.exists(src_dir + "/" + freq):
                os.makedirs(src_dir + "/" + freq, 0755)

            # write the configuration to the file
            fptr.write(Config.writePaddedString("SUBBAND_" + str(i), \
                       self.cfg["SUBBAND_CONFIG_" + str(streams[i])]) + "\n")

        fptr.close()
Esempio n. 14
0
  def get_subbands (self, obs_dir):

    subband_freqs = []

    # read obs.info file to find the subbands
    if not os.path.exists (obs_dir + "/obs.info"):
      self.log(0, "RepackDaemon::get_subbands " + obs_dir + "/obs.info file did not exist")
      return subband_freqs

    info = Config.readCFGFileIntoDict (obs_dir + "/obs.info")
    num_streams = info["NUM_STREAM"]
    for i in range(int(num_streams)):
      (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
      subband_freqs.append(freq)
    self.log(2, "RepackDaemon::get_subbands subband_freqs=" + str(subband_freqs))
    return subband_freqs
Esempio n. 15
0
    def build_cmd (self):

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # npol may vary from stream to stream
        npol = Config.getStreamParam (self.cfg, "NPOL", self.id)

        # this stat command will not change from observation to observation
        stat_cmd = self.cfg["STREAM_STATS_BINARY"] + \
                " -k " + self.db_key + \
                " " + self.stream_config_file + \
                " -D " + self.stat_dir + \
                " -n " + nchan + \
                " -p " + npol 

        return stat_cmd
Esempio n. 16
0
    def build_cmd(self):

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # npol may vary from stream to stream
        npol = Config.getStreamParam(self.cfg, "NPOL", self.id)

        # this stat command will not change from observation to observation
        stat_cmd = self.cfg["STREAM_STATS_BINARY"] + \
                " -k " + self.db_key + \
                " " + self.stream_config_file + \
                " -D " + self.stat_dir + \
                " -n " + nchan + \
                " -p " + npol

        return stat_cmd
Esempio n. 17
0
  def patch_psrfits_header (self, input_dir, input_file):

    header_file = input_dir + "/obs.header"
    self.log(3, "patch_psrfits_header: header_file="+header_file)

    header = Config.readCFGFileIntoDict (input_dir + "/obs.header")

    new = {}
    new["obs:observer"] = header["OBSERVER"] 
    new["obs:projid"]   = header["PID"]
    new["be:nrcvr"]     = "2"
    new["be:phase"]     = header["BACKEND_PHASE"] # Phase convention of backend

    # need to know what these mean!
    new["rcvr:hand"]    = header["RCVR_HAND"] # handedness of the receiver
    new["rcvr:sa"]      = "0"     # Advised by D.Manchester
    new["be:tcycle"]    = "8"     # Correlator cycle time
    new["be:dcc"]       = "1"
    new["sub:nsblk"]    = "1"     # Samples/row (SEARCH mode, else 1)
  
    # this needs to come from CAM, hack for now
    new["ext:trk_mode"] = "TRACK" # Tracking mode
    new["ext:bpa"]      = "0" # Beam position angle [?]
    new["ext:bmaj"]     = "0" # Beam major axis [degrees]
    new["ext:bmin"]     = "0" # Beam minor axis [degrees]

    # 31-10-2018 Dick Manchester requested that these parameters be set to 
    # the values output by the PFB FPGAs, not what DSPSR sees in the header
    (freq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + str(self.id)].split(":")
    new["ext:obsfreq"]  = freq
    new["ext:obsbw"]    = bw
    new["ext:obsnchan"] = nchan

    new["ext:stp_crd1"] = header["RA"]
    new["ext:stp_crd2"] = header["DEC"]
    new["ext:stt_date"] = header["UTC_START"][0:10]
    new["ext:stt_time"] = header["UTC_START"][11:19]

    # create the psredit command necessary to apply "new"
    cmd = "psredit -m -c " + ",".join(['%s=%s' % (key, value) for (key, value) in new.items()]) + " " + input_file
    rval, lines = self.system(cmd, 2)
    if rval:
      return rval, lines[0]
    return 0, ""
Esempio n. 18
0
    def getMuxedStreamConfigFixed(self, id):

        cfg = Config.getStreamConfigFixed(self, id)

        (cfg["DATA_HOST_0"],
         cfg["DATA_HOST_1"]) = self.config["DATA_HOST"].split(",")
        (cfg["DATA_MCAST_0"],
         cfg["DATA_MCAST_1"]) = self.config["DATA_MCAST"].split(",")
        (cfg["DATA_PORT_0"],
         cfg["DATA_PORT_1"]) = self.config["DATA_PORT"].split(",")

        (cfg["META_HOST_0"],
         cfg["META_HOST_1"]) = self.config["META_HOST"].split(",")
        (cfg["META_MCAST_0"],
         cfg["META_MCAST_1"]) = self.config["META_MCAST"].split(",")
        (cfg["META_PORT_0"],
         cfg["META_PORT_1"]) = self.config["META_PORT"].split(",")

        cfg["ADC_SAMPLE_RATE"] = self.config["ADC_SAMPLE_RATE"]

        (freq1, bw1, nchan1) = self.config["SUBBAND_CONFIG_0"].split(":")
        (freq2, bw2, nchan2) = self.config["SUBBAND_CONFIG_1"].split(":")

        freq = (float(freq1) + float(freq2)) / 2

        bw = float(bw1) + float(bw2)
        nchan = int(nchan1) + int(nchan2)

        # MeerKAT's convention is that the CFREQ is offset by chan_bw/2
        chan_bw = float(bw1) / float(nchan1)

        cfg["FREQ"] = str(float(freq) - chan_bw / 2)
        cfg["BW"] = str(bw)
        cfg["NCHAN"] = str(nchan)
        cfg["NPOL"] = "2"

        (start_chan1, end_chan1) = self.config["SUBBAND_CHANS_0"].split(":")
        (start_chan2, end_chan2) = self.config["SUBBAND_CHANS_1"].split(":")
        cfg["START_CHANNEL"] = start_chan1
        cfg["END_CHANNEL"] = end_chan2

        return cfg
Esempio n. 19
0
    def get_subbands(self, obs_dir):

        subband_freqs = []

        # read obs.info file to find the subbands
        if not os.path.exists(obs_dir + "/obs.info"):
            self.log(
                0, "RepackDaemon::get_subbands " + obs_dir +
                "/obs.info file did not exist")
            return subband_freqs

        info = Config.readCFGFileIntoDict(obs_dir + "/obs.info")
        num_streams = info["NUM_STREAM"]
        for i in range(int(num_streams)):
            (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
            subband_freqs.append(freq)
        self.log(
            2,
            "RepackDaemon::get_subbands subband_freqs=" + str(subband_freqs))
        return subband_freqs
Esempio n. 20
0
  def get_out_cfreq (self, obs_dir):

    # read obs.info file to find the subbands
    if not os.path.exists (obs_dir + "/obs.info"):
      self.log(0, "RepackDaemon::get_out_cfreq obs.info file did not exist")
      return (False, 0)

    info = Config.readCFGFileIntoDict (obs_dir + "/obs.info")
    num_streams = info["NUM_STREAM"]
    freq_low  = float(1e12)
    freq_high = float(-1e12)

    for i in range(int(num_streams)):
      (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
      freq_low  = min (freq_low, float(freq) - (float(bw)/2.0))
      freq_high = max (freq_high, float(freq) + (float(bw)/2.0))

    cfreq = int(freq_low + ((freq_high - freq_low) / 2.0))
    self.log(2, "RepackDaemon::get_out_cfreq low=" + str(freq_low) + " high=" + str(freq_high) + " cfreq=" + str(cfreq))
    return (True, cfreq)
Esempio n. 21
0
  def patch_psrfits_header (self, input_dir, input_file):

    header_file = input_dir + "/obs.header"
    self.log(3, "patch_psrfits_header: header_file="+header_file)

    header = Config.readCFGFileIntoDict (input_dir + "/obs.header")

    new = {}
    new["obs:observer"] = header["OBSERVER"] 
    new["obs:projid"]   = header["PID"]

    # constants that currently do not flow through CAM
    new["be:nrcvr"]     = "2"

    # need to know what these mean!
    new["be:phase"]     = "+1"    # Phase convention of backend
    new["be:tcycle"]    = "8"     # Correlator cycle time
    new["be:dcc"]       = "0"     # Downconversion conjugation corrected
    new["sub:nsblk"]    = "1"     # Samples/row (SEARCH mode, else 1)
  
    # this needs to come from CAM, hack for now
    new["ext:trk_mode"] = "TRACK" # Tracking mode
    new["ext:bpa"]      = "0" # Beam position angle [?]
    new["ext:bmaj"]     = "0" # Beam major axis [degrees]
    new["ext:bmin"]     = "0" # Beam minor axis [degrees]

    new["ext:obsfreq"]  = header["FREQ"]
    new["ext:obsbw"]    = header["BW"]
    new["ext:obsnchan"] = header["NCHAN"]

    new["ext:stp_crd1"] = header["RA"]
    new["ext:stp_crd2"] = header["DEC"]
    new["ext:stt_date"] = header["UTC_START"][0:10]
    new["ext:stt_time"] = header["UTC_START"][11:19]

    # create the psredit command necessary to apply "new"
    cmd = "psredit -m -c " + ",".join(['%s=%s' % (key, value) for (key, value) in new.items()]) + " " + input_file
    rval, lines = self.system(cmd, 2)
    if rval:
      return rval, lines[0]
    return 0, ""
Esempio n. 22
0
    def get_out_cfreq(self, obs_dir):

        # read obs.info file to find the subbands
        if not os.path.exists(obs_dir + "/obs.info"):
            self.log(
                0, "RepackDaemon::get_out_cfreq obs.info file did not exist")
            return (False, 0)

        info = Config.readCFGFileIntoDict(obs_dir + "/obs.info")
        num_streams = info["NUM_STREAM"]
        freq_low = float(1e12)
        freq_high = float(-1e12)

        for i in range(int(num_streams)):
            (freq, bw, beam) = info["SUBBAND_" + str(i)].split(":")
            freq_low = min(freq_low, float(freq) - (float(bw) / 2.0))
            freq_high = max(freq_high, float(freq) + (float(bw) / 2.0))

        cfreq = int(freq_low + ((freq_high - freq_low) / 2.0))
        self.log(
            2, "RepackDaemon::get_out_cfreq low=" + str(freq_low) + " high=" +
            str(freq_high) + " cfreq=" + str(cfreq))
        return (True, cfreq)
Esempio n. 23
0
  def getCommand (self, config_file):

    # get the beam name for the stream
    (host, self.beam_id, self.subband_id) = self.cfg["STREAM_" + self.id].split(":")
    beam = self.cfg["BEAM_" + str(self.beam_id)]

    npol = Config.getStreamParam (self.cfg, "NPOL", self.id)

    cmd = self.cfg["STREAM_BINARY"] + " -k " + self.db_key \
            + " -b " + self.cpu_core \
            + " -c " + self.ctrl_port \
            + " -D " + self.cfg["CLIENT_STATS_DIR"] + "/" + beam \
            + " -p " + npol \
            + " -s " + str(self.id) \
            + " -f dualvdif" \
            + " " + config_file

    #cmd = self.cfg["STREAM_BINARY"] + " -k " + self.db_key \
    #        + " -b " + self.cpu_core \
    #        + " -c " + self.ctrl_port \
    #        + " -f dualvdif" \
    #        + " " + config_file

    return cmd
Esempio n. 24
0
  def getMuxedStreamConfigFixed (self, id):

    cfg = Config.getStreamConfigFixed (self, id)

    (cfg["DATA_HOST_0"], cfg["DATA_HOST_1"]) = self.config["DATA_HOST"].split(",")
    (cfg["DATA_MCAST_0"], cfg["DATA_MCAST_1"]) = self.config["DATA_MCAST"].split(",")
    (cfg["DATA_PORT_0"], cfg["DATA_PORT_1"]) = self.config["DATA_PORT"].split(",")

    (cfg["META_HOST_0"], cfg["META_HOST_1"]) = self.config["META_HOST"].split(",")
    (cfg["META_MCAST_0"], cfg["META_MCAST_1"]) = self.config["META_MCAST"].split(",")
    (cfg["META_PORT_0"], cfg["META_PORT_1"]) = self.config["META_PORT"].split(",")

    cfg["ADC_SAMPLE_RATE"] = self.config["ADC_SAMPLE_RATE"]

    (freq1, bw1, nchan1) = self.config["SUBBAND_CONFIG_0"].split(":")
    (freq2, bw2, nchan2) = self.config["SUBBAND_CONFIG_1"].split(":")
 
    freq = (float(freq1) + float(freq2)) / 2

    bw = float(bw1) + float(bw2)
    nchan = int(nchan1) + int(nchan2)

    # MeerKAT's convention is that the CFREQ is offset by chan_bw/2
    chan_bw = float(bw1) / float(nchan1)

    cfg["FREQ"] = str(float(freq) - chan_bw / 2)
    cfg["BW"] = str(bw)
    cfg["NCHAN"] = str(nchan)
    cfg["NPOL"] = "2"

    (start_chan1, end_chan1) = self.config["SUBBAND_CHANS_0"].split(":")
    (start_chan2, end_chan2) = self.config["SUBBAND_CHANS_1"].split(":")
    cfg["START_CHANNEL"] = start_chan1
    cfg["END_CHANNEL"]   = end_chan2
    
    return cfg
Esempio n. 25
0
    def getStreamConfigFixed(self, id):

        cfg = Config.getStreamConfigFixed(self, id)

        (cfg["DATA_HOST_0"],
         cfg["DATA_HOST_1"]) = self.config["DATA_HOST"].split(",")
        (cfg["DATA_MCAST_0"],
         cfg["DATA_MCAST_1"]) = self.config["DATA_MCAST"].split(",")
        (cfg["DATA_PORT_0"],
         cfg["DATA_PORT_1"]) = self.config["DATA_PORT"].split(",")

        (cfg["META_HOST_0"],
         cfg["META_HOST_1"]) = self.config["META_HOST"].split(",")
        (cfg["META_MCAST_0"],
         cfg["META_MCAST_1"]) = self.config["META_MCAST"].split(",")
        (cfg["META_PORT_0"],
         cfg["META_PORT_1"]) = self.config["META_PORT"].split(",")

        cfg["ADC_SAMPLE_RATE"] = self.config["ADC_SAMPLE_RATE"]

        (freq, bw, nchan) = self.config["SUBBAND_CONFIG_" + str(id)].split(":")

        # MeerKAT's convention is that the CFREQ is offset by chan_bw/2
        chan_bw = float(bw) / float(nchan)
        cfg["FREQ"] = str(float(freq) - chan_bw / 2)

        cfg["BW"] = bw
        cfg["NCHAN"] = nchan
        cfg["NPOL"] = "2"

        (start_chan,
         end_chan) = self.config["SUBBAND_CHANS_" + str(id)].split(":")
        cfg["START_CHANNEL"] = start_chan
        cfg["END_CHANNEL"] = end_chan

        return cfg
Esempio n. 26
0
    def main(self):

        control_thread = []
        self.client_threads = {}
        self.reload_clients = {}
        self.server_thread = []
        self.system_lock = threading.Lock()

        # find matching client streams for this host
        client_streams = []
        for istream in range(int(self.cfg["NUM_STREAM"])):
            (req_host, beam_id,
             subband_id) = Config.getStreamConfig(istream, self.cfg)
            if req_host == self.req_host:
                client_streams.append(istream)

        # find matching server stream
        server_streams = []
        if self.cfg["SERVER_HOST"] == self.req_host:
            server_streams.append(-1)

        daemon_states = {}

        for stream in server_streams:
            self.log(2, "main: client_thread[-1] = clientThread(-1)")
            daemon_states[-1] = {}
            server_thread = clientThread(-1, self, daemon_states[-1])
            self.log(2, "main: client_thread[-1].start()")
            server_thread.start()
            self.log(2, "main: client_thread[-1] started")

        sleep(1)

        # start a control thread for each stream
        for stream in client_streams:
            daemon_states[stream] = {}
            self.log(
                2, "main: client_thread[" + str(stream) +
                "] = clientThread (" + str(stream) + ")")
            self.reload_clients[stream] = False
            self.client_threads[stream] = clientThread(stream, self,
                                                       daemon_states[stream])
            self.log(2, "main: client_thread[" + str(stream) + "].start()")
            self.client_threads[stream].start()
            self.log(2, "main: client_thread[" + str(stream) + "] started!")

        # main thread
        disks_to_monitor = [self.cfg["CLIENT_DIR"]]

        # create socket for LMC commands
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        sock.bind((self.req_host, int(self.cfg["LMC_PORT"])))
        sock.listen(5)

        can_read = [sock]
        can_write = []
        can_error = []
        timeout = 1
        hw_poll = 5
        counter = 0

        sensors = {}

        # monitor / control loop
        while not self.quit_event.isSet():

            self.log(3, "Main Loop: counter=" + str(counter))

            while (counter == 0):

                self.log(2, "Refreshing monitoring points")

                self.log(3, "main: getDiskCapacity ()")
                rval, disks = lmc_mon.getDiskCapacity(disks_to_monitor, DL)
                self.log(3, "main: " + str(disks))

                self.log(3, "main: getLoads()")
                rval, loads = lmc_mon.getLoads(DL)
                self.log(3, "main: " + str(loads))

                self.log(3,
                         "main: getSMRBCapacity(" + str(client_streams) + ")")
                rval, smrbs = lmc_mon.getSMRBCapacity(client_streams,
                                                      self.quit_event, DL)
                self.log(3, "main: " + str(smrbs))

                self.log(3, "main: getIPMISensors()")
                rval, sensors = lmc_mon.getIPMISensors(DL)
                self.log(3, "main: " + str(sensors))

                counter = hw_poll

            self.log(
                3, "main: calling select len(can_read)=" + str(len(can_read)))
            timeout = 1
            did_read = []
            did_write = []
            did_error = []

            try:
                did_read, did_write, did_error = select.select(
                    can_read, can_write, can_error, timeout)
            except select.error as e:
                self.quit_event.set()
            else:
                self.log(
                    3, "main: read=" + str(len(did_read)) + " write=" +
                    str(len(did_write)) + " error=" + str(len(did_error)))

            if (len(did_read) > 0):
                for handle in did_read:
                    if (handle == sock):
                        (new_conn, addr) = sock.accept()
                        self.log(2,
                                 "main: accept connection from " + repr(addr))
                        # add the accepted connection to can_read
                        can_read.append(new_conn)
                        # new_conn.send("Welcome to the LMC interface\r\n")

                    # an accepted connection must have generated some data
                    else:
                        try:
                            raw = handle.recv(4096)
                        except socket.error, e:
                            if e.errno == errno.ECONNRESET:
                                self.log(2, "main: closing connection")
                                handle.close()
                                for i, x in enumerate(can_read):
                                    if (x == handle):
                                        del can_read[i]
                            else:
                                raise e
                        else:
                            message = raw.strip()
                            self.log(2, "main: message='" + message + "'")

                            if len(message) == 0:
                                self.log(2, "main: closing connection")
                                handle.close()
                                for i, x in enumerate(can_read):
                                    if (x == handle):
                                        del can_read[i]

                            else:
                                xml = xmltodict.parse(message)

                                command = xml["lmc_cmd"]["command"]

                                if command == "reload_clients":
                                    self.log(1, "Reloading clients")
                                    for stream in client_streams:
                                        self.reload_clients[stream] = True

                                    all_reloaded = False
                                    while (not self.parent.quit_event.isSet()
                                           and not all_reloaded):
                                        all_reloaded = True
                                        for stream in client_streams:
                                            if not self.reload_clients[stream]:
                                                all_reloaded = False
                                        if not all_reloaded:
                                            self.log(
                                                1,
                                                "Waiting for clients to reload"
                                            )
                                            sleep(1)

                                    self.log(1, "Clients reloaded")
                                    response = "<lmc_reply>OK</lmc_reply>"

                                if command == "daemon_status":
                                    response = ""
                                    response += "<lmc_reply>"

                                    for stream in server_streams:
                                        response += "<stream id='" + str(
                                            stream) + "'>"
                                        for daemon in daemon_states[
                                                stream].keys():
                                            response += "<daemon name='" + daemon + "'>" + str(
                                                daemon_states[stream]
                                                [daemon]) + "</daemon>"
                                        response += "</stream>"

                                    for stream in client_streams:
                                        response += "<stream id='" + str(
                                            stream) + "'>"
                                        for daemon in daemon_states[
                                                stream].keys():
                                            response += "<daemon name='" + daemon + "'>" + str(
                                                daemon_states[stream]
                                                [daemon]) + "</daemon>"
                                        response += "</stream>"
                                    response += "</lmc_reply>"

                                elif command == "host_status":
                                    response = "<lmc_reply>"

                                    for disk in disks.keys():
                                        percent_full = 1.0 - (
                                            float(disks[disk]["available"]) /
                                            float(disks[disk]["size"]))
                                        response += "<disk mount='" + disk + "' percent_full='" + str(
                                            percent_full) + "'>"
                                        response += "<size units='MB'>" + disks[
                                            disk]["size"] + "</size>"
                                        response += "<used units='MB'>" + disks[
                                            disk]["used"] + "</used>"
                                        response += "<available units='MB'>" + disks[
                                            disk]["available"] + "</available>"
                                        response += "</disk>"

                                    for stream in smrbs.keys():
                                        for key in smrbs[stream].keys():
                                            smrb = smrbs[stream][key]
                                            response += "<smrb stream='" + str(
                                                stream) + "' key='" + str(
                                                    key) + "'>"
                                            response += "<header_block nbufs='" + str(
                                                smrb['hdr']
                                                ['nbufs']) + "'>" + str(
                                                    smrb['hdr']['full']
                                                ) + "</header_block>"
                                            response += "<data_block nbufs='" + str(
                                                smrb['data']['nbufs']
                                            ) + "'>" + str(smrb['data']['full']
                                                           ) + "</data_block>"
                                            response += "</smrb>"

                                    response += "<system_load ncore='" + loads[
                                        "ncore"] + "'>"
                                    response += "<load1>" + loads[
                                        "1min"] + "</load1>"
                                    response += "<load5>" + loads[
                                        "5min"] + "</load5>"
                                    response += "<load15>" + loads[
                                        "15min"] + "</load15>"
                                    response += "</system_load>"

                                    response += "<sensors>"
                                    for sensor in sensors.keys():
                                        response += "<metric name='" + sensor + "' units='" + sensors[
                                            sensor]["units"] + "'>" + sensors[
                                                sensor]["value"] + "</metric>"
                                    response += "</sensors>"

                                    response += "</lmc_reply>"

                                else:
                                    response = "<lmc_reply>OK</lmc_reply>"

                                self.log(2, "-> " + response)

                                handle.send(response + "\r\n")
Esempio n. 27
0
  def main (self):

    stream_id = self.id

    # get the data block keys
    db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
    db_id_in  = self.cfg["PROCESSING_DATA_BLOCK"]
    db_id_out = self.cfg["SEND_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id]

    db_key_in = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_in)
    db_key_out = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id_out)

    self.log (0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out)

    # create dspsr input file for the data block
    db_key_filename = "/tmp/spip_" + db_key_in + ".info"
    db_key_file = open (db_key_filename, "w")
    db_key_file.write("DADA INFO:\n")
    db_key_file.write("key " +  db_key_in + "\n")
    db_key_file.close()

    gpu_id = self.cfg["GPU_ID_" + str(self.id)]
    prev_utc_start = ""

    (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":")

    (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":")

    # wait up to 10s for the SMRB to be created
    smrb_wait = 10
    cmd = "dada_dbmetric -k " + db_key_in
    self.binary_list.append (cmd)

    rval = 1
    while rval and smrb_wait > 0 and not self.quit_event.isSet():

      rval, lines = self.system (cmd)
      if rval:
        time.sleep(1)
      smrb_wait -= 1

    if rval:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + db_key_in)
      self.quit_event.set()

    else:

      while (not self.quit_event.isSet()):

        cmd = "dada_header -k " + db_key_in
        self.log(0, cmd)
        self.binary_list.append (cmd)
        rval, lines = self.system (cmd)
        self.binary_list.remove (cmd)

        # if the command returned ok and we have a header
        if rval != 0:
          if self.quit_event.isSet():
            self.log (2, cmd + " failed, but quit_event true")
          else:
            self.log (-2, cmd + " failed")
            self.quit_event.set()

        elif len(lines) == 0:
        
          self.log (-2, "header was empty")
          self.quit_event.set()
        
        else:

          header = Config.parseHeader (lines)

          utc_start = header["UTC_START"]
          self.log (1, "UTC_START=" + header["UTC_START"])
          self.log (1, "RESOLUTION=" + header["RESOLUTION"])

          # default processing commands
          fold_cmd = "dada_dbnull -s -k " + db_key_in
          trans_cmd = "dada_dbnull -s -k " + db_key_out
          search_cmd = "dada_dbnull -s -k " + db_key_in

          if prev_utc_start == utc_start:
            self.log (-2, "UTC_START [" + utc_start + "] repeated, ignoring observation")
          
          else: 
            beam = self.cfg["BEAM_" + str(self.beam_id)]

            if not float(bw) == float(header["BW"]):
              self.log (-1, "configured bandwidth ["+bw+"] != header["+header["BW"]+"]")
            if not float(cfreq) == float(header["FREQ"]):
              self.log (-1, "configured cfreq ["+cfreq+"] != header["+header["FREQ"]+"]")
            if not int(nchan) == int(header["NCHAN"]):
              self.log (-2, "configured nchan ["+nchan+"] != header["+header["NCHAN"]+"]")

            source = header["SOURCE"]

            # output directories 
            suffix     = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq
            fold_dir   = self.cfg["CLIENT_FOLD_DIR"]   + suffix
            trans_dir  = self.cfg["CLIENT_TRANS_DIR"]  + suffix
            search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix
            
            fold = False
            search = False
            trans = False 
          
            try:
              fold = (header["PERFORM_FOLD"] == "1")
              search = (header["PERFORM_SEARCH"] == "1")
              trans = (header["PERFORM_TRANS"] == "1")
            except KeyError as e:
              fold = True
              search = False
              trans = False 

            if fold:
              os.makedirs (fold_dir, 0755)
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn"
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr"
              #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn"
              fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn"
              #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir

              header_file = fold_dir + "/obs.header"
              Config.writeDictToCFGFile (header, header_file)

            if search or trans:
              os.makedirs (search_dir, 0755)
              search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil"
              if trans:
                search_cmd += " -k " + db_key_out

            if trans and int(self.cfg["NUM_SUBBAND"] ) == "1":
              os.makedirs (trans_dir, 0755)
              trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1"

          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])

          # setup output pipes
          fold_log_pipe = LogSocket ("fold_src", "fold_src", str(self.id), "stream",
                                       log_host, log_port, int(DL))

          #trans_log_pipe  = LogSocket ("trans_src", "trans_src", str(self.id), "stream",
          #                             log_host, log_port, int(DL))
          #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream",
          #                             log_host, log_port, int(DL))

          fold_log_pipe.connect()

          self.binary_list.append (fold_cmd)
          #self.binary_list.append (trans_cmd)
          #self.binary_list.append (search_cmd)

          # create processing threads
          self.log (2, "creating processing threads")      
          cmd = "numactl -C " + cpu_core + " -- " + fold_cmd
          fold_thread = procThread (cmd, fold_dir, fold_log_pipe.sock, 1)

          #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2)
          #search_thread = procThread (search_cmd, self.log_sock.sock, 2)

          # start processing threads
          self.log (2, "starting processing threads")      
          self.log (1, "START " + fold_cmd)      
          fold_thread.start()
          #trans_thread.start()
          #search_thread.start()

          # join processing threads
          self.log (2, "waiting for fold thread to terminate")
          rval = fold_thread.join() 
          self.log (2, "fold thread joined")
          self.log (1, "END   " + fold_cmd)      

          # remove the binary command from the list
          self.binary_list.remove (fold_cmd)

          if rval:
            self.log (-2, "fold thread failed")
            quit_event.set()

          #self.log (2, "joining trans thread")
          #rval = trans_thread.join() 
          #self.log (2, "trans thread joined")
          #if rval:
          #  self.log (-2, "trans thread failed")
          #  quit_event.set()

          #self.log (2, "joining search thread")
          #rval = search_thread.join() 
          #self.log (2, "search thread joined")
          #if rval:
          #  self.log (-2, "search thread failed")
          #  quit_event.set()

          fold_log_pipe.close()
          #trans_log_pipe.close()
          #search_log_pipe.close()

        self.log (1, "processing completed")
Esempio n. 28
0
  def issue_start_cmd (self, xml):

    self.log(2, "TCSDaemon::issue_start_cmd nbeam=" + xml['obs_cmd']['beam_configuration']['nbeam']['#text'])

    # determine which beams this command corresponds to
    for ibeam in range(int(xml['obs_cmd']['beam_configuration']['nbeam']['#text'])):
      state = xml['obs_cmd']['beam_configuration']['beam_state_' + str(ibeam)]['#text']
      self.log(2, "TCSDaemon::issue_start_cmd beam state=" + state)
      if state == "1" or state == "on":
        b = xml['obs_cmd']['beam_configuration']['beam_state_' + str(ibeam)]['@name']
        self.log(2, "TCSDaemon::issue_start_cmd beam name=" + b)
        if b in self.beam_states.keys():

          self.log(2, "TCSDaemon::issue_start_cmd config=" + str(self.beam_states[b]["config"].keys()))

          obs_config = {}

          self.beam_states[b]["lock"].acquire()

          utc_start = "unset"
          source = "unset"

          # add source parameters
          s = self.beam_states[b]["config"]["source_parameters"]
          for k in s.keys():
            key = s[k]["@key"]
            val = s[k]["#text"]
            obs_config[key] = val
            self.log (1, key + "=" + val)
            if key == "SOURCE":
              source = val
          # add the observation parameters
          o = self.beam_states[b]["config"]["observation_parameters"]

          self.log(1, "TCSDaemon::issue_start_cmd o=" + str(o))
          self.log(1, "TCSDaemon::issue_start_cmd checking value of supplied UTC start: [" + o["utc_start"]["#text"] + "]" )

          # if no UTC_START has been specified, set it to +5 seconds
          if o["utc_start"]["#text"] == "None":
            utc_start = times.getUTCTime(self.start_offset_seconds)
            o["utc_start"]["#text"] = utc_start
            self.log(1, "TCSDaemon::issue_start_cmd utc_start=" + utc_start)

          else:
            self.log(1, "TCSDaemon::issue_start_cmd utc_start already set " + o["utc_start"]["#text"])

          for k in o.keys():
            key = o[k]["@key"]
            try:
              val = o[k]["#text"]
            except KeyError as e:
              val = ''
            obs_config[key] = val
            self.log(1, key + "=" + val)

          # add the calibration parameters
          o = self.beam_states[b]["config"]["calibration_parameters"]
          for k in o.keys():
            key = o[k]["@key"]
            try:
              val = o[k]["#text"]
            except KeyError as e:
              val = ''
            obs_config[key] = val
            self.log(1, key + "=" + val)

            # hack for DSPSR requiring this parameter
            if key == "CAL_FREQ":
              obs_config["CALFREQ"] = val

          # extract the stream informatiom
          s = self.beam_states[b]["config"]["stream_configuration"]

          # determine the number of streams present in the configure command
          nstream = s["nstream"]["#text"]
          if int(nstream) != int(self.cfg["NUM_STREAM"]):
            self.log(1, "TCSDaemon::issue_start_cmd number of streams in config and command did not match")

          # record which streams are processing which modes
          stream_modes = {}

          # work out which streams correspond to these beams
          for istream in range(int(nstream)):

            stream_active = False
            stream_xml = self.beam_states[b]["config"]["stream" + str(istream)]

            # make a deep copy of the common configuration
            stream_config = copy.deepcopy (obs_config)

            # inject custom fields into header
            custom = stream_xml["custom_parameters"]
            for k in custom.keys():
              key = custom[k]["@key"]
              try:
                val = custom[k]["#text"]
              except KeyError as e:
                val = ''
              stream_config[key] = val
              self.log(2, key + "=" + val)

            modes = stream_xml["processing_modes"]
            for k in modes.keys():
              key = modes[k]["@key"]
              val = modes[k]["#text"]
              stream_config[key] = val
              self.log(2, key + "=" + val)

              # inject processing parameters into header
              if val == "true" or val == "1":
                if not (k in stream_modes.keys()):
                  stream_modes[k] = []
                stream_modes[k].append(istream)
                stream_active = True
                self.log (2, "TCSDaemon::issue_start_cmd mode=" + k)
                p = stream_xml[k + "_processing_parameters"]
                for l in p.keys():
                  pkey = p[l]["@key"]
                  try:
                    pval = p[l]["#text"]
                  except KeyError as e:
                    val = ''
                  stream_config[pkey] = pval
                  self.log(2, pkey + "=" + pval)

            # ensure the start command is set
            stream_config["COMMAND"] = "START"
            stream_config["OBS_OFFSET"] = "0"

            # convert to a single ascii string
            obs_header = Config.writeDictToString (stream_config)

            (host, beam_idx, subband) = self.cfg["STREAM_"+str(istream)].split(":")
            beam = self.cfg["BEAM_" + beam_idx]

            # connect to streams for this beam only
            if stream_active and beam == b:
              self.log(2, "TCSDaemon::issue_start_cmd host="+host+" beam="+beam+" subband="+subband)

              # control port the this recv stream
              ctrl_port = int(self.cfg["STREAM_CTRL_PORT"]) + istream

              self.log(2, host + ":"  + str(ctrl_port) + " <- start")

              # connect to recv agent and provide observation configuration
              self.log(2, "TCSDaemon::issue_start_cmd openSocket("+host+","+str(ctrl_port)+")")
              recv_sock = sockets.openSocket (DL, host, ctrl_port, 5)
              if recv_sock:
                self.log(3, "TCSDaemon::issue_start_cmd sending obs_header length=" + str(len(obs_header)))
                recv_sock.send(obs_header)
                self.log(3, "TCSDaemon::issue_start_cmd header sent")
                recv_sock.close()
                self.log(3, "TCSDaemon::issue_start_cmd socket closed")
              else:
                self.log(-2, "TCSDaemon::issue_start_cmd failed to connect to "+host+":"+str(ctrl_port))

              # connect to spip_gen and issue start command for UTC
              # assumes gen host is the same as the recv host!
              # gen_port = int(self.cfg["STREAM_GEN_PORT"]) + istream
              # sock = sockets.openSocket (DL, host, gen_port, 1)
              # if sock:
              #   sock.send(obs_header)
              #   sock.close()

          utc_start = self.beam_states[b]["config"]["observation_parameters"]["utc_start"]["#text"]

          # update the dict of observing info for this beam
          self.beam_states[b]["state"] = "Recording"
          self.beam_states[b]["lock"].release()

          # now handle the active streams
          for mode in stream_modes.keys():
            self.log(1, "TCSDaemon::issue_start_cmd mode=" + mode + " streams=" + str(stream_modes[mode]))
            self.prepare_observation (beam, utc_start, source, mode, stream_modes[mode])
Esempio n. 29
0
    def main(self):

        self.log(2, "UWBProcDaemon::main configure_child()")
        self.configure_child()

        self.log(2, "UWBProcDaemon::main wait_for_smrb()")
        SMRBDaemon.waitForSMRB(self.db_key, self)

        if self.quit_event.isSet():
            self.log(
                -1,
                "UWBProcDaemon::main quit event was set after waiting for SMRB creation"
            )
            return

        # continuously run the main command waiting on the SMRB
        while (not self.quit_event.isSet()):

            # wait for the header to determine if folding is required
            cmd = "dada_header -k " + self.db_key + " -t " + self.tag
            self.log(2, "UWBProcDaemon::main " + cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                time.sleep(0.1)
                if self.quit_event.isSet():
                    self.log(
                        2, "UWBProcDaemon::main " + cmd +
                        " failed, but quit_event true")
                else:
                    self.log(-2, "UWBProcDaemon::main " + cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:

                self.log(-2, "UWBProcDaemon::main header was empty")
                self.quit_event.set()

            else:

                self.log(2, "UWBProcDaemon::main parsing header")
                self.header = Config.parseHeader(lines)

                # account for lower to upper sideband conversion
                if not abs(float(self.bw)) == float(self.header["BW"]):
                    self.log(
                        -1, "configured bandwidth [" + self.bw +
                        "] != self.header[" + self.header["BW"] + "]")
                if not float(self.cfreq) == float(self.header["FREQ"]):
                    self.log(
                        -1, "configured cfreq [" + self.cfreq +
                        "] != self.header[" + self.header["FREQ"] + "]")
                if not int(self.nchan) == int(self.header["NCHAN"]):
                    self.log(
                        -2, "configured nchan [" + self.nchan +
                        "] != self.header[" + self.header["NCHAN"] + "]")

                self.source = self.header["SOURCE"]
                self.utc_start = self.header["UTC_START"]

                # call the child class prepare method
                self.log(2, "UWBProcDaemon::main prepare()")
                valid = self.prepare()

                if valid:

                    # ensure the output directory exists
                    self.log(
                        2, "UWBProcDaemon::main creating out_dir: " +
                        self.out_dir)
                    if not os.path.exists(self.out_dir):
                        os.makedirs(self.out_dir, 0755)

                    # write the sub-bands header to the out_dir
                    header_file = self.out_dir + "/obs.header"
                    self.log(
                        2, "UWBProcDaemon::main writing obs.header to out_dir")
                    Config.writeDictToCFGFile(self.header, header_file)

                    # configure the output pipe
                    self.log(
                        2, "UWBProcDaemon::main configuring output log pipe")
                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])
                    log_pipe = LogSocket(self.log_prefix, self.log_prefix,
                                         str(self.id), "stream", log_host,
                                         log_port, int(DL))
                    log_pipe.connect()

                    # get any modifications to the environment
                    env = self.getEnvironment()

                    # add the binary command to the kill list
                    self.binary_list.append(self.cmd)

                    # create processing threads
                    self.log(
                        2, "UWBProcDaemon::main creating processing threads")
                    cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
                    proc_thread = UWBProcThread(self, cmd, log_pipe.sock, env,
                                                1)

                    # start processing threads
                    self.log(2,
                             "UWBProcDaemon::main starting processing thread")
                    proc_thread.start()

                    self.log(1, "START " + self.cmd)

                    # join processing threads
                    self.log(
                        2,
                        "UWBProcDaemon::main waiting for proc thread to terminate"
                    )
                    rval = proc_thread.join()
                    self.log(2, "UWBProcDaemon::main proc thread joined")

                    self.log(1, "END   " + self.cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(self.cmd)

                    if rval:
                        self.log(-2, "UWBProcDaemon::main proc thread failed")
                        quit_event.set()

                    log_pipe.close()

                    # good practise in case the proc thread always fails
                    time.sleep(1)

                else:

                    self.log(2, "MEERKATProcDaemon::main skip this processing")
                    time.sleep(10)

            self.log(2, "UWBProcDaemon::main processing loop completed")
    def process_gains(self, proc_dir, send_dir):

        # find the most recent gains file
        files = [
            file for file in os.listdir(proc_dir)
            if file.lower().endswith(".gains")
        ]
        self.trace("files=" + str(files))

        if len(files) > 0:

            gains_time_file = proc_dir + "/gains.time"
            cmd = ""

            # combine all the gains files together
            if os.path.exists(gains_time_file):
                cmd = "uwb_adaptive_filter_tappend " + gains_time_file
                for file in files:
                    cmd = cmd + " " + proc_dir + "/" + file
                cmd = cmd + " " + gains_time_file
            else:
                if len(files) == 1:
                    cmd = "cp " + proc_dir + "/" + files[0] + " " + \
                        gains_time_file
                else:
                    cmd = "uwb_adaptive_filter_tappend"
                    for file in files:
                        cmd = cmd + " " + proc_dir + "/" + file
                    cmd = cmd + " " + gains_time_file

            self.info(cmd)
            rval, lines = self.system(cmd, 2)
            if not rval == 0:
                self.warn("failed to tappend gains files")
                return

            # read the data from file into a numpy array
            file_size = os.path.getsize(gains_time_file)
            header_size = 4096
            data_size = file_size - header_size

            gains_file = open(gains_time_file, "rb")
            header_str = gains_file.read(header_size)
            header = Config.readDictFromString(header_str)

            npol = int(header["NPOL"])
            ndim = int(header["NDIM"])
            nchan = int(header["NCHAN"])
            nant = int(header["NANT"])
            nbit = int(header["NBIT"])
            freq = float(header["FREQ"])
            bw = float(header["BW"])
            tsamp = float(header["TSAMP"])

            self.info("npol=" + str(npol) + " ndim=" + str(ndim) + " nchan=" +
                      str(nchan) + " nant=" + str(nant) + " nbit=" +
                      str(nbit) + " freq=" + str(freq) + " bw=" + str(bw))

            # check that the nbit is 32
            bytes_per_sample = (npol * ndim * nchan * nant * nbit) / 8
            ndat = data_size / bytes_per_sample
            nval = ndat * nant * nchan * npol * ndim

            self.info("ndat=" + str(ndat) + " bytes_per_sample=" +
                      str(bytes_per_sample) + " nval=" + str(nval))

            raw = np.fromfile(gains_file, dtype=np.float32, count=nval)
            gains_file.close()

            self.info("np.shape(raw)=" + str(np.shape(raw)))
            self.info("npol=" + str(npol) + " nchan=" + str(nchan))

            # reshape the raw data into a numpy array with specified dimensions
            data = raw.reshape((ndat, nant, npol, nchan, ndim))

            # acquire the results lock
            self.results["lock"].acquire()

            # generate an empty numpy array with ndat values
            xvals = np.zeros(ndat)
            gains_time = np.zeros((npol, ndat))
            gains_freq = np.zeros((npol, nchan))

            for idat in range(ndat):
                xvals[idat] = float(idat) * tsamp / 1e6
                for ipol in range(npol):
                    for isig in range(nant):
                        for ichan in range(nchan):
                            power = 0
                            for idim in range(ndim):
                                g = data[idat][isig][ipol][ichan][idim]
                                power += g * g
                            if power > gains_time[ipol][idat]:
                                gains_time[ipol][idat] = power
                            if idat == ndat - 1:
                                gains_freq[ipol][ichan] = power

            if npol == 1:
                labels = ["AA+BB"]
                colours = ["red"]
            if npol == 2:
                labels = ["AA", "BB"]
                colours = ["red", "green"]
            else:
                labels = []
                colours = []

            self.gains_time_plot.plot(240, 180, True, xvals, gains_time,
                                      labels, colours)
            self.results["gainstime_lo"] = self.gains_time_plot.getRawImage()

            self.gains_time_plot.plot(1024, 768, False, xvals, gains_time,
                                      labels, colours)
            self.results["gainstime_hi"] = self.gains_time_plot.getRawImage()

            self.gains_freq_plot.plot_npol(240, 180, True, nchan, freq, bw,
                                           gains_freq, labels)
            self.results["gainsfreq_lo"] = self.gains_freq_plot.getRawImage()

            self.gains_freq_plot.plot_npol(1024, 768, False, nchan, freq, bw,
                                           gains_freq, labels)
            self.results["gainsfreq_hi"] = self.gains_freq_plot.getRawImage()

            self.gains_valid = True

            self.results["lock"].release()

            for file in files:
                os.rename(proc_dir + "/" + file, send_dir + "/" + file)
        return len(files)
Esempio n. 31
0
    def main(self):

        stream_id = self.id

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        db_id_in = self.cfg["PROCESSING_DATA_BLOCK"]
        db_id_out = self.cfg["SEND_DATA_BLOCK"]
        num_stream = self.cfg["NUM_STREAM"]
        cpu_core = self.cfg["STREAM_PROC_CORE_" + stream_id]

        db_key_in = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        db_id_in)
        db_key_out = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                         db_id_out)

        self.log(0, "db_key_in=" + db_key_in + " db_key_out=" + db_key_out)

        # create dspsr input file for the data block
        db_key_filename = "/tmp/spip_" + db_key_in + ".info"
        db_key_file = open(db_key_filename, "w")
        db_key_file.write("DADA INFO:\n")
        db_key_file.write("key " + db_key_in + "\n")
        db_key_file.close()

        gpu_id = self.cfg["GPU_ID_" + str(self.id)]
        prev_utc_start = ""

        (host, beam, subband) = self.cfg["STREAM_" + stream_id].split(":")

        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + subband].split(":")

        # wait up to 10s for the SMRB to be created
        smrb_wait = 10
        cmd = "dada_dbmetric -k " + db_key_in
        self.binary_list.append(cmd)

        rval = 1
        while rval and smrb_wait > 0 and not self.quit_event.isSet():

            rval, lines = self.system(cmd)
            if rval:
                time.sleep(1)
            smrb_wait -= 1

        if rval:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                db_key_in)
            self.quit_event.set()

        else:

            while (not self.quit_event.isSet()):

                cmd = "dada_header -k " + db_key_in
                self.log(0, cmd)
                self.binary_list.append(cmd)
                rval, lines = self.system(cmd)
                self.binary_list.remove(cmd)

                # if the command returned ok and we have a header
                if rval != 0:
                    if self.quit_event.isSet():
                        self.log(2, cmd + " failed, but quit_event true")
                    else:
                        self.log(-2, cmd + " failed")
                        self.quit_event.set()

                elif len(lines) == 0:

                    self.log(-2, "header was empty")
                    self.quit_event.set()

                else:

                    header = Config.parseHeader(lines)

                    utc_start = header["UTC_START"]
                    self.log(1, "UTC_START=" + header["UTC_START"])
                    self.log(1, "RESOLUTION=" + header["RESOLUTION"])

                    # default processing commands
                    fold_cmd = "dada_dbnull -s -k " + db_key_in
                    trans_cmd = "dada_dbnull -s -k " + db_key_out
                    search_cmd = "dada_dbnull -s -k " + db_key_in

                    if prev_utc_start == utc_start:
                        self.log(
                            -2, "UTC_START [" + utc_start +
                            "] repeated, ignoring observation")

                    else:
                        beam = self.cfg["BEAM_" + str(self.beam_id)]

                        if not float(bw) == float(header["BW"]):
                            self.log(
                                -1, "configured bandwidth [" + bw +
                                "] != header[" + header["BW"] + "]")
                        if not float(cfreq) == float(header["FREQ"]):
                            self.log(
                                -1, "configured cfreq [" + cfreq +
                                "] != header[" + header["FREQ"] + "]")
                        if not int(nchan) == int(header["NCHAN"]):
                            self.log(
                                -2, "configured nchan [" + nchan +
                                "] != header[" + header["NCHAN"] + "]")

                        source = header["SOURCE"]

                        # output directories
                        suffix = "/processing/" + beam + "/" + utc_start + "/" + source + "/" + cfreq
                        fold_dir = self.cfg["CLIENT_FOLD_DIR"] + suffix
                        trans_dir = self.cfg["CLIENT_TRANS_DIR"] + suffix
                        search_dir = self.cfg["CLIENT_SEARCH_DIR"] + suffix

                        fold = False
                        search = False
                        trans = False

                        try:
                            fold = (header["PERFORM_FOLD"] == "1")
                            search = (header["PERFORM_SEARCH"] == "1")
                            trans = (header["PERFORM_TRANS"] == "1")
                        except KeyError as e:
                            fold = True
                            search = False
                            trans = False

                        if fold:
                            os.makedirs(fold_dir, 0755)
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -overlap -minram 4000 -x 16384 -b 1024 -L 5 -no_dyn"
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 512 -b 1024 -L 10 -no_dyn -skz -skzs 4 -skzm 128 -skz_no_tscr -skz_no_fscr"
                            #fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -D 0 -minram 2048 -b 1024 -Lmin 7 -L 8 -no_dyn"
                            fold_cmd = "dspsr -Q " + db_key_filename + " -cuda " + gpu_id + " -minram 2048 -x 1024 -b 1024 -L 8 -Lmin 7 -no_dyn"
                            #fold_cmd = "dada_dbdisk -k " + db_key_in + " -s -D " + fold_dir

                            header_file = fold_dir + "/obs.header"
                            Config.writeDictToCFGFile(header, header_file)

                        if search or trans:
                            os.makedirs(search_dir, 0755)
                            search_cmd = "digifil " + db_key_filename + " -c -B 10 -o " + utc_start + " .fil"
                            if trans:
                                search_cmd += " -k " + db_key_out

                        if trans and int(self.cfg["NUM_SUBBAND"]) == "1":
                            os.makedirs(trans_dir, 0755)
                            trans_cmd = "heimdall -k " + db_key_out + " -gpu_id 1"

                    log_host = self.cfg["SERVER_HOST"]
                    log_port = int(self.cfg["SERVER_LOG_PORT"])

                    # setup output pipes
                    fold_log_pipe = LogSocket("fold_src", "fold_src",
                                              str(self.id), "stream", log_host,
                                              log_port, int(DL))

                    #trans_log_pipe  = LogSocket ("trans_src", "trans_src", str(self.id), "stream",
                    #                             log_host, log_port, int(DL))
                    #search_log_pipe = LogSocket ("search_src", "search_src", str(self.id), "stream",
                    #                             log_host, log_port, int(DL))

                    fold_log_pipe.connect()

                    self.binary_list.append(fold_cmd)
                    #self.binary_list.append (trans_cmd)
                    #self.binary_list.append (search_cmd)

                    # create processing threads
                    self.log(2, "creating processing threads")
                    cmd = "numactl -C " + cpu_core + " -- " + fold_cmd
                    fold_thread = procThread(cmd, fold_dir, fold_log_pipe.sock,
                                             1)

                    #trans_thread = procThread (trans_cmd, self.log_sock.sock, 2)
                    #search_thread = procThread (search_cmd, self.log_sock.sock, 2)

                    # start processing threads
                    self.log(2, "starting processing threads")
                    self.log(1, "START " + fold_cmd)
                    fold_thread.start()
                    #trans_thread.start()
                    #search_thread.start()

                    # join processing threads
                    self.log(2, "waiting for fold thread to terminate")
                    rval = fold_thread.join()
                    self.log(2, "fold thread joined")
                    self.log(1, "END   " + fold_cmd)

                    # remove the binary command from the list
                    self.binary_list.remove(fold_cmd)

                    if rval:
                        self.log(-2, "fold thread failed")
                        quit_event.set()

                    #self.log (2, "joining trans thread")
                    #rval = trans_thread.join()
                    #self.log (2, "trans thread joined")
                    #if rval:
                    #  self.log (-2, "trans thread failed")
                    #  quit_event.set()

                    #self.log (2, "joining search thread")
                    #rval = search_thread.join()
                    #self.log (2, "search thread joined")
                    #if rval:
                    #  self.log (-2, "search thread failed")
                    #  quit_event.set()

                    fold_log_pipe.close()
                    #trans_log_pipe.close()
                    #search_log_pipe.close()

                self.log(1, "processing completed")
Esempio n. 32
0
    def patch_psrfits_header(self, input_dir, input_file):

        header_file = input_dir + "/obs.header"
        self.log(3, "patch_psrfits_header: header_file=" + header_file)

        header = Config.readCFGFileIntoDict(input_dir + "/obs.header")

        new = {}
        new["obs:observer"] = header["OBSERVER"]
        new["obs:projid"] = header["PID"]

        new["be:nrcvr"] = header["NPOL"]

        try:
            val = header["RCVR_HAND"]
            new["rcvr:hand"] = val
        except KeyError as e:
            self.log(2, "patch_psrfits_header: RCVR_HAND not set in header")

        try:
            val = header["BACKEND_PHASE"]  # Phase convention of backend
            new["be:phase"] = val
        except KeyError as e:
            self.log(2,
                     "patch_psrfits_header: BACKEND_PHASE not set in header")

        try:
            val = header["FOLD_OUTTSUBINT"]  # Correlator cycle time
            new["be:tcycle"] = val
        except KeyError as e:
            self.log(
                2, "patch_psrfits_header: FOLD_OUTTSUBINT not set in header")

        new["be:dcc"] = "0"  # Downconversion conjugation corrected
        new["sub:nsblk"] = "1"  # Samples/row (SEARCH mode, else 1)

        # this needs to come from CAM, hack for now
        new["ext:trk_mode"] = "TRACK"  # Tracking mode
        new["ext:bpa"] = "0"  # Beam position angle [?]
        new["ext:bmaj"] = "0"  # Beam major axis [degrees]
        new["ext:bmin"] = "0"  # Beam minor axis [degrees]

        self.log(
            3,
            "RepackDaemon::patch_psrfits_header freq=" + str(header["FREQ"]))

        new["ext:obsfreq"] = header["FREQ"]
        new["ext:obsbw"] = header["BW"]
        new["ext:obsnchan"] = header["NCHAN"]

        new["ext:stt_crd1"] = header["RA"]
        new["ext:stt_crd2"] = header["DEC"]
        new["ext:stp_crd1"] = header["RA"]
        new["ext:stp_crd2"] = header["DEC"]
        new["ext:stt_date"] = header["UTC_START"][0:10]
        new["ext:stt_time"] = header["UTC_START"][11:19]

        # build psredit command, in-place modification
        cmd = "psredit -m"

        try:
            itrf = header["ITRF"]
            (x, y, z) = itrf.split(",")
            new["itrf:ant_x"] = x
            new["itrf:ant_y"] = y
            new["itrf:ant_z"] = z
            cmd = cmd + " -a itrf"

        except KeyError as e:
            self.log(2, "patch_psrfits_header: ITRF not set in header")

        # create the psredit command necessary to apply "new"
        cmd = cmd + " -c " + ",".join(
            ['%s=%s' % (key, value)
             for (key, value) in new.items()]) + " " + input_file
        rval, lines = self.system(cmd, 2)
        if rval:
            return rval, lines[0]
        return 0, ""
Esempio n. 33
0
  def main (self):

    control_thread = []
  
    # the threads for each instance of beams, streams and server    
    self.stream_threads = {}
    self.beam_threads = {}
    self.server_thread = []

    self.reload_beams = {}
    self.reload_streams = {}
    self.system_lock = threading.Lock()

    # find matching client streams for this host
    self.host_streams = []
    for istream in range(int(self.cfg["NUM_STREAM"])):
      (req_host, beam_id, subband_id) = Config.getStreamConfig (istream, self.cfg)
      if req_host == self.req_host and not istream in self.host_streams:
        self.host_streams.append(istream)

    # find matching client streams for this host
    self.host_beams = []
    for istream in range(int(self.cfg["NUM_STREAM"])):
      (req_host, beam_id, subband_id) = Config.getStreamConfig (istream, self.cfg)
      if req_host == self.req_host and not beam_id in self.host_beams:
        self.host_beams.append(beam_id)

    # find matching server stream
    self.host_servers = []
    if self.cfg["SERVER_HOST"] == self.req_host:
      self.host_servers.append(-1)

    self.server_daemon_states = {}
    self.stream_daemon_states = {}
    self.beam_daemon_states = {}

 
    # configure disk systems to monitor
    if len(self.host_servers) > 0:
      self.disks_to_monitor = [self.cfg["SERVER_DIR"]]
    else:
      self.disks_to_monitor = [self.cfg["CLIENT_DIR"]]

    # gather some initial statistics
    self.gather_stats()

    # start server thread
    for stream in self.host_servers:
      self.debug("server_thread["+str(stream)+"] = streamThread(-1)")
      self.server_daemon_states[stream] = {}
      self.server_thread = serverThread(stream, self, self.server_daemon_states[stream])
      self.debug("server_thread["+str(stream)+"].start()")
      self.server_thread.start()
      self.debug("server_thread["+str(stream)+"] started")

    sleep(5)
    self.log(1, "Server threads started: " + str(len(self.host_servers)))

    # start a thread for each stream
    for stream in self.host_streams:
      self.stream_daemon_states[stream] = {}
      self.debug("stream_threads["+str(stream)+"] = streamThread ("+str(stream)+")")
      self.reload_streams[stream] = False
      self.stream_threads[stream] = streamThread (stream, self, self.stream_daemon_states[stream])
      self.debug("stream_threads["+str(stream)+"].start()")
      self.stream_threads[stream].start()
      self.debug("stream_thread["+str(stream)+"] started!")

    self.log(1, "Stream threads started: " + str(len(self.host_streams)))

    # start a thread for each beam
    for beam in self.host_beams:
      self.beam_daemon_states[beam] = {}
      self.debug("beam_threads["+str(beam)+"] = beamThread ("+str(beam)+")")
      self.reload_beams[beam] = False
      self.beam_threads[beam] = beamThread (beam, self, self.beam_daemon_states[beam])
      self.debug("beam_threads["+str(beam)+"].start()")
      self.beam_threads[beam].start()
      self.debug("beam_thread["+str(beam)+"] started!")

    self.log(1, "Beam threads started: " + str(len(self.host_beams)))

    # main thread

    hw_poll = 5
    counter = 0 

    first_time = True

    self.debug("starting main loop quit_event=" + str(self.quit_event.isSet()))

    # control loop
    while not self.quit_event.isSet():

      # update the monitoring points
      if counter == 0:

        self.debug("refreshing monitoring points")
        if not first_time:
          self.resource_lock.acquire ()

        self.trace("self.gather_stats()")
        self.gather_stats()

        first_time = False
        self.resource_lock.release ()

        self.trace("monitoring points refreshed")

        counter = hw_poll

      if not self.quit_event.isSet():
        sleep(1)

      counter -= 1

    self.debug("quit_event set, asking lmcThreads to terminate")
    self.concludeThreads()

    self.debug("done")
Esempio n. 34
0
  def collect_data(self, dir, beam, utc_start, source):

    data = self.results[utc_start][source]
 
    data["beam"] = beam 
    data["utc_start"] = utc_start
    data["source"] = source
    data["index"] = self.source_number
    self.source_number += 1

    # find the header filename
    cmd = "find " + dir + " -mindepth 1 -maxdepth 1 -type f -name 'obs.header*' | head -n 1"
    rval, lines = self.system (cmd, 3)
    if rval:
      return ("fail", data)
  
    header_file = lines[0]
    self.log (3, "collect_data: header_file=" + header_file)

    # read the contents of the header
    header = Config.readCFGFileIntoDict (header_file)

    data["centre_frequency"] = header["FREQ"]
    data["bandwidth"] = header["BW"]
    data["nchannels"] = header["NCHAN"]
    data["ra"] = header["RA"]
    data["dec"] = header["DEC"]
    data["mode"] = header["MODE"]
    data["project_id"] = header["PID"]
    data["subarray_id"] = "N/A"
    data["dir"] = dir
    data["length"] = "-1"
    data["snr"] = "-1"

    # convert entire header into XML
    data["header"] = ""
    keys = header.keys()
    keys.sort()
    for key in keys:
      data["header"] += "<" + key + ">" + header[key] + "</" + key + ">"

    psrplot_opts = "-c x:view='(0.0,1.0)' -c y:view='(0.0,1.0)' -g 160x120 -D -/png"

    time_sum_file = dir + "/time.sum"
    # find the path to the archives for plotting
    if os.path.exists(time_sum_file):
      data["time_sum"] = time_sum_file

      data["time_vs_phase"] = {}
      data["time_vs_phase"]["xres"] = 160
      data["time_vs_phase"]["yres"] = 120

      time_plot_file = dir + "/time.png"
      # if the plot does not exist, create it
      if not os.path.exists (time_plot_file):
        cmd = "psrplot -p time " + time_sum_file + " -jDp " + psrplot_opts
        rval, data["time_vs_phase"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to generate time plot")
        fptr = open (time_plot_file, "wb")
        fptr.write(data["time_vs_phase"]["raw"])
        fptr.close()

      # read the created plot from the file system
      else:
        rval, data["time_vs_phase"]["raw"] = self.system_raw ("cat " + dir +"/time.png", 3)

    freq_sum_file = dir + "/freq.sum"
    if os.path.exists(freq_sum_file):
      data["freq_sum"] = freq_sum_file

      # generate the freq plot
      data["freq_vs_phase"] = {}
      data["freq_vs_phase"]["xres"] = 160
      data["freq_vs_phase"]["yres"] = 120

      freq_plot_file = dir + "/freq.png"
      if not os.path.exists (freq_plot_file):
        cmd = "psrplot -p freq " + freq_sum_file + " -jDp " + psrplot_opts
        rval, data["freq_vs_phase"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to generate freq.png")
        fptr = open (freq_plot_file, "wb")
        fptr.write(data["freq_vs_phase"]["raw"])
        fptr.close()
      else:
        rval, data["freq_vs_phase"]["raw"] = self.system_raw ("cat " + dir +"/freq.png", 3)

      # generate the flux plot
      data["flux_vs_phase"] = {}
      data["flux_vs_phase"]["xres"] = 160
      data["flux_vs_phase"]["yres"] = 120

      flux_plot_file = dir + "/flux.png"
      if not os.path.exists (flux_plot_file):
        cmd = "psrplot -p flux " + freq_sum_file + " -jFDp " + psrplot_opts
        rval, data["flux_vs_phase"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to create flux plot")
        fptr = open (flux_plot_file, "wb")
        fptr.write(data["flux_vs_phase"]["raw"])
        fptr.close()
      else:
        rval, data["flux_vs_phase"]["raw"] = self.system_raw ("cat " + dir +"/flux.png", 3)

    band_file = dir + "/band.last"
    if os.path.exists(band_file):
      data["band_last"] = band_file

      data["bandpass"] = {}
      data["bandpass"]["xres"] = 160
      data["bandpass"]["yres"] = 120
      band_plot_file = dir + "/band.png"
      if not os.path.exists (band_plot_file):
        cmd = "psrplot -p b " + band_file + " -x -lpol=0,1 -N2,1 " + psrplot_opts
        rval, data["bandpass"]["raw"] = self.system_raw (cmd, 3)
        if rval < 0:
          return (rval, "failed to create band plot")
        fptr = open (band_plot_file, "wb")
        fptr.write(data["bandpass"]["raw"])
        fptr.close()
      else:
        rval, data["bandpass"]["raw"] = self.system_raw ("cat " + band_plot_file, 3)

    # find the resultsfilename
    results_file = dir + "/obs.results"
    if os.path.exists(results_file):
      self.log (3, "collect_data: results_file=" + results_file)
      results = Config.readCFGFileIntoDict (results_file)
      data["snr"] = results["snr"]
      data["length"] = results["length"]
    else:
      if os.path.exists(freq_sum_file):
        cmd = "psrstat -jFDp -c snr " + freq_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
        rval, lines = self.system (cmd, 3)
        if rval < 0:
          return (rval, "failed to extract snr from freq.sum")
        data["snr"] = lines[0]

      # determine the length of the observation
      if os.path.exists(time_sum_file):
        cmd = "psrstat -c length " + time_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
        rval, lines = self.system (cmd, 3)
        if rval < 0:
          return (rval, "failed to extract length from time.sum")
        data["length"] = lines[0]

      # write these values to the sum file
      fptr = open (results_file, "w")
      fptr.write("snr\t" + data["snr"] + "\n")
      fptr.write("length\t" + data["length"] + "\n")
      fptr.close()

    return ("ok", "collected")
Esempio n. 35
0
  def issue_stop_cmd (self, xml):

    self.log(2, "issue_stop_cmd()")

    # determine which beams this command corresponds to
    for ibeam in range(int(xml['obs_cmd']['beam_configuration']['nbeam']['#text'])):
      state = xml['obs_cmd']['beam_configuration']['beam_state_' + str(ibeam)]['#text']
      if state == "1" or state == "on":
        b = xml['obs_cmd']['beam_configuration']['beam_state_' + str(ibeam)]['@name']
        if b in self.beam_states.keys():

          self.log(1, "issue_stop_cmd: beam=" + b)
          obs = {}

          self.beam_states[b]["lock"].acquire()
          self.beam_states[b]["state"] = "Stopping"
          obs["COMMAND"] = "STOP"

          # inject the observation parameters
          o = self.beam_states[b]["config"]["observation_parameters"]

          # if no UTC_STOP has been specified, set it to now
          if o["utc_stop"]["#text"] == "None":
            o["utc_stop"]["#text"] = times.getUTCTime()
          obs["UTC_STOP"] = o["utc_stop"]["#text"]
          self.beam_states[b]["lock"].release()

          # convert to a single ascii string
          obs_header = Config.writeDictToString (obs)

          # work out which streams correspond to these beams
          for istream in range(int(self.cfg["NUM_STREAM"])):
            (host, beam_idx, subband) = self.cfg["STREAM_"+str(istream)].split(":")
            beam = self.cfg["BEAM_" + beam_idx]
            self.log(2, "issue_stop_cmd: host="+host+" beam="+beam+" subband="+subband)

            # connect to streams for this beam only
            if beam == b:

              # control port the this recv stream 
              ctrl_port = int(self.cfg["STREAM_CTRL_PORT"]) + istream

              # connect to recv agent and provide observation configuration
              self.log(3, "issue_stop_cmd: openSocket("+host+","+str(ctrl_port)+")")
              sock = sockets.openSocket (DL, host, ctrl_port, 1)
              if sock:
                self.log(3, "issue_stop_cmd: sending obs_header len=" + str(len(obs_header)))
                sock.send(obs_header)
                self.log(3, "issue_stop_cmd: command sent")
                sock.close()
                self.log(3, "issue_stop_cmd: socket closed")

              # connect to spip_gen and issue stop command for UTC
              # assumes gen host is the same as the recv host!
              # gen_port = int(self.cfg["STREAM_GEN_PORT"]) + istream
              # sock = sockets.openSocket (DL, host, gen_port, 1)
              # if sock:
              #   sock.send(obs_header)
              #  sock.close()

          # update the dict of observing info for this beam
          self.beam_states[b]["lock"].acquire()
          self.beam_states[b]["state"] = "Idle"
          self.beam_states[b]["lock"].release()
Esempio n. 36
0
  def patch_psrfits_header (self, input_dir, input_file):

    header_file = input_dir + "/obs.header"
    self.log(3, "patch_psrfits_header: header_file="+header_file)

    header = Config.readCFGFileIntoDict (input_dir + "/obs.header")

    new = {}
    new["obs:observer"] = header["OBSERVER"] 
    new["obs:projid"]   = header["PID"]

    new["be:nrcvr"]     = header["NPOL"]
    
    try:
      val               = header["RCVR_HAND"]
      new["rcvr:hand"]  = val
    except KeyError as e:
      self.log(2, "patch_psrfits_header: RCVR_HAND not set in header")

    try:
      val               = header["BACKEND_PHASE"] # Phase convention of backend
      new["be:phase"]   = val
    except KeyError as e:
      self.log(2, "patch_psrfits_header: BACKEND_PHASE not set in header")

    try:
      val               = header["FOLD_OUTTSUBINT"] # Correlator cycle time
      new["be:tcycle"]  = val
    except KeyError as e:
      self.log(2, "patch_psrfits_header: FOLD_OUTTSUBINT not set in header")

    new["be:dcc"]       = "0"     # Downconversion conjugation corrected
    new["sub:nsblk"]    = "1"     # Samples/row (SEARCH mode, else 1)
  
    # this needs to come from CAM, hack for now
    new["ext:trk_mode"] = "TRACK" # Tracking mode
    new["ext:bpa"]      = "0" # Beam position angle [?]
    new["ext:bmaj"]     = "0" # Beam major axis [degrees]
    new["ext:bmin"]     = "0" # Beam minor axis [degrees]
    
    self.log(3, "RepackDaemon::patch_psrfits_header freq=" + str(header["FREQ"]))

    new["ext:obsfreq"]  = header["FREQ"]
    new["ext:obsbw"]    = header["BW"]
    new["ext:obsnchan"] = header["NCHAN"]

    new["ext:stt_crd1"] = header["RA"]
    new["ext:stt_crd2"] = header["DEC"]
    new["ext:stp_crd1"] = header["RA"]
    new["ext:stp_crd2"] = header["DEC"]
    new["ext:stt_date"] = header["UTC_START"][0:10]
    new["ext:stt_time"] = header["UTC_START"][11:19]

    # build psredit command, in-place modification 
    cmd = "psredit -m"

    try:
      itrf = header["ITRF"]
      (x, y, z) = itrf.split(",")
      new["itrf:ant_x"] = x
      new["itrf:ant_y"] = y
      new["itrf:ant_z"] = z
      cmd = cmd + " -a itrf"

    except KeyError as e:
      self.log(2, "patch_psrfits_header: ITRF not set in header")

    # create the psredit command necessary to apply "new"
    cmd = cmd + " -c " + ",".join(['%s=%s' % (key, value) for (key, value) in new.items()]) + " " + input_file
    rval, lines = self.system(cmd, 2)
    if rval:
      return rval, lines[0]
    return 0, ""
Esempio n. 37
0
    def collect_data(self, dir, beam, utc_start, source):

        data = self.results[utc_start][source]

        data["beam"] = beam
        data["utc_start"] = utc_start
        data["source"] = source
        data["index"] = self.source_number
        self.source_number += 1

        # find the header filename
        cmd = "find " + dir + " -mindepth 1 -maxdepth 1 -type f -name 'obs.header*' | head -n 1"
        rval, lines = self.system(cmd, 3)
        if rval:
            return ("fail", data)

        if not len(lines) == 1:
            return ("fail", data)
        header_file = lines[0]

        self.log(3, "collect_data: header_file=" + header_file)

        # read the contents of the header
        header = Config.readCFGFileIntoDict(header_file)

        data["centre_frequency"] = header["FREQ"]
        data["bandwidth"] = header["BW"]
        data["nchannels"] = header["NCHAN"]
        data["ra"] = header["RA"]
        data["dec"] = header["DEC"]
        data["mode"] = header["MODE"]
        data["project_id"] = header["PID"]
        data["subarray_id"] = "N/A"
        data["dir"] = dir
        data["length"] = "-1"
        data["snr"] = "-1"

        # convert entire header into XML
        data["header"] = ""
        keys = header.keys()
        keys.sort()
        for key in keys:
            data["header"] += "<" + key + ">" + header[key] + "</" + key + ">"

        psrplot_opts = "-c x:view='(0.0,1.0)' -c y:view='(0.0,1.0)' -g 160x120 -D -/png"

        time_sum_file = dir + "/time.sum"
        # find the path to the archives for plotting
        if os.path.exists(time_sum_file):
            data["time_sum"] = time_sum_file

            data["time_vs_phase"] = {}
            data["time_vs_phase"]["xres"] = 160
            data["time_vs_phase"]["yres"] = 120

            time_plot_file = dir + "/time.png"
            # if the plot does not exist, create it
            if not os.path.exists(time_plot_file):
                cmd = "psrplot -p time " + time_sum_file + " -jDp " + psrplot_opts
                rval, data["time_vs_phase"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to generate time plot")
                fptr = open(time_plot_file, "wb")
                fptr.write(data["time_vs_phase"]["raw"])
                fptr.close()

            # read the created plot from the file system
            else:
                rval, data["time_vs_phase"]["raw"] = self.system_raw(
                    "cat " + dir + "/time.png", 3)

        freq_sum_file = dir + "/freq.sum"
        if os.path.exists(freq_sum_file):
            data["freq_sum"] = freq_sum_file

            # generate the freq plot
            data["freq_vs_phase"] = {}
            data["freq_vs_phase"]["xres"] = 160
            data["freq_vs_phase"]["yres"] = 120

            freq_plot_file = dir + "/freq.png"
            if not os.path.exists(freq_plot_file):
                cmd = "psrplot -p freq " + freq_sum_file + " -jDp " + psrplot_opts
                rval, data["freq_vs_phase"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to generate freq.png")
                fptr = open(freq_plot_file, "wb")
                fptr.write(data["freq_vs_phase"]["raw"])
                fptr.close()
            else:
                rval, data["freq_vs_phase"]["raw"] = self.system_raw(
                    "cat " + dir + "/freq.png", 3)

            # generate the flux plot
            data["flux_vs_phase"] = {}
            data["flux_vs_phase"]["xres"] = 160
            data["flux_vs_phase"]["yres"] = 120

            flux_plot_file = dir + "/flux.png"
            if not os.path.exists(flux_plot_file):
                cmd = "psrplot -p flux " + freq_sum_file + " -jFDp " + psrplot_opts
                rval, data["flux_vs_phase"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to create flux plot")
                fptr = open(flux_plot_file, "wb")
                fptr.write(data["flux_vs_phase"]["raw"])
                fptr.close()
            else:
                rval, data["flux_vs_phase"]["raw"] = self.system_raw(
                    "cat " + dir + "/flux.png", 3)

        band_file = dir + "/band.last"
        if os.path.exists(band_file):
            data["band_last"] = band_file

            data["bandpass"] = {}
            data["bandpass"]["xres"] = 160
            data["bandpass"]["yres"] = 120
            band_plot_file = dir + "/band.png"
            if not os.path.exists(band_plot_file):
                cmd = "psrplot -p b " + band_file + " -x -lpol=0,1 -N2,1 -c log=1 " + psrplot_opts
                rval, data["bandpass"]["raw"] = self.system_raw(cmd, 3)
                if rval < 0:
                    return (rval, "failed to create band plot")
                fptr = open(band_plot_file, "wb")
                fptr.write(data["bandpass"]["raw"])
                fptr.close()
            else:
                rval, data["bandpass"]["raw"] = self.system_raw(
                    "cat " + band_plot_file, 3)

        # find the results filename
        results_file = dir + "/obs.results"
        if os.path.exists(results_file):
            self.log(3, "collect_data: results_file=" + results_file)
            results = Config.readCFGFileIntoDict(results_file)
            data["snr"] = results["snr"]
            data["length"] = results["length"]
        else:
            if os.path.exists(freq_sum_file):
                cmd = "psrstat -jFDp -c snr " + freq_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
                rval, lines = self.system(cmd, 3)
                if rval < 0:
                    return (rval, "failed to extract snr from freq.sum")
                data["snr"] = lines[0]

            # determine the length of the observation
            if os.path.exists(time_sum_file):
                cmd = "psrstat -c length " + time_sum_file + " | awk -F= '{printf(\"%f\",$2)}'"
                rval, lines = self.system(cmd, 3)
                if rval < 0:
                    return (rval, "failed to extract length from time.sum")
                data["length"] = lines[0]

            # write these values to the sum file
            fptr = open(results_file, "w")
            fptr.write("snr\t" + data["snr"] + "\n")
            fptr.write("length\t" + data["length"] + "\n")
            fptr.close()

        return ("ok", "collected")
Esempio n. 38
0
  def main (self):

    if self.gen_histogram:
      self.hg_plot = HistogramPlot()
      self.valid_plots.append("histogram")

    if self.gen_bandpass:
      self.bp_plot = BandpassPlot()
      self.valid_plots.append("bandpass")

    if self.gen_timeseries:
      self.ts_plot = TimeseriesPlot()
      self.valid_plots.append("timeseries")

    if self.gen_freqtime:
      self.ft_plot = FreqTimePlot()
      self.valid_plots.append("freqtime")

    # stats files are stored in flat directory structure
    # stats_dir / beam / cfreq

    if not os.path.exists(self.processing_dir):
      os.makedirs(self.processing_dir, 0755) 

    # get the data block keys
    db_prefix  = self.cfg["DATA_BLOCK_PREFIX"]
    db_id      = self.cfg["RECEIVING_DATA_BLOCK"]
    num_stream = self.cfg["NUM_STREAM"]
    stream_id  = str(self.id)
    self.debug("stream_id=" + str(self.id))
    self.db_key = SMRBDaemon.getDBKey (db_prefix, stream_id, num_stream, db_id)
    self.debug("db_key=" + self.db_key)

    # start dbstats in a separate thread
    self.stat_dir = self.processing_dir + "/" + self.beam_name + "/" + self.cfreq
    self.archived_dir = self.processing_dir + "/archived/" + self.beam_name + "/" + self.cfreq

    if not os.path.exists(self.stat_dir):
      os.makedirs(self.stat_dir, 0755)
    if not os.path.exists(self.archived_dir):
      os.makedirs(self.archived_dir, 0755)

    # configure the histogram plot with all channels included
    self.hg_plot.configure (-1, self.histogram_abs_xmax)

    log = False
    zap = False
    transpose = False
    # configure the freq v time plot
    if self.gen_freqtime:
      self.ft_plot.configure (log, zap, transpose)

    # configure the bandpass plot
    log = True
    if self.gen_bandpass:
      self.bp_plot.configure (log, zap, transpose)

    log_host = self.cfg["SERVER_HOST"]
    log_port = int(self.cfg["SERVER_LOG_PORT"])

    # stat will use the stream config file created for the recv command
    self.stream_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
    while (not os.path.exists(self.stream_config_file)):
      self.debug("waiting for stream_config file [" + self.stream_config_file +"] to be created by recv")
      time.sleep(1)    

    self.debug("wait_for_smrb()")
    smrb_exists = SMRBDaemon.waitForSMRB(self.db_key, self)

    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    stat_cmd = self.build_cmd()

    while (not self.quit_event.isSet()):

      process_stats = True

      # wait for the header to determine when dbstats should run
      cmd = "dada_header -k " + self.db_key + " -t stat"
      self.info(cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        if self.quit_event.isSet():
          self.debug(cmd + " failed, but quit_event true")
        else:
          self.error(cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        self.error("header was empty")
        self.quit_event.set()

      else:
        self.debug("parsing header")
        self.header = Config.parseHeader (lines)

        try:
          if self.header["ZERO_INPUT"] == "1":
            process_stats = False
        except:
          self.debug("ZERO_INPUT did not exist in header")

      if self.quit_event.isSet():
        self.debug("quit event set, exiting loop")
        continue

      if not process_stats:
        self.debug("not analyzing stats due to ZERO_INPUT")
        time.sleep(5)
        continue

      # create a log pipe for the stats command
      stat_log_pipe   = LogSocket ("stat_src", "stat_src", str(self.id), "stream",
                                   log_host, log_port, int(DL))

      # connect up the log file output
      stat_log_pipe.connect()

      # add this binary to the list of active commands
      kill_cmd = self.cfg["STREAM_STATS_BINARY"] + " -k " + self.db_key
      self.info("kill_cmd=" + kill_cmd)
      self.binary_list.append (kill_cmd)

      self.log (1, "START " + stat_cmd)

       # initialize the threads
      stat_thread = dbstatsThread (stat_cmd, self.stat_dir, stat_log_pipe.sock, 2)

      self.debug("cmd=" + stat_cmd)

      self.debug("starting stat thread")
      stat_thread.start()
      self.debug("stat thread started")

      pref_freq = 0

      while stat_thread.is_alive() and not self.quit_event.isSet():

        # get a list of all the files in stat_dir
        files = os.listdir (self.stat_dir)

        self.debug("found " + str(len(files)) + " in " + self.stat_dir)

        # if stat files exist in the directory
        if len(files) > 0:
          if self.gen_histogram:
            self.process_hg (pref_freq)
          if self.gen_bandpass:
            self.process_bp (pref_freq)
          if self.gen_freqtime:
            self.process_ft (pref_freq)
          if self.gen_timeseries:
            self.process_ts ()
          self.process_ms ()

          self.results["lock"].acquire()

          pref_freq = self.pref_freq
          self.results["timestamp"] = times.getCurrentTime()
          self.results["valid"] = self.ms_valid
          if self.gen_histogram:
            self.results["valid"] |= self.hg_valid
          if self.gen_timeseries:
            self.results["valid"] |= self.ts_valid
          if self.gen_freqtime:
            self.results["valid"] |= self.ft_valid
          if self.gen_bandpass:
            self.results["valid"] |= self.bp_valid

          self.results["lock"].release()

        time.sleep(5)

      self.debug("joining stat thread")
      rval = stat_thread.join()
      self.debug("stat thread joined")

      self.log (1, "END   " + stat_cmd)

      if rval:
        self.log (-2, "stat thread failed")
        self.quit_event.set()
Esempio n. 39
0
    def main(self):

        self.log(2, "MeerKATXposeDaemon::main configure_child()")
        self.configure_child()

        self.log(2, "MeerKATXposeDaemon::main wait_for_smrb()")
        self.wait_for_smrb()

        if self.quit_event.isSet():
            self.log(
                -1,
                "MeerKATXposeDaemon::main quit event was set after waiting for SMRB creation"
            )
            return

        self.prepare()

        # continuously run the main command waiting on the SMRB
        while (not self.quit_event.isSet()):

            tag = "meerkat_xpose_" + self.stream_id

            # wait for the header to determine if folding is required
            cmd = "dada_header -t " + tag + " -k " + self.db_key_in1
            self.log(1, cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd)
            self.binary_list.remove(cmd)

            # if the command returned ok and we have a header
            if rval != 0:
                if self.quit_event.isSet():
                    self.log(
                        2, "MeerKATXposeDaemon::main " + cmd +
                        " failed, but quit_event true")
                else:
                    self.log(-2, "MeerKATXposeDaemon::main " + cmd + " failed")
                    self.quit_event.set()

            elif len(lines) == 0:

                self.log(-2, "MeerKATXposeDaemon::main header was empty")
                self.quit_event.set()

            else:

                self.log(2, "MeerKATXposeDaemon::main parsing header")
                self.header = Config.parseHeader(lines)

                #if not float(self.bw) == float(self.header["BW"]):
                #  self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]")
                #if not float(self.cfreq) == float(self.header["FREQ"]):
                #  self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]")
                #if not int(self.nchan) == int(self.header["NCHAN"]):
                #  self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]")

                # configure the output pipe
                self.log(
                    2,
                    "MeerKATXposeDaemon::main configuring output log pipe prefix="
                    + self.log_prefix)
                log_host = self.cfg["SERVER_HOST"]
                log_port = int(self.cfg["SERVER_LOG_PORT"])
                log_pipe = LogSocket(self.log_prefix, self.log_prefix,
                                     str(self.id), "stream", log_host,
                                     log_port, int(DL))
                log_pipe.connect()

                # add the binary command to the kill list
                self.binary_list.append(self.cmd)

                # create processing threads
                self.log(
                    1, "MeerKATXposeDaemon::main creating processing thread")
                proc_thread = MeerKATXposeThread(self, self.cmd, log_pipe.sock,
                                                 1)

                # start processing threads
                self.log(
                    1, "MeerKATXposeDaemon::main starting processing thread")
                proc_thread.start()

                # join processing threads
                self.log(
                    2,
                    "MeerKATXposeDaemon::main waiting for proc thread to terminate"
                )
                rval = proc_thread.join()
                self.log(2, "MeerKATXposeDaemon::main proc thread joined")

                # remove the binary command from the list
                self.binary_list.remove(self.cmd)

                if rval:
                    self.log(-2, "MeerKATXposeDaemon::main proc thread failed")
                    quit_event.set()

                log_pipe.close()

            self.log(1, "MeerKATXposeDaemon::main processing completed")
Esempio n. 40
0
    def main(self):

        db_id = self.cfg["PROCESSING_DATA_BLOCK"]
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        self.db_key = SMRBDaemon.getDBKey(db_prefix, self.id, num_stream,
                                          db_id)
        self.log(0, "db_key=" + self.db_key)

        # wait up to 10s for the SMRB to be created
        smrb_wait = 10
        cmd = "dada_dbmetric -k " + self.db_key
        self.binary_list.append(cmd)

        rval = 1
        while rval and smrb_wait > 0 and not self.quit_event.isSet():

            rval, lines = self.system(cmd)
            if rval:
                sleep(1)
            smrb_wait -= 1

        if rval:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                self.db_key)
            self.quit_event.set()

        else:

            local_config = self.getConfiguration()

            self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)]
            self.ctrl_port = str(
                int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id))

            # write this config to file
            local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"
            self.log(1, "main: creating " + local_config_file)
            Config.writeDictToCFGFile(local_config, local_config_file)

            env = self.getEnvironment()

            cmd = self.getCommand(local_config_file)
            self.binary_list.append(cmd)

            self.log(3, "main: sleep(1)")
            sleep(1)

            self.log(3, "main: log_pipe = LogSocket(recvsim_src))")
            log_pipe = LogSocket("recvsim_src", "recvsim_src", str(self.id),
                                 "stream", self.cfg["SERVER_HOST"],
                                 self.cfg["SERVER_LOG_PORT"], int(DL))

            self.log(3, "main: log_pipe.connect()")
            log_pipe.connect()

            self.log(3, "main: sleep(1)")
            sleep(1)

            # this should be a persistent / blocking command
            rval = self.system_piped(cmd, log_pipe.sock)

            if rval:
                self.log(-2, cmd + " failed with return value " + str(rval))
            self.quit_event.set()

            log_pipe.close()
Esempio n. 41
0
  def main (self):

    archives_glob = "*.ar"

    self.log (2, "main: beams=" + str(self.beams))

    # archives stored in directory structure
    #  beam / utc_start / source / cfreq / "fold"

    # summary data stored in
    #  beam / utc_start / source / freq.sum
    # out_cfreq = 0

    if not os.path.exists(self.processing_dir):
      os.makedirs(self.processing_dir, 0755) 
    if not os.path.exists(self.finished_dir):
      os.makedirs(self.finished_dir, 0755) 
    if not os.path.exists(self.archived_dir):
      os.makedirs(self.archived_dir, 0755) 

    self.log (2, "main: stream_id=" + str(self.id))

    while (not self.quit_event.isSet()):

      processed_this_loop = 0

      # check each beam for folded archives to process    
      for beam in self.beams:

        beam_dir = self.processing_dir + "/" + beam
        self.log (3, "main: beam=" + beam + " beam_dir=" + beam_dir)

        if not os.path.exists(beam_dir):
          os.makedirs(beam_dir, 0755)

        # get a list of all the recent observations
        cmd = "find " + beam_dir + " -mindepth 2 -maxdepth 2 -type d"
        rval, observations = self.system (cmd, 3)

        # for each observation      
        for observation in observations:
   
          # strip prefix 
          observation = observation[(len(beam_dir)+1):]

          (utc, source) = observation.split("/")

          if source == "stats":
            continue

          obs_dir = beam_dir + "/" + observation
          out_dir = self.archived_dir + "/" + beam + "/" + utc + "/" + source + "/" + str(self.out_cfreq)

          if not os.path.exists(out_dir):
            os.makedirs(out_dir, 0755)

          # if we have only 1 sub-band, then files can be processed immediately
          archives = {}
          for subband in self.subbands:
            self.log (3, "processing subband=" + str(subband))
            
            cmd = "find " + obs_dir + "/" + subband["cfreq"] + " -mindepth 1 -maxdepth 1 " + \
                  "-type f -name '" + archives_glob + "' -printf '%f\\n'"
            rval, files = self.system (cmd, 3)

            for file in files:
              if not file in archives:
                archives[file] = 0
              archives[file] += 1

          # if a file meets the subband count it is ripe for processing
          files = archives.keys()
          files.sort()

          for file in files:

            processed_this_loop += 1

            self.log (1, observation + ": processing " + file)

            if archives[file] == len(self.subbands):
              if len(self.subbands) > 1:
                self.log (2, "main: process_subband()")
                (rval, response) = self.process_subband (obs_dir, out_dir, source, file)
                if rval:
                  self.log (-1, "failed to process sub-bands for " + file + ": " + response)
              else:
                input_file  = obs_dir  + "/" + self.subbands[0]["cfreq"] + "/" + file
                self.log (2, "main: process_archive() "+ input_file)
                (rval, response) = self.process_archive (obs_dir, input_file, out_dir, source)
                if rval:
                  self.log (-1, "failed to process " + file + ": " + response)

          if len(files) > 0:
            # now process the sum files to produce plots etc
            self.log (2, "main: process_observation("+beam+","+utc+","+source+","+obs_dir+")")
            (rval, response) = self.process_observation (beam, utc, source, obs_dir)
            if rval:
              self.log (-1, "failed to process observation: " + response)

          # if the proc has marked this observation as finished
          all_finished = True
          any_failed = False

          # perhaps a file was produced whilst the previous list was being processed,
          # do another pass
          if len(files) > 0:
            all_finished = False

          for subband in self.subbands:
            filename = obs_dir + "/" + subband["cfreq"] + "/obs.finished"
            if os.path.exists(filename):
              if os.path.getmtime(filename) + 10 > time.time():
                all_finished = False
            else:
              all_finished = False
            filename = obs_dir + "/" + subband["cfreq"] + "/obs.failed"
            if os.path.exists(filename):
              any_failed = True
         
          # the observation has failed, cleanup
          if any_failed:
            self.log (1, observation + ": processing -> failed")
            all_finished = False

            fail_parent_dir = self.failed_dir + "/" + beam + "/" + utc
            if not os.path.exists(fail_parent_dir):
              os.makedirs(fail_parent_dir, 0755)
            fail_dir = self.failed_dir + "/" + beam + "/" + utc + "/" + source
            self.log (2, "main: fail_observation("+obs_dir+")")
            (rval, response) = self.fail_observation (beam, obs_dir, fail_dir, out_dir)
            if rval:
              self.log (-1, "failed to finalise observation: " + response)

          # The observation has finished, cleanup
          if all_finished: 
            self.log (1, observation + ": processing -> finished")

            fin_parent_dir = self.finished_dir + "/" + beam + "/" + utc
            if not os.path.exists(fin_parent_dir):
              os.makedirs(fin_parent_dir, 0755)

            fin_dir = self.finished_dir + "/" + beam + "/" + utc + "/" + source
            self.log (2, "main: finalise_observation("+obs_dir+")")
            (rval, response) = self.finalise_observation (beam, obs_dir, fin_dir, out_dir)
            if rval:
              self.log (-1, "failed to finalise observation: " + response)
            else:

              # merge the headers from each sub-band
              header = Config.readCFGFileIntoDict (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.header")
              for i in range(1,len(self.subbands)):
                header_sub = Config.readCFGFileIntoDict (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.header")
                header = Config.mergerHeaderFreq (header, header_sub)
                os.remove (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.header")
                os.remove (fin_dir + "/" + self.subbands[i]["cfreq"] + "/obs.finished")
                os.removedirs (fin_dir + "/" + self.subbands[i]["cfreq"])
              os.remove (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.header")
              os.remove (fin_dir + "/" + self.subbands[0]["cfreq"] + "/obs.finished")
              os.removedirs (fin_dir + "/" + self.subbands[0]["cfreq"])

              Config.writeDictToCFGFile (header, fin_dir + "/" + "obs.header")
              shutil.copyfile (fin_dir + "/obs.header", out_dir + "/obs.header")


      if processed_this_loop == 0:
        self.log (3, "time.sleep(1)")
        time.sleep(1)
Esempio n. 42
0
    def process_cleaned(self, proc_dir, send_dir):

        files = [file for file in os.listdir(proc_dir)
                 if file.lower().endswith(".cleaned")]
        self.trace("files=" + str(files))

        if len(files) > 0:

            # process only the most recent file
            last_file = proc_dir + "/" + files[-1]
            self.info("file=" + last_file)

            # read the data from file into a numpy array
            file_size = os.path.getsize(last_file)
            header_size = 4096
            data_size = file_size - header_size

            # read the data from file into a numpy array
            fptr = open(last_file, "rb")
            header_str = fptr.read(header_size)
            header = Config.readDictFromString(header_str)

            npol = int(header["NPOL"])
            ndim = int(header["NDIM"])
            nchan = int(header["NCHAN"])
            nant = int(header["NANT"])
            nbit = int(header["NBIT"])
            freq = float(header["FREQ"])
            bw = float(header["BW"])
            self.info("npol=" + str(npol) + " ndim=" + str(ndim) + " nchan=" +
                      str(nchan) + " nant=" + str(nant) + " nbit=" +
                      str(nbit) + " freq=" + str(freq) + " bw=" + str(bw))

            bytes_per_sample = (npol * ndim * nchan * nant * nbit) / 8
            ndat = data_size / bytes_per_sample

            # TODO check that nbit==32 ndat==1 nant=1
            nval = ndat*nant*nchan*npol*ndim
            self.info("bytes_per_sample=" + str(bytes_per_sample) + " nval=" +
                      str(nval))

            raw = np.fromfile(fptr, dtype=np.float32, count=nval)
            fptr.close()

            # reshape the raw data into a numpy array with specified dimensions
            self.info("np.shape(raw)=" + str(np.shape(raw)))
            self.info("npol=" + str(npol) + " nchan=" + str(nchan))
            data = raw.reshape((npol, nchan))

            if npol == 1:
                labels = ["AA"]
            elif npol == 2:
                labels = ["AA", "BB"]
            else:
                labels = []

            # acquire the results lock
            self.results["lock"].acquire()

            self.info("len(data)=" + str(len(data)))

            # generate plots multi polarisation plots
            self.cleaned_plot.plot_npol(240, 180, True, nchan, freq, bw,
                                        data, labels)
            self.results["cleaned_lo"] = self.cleaned_plot.getRawImage()

            self.cleaned_plot.plot_npol(1024, 768, False, nchan, freq, bw,
                                        data, labels)
            self.results["cleaned_hi"] = self.cleaned_plot.getRawImage()
            self.cleaned_valid = True
            self.results["lock"].release()

            for file in files:
                os.rename(proc_dir + "/" + file, send_dir + "/" + file)
        return len(files)
Esempio n. 43
0
 def __init__(self):
     Config.__init__(self)
    def process_cleaned(self, proc_dir, send_dir):

        files = [
            file for file in os.listdir(proc_dir)
            if file.lower().endswith(".cleaned")
        ]
        self.trace("files=" + str(files))

        if len(files) > 0:

            # process only the most recent file
            last_file = proc_dir + "/" + files[-1]
            self.info("file=" + last_file)

            # read the data from file into a numpy array
            file_size = os.path.getsize(last_file)
            header_size = 4096
            data_size = file_size - header_size

            # read the data from file into a numpy array
            fptr = open(last_file, "rb")
            header_str = fptr.read(header_size)
            header = Config.readDictFromString(header_str)

            npol = int(header["NPOL"])
            ndim = int(header["NDIM"])
            nchan = int(header["NCHAN"])
            nant = int(header["NANT"])
            nbit = int(header["NBIT"])
            freq = float(header["FREQ"])
            bw = float(header["BW"])
            self.info("npol=" + str(npol) + " ndim=" + str(ndim) + " nchan=" +
                      str(nchan) + " nant=" + str(nant) + " nbit=" +
                      str(nbit) + " freq=" + str(freq) + " bw=" + str(bw))

            bytes_per_sample = (npol * ndim * nchan * nant * nbit) / 8
            ndat = data_size / bytes_per_sample

            # TODO check that nbit==32 ndat==1 nant=1
            nval = ndat * nant * nchan * npol * ndim
            self.info("bytes_per_sample=" + str(bytes_per_sample) + " nval=" +
                      str(nval))

            raw = np.fromfile(fptr, dtype=np.float32, count=nval)
            fptr.close()

            # reshape the raw data into a numpy array with specified dimensions
            self.info("np.shape(raw)=" + str(np.shape(raw)))
            self.info("npol=" + str(npol) + " nchan=" + str(nchan))
            data = raw.reshape((npol, nchan))

            if npol == 1:
                labels = ["AA"]
            elif npol == 2:
                labels = ["AA", "BB"]
            else:
                labels = []

            # acquire the results lock
            self.results["lock"].acquire()

            self.info("len(data)=" + str(len(data)))

            # generate plots multi polarisation plots
            self.cleaned_plot.plot_npol(240, 180, True, nchan, freq, bw, data,
                                        labels)
            self.results["cleaned_lo"] = self.cleaned_plot.getRawImage()

            self.cleaned_plot.plot_npol(1024, 768, False, nchan, freq, bw,
                                        data, labels)
            self.results["cleaned_hi"] = self.cleaned_plot.getRawImage()
            self.cleaned_valid = True
            self.results["lock"].release()

            for file in files:
                os.rename(proc_dir + "/" + file, send_dir + "/" + file)
        return len(files)
Esempio n. 45
0
    def process_gains(self, proc_dir, send_dir):

        # find the most recent gains file
        files = [file for file in os.listdir(proc_dir)
                 if file.lower().endswith(".gains")]
        self.trace("files=" + str(files))

        if len(files) > 0:

            gains_time_file = proc_dir + "/gains.time"
            cmd = ""

            # combine all the gains files together
            if os.path.exists(gains_time_file):
                cmd = "uwb_adaptive_filter_tappend " + gains_time_file
                for file in files:
                    cmd = cmd + " " + proc_dir + "/" + file
                cmd = cmd + " " + gains_time_file
            else:
                if len(files) == 1:
                    cmd = "cp " + proc_dir + "/" + files[0] + " " + \
                        gains_time_file
                else:
                    cmd = "uwb_adaptive_filter_tappend"
                    for file in files:
                        cmd = cmd + " " + proc_dir + "/" + file
                    cmd = cmd + " " + gains_time_file

            self.info(cmd)
            rval, lines = self.system(cmd, 2)
            if not rval == 0:
                self.warn("failed to tappend gains files")
                return

            # read the data from file into a numpy array
            file_size = os.path.getsize(gains_time_file)
            header_size = 4096
            data_size = file_size - header_size

            gains_file = open(gains_time_file, "rb")
            header_str = gains_file.read(header_size)
            header = Config.readDictFromString(header_str)

            npol = int(header["NPOL"])
            ndim = int(header["NDIM"])
            nchan = int(header["NCHAN"])
            nant = int(header["NANT"])
            nbit = int(header["NBIT"])
            freq = float(header["FREQ"])
            bw = float(header["BW"])
            tsamp = float(header["TSAMP"])

            self.info("npol=" + str(npol) + " ndim=" + str(ndim) + " nchan=" +
                      str(nchan) + " nant=" + str(nant) + " nbit=" +
                      str(nbit) + " freq=" + str(freq) + " bw=" + str(bw))

            # check that the nbit is 32
            bytes_per_sample = (npol * ndim * nchan * nant * nbit) / 8
            ndat = data_size / bytes_per_sample
            nval = ndat*nant*nchan*npol*ndim

            self.info("ndat=" + str(ndat) + " bytes_per_sample=" +
                      str(bytes_per_sample) + " nval=" + str(nval))

            raw = np.fromfile(gains_file, dtype=np.float32, count=nval)
            gains_file.close()

            self.info("np.shape(raw)=" + str(np.shape(raw)))
            self.info("npol=" + str(npol) + " nchan=" + str(nchan))

            # reshape the raw data into a numpy array with specified dimensions
            data = raw.reshape((ndat, nant, npol, nchan, ndim))

            # acquire the results lock
            self.results["lock"].acquire()

            # generate an empty numpy array with ndat values
            xvals = np.zeros(ndat)
            gains_time = np.zeros((npol, ndat))
            gains_freq = np.zeros((npol, nchan))

            for idat in range(ndat):
                xvals[idat] = float(idat) * tsamp / 1e6
                for ipol in range(npol):
                    for isig in range(nant):
                        for ichan in range(nchan):
                            power = 0
                            for idim in range(ndim):
                                g = data[idat][isig][ipol][ichan][idim]
                                power += g * g
                            if power > gains_time[ipol][idat]:
                                gains_time[ipol][idat] = power
                            if idat == ndat-1:
                                gains_freq[ipol][ichan] = power

            if npol == 1:
                labels = ["AA+BB"]
                colours = ["red"]
            if npol == 2:
                labels = ["AA", "BB"]
                colours = ["red", "green"]
            else:
                labels = []
                colours = []

            self.gains_time_plot.plot(240, 180, True, xvals, gains_time,
                                      labels, colours)
            self.results["gainstime_lo"] = self.gains_time_plot.getRawImage()

            self.gains_time_plot.plot(1024, 768, False, xvals, gains_time,
                                      labels, colours)
            self.results["gainstime_hi"] = self.gains_time_plot.getRawImage()

            self.gains_freq_plot.plot_npol(240, 180, True, nchan, freq, bw,
                                           gains_freq, labels)
            self.results["gainsfreq_lo"] = self.gains_freq_plot.getRawImage()

            self.gains_freq_plot.plot_npol(1024, 768, False, nchan, freq, bw,
                                           gains_freq, labels)
            self.results["gainsfreq_hi"] = self.gains_freq_plot.getRawImage()

            self.gains_valid = True

            self.results["lock"].release()

            for file in files:
                os.rename(proc_dir + "/" + file, send_dir + "/" + file)
        return len(files)
Esempio n. 46
0
  def prepare (self):

    self.log (2, "UWBFoldDaemon::prepare UTC_START=" + self.header["UTC_START"])
    self.log (2, "UWBFoldDaemon::prepare RESOLUTION=" + self.header["RESOLUTION"])

    # default processing commands
    self.cmd = "dada_dbnull -s -k " + self.db_key

    # check if FOLD mode has been requested in the header
    try:
      fold = (self.header["PERFORM_FOLD"] in ["1", "true"])
    except KeyError as e:
      fold = False

    # if no folding has been requested return
    if not fold:
      return False

    # output directory for FOLD mode
    self.out_dir = self.cfg["CLIENT_FOLD_DIR"] + "/processing/" + self.beam + "/" \
                   + self.utc_start + "/" + self.source + "/" + self.cfreq

    # create DSPSR input file for the data block
    db_key_filename = "/tmp/spip_" + self.db_key + ".info"
    if not os.path.exists (db_key_filename):
      db_key_file = open (db_key_filename, "w")
      db_key_file.write("DADA INFO:\n")
      db_key_file.write("key " +  self.db_key + "\n")
      db_key_file.close()

    # create DSPSR viewing file for the data block
    view_key_filename = "/tmp/spip_" + self.db_key + ".viewer"
    if not  os.path.exists (view_key_filename):
      view_key_file = open (view_key_filename, "w")
      view_key_file.write("DADA INFO:\n")
      view_key_file.write("key " +  self.db_key + "\n")
      view_key_file.write("viewer\n")
      view_key_file.close()

    outnstokes = -1
    outtsubint = -1
    dm = -1
    outnbin = -1
    innchan = int(self.header["NCHAN"])
    outnchan = innchan
    sk = False
    sk_threshold = -1
    sk_nsamps = -1
    mode = "PSR"

    try:
      outnstokes = int(self.header["FOLD_OUTNSTOKES"])
    except:
      outnstokes = 4

    try:
      outtsub = int(self.header["FOLD_OUTTSUBINT"])
    except:
      outtsub = 10

    try:
      outnbin = int(self.header["FOLD_OUTNBIN"])
    except:
      outnbin = 1024

    try:
      outnchan = int(self.header["FOLD_OUTNCHAN"])
    except:
      outnchan = 0
      innchan = 0

    try:
      mode = self.header["MODE"]
    except:
      mode = "PSR"

    try:
      dm = float(self.header["DM"])
    except:
      dm = -1

    try:
      sk = self.header["FOLD_SK"] == "1"
    except:
      sk = False

    try:
      sk_threshold = int(self.header["FOLD_SK_THRESHOLD"])
    except:
      sk_threshold = 3

    try:
      sk_nsamps = int(self.header["FOLD_SK_NSAMPS"])
    except:
      sk_nsamps = 1024


    # configure the command to be run
    self.cmd = "dspsr -Q " + db_key_filename + " -minram 2048 -cuda " + self.gpu_id + " -no_dyn"

    # handle detection options
    if outnstokes >= 1 or outnstokes <= 4:
      # hack for NPOL==1
      if self.header["NPOL"] == "1":
        self.cmd = self.cmd + " -d 1"
      else:
        self.cmd = self.cmd + " -d " + str(outnstokes)
    elif outnstokes == -1:
      self.log(2, "using stokes IQUV default for DSPSR")
    else:
      self.log(-1, "ignoring invalid outnstokes of " + str(outnstokes))

    # handle channelisation
    if outnchan > innchan:
      if outnchan % innchan == 0:
        if mode == "PSR":
          self.cmd = self.cmd + " -F " + str(outnchan) + ":D"
        else:
          self.cmd = self.cmd + " -F " + str(outnchan) + ":" + str(outnchan*4)
      else:
        self.log(-1, "output channelisation was not a multiple of input channelisation")
    else:
      self.log(-2, "requested output channelisation [" + str(outnchan) + "] " + \
               "less than input channelisation [" + str(innchan) + "]")

    # handle output binning
    if outnbin > 0:
      self.cmd = self.cmd + " -b " + str(outnbin)

    # subint is required
    self.cmd = self.cmd + " -L " + str(outtsub)
    mintsub = outtsub - 5
    if mintsub > 0:
      self.cmd = self.cmd + " -Lmin " + str(mintsub)

    # if observing a puslar
    if mode == "PSR":

      # handle a custom DM
      if dm >= 0:
        self.cmd = self.cmd + " -D " + str(dm)

      # if the SK options are active
      if sk:
        self.cmd = self.cmd + " -skz -skz_no_fscr"

        if sk_threshold != -1:
          self.cmd = self.cmd + " -skzs " + str(sk_threshold)

        if sk_nsamps != -1:
          self.cmd = self.cmd + " -skzm " + str(sk_nsamps)

      # if we are trialing the wideband predictor mode
      if self.wideband_predictor:

        # create a copy of the header to modify
        fullband_header = copy.deepcopy (self.header)

        nchan_total = 0
        freq_low = 1e12
        freq_high = -1e12

        # now update the key parameters of the header
        for i in range(int(self.cfg["NUM_STREAM"])):
          (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + str(i)].split(":")
          nchan_total += int(nchan)
          half_chan_bw = abs(float(bw))
          freq_low_subband = float(cfreq) - half_chan_bw
          freq_high_subband = float(cfreq) + half_chan_bw
          if freq_low_subband < freq_low:
            freq_low = freq_low_subband
          if freq_high_subband > freq_high:
            freq_high = freq_high_subband

        bw = (freq_high - freq_low)
        fullband_header["NCHAN"] = str(nchan_total)
        fullband_header["BW"] = str(bw)
        fullband_header["FREQ"] = str(freq_low + bw/2)
        self.info("fullband predictor: NCHAN=" + fullband_header["NCHAN"] +
                  " BW=" + fullband_header["BW"] + " FREQ=" +
                  fullband_header["FREQ"])

        # create the output directory
        if not os.path.exists (self.out_dir):
          os.makedirs (self.out_dir, 0755)

        # write the sub-bands header to the out_dir
        dummy_file = self.out_dir + "/obs.dummy"
        Config.writeDictToCFGFile (fullband_header, dummy_file, prepend='DUMMY')
 
        # generate an ephemeris file
        ephemeris_file = self.out_dir + "/pulsar.eph"
        cmd = "psrcat -all -e " + self.header["SOURCE"] + " > " + ephemeris_file
        rval, lines = self.system(cmd, 1)

        # generate the tempo2 predictor
        cmd = "t2pred " + ephemeris_file + " " + dummy_file
        rval, lines = self.system(cmd, 1, False, self.getEnvironment())

        # copy the predictor file to the out_dir
        predictor_file = self.out_dir + "/pulsar.pred"
        cmd = "cp /tmp/tempo2/uwb" + str(self.id) + "/t2pred.dat " + predictor_file
        rval, lines = self.system(cmd, 1)

        # append the ephemeris and predictor to DSPSR command line
        self.cmd = self.cmd + " -E " + ephemeris_file + " -P " + predictor_file


    # set the optimal filterbank kernel length
    self.cmd = self.cmd + " -fft-bench"

    self.log_prefix = "fold_src"

    return True
Esempio n. 47
0
  def main (self):

    self.log(2, "RecvDaemon::main self.waitForSMRB()")
    smrb_exists = self.waitForSMRB()

    if not smrb_exists:
      self.log(-2, "smrb["+str(self.id)+"] no valid SMRB with " +
                  "key=" + self.db_key)
      self.quit_event.set()
      return

    # configuration file for recv stream
    self.local_config = self.getConfiguration()
    self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"

    self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)]
    self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id))
  
    self.configured = True
    self.running = False
    env = self.getEnvironment()

    # external control loop to allow for reconfiguration of RECV
    while not self.quit_event.isSet():
    
      self.log(2, "RecvDaemon::main waiting for configuration")
      while not self.quit_event.isSet() and not self.configured:
        sleep(1) 
      if self.quit_event.isSet():
        return
      Config.writeDictToCFGFile (self.local_config, self.local_config_file)
      self.log(2, "RecvDaemon:: configured")

      cmd = self.getCommand(self.local_config_file)
      self.binary_list.append (cmd)

      self.log(3, "RecvDaemon::main sleep(1)")
      sleep(1)

      self.log(2, "RecvDaemon::main log_pipe = LogSocket(recv_src)")
      log_pipe = LogSocket ("recv_src", "recv_src", str(self.id), "stream",
                            self.cfg["SERVER_HOST"], self.cfg["SERVER_LOG_PORT"],
                            int(DL))

      self.log(2, "RecvDaemon::main log_pipe.connect()")
      log_pipe.connect()

      self.log(2, "RecvDaemon::main sleep(1)")
      sleep(1)

      self.running = True

      self.numa_core = self.cfg["STREAM_RECV_CORE_" + self.id]
      self.numa_node = self.cfg["STREAM_NUMA_" + self.id]

      recv_cmd = "numactl -C " + self.numa_core + " --membind=" + self.numa_node + " -- " + cmd
 
      self.log(1, "START  " + cmd)
    
      # this should be a persistent / blocking command 
      rval = self.system_piped (recv_cmd, log_pipe.sock, 2, env)

      self.log(1, "END    " + cmd)

      self.running = False 
      self.binary_list.remove (cmd)

      if rval:
        if not self.quit_event.isSet():
          self.log (-2, cmd + " failed with return value " + str(rval))

      log_pipe.close ()
Esempio n. 48
0
  def load_finished (self):

    # read the most recently finished observations
    for b in self.beam_states.keys():

      # TODO check this for SERVER / BEAM
      beam_dir = self.fold_dir + "/finished/" + b

      cmd = "find " + beam_dir + " -mindepth 2 -maxdepth 2 -type d | sort | tail -n 1"
      rval, observation = self.system (cmd, 3)

      if len(observation) == 0:
        return

      # strip prefix 
      observation = observation[0][(len(beam_dir)+1):]

      self.log (1, "main: " + observation)
      (utc, source) = observation.split("/")

      obs_dir = beam_dir + "/" + utc + "/" + source

      self.log(2, "load_finished: reading configuration for " + b + "/" + utc + "/" + source)

      if os.path.exists (obs_dir + "/obs.header"):
        header = Config.readCFGFileIntoDict(obs_dir + "/obs.header")
        self.beam_states[b]["lock"].acquire()

        self.beam_states[b]["config"]["source_parameters"]["name"]["#text"] = header["SOURCE"]
        self.beam_states[b]["config"]["source_parameters"]["name"]["@epoch"] = "J2000"

        self.beam_states[b]["config"]["source_parameters"]["ra"]["#text"] = header["RA"]
        self.beam_states[b]["config"]["source_parameters"]["ra"]["@units"] = "hh:mm:ss"
        self.beam_states[b]["config"]["source_parameters"]["dec"]["#text"] = header["DEC"]
        self.beam_states[b]["config"]["source_parameters"]["dec"]["@units"] = "dd:mm:ss"

        self.beam_states[b]["config"]["observation_parameters"]["observer"]["#text"] = header["OBSERVER"]
        self.beam_states[b]["config"]["observation_parameters"]["project_id"]["#text"] = header["PID"]
        self.beam_states[b]["config"]["observation_parameters"]["mode"]["#text"] = header["MODE"]
        self.beam_states[b]["config"]["observation_parameters"]["calfreq"]["#text"] = header["CALFREQ"]
        self.beam_states[b]["config"]["observation_parameters"]["tobs"]["#text"] = header["TOBS"]
        self.beam_states[b]["config"]["observation_parameters"]["utc_start"]["#text"] = header["UTC_START"]
        self.beam_states[b]["config"]["observation_parameters"]["utc_stop"]["#text"] = ""

        self.beam_states[b]["config"]["calibration_parameters"]["signal"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["freq"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["phase"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["duty_cycle"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["epoch"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["tsys_avg_time"]["#text"] = ""
        self.beam_states[b]["config"]["calibration_parameters"]["tsys_freq_resolution"]["#text"] = ""

        self.load_header_param(b, "calibration_parameters", "signal", header, "CAL_SIGNAL")
        self.load_header_param(b, "calibration_parameters", "freq", header, "CAL_FREQ")
        self.load_header_param(b, "calibration_parameters", "phase", header, "CAL_PHASE")
        self.load_header_param(b, "calibration_parameters", "duty_cycle", header, "CAL_DUTY_CYCLE")
        self.load_header_param(b, "calibration_parameters", "epoch", header, "CAL_EPOCH")
        self.load_header_param(b, "calibration_parameters", "tsys_avg_time", header, "TSYS_AVG_TIME")
        self.load_header_param(b, "calibration_parameters", "tsys_freq_resolution", header, "TSYS_FREQ_RES")

        self.beam_states[b]["config"]["stream_configuration"]["nstream"]["#text"] = "0"

        self.beam_states[b]["state"] = "Idle"

        self.beam_states[b]["lock"].release()
Esempio n. 49
0
  def main (self):

    self.log (2, "UWBProcDaemon::main configure_child()")
    self.configure_child()

    self.log (2, "UWBProcDaemon::main wait_for_smrb()")
    SMRBDaemon.waitForSMRB(self.db_key, self)

    if self.quit_event.isSet():
      self.log (-1, "UWBProcDaemon::main quit event was set after waiting for SMRB creation")
      return

    # continuously run the main command waiting on the SMRB
    while (not self.quit_event.isSet()):

      # wait for the header to determine if folding is required
      cmd = "dada_header -k " + self.db_key + " -t " + self.tag
      self.log(2, "UWBProcDaemon::main " + cmd)
      self.binary_list.append (cmd)
      rval, lines = self.system (cmd, 2, True)
      self.binary_list.remove (cmd)

      # if the command returned ok and we have a header
      if rval != 0:
        time.sleep(0.1)
        if self.quit_event.isSet():
          self.log (2, "UWBProcDaemon::main " + cmd + " failed, but quit_event true")
        else:
          self.log (-2, "UWBProcDaemon::main " + cmd + " failed")
          self.quit_event.set()

      elif len(lines) == 0:
        
        self.log (-2, "UWBProcDaemon::main header was empty")
        self.quit_event.set()
        
      else:

        self.log (2, "UWBProcDaemon::main parsing header")
        self.header = Config.parseHeader (lines)

        # account for lower to upper sideband conversion
        if not abs(float(self.bw)) == float(self.header["BW"]):
          self.log (-1, "configured bandwidth ["+self.bw+"] != self.header["+self.header["BW"]+"]")
        if not float(self.cfreq) == float(self.header["FREQ"]):
          self.log (-1, "configured cfreq ["+self.cfreq+"] != self.header["+self.header["FREQ"]+"]")
        if not int(self.nchan) == int(self.header["NCHAN"]):
          self.log (-2, "configured nchan ["+self.nchan+"] != self.header["+self.header["NCHAN"]+"]")

        self.source = self.header["SOURCE"]
        self.utc_start = self.header["UTC_START"]

        # call the child class prepare method
        self.log (2, "UWBProcDaemon::main prepare()")
        valid = self.prepare()

        if valid:

          # ensure the output directory exists
          self.log (2, "UWBProcDaemon::main creating out_dir: " + self.out_dir)
          if not os.path.exists (self.out_dir):
            os.makedirs (self.out_dir, 0755)

          # write the sub-bands header to the out_dir
          header_file = self.out_dir + "/obs.header"
          self.log (2, "UWBProcDaemon::main writing obs.header to out_dir")
          Config.writeDictToCFGFile (self.header, header_file)
    
          # configure the output pipe
          self.log (2, "UWBProcDaemon::main configuring output log pipe")
          log_host = self.cfg["SERVER_HOST"]
          log_port = int(self.cfg["SERVER_LOG_PORT"])
          log_pipe = LogSocket (self.log_prefix, self.log_prefix,
                                str(self.id), "stream",
                                log_host, log_port, int(DL))
          log_pipe.connect()

          # get any modifications to the environment
          env = self.getEnvironment()

          # add the binary command to the kill list
          self.binary_list.append (self.cmd)

          # create processing threads
          self.log (2, "UWBProcDaemon::main creating processing threads")      
          cmd = "numactl -C " + self.cpu_core + " -- " + self.cmd
          proc_thread = UWBProcThread (self, cmd, log_pipe.sock, env, 1)

          # start processing threads
          self.log (2, "UWBProcDaemon::main starting processing thread")
          proc_thread.start()

          self.log (1, "START " + self.cmd)

          # join processing threads
          self.log (2, "UWBProcDaemon::main waiting for proc thread to terminate")
          rval = proc_thread.join() 
          self.log (2, "UWBProcDaemon::main proc thread joined")

          self.log (1, "END   " + self.cmd)

          # remove the binary command from the list
          self.binary_list.remove (self.cmd)

          if rval:
            self.log (-2, "UWBProcDaemon::main proc thread failed")
            quit_event.set()

          log_pipe.close()

          # good practise in case the proc thread always fails
          time.sleep(1)

        else:

          self.log (2, "MEERKATProcDaemon::main skip this processing")
          time.sleep(10)

      self.log (2, "UWBProcDaemon::main processing loop completed")
Esempio n. 50
0
    def main(self):

        if not os.path.exists(self.proc_dir):
            os.makedirs(self.proc_dir, 0755)

        # get the data block keys
        db_prefix = self.cfg["DATA_BLOCK_PREFIX"]
        num_stream = self.cfg["NUM_STREAM"]
        stream_id = str(self.id)
        self.debug("stream_id=" + str(self.id))

        # 4 data blocks
        in_id = self.cfg["RECEIVING_DATA_BLOCK"]
        trans_id = self.cfg["TRANSIENTS_DATA_BLOCK"]
        out_id = self.cfg["PROCESSING_DATA_BLOCK"]

        # 4 data block keys
        in_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, in_id)
        trans_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream,
                                        trans_id)
        out_key = SMRBDaemon.getDBKey(db_prefix, stream_id, num_stream, out_id)

        log_host = self.cfg["SERVER_HOST"]
        log_port = int(self.cfg["SERVER_LOG_PORT"])

        self.debug("SMRBDaemon.waitForSMRB()")
        smrb_exists = SMRBDaemon.waitForSMRB(in_key, self)

        if not smrb_exists:
            self.error("smrb["+str(self.id)+"] no valid SMRB with " +
                       "key=" + self.db_key)
            self.quit_event.set()
            return

        # determine the number of channels to be processed by this stream
        (cfreq, bw, nchan) = self.cfg["SUBBAND_CONFIG_" + stream_id].split(":")

        # this stat command will not change from observation to observation
        preproc_cmd = "uwb_preprocessing_pipeline " + in_key + " " + \
            trans_key + " " + out_key + " -d " + \
            self.cfg["GPU_ID_" + stream_id]

        tag = "preproc" + stream_id

        # enter the main loop
        while (not self.quit_event.isSet()):

            # wait for the header to acquire the processing parameters
            cmd = "dada_header -k " + in_key + " -t " + tag
            self.debug(cmd)
            self.binary_list.append(cmd)
            rval, lines = self.system(cmd, 2, True)
            self.binary_list.remove(cmd)

            if rval != 0 or self.quit_event.isSet():
                return

            self.debug("parsing header")
            header = Config.parseHeader(lines)

            cmd = preproc_cmd

            utc_start = header["UTC_START"]
            source = header["SOURCE"]
            freq = header["FREQ"]

            # directory in which to run preprocessor
            proc_dir = self.proc_dir + "/" + utc_start + "/" + source + "/" + \
                freq

            if not os.path.exists(proc_dir):
                os.makedirs(proc_dir, 0755)

            # write the header to the proc_dir
            header_file = proc_dir + "/obs.header"
            self.debug("writing obs.header to out_dir")
            Config.writeDictToCFGFile(header, header_file)

            run_adaptive_filter = (header["ADAPTIVE_FILTER"] == "1")

            # presense of RFI reference is based on NPOL == 3
            have_rfi_reference_pol = (int(header["NPOL"]) == 3)

            # presence of a calibration signal
            run_calibration = (header["CAL_SIGNAL"] == "1")

            # run the transients processor
            # run_transients = (header["TRANSIENTS"] == "1")
            run_transients = False

            # RFI reference pol is assumed to be last pol
            if have_rfi_reference_pol:
                rfi_reference_pol = int(header["NPOL"]) - 1
                self.info("Header NPOL=" + str(int(header["NPOL"])) +
                          " RFI reference signal present in pol " +
                          str(rfi_reference_pol))
                cmd = cmd + " -r " + str(rfi_reference_pol)

            if run_adaptive_filter:
                self.info("Adaptive filter active")
                cmd = cmd + " -a "

            if run_calibration:
                self.info("Calibration active")
                try:
                    avg_time = header["TSYS_AVG_TIME"]
                except KeyError:
                    avg_time = "10"
                try:
                    freq_res = header["TSYS_FREQ_RES"]
                except KeyError:
                    freq_res = "1"
                cmd = cmd + " -c " + avg_time + " -e " + freq_res

            if run_transients:
                self.info("Transients active")
                cmd = cmd + " -f " + header["TRANS_TSAMP"]

            # AJ todo check the channelisation limits with Nuer
            if run_adaptive_filter or run_calibration or run_transients:
                cmd = cmd + " -n 1024"

            # create a log pipe for the stats command
            log_pipe = LogSocket("preproc_src", "preproc_src",
                                 str(self.id), "stream", log_host,
                                 log_port, int(DL))

            # connect up the log file output
            log_pipe.connect()

            # add this binary to the list of active commands
            self.binary_list.append("uwb_preprocessing_pipeline " + in_key)

            self.info("START " + cmd)

            # initialize the threads
            preproc_thread = preprocThread(cmd, proc_dir, log_pipe.sock, 2)

            self.debug("starting preproc thread")
            preproc_thread.start()
            self.debug("preproc thread started")

            self.debug("joining preproc thread")
            rval = preproc_thread.join()
            self.debug("preproc thread joined")

            self.info("END     " + cmd)

            if rval:
                self.error("preproc thread failed")
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
                self.quit_event.set()
            else:
                cmd = "touch " + proc_dir + "/obs.finished"
                rval, lines = self.system(cmd, 2)
Esempio n. 51
0
    def main(self):

        # open a listening socket to receive the data files to read
        hostname = getHostNameShort()

        # get the site configurationa
        config = Config()

        # prepare header using configuration file parameters
        fixed_config = config.getStreamConfigFixed(self.id)

        if DL > 1:
            self.log(1, "NBIT\t" + fixed_config["NBIT"])
            self.log(1, "NDIM\t" + fixed_config["NDIM"])
            self.log(1, "NCHAN\t" + fixed_config["NCHAN"])
            self.log(1, "TSAMP\t" + fixed_config["TSAMP"])
            self.log(1, "BW\t" + fixed_config["BW"])
            self.log(1, "FREQ\t" + fixed_config["FREQ"])
            self.log(1, "START_CHANNEL\t" + fixed_config["START_CHANNEL"])
            self.log(1, "END_CHANNEL\t" + fixed_config["END_CHANNEL"])

        self.log(1, "ReadDaemon::main self.list_obs()")
        list_xml_str = self.list_obs()
        list_xml = xmltodict.parse(list_xml_str)
        first_obs = list_xml['observation_list']['observation'][0]
        print str(first_obs)
        self.read_obs(first_obs)
        #self.log(1, "ReadDaemon::main " + str(xml))

        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        sock.bind((hostname, int(self.cfg["STREAM_READ_PORT"]) + int(self.id)))
        sock.listen(1)

        can_read = [sock]
        can_write = []
        can_error = []

        while not self.quit_event.isSet():

            timeout = 1

            did_read = []
            did_write = []
            did_error = []

            try:
                # wait for some activity on the control socket
                self.log(3, "main: select")
                did_read, did_write, did_error = select.select(
                    can_read, can_write, can_error, timeout)
                self.log(
                    3, "main: read=" + str(len(did_read)) + " write=" +
                    str(len(did_write)) + " error=" + str(len(did_error)))
            except select.error as e:
                if e[0] == errno.EINTR:
                    self.log(0, "SIGINT received during select, exiting")
                    self.quit_event.set()
                else:
                    raise

            if (len(did_read) > 0):
                for handle in did_read:
                    if (handle == sock):
                        (new_conn, addr) = sock.accept()
                        self.log(1,
                                 "main: accept connection from " + repr(addr))

                        # add the accepted connection to can_read
                        can_read.append(new_conn)

                    # an accepted connection must have generated some data
                    else:

                        message = handle.recv(4096).strip()
                        self.log(
                            3, "commandThread: message='" + str(message) + "'")

                        xml = xmltodict.parse(message)
                        self.log(3, DL,
                                 "commandThread: xml='" + str(xml) + "'")

                        if (len(message) == 0):
                            self.log(1, "commandThread: closing connection")
                            handle.close()
                            for i, x in enumerate(can_read):
                                if (x == handle):
                                    del can_read[i]
                        else:

                            if xml['command'] == "list_obs":
                                self.log(1, "command [" + xml['command'] + "]")
                                self.list_obs()
                                response = "OK"

                            elif xml['command'] == "read_obs":
                                self.log(1, "command [" + xml['command'] + "]")
                                self.read_obs()
                                response = "OK"

                            else:
                                self.log(
                                    -1, "unrecognized command [" +
                                    xml['command'] + "]")
                                response = "FAIL"

                            self.log(3, "-> " + response)
                            xml_response = "<read_response>" + response + "</read_response>"
                            handle.send(xml_response)
Esempio n. 52
0
    def main(self):

        self.log(2, "main: self.waitForSMRB()")
        smrb_exists = self.waitForSMRB()

        if not smrb_exists:
            self.log(
                -2, "smrb[" + str(self.id) + "] no valid SMRB with " + "key=" +
                self.db_key)
            self.quit_event.set()
            return

        # configuration file for recv stream
        self.local_config = self.getConfiguration()
        self.local_config_file = "/tmp/spip_stream_" + str(self.id) + ".cfg"

        self.cpu_core = self.cfg["STREAM_RECV_CORE_" + str(self.id)]
        self.ctrl_port = str(int(self.cfg["STREAM_CTRL_PORT"]) + int(self.id))

        self.configured = True
        self.running = False
        env = self.getEnvironment()

        # external control loop to allow for reconfiguration of RECV
        while not self.quit_event.isSet():

            self.log(3, "main: waiting for configuration")
            while not self.quit_event.isSet() and not self.configured:
                sleep(1)
            if self.quit_event.isSet():
                return
            Config.writeDictToCFGFile(self.local_config,
                                      self.local_config_file)
            self.log(3, "main: configured")

            cmd = self.getCommand(self.local_config_file)
            self.binary_list.append(cmd)

            self.log(3, "main: sleep(1)")
            sleep(1)

            self.log(3, "main: log_pipe = LogSocket(recv_src)")
            log_pipe = LogSocket("recv_src", "recv_src", str(self.id),
                                 "stream", self.cfg["SERVER_HOST"],
                                 self.cfg["SERVER_LOG_PORT"], int(DL))

            self.log(3, "main: log_pipe.connect()")
            log_pipe.connect()

            self.log(3, "main: sleep(1)")
            sleep(1)

            self.running = True

            recv_cmd = "numactl -C 6 -- " + cmd

            # this should be a persistent / blocking command
            rval = self.system_piped(recv_cmd, log_pipe.sock, int(DL), env)

            self.running = False
            self.binary_list = []

            if rval:
                if self.quit_event.isSet():
                    self.log(-2,
                             cmd + " failed with return value " + str(rval))
            log_pipe.close()
Esempio n. 53
0
  def main (self):

    self.debug("starting main loop")

    while not self.quit_event.isSet():

      # for each directory that has a completed dir
      for proc_type in self.proc_types:

        self.debug("proc_type=" + proc_type)

        # for each configured beam (there is only 1 for UWB)
        for beam in self.beams:

          self.debug("beam=" + beam)

          if self.quit_event.isSet():
            self.info("quit_event true [1]")
            continue

          # the input and output directories
          send_dir = self.send_dirs[proc_type] + "/" + beam
          junk_dir = self.junk_dirs[proc_type] + "/" + beam
          sent_dir = self.sent_dirs[proc_type] + "/" + beam

          if not os.path.exists(send_dir):
            self.warn("send_dir [" + send_dir + "] did not exist")
            os.makedirs(send_dir, 0755)

          if not os.path.exists(sent_dir):
            self.warn("sent_dir [" + sent_dir + "] did not exist")
            os.makedirs(sent_dir, 0755)

          if not os.path.exists(junk_dir):
            self.warn("sent_dir [" + junk_dir + "] did not exist")
            os.makedirs(junk_dir, 0755)

          # look for observations that have been completed and have / BEAM / utc / source / CFREQ
          self.debug("looking for obs.finished in " + send_dir + "/<UTC>/<SOURCE>/" + self.cfreq)
          cmd = "find " + send_dir + " -type f -path '*/" + self.cfreq + "/obs.finished' -mmin +1 | sort"
          rval, fin_files = self.system(cmd, 2)
          if rval:
            self.warn("find command failed: " + fin_files[0])
            sleep(1)
            continue
      
          self.debug("assessing obs.finished observations") 
          # transfer the completed directory to herschel
          for path in fin_files:

            if self.quit_event.isSet():
              self.info("quit_event true [2]")
              continue

            # strip dir prefix
            subpath = path [(len(send_dir)+1):] 

            # extract the the beam, utc, source and cfreq
            (utc, source, cfreq, file) = subpath.split("/")
            utc_source = utc + "/" + source

            self.debug("found obs to transfer " + utc_source)

            # finished and completed directories
            completed_subdir = utc_source + "/" + cfreq

            # determine the size of the data to be transferred
            cmd = "du -sb " + send_dir + "/" + completed_subdir + " | awk '{print $1}'"
            rval, size = self.system(cmd, 2)
            if rval:
              self.warn("failed to determine size of " + completed_subdir)
            else:
              self.debug("transferring " + (str(float(size[0])/1048576)) + " MB")
      
            # change to the beam directory
            os.chdir (send_dir)

            transfer = True
            # check the header file
            header_file = send_dir + "/" + completed_subdir + "/obs.header"
            if os.path.exists (header_file):
              header = Config.readCFGFileIntoDict (header_file)
              self.debug("utc=" + utc + " source=" + source + " pid=" + header["PID"])
              if header["PID"] == "P999":
                transfer = False

            if transfer:
            
              self.debug("get_rsync_from_stream (" + str(self.id) + ")")
              (username, server, module) = self.get_rsync_from_stream (self.id)
              self.debug("rsync stream=" + str(self.id)+ " user="******" server=" + server + " module=" + module)

              # build the rsync command TODO handle fold/search/etc
              cmd = "rsync ./" + completed_subdir + " " + \
                    username + "@" + server + "::" + module + "/" + proc_type + "/ " + \
                    self.rsync_options + " --exclude='obs.finished'"
  
              # run the rsync command
              transfer_rate = ""
              transfer_success = True
              rval, lines = self.system (cmd, 2)
              if rval:
                transfer_success = False
                self.warn("failed to transfer " + completed_subdir)
                # TODO add support for terminating the transfer early

              else:

                # parse the transfer speed
                for line in lines:
                  if line.find ("bytes/sec") != -1:
                    transfer_rate = line
  
                # transfer the obs.finished file
                cmd = "rsync ./" + completed_subdir + "/obs.finished " + \
                      username + "@" + server + "::" + \
                      module + "/" + proc_type + "/ " + self.rsync_options
  
                # run the rsync command
                rval, size = self.system (cmd, 2)
                if rval:
                  transfer_success = False
                  self.warn("failed to transfer " + completed_subdir + "/obs.finished")
             
                if transfer_success:
                  # create a parent directory in the transferred dir
                  try:
                    os.makedirs(sent_dir + "/" + utc_source, 0755)
                  except OSError, e:
                    self.debug(str(e))

                  # now move this observation from send to sent
                  cmd = "mv " + send_dir + "/" + utc_source + "/" + cfreq + " " + sent_dir + "/" + utc_source
                  rval, lines = self.system(cmd, 2)

                  self.clean_utc_source_dir (send_dir + "/" + utc_source)
                  self.info(proc_type + " " + utc_source + "/" + cfreq + " transferred to " + module + ": " + transfer_rate)
                else:
                  self.info(proc_type + " " + utc_source + "/" + cfreq + " failed to transfer")
            else:

              # create a parent directory in the transferred dir
              try:
                os.makedirs(junk_dir + "/" + utc_source, 0755)
              except OSError, e:
                self.debug(str(e))

              # now move this observation from send to junk
              cmd = "mv " + send_dir + "/" + utc_source + "/" + cfreq + " " + junk_dir + "/" + utc_source + "/"
              rval, lines = self.system(cmd, 2)

              self.clean_utc_source_dir (send_dir + "/" + utc_source)
              self.info(proc_type + " " + utc_source + "/" + cfreq + " junked")
Esempio n. 54
0
  def main (self):

    # open a listening socket to receive the data files to read
    hostname = getHostNameShort()

    # get the site configurationa
    config = Config()

    # prepare header using configuration file parameters
    fixed_config = config.getStreamConfigFixed(self.id)

    if DL > 1:
      self.log(1, "NBIT\t"  + fixed_config["NBIT"])
      self.log(1, "NDIM\t"  + fixed_config["NDIM"])
      self.log(1, "NCHAN\t" + fixed_config["NCHAN"])
      self.log(1, "TSAMP\t" + fixed_config["TSAMP"])
      self.log(1, "BW\t"    + fixed_config["BW"])
      self.log(1, "FREQ\t"  + fixed_config["FREQ"])
      self.log(1, "START_CHANNEL\t"  + fixed_config["START_CHANNEL"])
      self.log(1, "END_CHANNEL\t"  + fixed_config["END_CHANNEL"])

    self.log(1, "ReadDaemon::main self.list_obs()")
    list_xml_str = self.list_obs()
    list_xml = xmltodict.parse (list_xml_str)
    first_obs = list_xml['observation_list']['observation'][0]
    print str(first_obs)
    self.read_obs (first_obs)
    #self.log(1, "ReadDaemon::main " + str(xml))

    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    sock.bind((hostname, int(self.cfg["STREAM_READ_PORT"]) + int(self.id)))
    sock.listen(1)

    can_read = [sock]
    can_write = []
    can_error = []

    while not self.quit_event.isSet():

      timeout = 1

      did_read = []
      did_write = []
      did_error = []

      try:
        # wait for some activity on the control socket
        self.log(3, "main: select")
        did_read, did_write, did_error = select.select(can_read, can_write, can_error, timeout)
        self.log(3, "main: read="+str(len(did_read))+" write="+
                    str(len(did_write))+" error="+str(len(did_error)))
      except select.error as e:
        if e[0] == errno.EINTR:
          self.log(0, "SIGINT received during select, exiting")
          self.quit_event.set()
        else:
          raise

      if (len(did_read) > 0):
        for handle in did_read:
          if (handle == sock):
            (new_conn, addr) = sock.accept()
            self.log(1, "main: accept connection from "+repr(addr))

            # add the accepted connection to can_read
            can_read.append(new_conn)

          # an accepted connection must have generated some data
          else:

            message = handle.recv(4096).strip()
            self.log(3, "commandThread: message='" + str(message) +"'")
            
            xml = xmltodict.parse (message)
            self.log(3, DL, "commandThread: xml='" + str(xml) +"'")

            if (len(message) == 0):
              self.log(1, "commandThread: closing connection")
              handle.close()
              for i, x in enumerate(can_read):
                if (x == handle):
                  del can_read[i]
            else:

              if xml['command'] == "list_obs":
                self.log (1, "command ["+xml['command'] + "]")
                self.list_obs ()
                response = "OK"

              elif xml['command'] == "read_obs":
                self.log (1, "command ["+xml['command'] + "]")
                self.read_obs ()
                response = "OK"

              else:
                self.log (-1, "unrecognized command ["+xml['command'] + "]")
                response = "FAIL"
  
              self.log(3, "-> " + response)
              xml_response = "<read_response>" + response + "</read_response>"
              handle.send (xml_response)
Esempio n. 55
0
 def __init__ (self):
   Config.__init__(self)